swamp_code_gen/
intr.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7
8use crate::transformer::{Collection, Transformer};
9use source_map_node::Node;
10use swamp_semantic::intr::IntrinsicFunction;
11use swamp_semantic::{ArgumentExpression, Expression, ExpressionKind, VariableRef};
12use swamp_vm_isa::{
13    MemoryOffset, COLLECTION_CAPACITY_OFFSET, COLLECTION_ELEMENT_COUNT_OFFSET,
14    GRID_HEADER_HEIGHT_OFFSET, GRID_HEADER_WIDTH_OFFSET,
15};
16use swamp_vm_types::types::{
17    float_type, int_type, pointer_type, u16_type, u32_type, u8_type, Place, TypedRegister, VmType,
18};
19use swamp_vm_types::{AggregateMemoryLocation, MemoryLocation, PointerLocation};
20
21impl CodeBuilder<'_> {
22    #[allow(clippy::too_many_lines)]
23    #[allow(clippy::single_match_else)]
24    pub fn emit_single_intrinsic_call(
25        &mut self,
26        target_reg: &Place,
27        node: &Node,
28        intrinsic_fn: &IntrinsicFunction,
29        arguments: &[ArgumentExpression],
30        ctx: &Context,
31    ) {
32        {
33            // For primitive intrinsics, materialize the self argument to a register early
34            let self_reg = if arguments.is_empty() {
35                None
36            } else {
37                let ArgumentExpression::Expression(self_expr) = &arguments[0] else {
38                    panic!("Expected expression for self argument");
39                };
40                Some(self.emit_scalar_rvalue(self_expr, ctx))
41            };
42
43            let rest_args = if arguments.len() > 1 {
44                &arguments[1..]
45            } else {
46                &vec![]
47            };
48            self.emit_single_intrinsic_call_with_self(
49                target_reg,
50                node,
51                intrinsic_fn,
52                self_reg.as_ref(),
53                rest_args,
54                ctx,
55                "single intrinsic call",
56            );
57        }
58    }
59
60    pub fn emit_intrinsic_map(
61        &mut self,
62        output_destination: &Place,
63        intrinsic_fn: &IntrinsicFunction,
64        self_ptr_reg: &PointerLocation,
65        arguments: &[Expression],
66        node: &Node,
67        comment: &str,
68        ctx: &Context,
69    ) {
70        match intrinsic_fn {
71            IntrinsicFunction::MapHas => {
72                let key_argument = &arguments[0];
73                // We have to get the key materialized in a temporary storage, so the map can calculate the hash for it.
74                let key_temp_storage_reg =
75                    self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_argument, ctx);
76
77                self.builder.add_map_has(
78                    output_destination.register().unwrap(),
79                    self_ptr_reg,
80                    &key_temp_storage_reg,
81                    node,
82                    "map_has",
83                );
84            }
85            IntrinsicFunction::MapRemove => {
86                let key_argument = &arguments[0];
87                self.emit_intrinsic_map_remove(self_ptr_reg, key_argument, ctx);
88            }
89            _ => todo!("missing intrinsic_map {intrinsic_fn}"),
90        }
91    }
92
93    pub fn emit_intrinsic_sparse(
94        &mut self,
95        output_destination: &Place,
96        intrinsic_fn: &IntrinsicFunction,
97        self_ptr_reg: &PointerLocation,
98        arguments: &[Expression],
99        node: &Node,
100        comment: &str,
101        ctx: &Context,
102    ) {
103        match intrinsic_fn {
104            IntrinsicFunction::SparseAdd => {
105                let element_to_add_expression = &arguments[0];
106                self.emit_sparse_add(
107                    &output_destination.register().unwrap().clone(),
108                    self_ptr_reg,
109                    element_to_add_expression,
110                    node,
111                    ctx,
112                );
113            }
114
115            IntrinsicFunction::SparseRemove => {
116                let sparse_id_int_expression = &arguments[0];
117                self.emit_sparse_remove(self_ptr_reg, sparse_id_int_expression, node, ctx);
118            }
119
120            IntrinsicFunction::SparseIsAlive => {
121                let sparse_id_int_expression = &arguments[0];
122                self.emit_sparse_is_alive(
123                    &output_destination.register().unwrap().clone(),
124                    self_ptr_reg,
125                    sparse_id_int_expression,
126                    node,
127                    ctx,
128                );
129            }
130            _ => todo!("unknown sparse {intrinsic_fn}"),
131        }
132    }
133    pub fn emit_intrinsic_grid(
134        &mut self,
135        target_destination: &Place,
136        intrinsic_fn: &IntrinsicFunction,
137        self_ptr_reg: &PointerLocation,
138        arguments: &[Expression],
139        node: &Node,
140        comment: &str,
141        ctx: &Context,
142    ) {
143        let (temp_reg, dest_reg) = if target_destination.is_register() {
144            (None, target_destination.register().unwrap().clone())
145        } else {
146            let temp_reg = self.temp_registers.allocate(
147                VmType::new_contained_in_register(float_type()),
148                "temporary destination for low level intrinsic",
149            );
150
151            (Some(temp_reg.register.clone()), temp_reg.register)
152        };
153        match intrinsic_fn {
154            IntrinsicFunction::GridSet => {
155                let x_expr = &arguments[0];
156                let y_expr = &arguments[1];
157                let value_expr = &arguments[2];
158
159                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
160                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
161                let element_gen_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
162
163                let temp_element_ptr = self.temp_registers.allocate(
164                    VmType::new_contained_in_register(element_gen_type.clone()),
165                    "temporary scalar",
166                );
167
168                self.builder.add_grid_get_entry_addr(
169                    &temp_element_ptr.register,
170                    self_ptr_reg,
171                    &x_reg,
172                    &y_reg,
173                    element_gen_type.total_size,
174                    node,
175                    comment,
176                );
177
178                let location = AggregateMemoryLocation {
179                    location: MemoryLocation {
180                        base_ptr_reg: temp_element_ptr.register,
181                        offset: MemoryOffset(0),
182                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
183                    },
184                };
185
186                // Initialize the allocated space first (like variable definition)
187                if element_gen_type.is_aggregate() {
188                    self.emit_initialize_memory_for_any_type(
189                        &location.location,
190                        node,
191                        "initialize grid set allocated space",
192                    );
193                }
194
195                self.emit_expression_into_target_memory(
196                    &location.location,
197                    value_expr,
198                    "grid set",
199                    ctx,
200                );
201            }
202            IntrinsicFunction::GridGet => {
203                let x_expr = &arguments[0];
204                let y_expr = &arguments[1];
205
206                let x_reg = self.emit_scalar_rvalue(x_expr, ctx);
207                let y_reg = self.emit_scalar_rvalue(y_expr, ctx);
208
209                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
210
211                // Allocate a temporary register to hold the address of the grid element
212                let temp_element_ptr = self.temp_registers.allocate(
213                    VmType::new_contained_in_register(pointer_type()),
214                    "temp for grid element address",
215                );
216
217                // Get the address of the grid element using the opcode
218                self.builder.add_grid_get_entry_addr(
219                    &temp_element_ptr.register,
220                    self_ptr_reg,
221                    &x_reg,
222                    &y_reg,
223                    element_type.total_size,
224                    node,
225                    comment,
226                );
227
228                // Create a memory location from the element address with correct type information
229                let element_memory_location = MemoryLocation {
230                    base_ptr_reg: temp_element_ptr.register,
231                    offset: MemoryOffset(0),
232                    ty: VmType::new_unknown_placement(element_type),
233                };
234
235                // Use emit_copy_value_from_memory_location to handle both register and memory destinations
236                // This will properly handle aggregates (like optionals) vs scalars
237                self.emit_copy_value_from_memory_location(
238                    target_destination,
239                    &element_memory_location,
240                    node,
241                    "copy grid element value to destination",
242                );
243            }
244
245            IntrinsicFunction::GridWidth => {
246                // Allocate a temporary register for the width value
247                let temp = self.temp_registers.allocate(
248                    VmType::new_contained_in_register(u16_type()),
249                    "temp for grid width",
250                );
251
252                // Get the memory location of the width field in the grid header
253                let self_memory_location = AggregateMemoryLocation::new(
254                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
255                        self_ptr_reg.ptr_reg.clone(),
256                    ),
257                );
258                let width_location =
259                    self_memory_location.offset(GRID_HEADER_WIDTH_OFFSET, int_type());
260
261                // Load the width value from the grid header into the temporary register
262                self.builder.add_ld16_from_pointer_from_memory_location(
263                    &temp.register,
264                    &width_location.location,
265                    node,
266                    comment,
267                );
268
269                // Create a source destination from the temporary register
270                let value_source = Place::Register(temp.register);
271
272                // Use emit_copy_value_between_destinations to handle both register and memory destinations
273                self.emit_copy_value_between_places(
274                    target_destination,
275                    &value_source,
276                    node,
277                    "store grid width to destination",
278                );
279            }
280            IntrinsicFunction::GridHeight => {
281                // Allocate a temporary register for the height value
282                let temp = self.temp_registers.allocate(
283                    VmType::new_contained_in_register(u16_type()),
284                    "temp for grid height",
285                );
286
287                // Get the memory location of the height field in the grid header
288                let self_memory_location = AggregateMemoryLocation::new(
289                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
290                        self_ptr_reg.ptr_reg.clone(),
291                    ),
292                );
293                let height_location =
294                    self_memory_location.offset(GRID_HEADER_HEIGHT_OFFSET, int_type());
295
296                // Load the height value from the grid header into the temporary register
297                self.builder.add_ld16_from_pointer_from_memory_location(
298                    &temp.register,
299                    &height_location.location,
300                    node,
301                    comment,
302                );
303
304                // Create a source destination from the temporary register
305                let value_source = Place::Register(temp.register);
306
307                // Use emit_copy_value_between_destinations to handle both register and memory destinations
308                self.emit_copy_value_between_places(
309                    target_destination,
310                    &value_source,
311                    node,
312                    "store grid height to destination",
313                );
314            }
315            _ => todo!("wrong grid {intrinsic_fn}"),
316        }
317    }
318
319    #[allow(clippy::too_many_lines)]
320    fn emit_intrinsic_call_vec(
321        &mut self,
322        output_destination: &Place,
323        intrinsic_fn: &IntrinsicFunction,
324        self_ptr_reg: &PointerLocation,
325        arguments: &[Expression],
326        node: &Node,
327        ctx: &Context,
328    ) {
329        let self_basic_type = &self_ptr_reg.ptr_reg.ty.basic_type;
330        match intrinsic_fn {
331            IntrinsicFunction::VecPush => {
332                let element_expr = &arguments[0];
333
334                let element_gen_type = self.state.layout_cache.layout(&element_expr.ty);
335
336                let temp_element_ptr = self.temp_registers.allocate(
337                    VmType::new_contained_in_register(pointer_type()),
338                    "pointer to new element",
339                );
340
341                self.builder.add_vec_push_addr(
342                    temp_element_ptr.register(),
343                    &self_ptr_reg.ptr_reg,
344                    node,
345                    "set pointer to new element",
346                );
347
348                let location = AggregateMemoryLocation {
349                    location: MemoryLocation {
350                        base_ptr_reg: temp_element_ptr.register,
351                        offset: MemoryOffset(0),
352                        ty: VmType::new_unknown_placement(element_gen_type.clone()),
353                    },
354                };
355
356                // Initialize the allocated space first (like variable definition)
357                if element_gen_type.is_aggregate() {
358                    self.emit_initialize_memory_for_any_type(
359                        &location.location,
360                        node,
361                        "initialize vec.push allocated space",
362                    );
363                }
364
365                self.emit_expression_into_target_memory(
366                    &location.location,
367                    element_expr,
368                    "vec push",
369                    ctx,
370                );
371            }
372
373            IntrinsicFunction::VecExtend => {
374                let element_expr = &arguments[0];
375                let other_vec_reg = self.emit_scalar_rvalue(element_expr, ctx);
376
377                self.builder.add_vec_extend(
378                    &self_ptr_reg.ptr_reg,
379                    &other_vec_reg,
380                    node,
381                    "extend vec",
382                );
383            }
384
385            IntrinsicFunction::VecCopy => {
386                let output_pointer = self.emit_compute_effective_address_to_register(
387                    output_destination,
388                    node,
389                    "get absolute pointer for vec slice destination",
390                );
391                let output_pointer_location = PointerLocation::new(output_pointer);
392
393                self.builder.add_vec_copy(
394                    &output_pointer_location,
395                    self_ptr_reg,
396                    node,
397                    "copy vector",
398                );
399            }
400
401            IntrinsicFunction::VecPop => {
402                let element_type = self_basic_type.element().unwrap();
403                let pop_target_reg = if let Some(found_target_reg) = output_destination.register() {
404                    found_target_reg.clone()
405                } else {
406                    let temp = self.temp_registers.allocate(
407                        VmType::new_contained_in_register(element_type.clone()),
408                        "temp for vec pop",
409                    );
410                    temp.register
411                };
412                self.builder.add_vec_pop(
413                    &pop_target_reg,
414                    &self_ptr_reg.ptr_reg, // mut self
415                    element_type.total_size,
416                    node,
417                    "vec pop",
418                );
419                let source_memory_location = MemoryLocation {
420                    base_ptr_reg: pop_target_reg,
421                    offset: MemoryOffset(0),
422                    ty: VmType::new_unknown_placement(element_type),
423                };
424
425                self.emit_copy_value_from_memory_location(
426                    output_destination,
427                    &source_memory_location,
428                    node,
429                    "copy from vec pop",
430                );
431            }
432
433            IntrinsicFunction::VecSlice => {
434                let range_expr = &arguments[0];
435                let range_region = self.emit_scalar_rvalue(range_expr, ctx);
436
437                let output_pointer = self.emit_compute_effective_address_to_register(
438                    output_destination,
439                    node,
440                    "get absolute pointer for vec slice destination",
441                );
442                let output_pointer_location = PointerLocation::new(output_pointer);
443
444                self.builder.add_vec_copy_range(
445                    &output_pointer_location,
446                    self_ptr_reg,
447                    &range_region,
448                    node,
449                    "vec slice",
450                );
451            }
452
453            IntrinsicFunction::VecRemoveIndex => {
454                let index_region_expr = &arguments[0];
455                let index_region = self.emit_scalar_rvalue(index_region_expr, ctx);
456
457                let element_type = self_basic_type.element().unwrap();
458
459                self.builder.add_vec_remove_index(
460                    &self_ptr_reg.ptr_reg,
461                    &index_region,
462                    node,
463                    "remove index",
464                );
465            }
466            IntrinsicFunction::VecRemoveIndexGetValue => {
467                let key_expr = &arguments[0];
468                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
469                let element_type = self_basic_type.element().unwrap();
470
471                // Handle both register and memory destinations
472                if let Some(target_reg) = output_destination.register() {
473                    // Direct register destination
474                    self.builder.add_vec_remove_index_get_value(
475                        target_reg,
476                        &self_ptr_reg.ptr_reg, // mut self
477                        &key_region,
478                        node,
479                        "vec remove index get value to register",
480                    );
481                } else {
482                    // Memory destination or other
483                    let temp_reg = self.temp_registers.allocate(
484                        VmType::new_contained_in_register(element_type),
485                        "temp for vec remove index get value",
486                    );
487
488                    self.builder.add_vec_remove_index_get_value(
489                        &temp_reg.register,
490                        &self_ptr_reg.ptr_reg,
491                        &key_region,
492                        node,
493                        "vec remove index get value to temp",
494                    );
495
496                    // Copy from temporary register to destination
497                    let source = Place::Register(temp_reg.register);
498                    self.emit_copy_value_between_places(
499                        output_destination,
500                        &source,
501                        node,
502                        "copy vec element to destination",
503                    );
504                }
505            }
506            IntrinsicFunction::VecRemoveFirstIndexGetValue => {
507                let zero_reg = self.temp_registers.allocate(
508                    VmType::new_contained_in_register(u8_type()),
509                    "vec remove first. set index 0",
510                );
511                self.builder
512                    .add_mov8_immediate(zero_reg.register(), 0, node, "zero index");
513                let value_addr_reg = self.temp_registers.allocate(
514                    VmType::new_contained_in_register(u32_type()),
515                    "vec entry addr to copy from",
516                );
517                let element_type = self_basic_type.element().unwrap();
518                self.builder.add_vec_subscript(
519                    value_addr_reg.register(),
520                    &self_ptr_reg.ptr_reg,
521                    zero_reg.register(),
522                    node,
523                    "lookup first entry in vec",
524                );
525
526                let source_memory_location = MemoryLocation {
527                    base_ptr_reg: value_addr_reg.register,
528                    offset: MemoryOffset(0),
529                    ty: VmType::new_unknown_placement(element_type),
530                };
531
532                self.emit_copy_value_from_memory_location(
533                    output_destination,
534                    &source_memory_location,
535                    node,
536                    "load the vec entry to target register",
537                );
538
539                self.builder.add_vec_remove_index(
540                    &self_ptr_reg.ptr_reg, // mut self
541                    zero_reg.register(),
542                    node,
543                    "vec remove first index",
544                );
545            }
546            IntrinsicFunction::VecClear => {
547                let temp_element_count_reg = self.temp_registers.allocate(
548                    VmType::new_contained_in_register(u16_type()),
549                    "vec_clear zero",
550                );
551                self.builder.add_mov_16_immediate_value(
552                    temp_element_count_reg.register(),
553                    0,
554                    node,
555                    "set to zero",
556                );
557
558                let self_memory_location = AggregateMemoryLocation::new(
559                    MemoryLocation::new_copy_over_whole_type_with_zero_offset(
560                        self_ptr_reg.ptr_reg.clone(),
561                    ),
562                );
563
564                self.builder.add_st16_using_ptr_with_offset(
565                    &self_memory_location
566                        .offset(COLLECTION_ELEMENT_COUNT_OFFSET, u16_type())
567                        .location,
568                    temp_element_count_reg.register(),
569                    node,
570                    "set element_count to zero",
571                );
572            }
573
574            IntrinsicFunction::VecGet => {
575                let key_expr = &arguments[0];
576                let key_region = self.emit_scalar_rvalue(key_expr, ctx);
577                let element_type = self_ptr_reg.ptr_reg.ty.basic_type.element().unwrap();
578
579                // Similar approach as GridGet - get pointer to element and use copy helpers
580                let temp_element_ptr = self.temp_registers.allocate(
581                    VmType::new_contained_in_register(pointer_type()),
582                    "temp for vec element address",
583                );
584
585                // Get the address of the vector element
586                self.builder.add_vec_subscript(
587                    temp_element_ptr.register(),
588                    &self_ptr_reg.ptr_reg,
589                    &key_region,
590                    node,
591                    "get vec element address",
592                );
593
594                // Create a memory location for the element
595                let element_memory_location = MemoryLocation {
596                    base_ptr_reg: temp_element_ptr.register,
597                    offset: MemoryOffset(0),
598                    ty: VmType::new_unknown_placement(element_type),
599                };
600
601                // Copy from memory location to destination (works for both register and memory)
602                self.emit_copy_value_from_memory_location(
603                    output_destination,
604                    &element_memory_location,
605                    node,
606                    "copy vec element to destination",
607                );
608            }
609            _ => todo!("Vec {intrinsic_fn}"),
610        }
611
612        /*
613                   IntrinsicFunction::VecSwap => {
614               let index_a = self
615                   .emit_for_access_or_location(&arguments[0], ctx)
616                   .grab_rvalue()
617                   .clone();
618               let index_b = self
619                   .emit_for_access_or_location(&arguments[1], ctx)
620                   .grab_rvalue()
621                   .clone();
622               self.builder
623                   .add_vec_swap(self_addr.unwrap(), &index_a, &index_b, node, "vec swap");
624           }
625
626           IntrinsicFunction::VecInsert => { // Low prio
627           }
628           IntrinsicFunction::VecFirst => { // Low prio
629           }
630           IntrinsicFunction::VecLast => { // Low prio
631           }
632
633        */
634    }
635
636    fn emit_intrinsic_call_int(
637        &mut self,
638        target_reg: &TypedRegister,
639        intrinsic_fn: &IntrinsicFunction,
640        arguments: &[TypedRegister],
641        node: &Node,
642    ) {
643        let first_argument = &arguments[0];
644
645        // Intrinsics can operate on any register directly, no need for register protection
646        match intrinsic_fn {
647            IntrinsicFunction::IntAbs => {
648                self.builder
649                    .add_int_abs(target_reg, first_argument, node, "int abs");
650            }
651
652            IntrinsicFunction::IntRnd => {
653                self.builder
654                    .add_int_rnd(target_reg, first_argument, node, "int pseudo random");
655            }
656            IntrinsicFunction::IntMax => {
657                let int_register = &arguments[1];
658
659                self.builder
660                    .add_int_max(target_reg, first_argument, int_register, node, "int max");
661            }
662            IntrinsicFunction::IntMin => {
663                let int_register = &arguments[1];
664
665                self.builder
666                    .add_int_min(target_reg, first_argument, int_register, node, "int min");
667            }
668            IntrinsicFunction::IntClamp => {
669                let min_reg = &arguments[1];
670                let max_reg = &arguments[2];
671                self.builder.add_int_clamp(
672                    target_reg,
673                    first_argument,
674                    min_reg,
675                    max_reg,
676                    node,
677                    "int clamp",
678                );
679            }
680            IntrinsicFunction::IntToFloat => {
681                self.builder.add_int_to_float(
682                    target_reg,
683                    first_argument,
684                    node,
685                    &format!("int to float {}", first_argument.comment()),
686                );
687            }
688            IntrinsicFunction::IntToString => {
689                self.builder
690                    .add_int_to_string(target_reg, first_argument, node, "int_to_string");
691            }
692            _ => {}
693        }
694        // No need to copy from a temporary register as we're using target_reg directly
695    }
696
697    #[allow(clippy::too_many_lines)]
698    fn emit_intrinsic_call_fixed(
699        &mut self,
700        target_reg: &TypedRegister,
701        intrinsic_fn: &IntrinsicFunction,
702        arguments: &[TypedRegister],
703        node: &Node,
704    ) {
705        // Intrinsics can operate directly on any register, no need for temporary registers
706        let first_argument_reg = &arguments[0];
707        match intrinsic_fn {
708            IntrinsicFunction::FloatRound => {
709                self.builder
710                    .add_float_round(target_reg, first_argument_reg, node, "float round");
711            }
712            IntrinsicFunction::FloatFloor => {
713                self.builder
714                    .add_float_floor(target_reg, first_argument_reg, node, "float floor");
715            }
716            IntrinsicFunction::FloatSign => {
717                self.builder
718                    .add_float_sign(target_reg, first_argument_reg, node, "float sign");
719            }
720            IntrinsicFunction::FloatAbs => {
721                self.builder
722                    .add_float_abs(target_reg, first_argument_reg, node, "float abs");
723            }
724            IntrinsicFunction::FloatRnd => {
725                self.builder.add_float_prnd(
726                    target_reg,
727                    first_argument_reg,
728                    node,
729                    "float pseudo random",
730                );
731            }
732            IntrinsicFunction::FloatCos => {
733                self.builder
734                    .add_float_cos(target_reg, first_argument_reg, node, "float cos");
735            }
736            IntrinsicFunction::FloatSin => {
737                self.builder
738                    .add_float_sin(target_reg, first_argument_reg, node, "float sin");
739            }
740            IntrinsicFunction::FloatAcos => {
741                self.builder
742                    .add_float_acos(target_reg, first_argument_reg, node, "float acos");
743            }
744            IntrinsicFunction::FloatAsin => {
745                self.builder
746                    .add_float_asin(target_reg, first_argument_reg, node, "float asin");
747            }
748            IntrinsicFunction::FloatAtan2 => {
749                self.builder
750                    .add_float_atan2(target_reg, first_argument_reg, node, "float atan2");
751            }
752            IntrinsicFunction::FloatMin => {
753                let float_region = &arguments[1];
754                self.builder.add_float_min(
755                    target_reg,
756                    first_argument_reg,
757                    float_region,
758                    node,
759                    "float min",
760                );
761            }
762            IntrinsicFunction::FloatMax => {
763                let float_region = &arguments[1];
764                self.builder.add_float_max(
765                    target_reg,
766                    first_argument_reg,
767                    float_region,
768                    node,
769                    "float max",
770                );
771            }
772            IntrinsicFunction::FloatClamp => {
773                let float_region = &arguments[1];
774                let float_b_region = &arguments[2];
775
776                self.builder.add_float_clamp(
777                    target_reg,
778                    float_region,
779                    first_argument_reg,
780                    float_b_region,
781                    node,
782                    "float round",
783                );
784            }
785            IntrinsicFunction::FloatToString => self.builder.add_float_to_string(
786                target_reg,
787                first_argument_reg,
788                node,
789                "float_to_string",
790            ),
791            _ => panic!("wasn't a fixed operation"),
792        }
793        // No need to copy from temp register to target as we're using target_reg directly
794    }
795
796    pub fn emit_intrinsic_transformer(
797        &mut self,
798        target_destination: &Place,
799        intrinsic_fn: &IntrinsicFunction,
800        self_addr: &PointerLocation,
801        lambda: (Vec<VariableRef>, &Expression),
802        node: &Node,
803        ctx: &Context,
804    ) {
805        match intrinsic_fn {
806            IntrinsicFunction::TransformerFold => { // Low prio
807            }
808            IntrinsicFunction::TransformerFilter => {
809                self.emit_iterate_over_collection_with_lambda(
810                    target_destination,
811                    node,
812                    Collection::Vec,
813                    Transformer::Filter,
814                    &self_addr.ptr_reg,
815                    lambda,
816                    ctx,
817                );
818            }
819
820            IntrinsicFunction::TransformerFilterInPlace => {
821                self.emit_iterate_over_collection_with_lambda(
822                    target_destination,
823                    node,
824                    Collection::Vec,
825                    Transformer::FilterMut,
826                    &self_addr.ptr_reg,
827                    lambda,
828                    ctx,
829                );
830            }
831
832            IntrinsicFunction::TransformerFor => {
833                self.emit_iterate_over_collection_with_lambda(
834                    target_destination,
835                    node,
836                    Collection::Vec,
837                    Transformer::For,
838                    &self_addr.ptr_reg,
839                    lambda,
840                    ctx,
841                );
842            }
843            IntrinsicFunction::TransformerWhile => {
844                self.emit_iterate_over_collection_with_lambda(
845                    target_destination,
846                    node,
847                    Collection::Vec,
848                    Transformer::While,
849                    &self_addr.ptr_reg,
850                    lambda,
851                    ctx,
852                );
853            }
854
855            IntrinsicFunction::TransformerFind => {
856                self.emit_iterate_over_collection_with_lambda(
857                    target_destination,
858                    node,
859                    Collection::Vec,
860                    Transformer::Find,
861                    &self_addr.ptr_reg,
862                    lambda,
863                    ctx,
864                );
865            }
866            _ => todo!("{intrinsic_fn}"),
867        }
868    }
869
870    #[allow(clippy::too_many_lines)]
871    #[allow(clippy::too_many_arguments)]
872    pub fn emit_single_intrinsic_call_with_self_destination(
873        &mut self,
874        target_destination: &Place,
875        node: &Node,
876        intrinsic_fn: &IntrinsicFunction,
877        self_destination: Option<&Place>,
878        arguments: &[ArgumentExpression],
879        ctx: &Context,
880        comment: &str,
881    ) {
882        // Use the helper function to properly materialize the self argument
883        let self_reg = if let Some(self_dest) = self_destination {
884            self.emit_load_scalar_or_absolute_aggregate_pointer(self_dest, node, comment)
885        } else {
886            None
887        };
888
889        // Delegate to the existing function
890        self.emit_single_intrinsic_call_with_self(
891            target_destination,
892            node,
893            intrinsic_fn,
894            self_reg.as_ref(),
895            arguments,
896            ctx,
897            comment,
898        );
899    }
900
901    #[allow(clippy::too_many_lines)]
902    #[allow(clippy::too_many_arguments)]
903    pub fn emit_single_intrinsic_call_with_self(
904        &mut self,
905        target_destination: &Place,
906        node: &Node,
907        intrinsic_fn: &IntrinsicFunction,
908        self_reg: Option<&TypedRegister>,
909        arguments: &[ArgumentExpression],
910        ctx: &Context,
911        comment: &str,
912    ) {
913        let maybe_target = target_destination.register();
914
915        match intrinsic_fn {
916            IntrinsicFunction::Float2Magnitude
917            | IntrinsicFunction::FloatAbs
918            | IntrinsicFunction::FloatRound
919            | IntrinsicFunction::FloatFloor
920            | IntrinsicFunction::FloatSign
921            | IntrinsicFunction::FloatRnd
922            | IntrinsicFunction::FloatCos
923            | IntrinsicFunction::FloatSin
924            | IntrinsicFunction::FloatAcos
925            | IntrinsicFunction::FloatAsin
926            | IntrinsicFunction::FloatAtan2
927            | IntrinsicFunction::FloatMin
928            | IntrinsicFunction::FloatMax
929            | IntrinsicFunction::FloatClamp
930            | IntrinsicFunction::FloatToString => {
931                // Float
932                let (temp_reg, dest_reg) = if target_destination.is_register() {
933                    (None, target_destination.register().unwrap().clone())
934                } else {
935                    let temp_reg = self.temp_registers.allocate(
936                        VmType::new_contained_in_register(float_type()),
937                        "temporary destination for low level intrinsic",
938                    );
939
940                    (Some(temp_reg.register.clone()), temp_reg.register)
941                };
942
943                // Materialize self to ensure we have the actual scalar value
944                let mut converted_regs = vec![self_reg.unwrap().clone()];
945                for arg in arguments {
946                    let (ArgumentExpression::Expression(found_expression)
947                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
948                    else {
949                        panic!("must be expression");
950                    };
951                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
952                    converted_regs.push(materialized_arg);
953                }
954
955                self.emit_intrinsic_call_fixed(&dest_reg, intrinsic_fn, &converted_regs, node);
956
957                if let Some(temp_reg) = temp_reg {
958                    self.emit_store_scalar_to_memory_offset_instruction(
959                        target_destination.grab_memory_location(),
960                        &temp_reg,
961                        node,
962                        "store the fixed point value into memory",
963                    );
964                }
965            }
966
967            IntrinsicFunction::IntToFloat => {
968                // IntToFloat - special case because it returns a float, not an int
969                let (temp_reg, dest_reg) = if target_destination.is_register() {
970                    (None, target_destination.register().unwrap().clone())
971                } else {
972                    let temp_reg = self.temp_registers.allocate(
973                        VmType::new_contained_in_register(float_type()),
974                        "temporary destination for int to float intrinsic",
975                    );
976
977                    (Some(temp_reg.register.clone()), temp_reg.register)
978                };
979
980                // Self is already materialized as a register
981                let int_value_reg = self_reg.unwrap();
982
983                // Now convert the materialized integer value to float
984                self.builder.add_int_to_float(
985                    &dest_reg,
986                    int_value_reg,
987                    node,
988                    &format!("int to float {}", int_value_reg.comment()),
989                );
990
991                if let Some(temp_reg) = temp_reg {
992                    self.emit_store_scalar_to_memory_offset_instruction(
993                        target_destination.grab_memory_location(),
994                        &temp_reg,
995                        node,
996                        "store the float result from int to float conversion",
997                    );
998                }
999            }
1000
1001            IntrinsicFunction::IntAbs
1002            | IntrinsicFunction::IntRnd
1003            | IntrinsicFunction::IntMax
1004            | IntrinsicFunction::IntMin
1005            | IntrinsicFunction::IntClamp
1006            | IntrinsicFunction::IntToString => {
1007                // Int
1008                let (temp_reg, dest_reg) = if target_destination.is_register() {
1009                    let target_reg = target_destination.register().unwrap();
1010                    // Intrinsics can operate on any register directly, no special treatment needed
1011                    (None, target_reg.clone())
1012                } else {
1013                    let temp_reg = self.temp_registers.allocate(
1014                        VmType::new_contained_in_register(u32_type()),
1015                        "temporary destination for low level intrinsic",
1016                    );
1017
1018                    (Some(temp_reg.register.clone()), temp_reg.register)
1019                };
1020
1021                // Materialize additional arguments (self is already materialized)
1022                let mut converted_regs = vec![self_reg.unwrap().clone()];
1023                for arg in arguments {
1024                    let (ArgumentExpression::Expression(found_expression)
1025                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1026                    else {
1027                        panic!("must be expression");
1028                    };
1029                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1030                    converted_regs.push(materialized_arg);
1031                }
1032
1033                self.emit_intrinsic_call_int(&dest_reg, intrinsic_fn, &converted_regs, node);
1034
1035                if let Some(temp_reg) = temp_reg {
1036                    if target_destination.is_register() {
1037                        // Copy from temp to target register
1038                        self.builder.add_mov_reg(
1039                            target_destination.register().unwrap(),
1040                            &temp_reg,
1041                            node,
1042                            "copy intrinsic result from temp to target register",
1043                        );
1044                    } else {
1045                        // Store to memory location
1046                        self.emit_store_scalar_to_memory_offset_instruction(
1047                            target_destination.grab_memory_location(),
1048                            &temp_reg,
1049                            node,
1050                            "put the low level intrinsic fixed (int) back to memory",
1051                        );
1052                    }
1053                }
1054            }
1055
1056            IntrinsicFunction::EnumFromDiscriminant => {
1057                let enum_pointer = PointerLocation {
1058                    ptr_reg: self_reg.unwrap().clone(),
1059                };
1060
1061                assert!(arguments.len() == 1, "problem");
1062                // Materialize additional arguments (self is already materialized)
1063                let mut converted_regs = vec![self_reg.unwrap().clone()];
1064                for arg in arguments {
1065                    let (ArgumentExpression::Expression(found_expression)
1066                    | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1067                    else {
1068                        panic!("must be expression");
1069                    };
1070                    let materialized_arg = self.emit_scalar_rvalue(found_expression, ctx);
1071                    converted_regs.push(materialized_arg);
1072                }
1073
1074                // TODO: @important: Support different sizes of discriminants
1075                self.builder.add_st8_using_ptr_with_offset(
1076                    &MemoryLocation::new_copy_over_whole_type_with_zero_offset(
1077                        enum_pointer.ptr_reg,
1078                    ),
1079                    &converted_regs[1], // the discriminant to set
1080                    node,
1081                    "overwrite the discriminant in the enum pointer",
1082                );
1083            }
1084
1085            IntrinsicFunction::EnumDiscriminant => {
1086                let enum_pointer = PointerLocation {
1087                    ptr_reg: self_reg.unwrap().clone(),
1088                };
1089
1090                let discriminant_temp_reg = self.temp_registers.allocate(
1091                    VmType::new_contained_in_register(u8_type()),
1092                    "temp register for fetching discriminant",
1093                );
1094
1095                // TODO: @important: Support different sizes of discriminants
1096                self.builder.add_ld8_from_pointer_with_offset(
1097                    discriminant_temp_reg.register(),
1098                    &enum_pointer.ptr_reg,
1099                    MemoryOffset(0), // Support proper tag offsets
1100                    node,
1101                    "get the discriminant from the enum pointer",
1102                );
1103
1104                if target_destination.is_register() {
1105                    self.builder.add_mov_reg(
1106                        target_destination.register().unwrap(),
1107                        &discriminant_temp_reg.register,
1108                        node,
1109                        "copy discriminant in register to target register",
1110                    );
1111                } else {
1112                    self.emit_store_scalar_to_memory_offset_instruction(
1113                        target_destination.grab_memory_location(),
1114                        &discriminant_temp_reg.register,
1115                        node,
1116                        "store discriminant in register to target memory location",
1117                    );
1118                }
1119            }
1120
1121            /*
1122            IntrinsicFunction::EnumFromDiscriminant => {
1123
1124            }
1125
1126             */
1127            IntrinsicFunction::VecPush
1128            | IntrinsicFunction::VecPop
1129            | IntrinsicFunction::VecExtend
1130            | IntrinsicFunction::VecRemoveIndex
1131            | IntrinsicFunction::VecRemoveIndexGetValue
1132            | IntrinsicFunction::VecRemoveFirstIndexGetValue
1133            | IntrinsicFunction::VecClear
1134            | IntrinsicFunction::VecSlice
1135            | IntrinsicFunction::VecSwap
1136            | IntrinsicFunction::VecInsert
1137            | IntrinsicFunction::VecFirst
1138            | IntrinsicFunction::VecGet
1139            | IntrinsicFunction::VecCopy
1140            | IntrinsicFunction::VecLast => {
1141                // Vec
1142                // Self is assumed to be a flattened pointer:
1143                let vec_self_ptr_reg = PointerLocation {
1144                    ptr_reg: self_reg.unwrap().clone(),
1145                };
1146                let converted_to_expressions: Vec<_> = arguments
1147                    .iter()
1148                    .map(|arg| {
1149                        let (ArgumentExpression::Expression(found_expression)
1150                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1151                        else {
1152                            panic!("must be expression");
1153                        };
1154                        found_expression.clone()
1155                    })
1156                    .collect();
1157
1158                self.emit_intrinsic_call_vec(
1159                    target_destination,
1160                    intrinsic_fn,
1161                    &vec_self_ptr_reg,
1162                    &converted_to_expressions,
1163                    node,
1164                    ctx,
1165                );
1166            }
1167
1168            IntrinsicFunction::GridGet
1169            | IntrinsicFunction::GridSet
1170            | IntrinsicFunction::GridWidth
1171            | IntrinsicFunction::GridHeight => {
1172                // Grid
1173                // Self is assumed to be a flattened pointer:
1174                let grid_self_ptr_reg = PointerLocation {
1175                    ptr_reg: self_reg.unwrap().clone(),
1176                };
1177                let converted_to_expressions: Vec<_> = arguments
1178                    .iter()
1179                    .map(|arg| {
1180                        let (ArgumentExpression::Expression(found_expression)
1181                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1182                        else {
1183                            panic!("must be expression");
1184                        };
1185                        found_expression.clone()
1186                    })
1187                    .collect();
1188                self.emit_intrinsic_grid(
1189                    target_destination,
1190                    intrinsic_fn,
1191                    &grid_self_ptr_reg,
1192                    &converted_to_expressions,
1193                    node,
1194                    comment,
1195                    ctx,
1196                );
1197            }
1198
1199            IntrinsicFunction::SparseIsAlive
1200            | IntrinsicFunction::SparseRemove
1201            | IntrinsicFunction::SparseAdd => {
1202                // Sparse
1203                // Self is assumed to be a flattened pointer:
1204                let grid_self_ptr_reg = PointerLocation {
1205                    ptr_reg: self_reg.unwrap().clone(),
1206                };
1207                let converted_to_expressions: Vec<_> = arguments
1208                    .iter()
1209                    .map(|arg| {
1210                        let (ArgumentExpression::Expression(found_expression)
1211                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1212                        else {
1213                            panic!("must be expression");
1214                        };
1215                        found_expression.clone()
1216                    })
1217                    .collect();
1218                self.emit_intrinsic_sparse(
1219                    target_destination,
1220                    intrinsic_fn,
1221                    &grid_self_ptr_reg,
1222                    &converted_to_expressions,
1223                    node,
1224                    comment,
1225                    ctx,
1226                );
1227            }
1228
1229            IntrinsicFunction::TransformerFor
1230            | IntrinsicFunction::TransformerWhile
1231            | IntrinsicFunction::TransformerFindMap
1232            | IntrinsicFunction::TransformerAny
1233            | IntrinsicFunction::TransformerAll
1234            | IntrinsicFunction::TransformerMap
1235            | IntrinsicFunction::TransformerFilter
1236            | IntrinsicFunction::TransformerFilterInPlace
1237            | IntrinsicFunction::TransformerFilterMap
1238            | IntrinsicFunction::TransformerFind
1239            | IntrinsicFunction::TransformerFold => {
1240                // Self is assumed to be a flattened pointer:
1241                let collection_self_ptr_reg = PointerLocation {
1242                    ptr_reg: self_reg.unwrap().clone(),
1243                };
1244
1245                let lambda_expression = &arguments[0];
1246
1247                // Take out lambda and other lookups before generating the code
1248                let ArgumentExpression::Expression(expr) = lambda_expression else {
1249                    panic!("err");
1250                };
1251
1252                let ExpressionKind::Lambda(lambda_variables, lambda_expr) = &expr.kind else {
1253                    panic!("must have lambda for transformers");
1254                };
1255
1256                self.emit_intrinsic_transformer(
1257                    target_destination,
1258                    intrinsic_fn,
1259                    &collection_self_ptr_reg,
1260                    (lambda_variables.clone(), lambda_expr),
1261                    node,
1262                    ctx,
1263                );
1264            }
1265
1266            IntrinsicFunction::RuntimePanic => {
1267                self.builder
1268                    .add_panic(self_reg.unwrap(), node, "intrinsic panic");
1269            }
1270
1271            IntrinsicFunction::RuntimeHalt => {
1272                self.builder.add_halt(node, "intrinsic halt");
1273            }
1274
1275            IntrinsicFunction::RuntimeStep => {
1276                self.builder.add_step(node, "intrinsic step");
1277            }
1278
1279            IntrinsicFunction::RangeInit => {
1280                let start_reg = self_reg.unwrap();
1281                // let MutRefOrImmutableExpression::Expression(start_arg_expr) = start_arg else {
1282                //    panic!();
1283                //};
1284                // let start_reg = self.emit_scalar_rvalue(start_arg_expr, ctx);
1285
1286                let end_arg = &arguments[0];
1287                let ArgumentExpression::Expression(end_arg_expr) = end_arg else {
1288                    panic!();
1289                };
1290                let end_reg = self.emit_scalar_rvalue(end_arg_expr, ctx);
1291
1292                let is_inclusive = &arguments[1];
1293                let ArgumentExpression::Expression(is_inclusive_expr) = is_inclusive else {
1294                    panic!();
1295                };
1296                let is_inclusive_reg = self.emit_scalar_rvalue(is_inclusive_expr, ctx);
1297                let absolute_range_pointer = self.emit_compute_effective_address_to_register(
1298                    target_destination,
1299                    node,
1300                    "create range target pointer",
1301                );
1302                self.builder.add_range_init(
1303                    &absolute_range_pointer,
1304                    start_reg,
1305                    &end_reg,
1306                    &is_inclusive_reg,
1307                    node,
1308                    "create a range",
1309                );
1310            }
1311
1312            // Bool
1313            IntrinsicFunction::CodepointToString => {
1314                if target_destination.is_register() {
1315                    self.builder.add_codepoint_to_string(
1316                        target_destination.register().unwrap(),
1317                        self_reg.unwrap(),
1318                        node,
1319                        "char_to_string",
1320                    );
1321                } else {
1322                    let temp_reg = self.temp_registers.allocate(
1323                        VmType::new_contained_in_register(u32_type()),
1324                        "temporary for codepoint to string intrinsic",
1325                    );
1326
1327                    self.builder.add_codepoint_to_string(
1328                        &temp_reg.register,
1329                        self_reg.unwrap(),
1330                        node,
1331                        "char_to_string",
1332                    );
1333
1334                    self.emit_store_scalar_to_memory_offset_instruction(
1335                        target_destination.grab_memory_location(),
1336                        &temp_reg.register,
1337                        node,
1338                        "store codepoint to string result to memory",
1339                    );
1340                }
1341            }
1342
1343            IntrinsicFunction::CodepointToInt => {
1344                if target_destination.is_register() {
1345                    self.builder.add_mov_reg(
1346                        target_destination.register().unwrap(),
1347                        self_reg.unwrap(),
1348                        node,
1349                        "char_to_int",
1350                    );
1351                } else {
1352                    self.emit_store_scalar_to_memory_offset_instruction(
1353                        target_destination.grab_memory_location(),
1354                        self_reg.unwrap(),
1355                        node,
1356                        "store codepoint to int result to memory",
1357                    );
1358                }
1359            }
1360
1361            IntrinsicFunction::CodepointToByte => {
1362                if target_destination.is_register() {
1363                    self.builder.add_mov_reg(
1364                        target_destination.register().unwrap(),
1365                        self_reg.unwrap(),
1366                        node,
1367                        "char_to_int",
1368                    );
1369                } else {
1370                    self.emit_store_scalar_to_memory_offset_instruction(
1371                        target_destination.grab_memory_location(),
1372                        self_reg.unwrap(),
1373                        node,
1374                        "store codepoint to int result to memory",
1375                    );
1376                }
1377            }
1378
1379            // Bool
1380            IntrinsicFunction::ByteToString => {
1381                if target_destination.is_register() {
1382                    self.builder.byte_to_string(
1383                        target_destination.register().unwrap(),
1384                        self_reg.unwrap(),
1385                        node,
1386                        "byte_to_string",
1387                    );
1388                } else {
1389                    let temp_reg = self.temp_registers.allocate(
1390                        VmType::new_contained_in_register(u32_type()),
1391                        "temporary for byte to string intrinsic",
1392                    );
1393
1394                    self.builder.byte_to_string(
1395                        &temp_reg.register,
1396                        self_reg.unwrap(),
1397                        node,
1398                        "byte_to_string",
1399                    );
1400
1401                    self.emit_store_scalar_to_memory_offset_instruction(
1402                        target_destination.grab_memory_location(),
1403                        &temp_reg.register,
1404                        node,
1405                        "store byte to string result to memory",
1406                    );
1407                }
1408            }
1409
1410            IntrinsicFunction::ByteToInt => {
1411                if target_destination.is_register() {
1412                    // It is safe to "upcast" to an i32 from a u8, so just copy the register
1413                    // TODO: Make something smarter so we don't have to copy
1414                    self.builder.add_mov_reg(
1415                        target_destination.register().unwrap(),
1416                        self_reg.unwrap(),
1417                        node,
1418                        "byte_to_int",
1419                    );
1420                } else {
1421                    self.emit_store_scalar_to_memory_offset_instruction(
1422                        target_destination.grab_memory_location(),
1423                        self_reg.unwrap(),
1424                        node,
1425                        "store byte to int result to memory",
1426                    );
1427                }
1428            }
1429
1430            IntrinsicFunction::ByteToFloat => {
1431                if target_destination.is_register() {
1432                    // Use existing int-to-float conversion since byte is just a small integer
1433                    self.builder.add_int_to_float(
1434                        target_destination.register().unwrap(),
1435                        self_reg.unwrap(),
1436                        node,
1437                        "byte_to_float",
1438                    );
1439                } else {
1440                    let temp_reg = self.temp_registers.allocate(
1441                        VmType::new_contained_in_register(float_type()),
1442                        "temporary for byte to float intrinsic",
1443                    );
1444
1445                    // Use existing int-to-float conversion since byte is just a small integer
1446                    self.builder.add_int_to_float(
1447                        &temp_reg.register,
1448                        self_reg.unwrap(),
1449                        node,
1450                        "byte_to_float",
1451                    );
1452
1453                    self.emit_store_scalar_to_memory_offset_instruction(
1454                        target_destination.grab_memory_location(),
1455                        &temp_reg.register,
1456                        node,
1457                        "store byte to float result to memory",
1458                    );
1459                }
1460            }
1461
1462            IntrinsicFunction::ByteToCodepoint => {
1463                if target_destination.is_register() {
1464                    // It is safe to "upcast" to a codepoint from a u8, so just copy the register
1465                    // TODO: Make something smarter so we don't have to copy
1466                    self.builder.add_mov_reg(
1467                        target_destination.register().unwrap(),
1468                        self_reg.unwrap(),
1469                        node,
1470                        "byte_to_codepoint",
1471                    );
1472                } else {
1473                    self.emit_store_scalar_to_memory_offset_instruction(
1474                        target_destination.grab_memory_location(),
1475                        self_reg.unwrap(),
1476                        node,
1477                        "store byte to codepoint result to memory",
1478                    );
1479                }
1480            }
1481
1482            // Bool
1483            IntrinsicFunction::BoolToString => {
1484                if target_destination.is_register() {
1485                    self.builder.bool_to_string(
1486                        target_destination.register().unwrap(),
1487                        self_reg.unwrap(),
1488                        node,
1489                        "bool_to_string",
1490                    );
1491                } else {
1492                    let temp_reg = self.temp_registers.allocate(
1493                        VmType::new_contained_in_register(u32_type()),
1494                        "temporary for bool to string intrinsic",
1495                    );
1496
1497                    self.builder.bool_to_string(
1498                        &temp_reg.register,
1499                        self_reg.unwrap(),
1500                        node,
1501                        "bool_to_string",
1502                    );
1503
1504                    self.emit_store_scalar_to_memory_offset_instruction(
1505                        target_destination.grab_memory_location(),
1506                        &temp_reg.register,
1507                        node,
1508                        "store bool to string result to memory",
1509                    );
1510                }
1511            }
1512
1513            IntrinsicFunction::ByteVectorToString => {
1514                if target_destination.is_register() {
1515                    self.builder.add_bytes_to_string(
1516                        target_destination.register().unwrap(),
1517                        self_reg.unwrap(),
1518                        node,
1519                        "bytes_to_string",
1520                    );
1521                } else {
1522                    let temp_reg = self.temp_registers.allocate(
1523                        VmType::new_contained_in_register(u32_type()),
1524                        "temporary for string duplicate intrinsic",
1525                    );
1526
1527                    self.builder.add_bytes_to_string(
1528                        &temp_reg.register,
1529                        self_reg.unwrap(),
1530                        node,
1531                        "bytes_to_string",
1532                    );
1533
1534                    self.emit_store_scalar_to_memory_offset_instruction(
1535                        target_destination.grab_memory_location(),
1536                        &temp_reg.register,
1537                        node,
1538                        "store converted utf8 string to memory",
1539                    );
1540                }
1541            }
1542
1543            IntrinsicFunction::ByteVectorToStringStorage => {
1544                if target_destination.is_register() {
1545                    self.builder.add_bytes_to_string_storage(
1546                        target_destination.register().unwrap(),
1547                        self_reg.unwrap(),
1548                        node,
1549                        "bytes_to_string_storage",
1550                    );
1551                } else {
1552                    let target_pointer_reg = self.emit_compute_effective_address_to_register(
1553                        target_destination,
1554                        node,
1555                        "get pointer to target string storage",
1556                    );
1557
1558                    self.builder.add_bytes_to_string_storage(
1559                        &target_pointer_reg,
1560                        self_reg.unwrap(),
1561                        node,
1562                        "bytes_to_string_storage",
1563                    );
1564                }
1565            }
1566
1567            IntrinsicFunction::StringDuplicate => {
1568                if target_destination.is_register() {
1569                    self.builder.add_string_duplicate(
1570                        target_destination.register().unwrap(),
1571                        self_reg.unwrap(),
1572                        node,
1573                        "string_to_string",
1574                    );
1575                } else {
1576                    let temp_reg = self.temp_registers.allocate(
1577                        VmType::new_contained_in_register(u32_type()),
1578                        "temporary for string duplicate intrinsic",
1579                    );
1580
1581                    self.builder.add_string_duplicate(
1582                        &temp_reg.register,
1583                        self_reg.unwrap(),
1584                        node,
1585                        "string_duplicate",
1586                    );
1587
1588                    self.emit_store_scalar_to_memory_offset_instruction(
1589                        target_destination.grab_memory_location(),
1590                        &temp_reg.register,
1591                        node,
1592                        "store string duplicate result to memory",
1593                    );
1594                }
1595            }
1596
1597            IntrinsicFunction::StringToString => {
1598                if target_destination.is_register() {
1599                    self.builder.add_string_to_string(
1600                        target_destination.register().unwrap(),
1601                        self_reg.unwrap(),
1602                        node,
1603                        "string_to_string",
1604                    );
1605                } else {
1606                    let temp_reg = self.temp_registers.allocate(
1607                        VmType::new_contained_in_register(u32_type()),
1608                        "temporary for string to string intrinsic",
1609                    );
1610
1611                    self.builder.add_string_to_string(
1612                        &temp_reg.register,
1613                        self_reg.unwrap(),
1614                        node,
1615                        "string_to_string",
1616                    );
1617
1618                    self.emit_store_scalar_to_memory_offset_instruction(
1619                        target_destination.grab_memory_location(),
1620                        &temp_reg.register,
1621                        node,
1622                        "store string to string result to memory",
1623                    );
1624                }
1625            }
1626
1627            IntrinsicFunction::StringStartsWith => {
1628                let converted_to_expressions: Vec<_> = arguments
1629                    .iter()
1630                    .map(|arg| {
1631                        let (ArgumentExpression::Expression(found_expression)
1632                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1633                        else {
1634                            panic!("must be expression");
1635                        };
1636                        found_expression.clone()
1637                    })
1638                    .collect();
1639                let other_str = self.emit_scalar_rvalue(&converted_to_expressions[0], ctx);
1640                if target_destination.is_register() {
1641                    self.builder.add_string_starts_with(
1642                        target_destination.register().unwrap(),
1643                        self_reg.unwrap(),
1644                        &other_str,
1645                        node,
1646                        "string_starts_with",
1647                    );
1648                } else {
1649                    let temp_reg = self.temp_registers.allocate(
1650                        VmType::new_contained_in_register(u8_type()),
1651                        "temporary for string starts with intrinsic",
1652                    );
1653
1654                    self.builder.add_string_starts_with(
1655                        &temp_reg.register,
1656                        self_reg.unwrap(),
1657                        &other_str,
1658                        node,
1659                        "string_starts_with",
1660                    );
1661
1662                    self.emit_store_scalar_to_memory_offset_instruction(
1663                        target_destination.grab_memory_location(),
1664                        &temp_reg.register,
1665                        node,
1666                        "store string starts with result to memory",
1667                    );
1668                }
1669            }
1670
1671            IntrinsicFunction::StringToInt => {
1672                let pointer = self.emit_compute_effective_address_to_register(
1673                    target_destination,
1674                    node,
1675                    "need pointer to tuple",
1676                );
1677                self.builder
1678                    .add_string_to_int(&pointer, self_reg.unwrap(), node, "string to int");
1679            }
1680            IntrinsicFunction::StringToFloat => {
1681                let pointer = self.emit_compute_effective_address_to_register(
1682                    target_destination,
1683                    node,
1684                    "need pointer to tuple",
1685                );
1686                self.builder.add_string_to_float(
1687                    &pointer,
1688                    self_reg.unwrap(),
1689                    node,
1690                    "string to float",
1691                );
1692            }
1693
1694            // Common Collection
1695            IntrinsicFunction::MapIsEmpty | IntrinsicFunction::VecIsEmpty => {
1696                let collection_pointer = PointerLocation {
1697                    ptr_reg: self_reg.unwrap().clone(),
1698                };
1699                if target_destination.is_register() {
1700                    self.emit_collection_is_empty(
1701                        target_destination.register().unwrap().clone(),
1702                        &collection_pointer,
1703                        node,
1704                        "vec empty",
1705                    );
1706                } else {
1707                    let temp_reg = self.temp_registers.allocate(
1708                        VmType::new_contained_in_register(u8_type()),
1709                        "temporary for collection is empty intrinsic",
1710                    );
1711
1712                    self.emit_collection_is_empty(
1713                        temp_reg.register.clone(),
1714                        &collection_pointer,
1715                        node,
1716                        "vec empty",
1717                    );
1718
1719                    self.emit_store_scalar_to_memory_offset_instruction(
1720                        target_destination.grab_memory_location(),
1721                        &temp_reg.register,
1722                        node,
1723                        "store collection is empty result to memory",
1724                    );
1725                }
1726            }
1727
1728            IntrinsicFunction::StringLen
1729            | IntrinsicFunction::MapLen
1730            | IntrinsicFunction::VecLen => {
1731                let collection_pointer = PointerLocation {
1732                    ptr_reg: self_reg.unwrap().clone(),
1733                };
1734                if target_destination.is_register() {
1735                    self.emit_collection_len(
1736                        target_destination.register().unwrap(),
1737                        &collection_pointer,
1738                        node,
1739                        "get the collection element_count",
1740                    );
1741                } else {
1742                    let temp_reg = self.temp_registers.allocate(
1743                        VmType::new_contained_in_register(u16_type()),
1744                        "temporary for collection len intrinsic",
1745                    );
1746
1747                    self.emit_collection_len(
1748                        &temp_reg.register,
1749                        &collection_pointer,
1750                        node,
1751                        "get the collection element_count",
1752                    );
1753
1754                    self.emit_store_scalar_to_memory_offset_instruction(
1755                        target_destination.grab_memory_location(),
1756                        &temp_reg.register,
1757                        node,
1758                        "store collection len result to memory",
1759                    );
1760                }
1761            }
1762            IntrinsicFunction::MapCapacity | IntrinsicFunction::VecCapacity => {
1763                let collection_pointer = PointerLocation {
1764                    ptr_reg: self_reg.unwrap().clone(),
1765                };
1766                if target_destination.is_register() {
1767                    self.emit_collection_capacity(
1768                        target_destination.register().unwrap(),
1769                        &collection_pointer,
1770                        node,
1771                        "get the collection element_count",
1772                    );
1773                } else {
1774                    let temp_reg = self.temp_registers.allocate(
1775                        VmType::new_contained_in_register(u16_type()),
1776                        "temporary for collection capacity intrinsic",
1777                    );
1778
1779                    self.emit_collection_capacity(
1780                        &temp_reg.register,
1781                        &collection_pointer,
1782                        node,
1783                        "get the collection element_count",
1784                    );
1785
1786                    self.emit_store_scalar_to_memory_offset_instruction(
1787                        target_destination.grab_memory_location(),
1788                        &temp_reg.register,
1789                        node,
1790                        "store collection capacity result to memory",
1791                    );
1792                }
1793            }
1794
1795            IntrinsicFunction::MapRemove | IntrinsicFunction::MapHas => {
1796                // Map
1797                // Self is assumed to be a flattened pointer:
1798                let grid_self_ptr_reg = PointerLocation {
1799                    ptr_reg: self_reg.unwrap().clone(),
1800                };
1801                let converted_to_expressions: Vec<_> = arguments
1802                    .iter()
1803                    .map(|arg| {
1804                        let (ArgumentExpression::Expression(found_expression)
1805                        | ArgumentExpression::MaterializedExpression(found_expression)) = arg
1806                        else {
1807                            panic!("must be expression");
1808                        };
1809                        found_expression.clone()
1810                    })
1811                    .collect();
1812                self.emit_intrinsic_map(
1813                    target_destination,
1814                    intrinsic_fn,
1815                    &grid_self_ptr_reg,
1816                    &converted_to_expressions,
1817                    node,
1818                    comment,
1819                    ctx,
1820                );
1821            } // All intrinsic cases are now handled above
1822
1823            IntrinsicFunction::PtrFromU32 => {}
1824            IntrinsicFunction::PtrWriteU32 => {}
1825            IntrinsicFunction::PtrWriteU16 => {}
1826            IntrinsicFunction::PtrWriteU8 => {}
1827            IntrinsicFunction::PtrReadU16 => {}
1828        }
1829    }
1830
1831    fn emit_intrinsic_map_remove(
1832        &mut self,
1833        map_header_reg: &PointerLocation,
1834        key_expression: &Expression,
1835        ctx: &Context,
1836    ) {
1837        let key_register =
1838            self.emit_aggregate_pointer_or_pointer_to_scalar_memory(key_expression, ctx);
1839
1840        self.builder
1841            .add_map_remove(map_header_reg, &key_register, &key_expression.node, "");
1842    }
1843
1844    fn emit_collection_capacity(
1845        &mut self,
1846        output_reg: &TypedRegister,
1847        collection_addr: &PointerLocation,
1848        node: &Node,
1849        comment: &str,
1850    ) {
1851        self.builder.add_ld16_from_pointer_with_offset_u16(
1852            output_reg,
1853            &collection_addr.ptr_reg,
1854            COLLECTION_CAPACITY_OFFSET,
1855            node,
1856            comment,
1857        );
1858    }
1859
1860    fn emit_collection_len(
1861        &mut self,
1862        output_reg: &TypedRegister,
1863        collection_addr: &PointerLocation,
1864        node: &Node,
1865        comment: &str,
1866    ) {
1867        self.builder.add_ld16_from_pointer_with_offset_u16(
1868            output_reg,
1869            &collection_addr.ptr_reg,
1870            COLLECTION_ELEMENT_COUNT_OFFSET,
1871            node,
1872            &format!("{comment} - collection element_count"),
1873        );
1874    }
1875
1876    fn emit_collection_is_empty(
1877        &mut self,
1878        output_reg: TypedRegister,
1879        collection_addr: &PointerLocation,
1880        node: &Node,
1881        _comment: &str,
1882    ) {
1883        self.builder.add_ld16_from_pointer_with_offset_u16(
1884            &output_reg,
1885            &collection_addr.ptr_reg,
1886            COLLECTION_ELEMENT_COUNT_OFFSET,
1887            node,
1888            "get the map length for testing if it is empty",
1889        );
1890        self.builder.add_meqz(
1891            &output_reg,
1892            &output_reg,
1893            node,
1894            "convert the map length to inverted bool",
1895        );
1896    }
1897}