swamp_code_gen/
code_bld.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::err;
8use crate::reg_pool::HwmTempRegisterPool;
9use crate::state::CodeGenState;
10use seq_map::SeqMap;
11use source_map_cache::{
12    KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
13};
14use source_map_node::Node;
15use swamp_semantic::{
16    ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
17    UnaryOperator, UnaryOperatorKind, VariableRef,
18};
19use swamp_types::TypeKind;
20use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
21use swamp_vm_types::aligner::{align, SAFE_ALIGNMENT};
22use swamp_vm_types::types::BasicTypeKind;
23use swamp_vm_types::types::{
24    b8_type, u32_type, u8_type, BasicTypeRef, Destination, FramePlacedType, TypedRegister, VmType,
25};
26use swamp_vm_types::{
27    AggregateMemoryLocation, FrameMemoryRegion, FrameMemorySize, MemoryLocation,
28    MemoryOffset, MemorySize, PointerLocation, ANY_HEADER_HASH_OFFSET, ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET,
29    REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
30};
31use tracing::info;
32
33#[derive(Copy, Clone)]
34pub struct CodeBuilderOptions {
35    pub should_show_debug: bool,
36}
37pub(crate) struct CodeBuilder<'a> {
38    pub state: &'a mut CodeGenState,
39    pub(crate) builder: &'a mut InstructionBuilder<'a>,
40    pub(crate) variable_registers: SeqMap<usize, TypedRegister>,
41    pub(crate) temp_registers: HwmTempRegisterPool,
42    pub(crate) frame_allocator: StackFrameAllocator,
43    pub debug_line_tracker: KeepTrackOfSourceLine,
44    //pub spilled_registers: SpilledRegisterScopes,
45    pub source_map_lookup: &'a SourceMapWrapper<'a>,
46    pub options: CodeBuilderOptions,
47    pub errors: Vec<err::Error>,
48}
49
50impl<'a> CodeBuilder<'a> {
51    pub fn new(
52        state: &'a mut CodeGenState,
53        builder: &'a mut InstructionBuilder<'a>,
54        variable_registers: SeqMap<usize, TypedRegister>,
55        temp_registers: HwmTempRegisterPool,
56        temp_allocator: StackFrameAllocator,
57        options: CodeBuilderOptions,
58        source_map_lookup: &'a SourceMapWrapper<'a>,
59    ) -> Self {
60        Self {
61            state,
62            builder,
63            variable_registers,
64            //frame_memory_registers,
65            temp_registers,
66            frame_allocator: temp_allocator,
67            debug_line_tracker: KeepTrackOfSourceLine::default(),
68            options,
69            source_map_lookup,
70            errors: Vec::new(),
71        }
72    }
73}
74impl CodeBuilder<'_> {
75    pub(crate) fn emit_copy_register(
76        &mut self,
77        target_reg: &TypedRegister,
78        source_reg: &TypedRegister,
79        node: &Node,
80        comment: &str,
81    ) {
82        if source_reg.ty.is_mutable_reference_semantic() {
83            if target_reg.ty().is_mutable_reference() {
84                self.builder.add_mov_reg(
85                    target_reg,
86                    source_reg,
87                    node,
88                    &format!("emit_copy_register. ptr to ptr. {comment}"),
89                );
90            } else {
91                let size = source_reg.size();
92
93                let target_pointer_location = PointerLocation {
94                    ptr_reg: target_reg.clone(),
95                };
96                let source_pointer_location = PointerLocation {
97                    ptr_reg: source_reg.clone(),
98                };
99                self.builder.add_block_copy_with_immediate_size(
100                    &target_pointer_location,
101                    &source_pointer_location,
102                    size,
103                    node,
104                    &format!("emit_copy_register.copy struct. {comment}"),
105                );
106            }
107        } else {
108            // Special case: StringStorage to StringView should create a string duplicate
109            if matches!(
110                source_reg.ty.basic_type.kind,
111                BasicTypeKind::StringStorage {
112                    element_type: _,
113                    char: _,
114                    capacity: _
115                }
116            ) && matches!(
117                target_reg.ty.basic_type.kind,
118                BasicTypeKind::StringView { byte: _, char: _ }
119            ) {
120                self.builder.add_string_duplicate(
121                    target_reg,
122                    source_reg,
123                    node,
124                    &format!("emit_copy_register. string storage to view (duplicate). {comment}"),
125                );
126            } else {
127                self.builder.add_mov_reg(
128                    target_reg,
129                    source_reg,
130                    node,
131                    &format!("emit_copy_register. primitive to primitive. {comment}"),
132                );
133            }
134        }
135    }
136
137    pub(crate) fn add_ld_regs_from_frame(
138        &mut self,
139        start_reg: &TypedRegister,
140        start_address: FrameMemoryRegion,
141        count: u8,
142        node: &Node,
143        comment: &str,
144    ) {
145        self.builder.add_ld_contiguous_regs_from_frame(
146            start_reg.index,
147            start_address,
148            count,
149            node,
150            comment,
151        );
152    }
153
154    pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
155        let aligned = align(
156            self.frame_allocator.addr().as_size().0 as usize,
157            SAFE_ALIGNMENT,
158        );
159        FrameMemorySize(aligned as u32)
160    }
161
162    pub fn patch_enter(&mut self, patch_position: PatchPosition) {
163        self.builder
164            .patch_enter(self.total_aligned_frame_size(), patch_position);
165    }
166
167    fn debug_node(&self, node: &Node) {
168        let line_info = self.source_map_lookup.get_line(&node.span);
169        let span_text = self.source_map_lookup.get_text_span(&node.span);
170        eprintln!(
171            "{}:{}:{}> {}",
172            line_info.relative_file_name, line_info.row, line_info.col, span_text
173        );
174        //info!(?source_code_line, "generating");
175    }
176
177    pub(crate) fn emit_unary_operator(
178        &mut self,
179        target_reg: &TypedRegister,
180        unary_operator: &UnaryOperator,
181        ctx: &Context,
182    ) {
183        let node = &unary_operator.node;
184        match &unary_operator.kind {
185            UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
186                TypeKind::Bool => {
187                    let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
188                    self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
189                }
190                _ => panic!("unknown not op"),
191            },
192
193            UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
194                TypeKind::Int => {
195                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
196                    self.builder
197                        .add_neg_i32(target_reg, &left_source, node, "negate i32");
198                }
199
200                TypeKind::Float => {
201                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
202                    self.builder
203                        .add_neg_f32(target_reg, &left_source, node, "negate f32");
204                }
205                _ => panic!("negate should only be possible on Int and Float"),
206            },
207        }
208    }
209
210    pub(crate) fn emit_if(
211        &mut self,
212        output_destination: &Destination,
213        condition: &BooleanExpression,
214        true_expr: &Expression,
215        maybe_false_expr: Option<&Expression>,
216        ctx: &Context,
217    ) {
218        let jump_on_false_condition = self.emit_condition_context(condition, ctx);
219
220        // True expression just takes over our target
221        // Both to reuse the current target, and for the fact when there is no else
222        self.emit_expression(output_destination, true_expr, ctx);
223
224        if let Some(false_expr) = maybe_false_expr {
225            // we need to help the true expression to jump over false
226            let skip_false_if_true = self.builder.add_jump_placeholder(
227                &condition.expression.node,
228                "since it was true, skip over false section",
229            );
230
231            // If the expression was false, it should continue here
232            self.builder.patch_jump_here(jump_on_false_condition);
233
234            // Else expression also can just take over our if target
235            self.emit_expression(output_destination, false_expr, ctx);
236
237            self.builder.patch_jump_here(skip_false_if_true);
238        } else {
239            self.builder.patch_jump_here(jump_on_false_condition);
240        }
241    }
242
243    pub(crate) fn temp_frame_space_for_register(
244        &mut self,
245        count: u8,
246        comment: &str,
247    ) -> FrameMemoryRegion {
248        let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
249        let start = self
250            .frame_allocator
251            .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
252
253        FrameMemoryRegion {
254            addr: start,
255            size: total_size,
256        }
257    }
258
259    pub(crate) fn emit_option_expression_into_target_memory_location(
260        &mut self,
261        output: &Destination,
262        node: &Node,
263        maybe_option: Option<&Expression>,
264        ctx: &Context,
265    ) {
266        let memory_target = output.memory_location_or_pointer_reg();
267        let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
268
269        let hwm = self.temp_registers.save_mark();
270
271        let tag_reg = self
272            .temp_registers
273            .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
274
275        if let Some(some_expression) = maybe_option {
276            let union_information = memory_lvalue_location
277                .location
278                .ty
279                .basic_type()
280                .optional_info()
281                .unwrap()
282                .clone();
283
284            {
285                // Overwrite the tag with 1 (`Some`)
286                let ty = memory_lvalue_location.location.ty.basic_type();
287                self.builder.add_mov8_immediate(
288                    tag_reg.register(),
289                    1,
290                    node,
291                    &format!("set the tag Some (1) in register {ty}"),
292                );
293                // for options, we know that the tag size is one byte
294                self.builder.add_st8_using_ptr_with_offset(
295                    &memory_lvalue_location
296                        .offset(union_information.tag_offset, b8_type())
297                        .location,
298                    tag_reg.register(),
299                    node,
300                    "store optional Some tag",
301                );
302            }
303            {
304                let payload_location = &memory_lvalue_location
305                    .offset(union_information.payload_offset, b8_type())
306                    .location;
307                self.emit_expression_into_target_memory(
308                    payload_location,
309                    some_expression,
310                    "store option payload",
311                    ctx,
312                );
313            }
314        } else {
315            self.builder
316                .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); // 0 signals `None`
317
318            // For `none` we simply overwrite the tag with zero
319            self.builder.add_st8_using_ptr_with_offset(
320                &memory_lvalue_location.location,
321                tag_reg.register(),
322                node,
323                "store optional None tag",
324            );
325        }
326        self.temp_registers.restore_to_mark(hwm);
327    }
328
329    pub(crate) fn emit_block(
330        &mut self,
331        target_reg: &Destination,
332        expressions: &[Expression],
333        ctx: &Context,
334    ) {
335        if let Some((last, others)) = expressions.split_last() {
336            for expr in others {
337                self.emit_statement(expr, ctx);
338            }
339            if matches!(&*last.ty.kind, TypeKind::Unit) {
340                self.emit_statement(last, ctx);
341            } else {
342                self.emit_expression(target_reg, last, ctx);
343            }
344        } else {
345            // empty blocks are allowed for side effects
346        }
347    }
348
349    pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
350        //info!(unique_id=?variable.unique_id_within_function, name=?variable.assigned_name, "trying to fetch");
351        self.variable_registers
352            .get(&variable.unique_id_within_function)
353            .unwrap()
354    }
355
356    fn get_variable_frame_placed(&self, variable: &VariableRef) -> FramePlacedType {
357        let frame_address = self
358            .variable_registers
359            .get(&variable.unique_id_within_function)
360            .unwrap();
361
362        frame_address.frame_placed()
363    }
364
365    pub fn allocate_frame_space_and_return_absolute_pointer_reg(
366        &mut self,
367        ty: &BasicTypeRef,
368        node: &Node,
369        comment: &str,
370    ) -> TypedRegister {
371        let frame_placed_type = self.frame_allocator.allocate_type(ty);
372
373        let temp = self.temp_registers.allocate(
374            VmType::new_frame_placed(frame_placed_type),
375            &format!("{comment}: allocate frame space"),
376        );
377
378        self.builder.add_lea_from_frame_region(
379            &temp.register,
380            temp.register.region(),
381            node,
382            &format!("{comment}: set the allocated memory to pointer reg"),
383        );
384
385        temp.register
386    }
387
388    pub fn allocate_frame_space_and_return_pointer_location(
389        &mut self,
390        ty: &BasicTypeRef,
391        node: &Node,
392        comment: &str,
393    ) -> PointerLocation {
394        let absolute_base_ptr_reg =
395            self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
396        PointerLocation {
397            ptr_reg: absolute_base_ptr_reg,
398        }
399    }
400
401    pub fn allocate_frame_space_and_return_memory_location(
402        &mut self,
403        ty: &BasicTypeRef,
404        node: &Node,
405        comment: &str,
406    ) -> MemoryLocation {
407        let absolute_base_ptr_reg =
408            self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
409        MemoryLocation {
410            ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
411            base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
412            offset: MemoryOffset(0),
413        }
414    }
415
416    pub fn allocate_frame_space_and_return_destination_to_it(
417        &mut self,
418        ty: &BasicTypeRef,
419        node: &Node,
420        comment: &str,
421    ) -> Destination {
422        let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
423        Destination::new_location(location)
424    }
425
426    pub(crate) fn emit_constant_access(
427        &mut self,
428        output: &Destination,
429        constant_reference: &ConstantRef,
430        node: &Node,
431        ctx: &Context,
432    ) {
433        let constant_region = self
434            .state
435            .constant_offsets
436            .get(&constant_reference.id)
437            .unwrap();
438        // TODO: Bring this back// assert_eq!(target_reg.size(), constant_region.size());
439
440        if constant_region.ty().is_aggregate() {
441            // load the known constant address into a temp register to use as a base for the block copy
442            let source_base_ptr = self.temp_registers.allocate(
443                VmType::new_contained_in_register(u32_type()),
444                "temp register for the base pointer to the constant",
445            );
446            self.builder.add_mov_32_immediate_value(
447                source_base_ptr.register(),
448                constant_region.addr().0,
449                node,
450                &format!(
451                    "load constant pointer '{}' type:{}",
452                    constant_reference.assigned_name,
453                    constant_region.ty()
454                ),
455            );
456
457            let source_memory_location = MemoryLocation {
458                base_ptr_reg: source_base_ptr.register,
459                offset: MemoryOffset(0),
460                ty: VmType::new_heap_placement(
461                    constant_region.ty().clone(),
462                    constant_region.region(),
463                ),
464            };
465
466            let output_reg = output.memory_location_or_pointer_reg();
467
468            self.emit_copy_value_from_memory_location(
469                output,
470                &source_memory_location,
471                node,
472                &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
473            );
474        } else if let Some(output_memory_location) = output.memory_location() {
475            let hwm = self.temp_registers.save_mark();
476            let temp_reg = self.temp_registers.allocate(
477                VmType::new_contained_in_register(constant_region.ty().clone()),
478                "temporary for constant",
479            );
480
481            self.emit_load_scalar_from_absolute_address_instruction(
482                temp_reg.register(),
483                constant_region.addr(),
484                &VmType::new_unknown_placement(constant_region.ty().clone()),
485                node,
486                &format!(
487                    "load constant primitive '{}' {:?}",
488                    constant_reference.assigned_name,
489                    constant_region.ty()
490                ),
491            );
492
493            self.emit_store_scalar_to_memory_offset_instruction(
494                output_memory_location,
495                temp_reg.register(),
496                node,
497                &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
498            );
499
500            self.temp_registers.restore_to_mark(hwm);
501        } else {
502            self.emit_load_scalar_from_absolute_address_instruction(
503                output.grab_register(),
504                constant_region.addr(),
505                &VmType::new_unknown_placement(constant_region.ty().clone()),
506                node,
507                &format!(
508                    "load constant primitive '{}' {:?}",
509                    constant_reference.assigned_name,
510                    constant_region.ty()
511                ),
512            );
513        }
514    }
515
516    pub(crate) fn emit_coerce_int_to_char(
517        &mut self,
518        target_reg: &TypedRegister,
519        expr: &Expression,
520        ctx: &Context,
521    ) {
522        let destination = Destination::Register(target_reg.clone());
523
524        // Since Char (u32) is same size as Int(i32), we can just use it directly
525        self.emit_expression(&destination, expr, ctx);
526    }
527
528    pub(crate) fn emit_coerce_int_to_byte(
529        &mut self,
530        output: &Destination,
531        expr: &Expression,
532        ctx: &Context,
533    ) {
534        // Since u32 is same size as byte (a register), we can just use it directly
535        self.emit_expression(output, expr, ctx);
536
537        match output {
538            Destination::Unit => {}
539            Destination::Register(dest_reg) => {
540                self.builder
541                    .add_check_u8(dest_reg, &expr.node, "trunc int to byte");
542            }
543            Destination::Memory(mem) => {
544                let hwm = self.temp_registers.save_mark();
545                let temp_u8 = self
546                    .temp_registers
547                    .allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
548                self.builder.add_ld8_from_pointer_with_offset(
549                    temp_u8.register(),
550                    &mem.base_ptr_reg,
551                    mem.offset,
552                    &expr.node,
553                    "load it to check it",
554                );
555                self.builder
556                    .add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
557                self.temp_registers.restore_to_mark(hwm);
558            }
559        }
560    }
561
562    pub(crate) fn emit_coerce_option_to_bool(
563        &mut self,
564        target_reg: &TypedRegister,
565        expr: &Expression,
566        ctx: &Context,
567    ) {
568        //info!(?target_reg.ty, "it wants to coerce this to bool");
569
570        let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
571
572        /* TODO: Bring this back // let (tag_offset, tag_size, ..) = base_pointer_of_tagged_union_reg
573            .underlying()
574            .unwrap_info()
575            .unwrap();
576        assert_eq!(tag_size.0, 1);
577        */
578
579        // Move the tag portion to the target variable
580        self.builder.add_ld8_from_pointer_with_offset(
581            target_reg,
582            &base_pointer_of_tagged_union_reg,
583            MemoryOffset(0),
584            &expr.node,
585            "load option tag to bool register",
586        );
587    }
588
589    pub(crate) fn emit_coerce_to_any(
590        &mut self,
591        output: &Destination,
592        expr: &Expression,
593        ctx: &Context,
594    ) {
595        //info!(?target_reg.ty, "it wants to coerce this to bool");
596
597        let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
598
599        let pointer_register = self.emit_compute_effective_address_to_register(
600            output,
601            &expr.node,
602            "get starting ptr to output",
603        );
604        let output_aggregate_location = AggregateMemoryLocation::new(
605            MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register),
606        );
607
608        self.builder.add_st32_using_ptr_with_offset(
609            &output_aggregate_location
610                .offset(ANY_HEADER_PTR_OFFSET, u32_type())
611                .location,
612            &source_aggregate_pointer,
613            &expr.node,
614            "store aggregate pointer into Any Header",
615        );
616        let temp_size = self.temp_registers.allocate(
617            VmType::new_contained_in_register(u32_type()),
618            "Any header size temp",
619        );
620
621        self.builder.add_mov_32_immediate_value(
622            temp_size.register(),
623            source_aggregate_pointer.ty.basic_type.total_size.0,
624            &expr.node,
625            "fixed size",
626        );
627        self.builder.add_st32_using_ptr_with_offset(
628            &output_aggregate_location
629                .offset(ANY_HEADER_SIZE_OFFSET, u32_type())
630                .location,
631            temp_size.register(),
632            &expr.node,
633            "copy size into Any Header",
634        );
635
636        self.builder.add_mov_32_immediate_value(
637            temp_size.register(),
638            source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32,
639            &expr.node,
640            "reuse for hash",
641        );
642        self.builder.add_st32_using_ptr_with_offset(
643            &output_aggregate_location
644                .offset(ANY_HEADER_HASH_OFFSET, u32_type())
645                .location,
646            temp_size.register(),
647            &expr.node,
648            "copy size into Any Header",
649        );
650    }
651
652    pub(crate) fn merge_arguments_keep_literals(
653        outer_args: &Vec<ArgumentExpression>,
654        intrinsic_args: &Vec<ArgumentExpression>,
655    ) -> Vec<ArgumentExpression> {
656        // HACK: we assume that the parameters are in the same order.
657        // If one has more arguments, we assume that those extra arguments are in the end
658        // We also assume that the first is self
659        let mut all_args = outer_args.clone();
660
661        if intrinsic_args.len() > outer_args.len() + 1 {
662            all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
663        }
664
665        all_args
666    }
667
668    pub(crate) fn emit_borrow_mutable_reference(
669        &mut self,
670        target_register: &TypedRegister,
671        node: &Node,
672        expr: &SingleLocationExpression,
673        ctx: &Context,
674    ) {
675        let location = self.emit_lvalue_address(expr, ctx);
676
677        let abs_pointer = self.emit_compute_effective_address_to_register(
678            &location,
679            node,
680            "calculate absolute address for reference",
681        );
682
683        self.builder.add_mov_reg(
684            target_register,
685            &abs_pointer,
686            node,
687            "copy calculated address for borrow",
688        );
689    }
690
691    pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
692        let node = &expr.node;
693        let (line, _column) = self
694            .source_map_lookup
695            .source_map
696            .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
697        let source_line_info = SourceFileLineInfo {
698            row: line,
699            file_id: node.span.file_id as usize,
700        };
701
702        let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
703        if let Some((start, end)) = answer {
704            let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
705            let (line, col) = self
706                .source_map_lookup
707                .source_map
708                .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
709            let source_line = self
710                .source_map_lookup
711                .source_map
712                .get_source_line(node.span.file_id, line)
713                .unwrap_or("<source line not found>");
714
715            info!(
716                file=%relative_file_name,
717                line=%line,
718                col=%col,
719                source=%source_line,
720                "{}",
721                description
722            );
723        }
724    }
725}