swamp_code_gen/
code_bld.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::err;
8use crate::reg_pool::HwmTempRegisterPool;
9use crate::state::CodeGenState;
10use seq_map::SeqMap;
11use source_map_cache::{
12    KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
13};
14use source_map_node::Node;
15use swamp_semantic::{
16    ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
17    UnaryOperator, UnaryOperatorKind, VariableRef,
18};
19use swamp_types::TypeKind;
20use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
21use swamp_vm_isa::aligner::{align, SAFE_ALIGNMENT};
22use swamp_vm_isa::{
23    FrameMemorySize, MemoryOffset, MemorySize, ANY_HEADER_HASH_OFFSET,
24    ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET, REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
25};
26use swamp_vm_types::types::{
27    b8_type, u32_type, u8_type, BasicTypeRef, Place, TypedRegister, VmType,
28};
29use swamp_vm_types::{AggregateMemoryLocation, FrameMemoryRegion, MemoryLocation, PointerLocation};
30use tracing::info;
31
32#[derive(Copy, Clone)]
33pub struct CodeBuilderOptions {
34    pub should_show_debug: bool,
35}
36pub struct CodeBuilder<'a> {
37    pub state: &'a mut CodeGenState,
38    pub builder: &'a mut InstructionBuilder<'a>,
39    pub variable_registers: SeqMap<usize, TypedRegister>,
40    pub temp_registers: HwmTempRegisterPool,
41    pub frame_allocator: StackFrameAllocator,
42    pub debug_line_tracker: KeepTrackOfSourceLine,
43    //pub spilled_registers: SpilledRegisterScopes,
44    pub source_map_lookup: &'a SourceMapWrapper<'a>,
45    pub options: CodeBuilderOptions,
46    pub errors: Vec<err::Error>,
47}
48
49impl<'a> CodeBuilder<'a> {
50    pub fn new(
51        state: &'a mut CodeGenState,
52        builder: &'a mut InstructionBuilder<'a>,
53        variable_registers: SeqMap<usize, TypedRegister>,
54        temp_registers: HwmTempRegisterPool,
55        temp_allocator: StackFrameAllocator,
56        options: CodeBuilderOptions,
57        source_map_lookup: &'a SourceMapWrapper<'a>,
58    ) -> Self {
59        Self {
60            state,
61            builder,
62            variable_registers,
63            //frame_memory_registers,
64            temp_registers,
65            frame_allocator: temp_allocator,
66            debug_line_tracker: KeepTrackOfSourceLine::default(),
67            options,
68            source_map_lookup,
69            errors: Vec::new(),
70        }
71    }
72}
73impl CodeBuilder<'_> {
74    pub(crate) fn emit_copy_register(
75        &mut self,
76        target_reg: &TypedRegister,
77        source_reg: &TypedRegister,
78        node: &Node,
79        comment: &str,
80    ) {
81        if source_reg.ty.is_mutable_reference_semantic() {
82            if target_reg.ty().is_mutable_reference() {
83                self.builder.add_mov_reg(
84                    target_reg,
85                    source_reg,
86                    node,
87                    &format!("emit_copy_register. ptr to ptr. {comment}"),
88                );
89            } else {
90                let size = source_reg.size();
91
92                let target_pointer_location = PointerLocation {
93                    ptr_reg: target_reg.clone(),
94                };
95                let source_pointer_location = PointerLocation {
96                    ptr_reg: source_reg.clone(),
97                };
98                self.builder.add_block_copy_with_immediate_size(
99                    &target_pointer_location,
100                    &source_pointer_location,
101                    size,
102                    node,
103                    &format!("emit_copy_register.copy struct. {comment}"),
104                );
105            }
106        } else {
107            {
108                self.builder.add_mov_reg(
109                    target_reg,
110                    source_reg,
111                    node,
112                    &format!("emit_copy_register. primitive to primitive. {comment}"),
113                );
114            }
115        }
116    }
117
118    #[must_use]
119    pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
120        let aligned = align(
121            self.frame_allocator.addr().as_size().0 as usize,
122            SAFE_ALIGNMENT,
123        );
124        FrameMemorySize(aligned as u32)
125    }
126
127    pub fn patch_enter(&mut self, patch_position: PatchPosition) {
128        self.builder
129            .patch_enter(self.total_aligned_frame_size(), patch_position);
130    }
131
132    fn debug_node(&self, node: &Node) {
133        let line_info = self.source_map_lookup.get_line(&node.span);
134        let span_text = self.source_map_lookup.get_text_span(&node.span);
135        eprintln!(
136            "{}:{}:{}> {}",
137            line_info.relative_file_name, line_info.row, line_info.col, span_text
138        );
139        //info!(?source_code_line, "generating");
140    }
141
142    pub(crate) fn emit_unary_operator(
143        &mut self,
144        target_reg: &TypedRegister,
145        unary_operator: &UnaryOperator,
146        ctx: &Context,
147    ) {
148        let node = &unary_operator.node;
149        match &unary_operator.kind {
150            UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
151                TypeKind::Bool => {
152                    let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
153                    self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
154                }
155                _ => panic!("unknown not op"),
156            },
157
158            UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
159                TypeKind::Int => {
160                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
161                    self.builder
162                        .add_neg_i32(target_reg, &left_source, node, "negate i32");
163                }
164
165                TypeKind::Float => {
166                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
167                    self.builder
168                        .add_neg_f32(target_reg, &left_source, node, "negate f32");
169                }
170                _ => panic!("negate should only be possible on Int and Float"),
171            },
172        }
173    }
174
175    pub(crate) fn emit_if(
176        &mut self,
177        output_destination: &Place,
178        condition: &BooleanExpression,
179        true_expr: &Expression,
180        maybe_false_expr: Option<&Expression>,
181        ctx: &Context,
182    ) {
183        let jump_on_false_condition = self.emit_condition_context(condition, ctx);
184
185        // True expression just takes over our target
186        // Both to reuse the current target, and for the fact when there is no else
187        self.emit_expression(output_destination, true_expr, ctx);
188
189        if let Some(false_expr) = maybe_false_expr {
190            // we need to help the true expression to jump over false
191            let skip_false_if_true = self.builder.add_jump_placeholder(
192                &condition.expression.node,
193                "since it was true, skip over false section",
194            );
195
196            // If the expression was false, it should continue here
197            self.builder.patch_jump_here(jump_on_false_condition);
198
199            // Else expression also can just take over our if target
200            self.emit_expression(output_destination, false_expr, ctx);
201
202            self.builder.patch_jump_here(skip_false_if_true);
203        } else {
204            self.builder.patch_jump_here(jump_on_false_condition);
205        }
206    }
207
208    pub(crate) fn temp_frame_space_for_register(
209        &mut self,
210        count: u8,
211        comment: &str,
212    ) -> FrameMemoryRegion {
213        let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
214        let start = self
215            .frame_allocator
216            .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
217
218        FrameMemoryRegion {
219            addr: start,
220            size: total_size,
221        }
222    }
223
224    pub(crate) fn emit_option_expression_into_target_memory_location(
225        &mut self,
226        output: &Place,
227        node: &Node,
228        maybe_option: Option<&Expression>,
229        ctx: &Context,
230    ) {
231        let memory_target = output.memory_location_or_pointer_reg();
232        let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
233
234        let hwm = self.temp_registers.save_mark();
235
236        let tag_reg = self
237            .temp_registers
238            .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
239
240        if let Some(some_expression) = maybe_option {
241            let union_information = memory_lvalue_location
242                .location
243                .ty
244                .basic_type()
245                .optional_info()
246                .unwrap()
247                .clone();
248
249            {
250                // Overwrite the tag with 1 (`Some`)
251                let ty = memory_lvalue_location.location.ty.basic_type();
252                self.builder.add_mov8_immediate(
253                    tag_reg.register(),
254                    1,
255                    node,
256                    &format!("set the tag Some (1) in register {ty}"),
257                );
258                // for options, we know that the tag size is one byte
259                self.builder.add_st8_using_ptr_with_offset(
260                    &memory_lvalue_location
261                        .offset(union_information.tag_offset, b8_type())
262                        .location,
263                    tag_reg.register(),
264                    node,
265                    "store optional Some tag",
266                );
267            }
268            {
269                let payload_location = &memory_lvalue_location
270                    .offset(
271                        union_information.payload_offset,
272                        union_information.get_variant_by_index(1).ty.clone(),
273                    )
274                    .location;
275                self.emit_initialize_memory_for_any_type(
276                    payload_location,
277                    node,
278                    "initialize error",
279                );
280                self.emit_expression_into_target_memory(
281                    payload_location,
282                    some_expression,
283                    "store option payload",
284                    ctx,
285                );
286            }
287        } else {
288            self.builder
289                .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); // 0 signals `None`
290
291            // For `none` we simply overwrite the tag with zero
292            self.builder.add_st8_using_ptr_with_offset(
293                &memory_lvalue_location.location,
294                tag_reg.register(),
295                node,
296                "store optional None tag",
297            );
298        }
299        self.temp_registers.restore_to_mark(hwm);
300    }
301
302    pub(crate) fn emit_block(
303        &mut self,
304        target_reg: &Place,
305        expressions: &[Expression],
306        ctx: &Context,
307    ) {
308        if let Some((last, others)) = expressions.split_last() {
309            for expr in others {
310                self.emit_statement(expr, ctx);
311            }
312            if matches!(&*last.ty.kind, TypeKind::Unit) {
313                self.emit_statement(last, ctx);
314            } else {
315                self.emit_expression(target_reg, last, ctx);
316            }
317        } else {
318            // empty blocks are allowed for side effects
319        }
320    }
321
322    pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
323        //info!(unique_id=?variable.unique_id_within_function, name=?variable.assigned_name, "trying to fetch");
324        self.variable_registers
325            .get(&variable.unique_id_within_function)
326            .unwrap()
327    }
328
329    pub fn allocate_frame_space_and_return_absolute_pointer_reg(
330        &mut self,
331        ty: &BasicTypeRef,
332        clear_it: bool,
333        node: &Node,
334        comment: &str,
335    ) -> TypedRegister {
336        let frame_placed_type = self.frame_allocator.allocate_type(ty);
337
338        let temp = self.temp_registers.allocate(
339            VmType::new_frame_placed(frame_placed_type),
340            &format!("{comment}: allocate frame space"),
341        );
342
343        self.builder.add_lea_from_frame_region(
344            &temp.register,
345            temp.register.region(),
346            node,
347            &format!("{comment}: set the allocated memory to pointer reg"),
348        );
349
350        if clear_it {
351            self.builder.add_frame_memory_clear(temp.register.region(), node, &format!("{comment}: clear temporary memory"));
352        }
353
354        temp.register
355    }
356
357    pub fn allocate_frame_space_and_return_pointer_location(
358        &mut self,
359        ty: &BasicTypeRef,
360        clear_it: bool,
361        node: &Node,
362        comment: &str,
363    ) -> PointerLocation {
364        let absolute_base_ptr_reg =
365            self.allocate_frame_space_and_return_absolute_pointer_reg(ty, clear_it, node, comment);
366        PointerLocation {
367            ptr_reg: absolute_base_ptr_reg,
368        }
369    }
370
371    pub fn allocate_frame_space_and_return_memory_location(
372        &mut self,
373        ty: &BasicTypeRef,
374        clear_it: bool,
375        node: &Node,
376        comment: &str,
377    ) -> MemoryLocation {
378        let absolute_base_ptr_reg =
379            self.allocate_frame_space_and_return_pointer_location(ty, clear_it, node, comment);
380        MemoryLocation {
381            ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
382            base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
383            offset: MemoryOffset(0),
384        }
385    }
386
387    pub fn allocate_frame_space_and_return_destination_to_it(
388        &mut self,
389        ty: &BasicTypeRef,
390        clear_it: bool,
391        node: &Node,
392        comment: &str,
393    ) -> Place {
394        let location = self.allocate_frame_space_and_return_memory_location(ty, clear_it, node, comment);
395        Place::new_location(location)
396    }
397
398    pub(crate) fn emit_constant_access(
399        &mut self,
400        output: &Place,
401        constant_reference: &ConstantRef,
402        node: &Node,
403        ctx: &Context,
404    ) {
405        let constant_region = self
406            .state
407            .constant_offsets
408            .get(&constant_reference.id)
409            .unwrap();
410        // TODO: Bring this back// assert_eq!(target_reg.size(), constant_region.size());
411
412        if constant_region.ty().is_aggregate() {
413            // load the known constant address into a temp register to use as a base for the block copy
414            let source_base_ptr = self.temp_registers.allocate(
415                VmType::new_contained_in_register(u32_type()),
416                "temp register for the base pointer to the constant",
417            );
418            self.builder.add_mov_32_immediate_value(
419                source_base_ptr.register(),
420                constant_region.addr().0,
421                node,
422                &format!(
423                    "load constant pointer '{}' type:{}",
424                    constant_reference.assigned_name,
425                    constant_region.ty()
426                ),
427            );
428
429            let source_memory_location = MemoryLocation {
430                base_ptr_reg: source_base_ptr.register,
431                offset: MemoryOffset(0),
432                ty: VmType::new_heap_placement(
433                    constant_region.ty().clone(),
434                    constant_region.region(),
435                ),
436            };
437
438            let output_reg = output.memory_location_or_pointer_reg();
439
440            self.emit_copy_value_from_memory_location(
441                output,
442                &source_memory_location,
443                node,
444                &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
445            );
446        } else if let Some(output_memory_location) = output.memory_location() {
447            let hwm = self.temp_registers.save_mark();
448            let temp_reg = self.temp_registers.allocate(
449                VmType::new_contained_in_register(constant_region.ty().clone()),
450                "temporary for constant",
451            );
452
453            self.emit_load_scalar_from_absolute_address_instruction(
454                temp_reg.register(),
455                constant_region.addr(),
456                &VmType::new_unknown_placement(constant_region.ty().clone()),
457                node,
458                &format!(
459                    "load constant primitive '{}' {:?}",
460                    constant_reference.assigned_name,
461                    constant_region.ty()
462                ),
463            );
464
465            self.emit_store_scalar_to_memory_offset_instruction(
466                output_memory_location,
467                temp_reg.register(),
468                node,
469                &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
470            );
471
472            self.temp_registers.restore_to_mark(hwm);
473        } else {
474            self.emit_load_scalar_from_absolute_address_instruction(
475                output.grab_register(),
476                constant_region.addr(),
477                &VmType::new_unknown_placement(constant_region.ty().clone()),
478                node,
479                &format!(
480                    "load constant primitive '{}' {:?}",
481                    constant_reference.assigned_name,
482                    constant_region.ty()
483                ),
484            );
485        }
486    }
487
488    pub(crate) fn emit_coerce_int_to_char(
489        &mut self,
490        target_reg: &TypedRegister,
491        expr: &Expression,
492        ctx: &Context,
493    ) {
494        let destination = Place::Register(target_reg.clone());
495
496        // Since Char (u32) is same size as Int(i32), we can just use it directly
497        self.emit_expression(&destination, expr, ctx);
498    }
499
500    pub(crate) fn emit_coerce_int_to_byte(
501        &mut self,
502        output: &Place,
503        expr: &Expression,
504        ctx: &Context,
505    ) {
506        // Since u32 is same size as byte (a register), we can just use it directly
507        self.emit_expression(output, expr, ctx);
508
509        match output {
510            Place::Discard => {}
511            Place::Register(dest_reg) => {
512                self.builder
513                    .add_check_u8(dest_reg, &expr.node, "trunc int to byte");
514            }
515            Place::Memory(mem) => {
516                let hwm = self.temp_registers.save_mark();
517                let temp_u8 = self
518                    .temp_registers
519                    .allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
520                self.builder.add_ld8_from_pointer_with_offset(
521                    temp_u8.register(),
522                    &mem.base_ptr_reg,
523                    mem.offset,
524                    &expr.node,
525                    "load it to check it",
526                );
527                self.builder
528                    .add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
529                self.temp_registers.restore_to_mark(hwm);
530            }
531        }
532    }
533
534    pub(crate) fn emit_coerce_option_to_bool(
535        &mut self,
536        target_reg: &TypedRegister,
537        expr: &Expression,
538        ctx: &Context,
539    ) {
540        //info!(?target_reg.ty, "it wants to coerce this to bool");
541
542        let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
543
544        /* TODO: Bring this back // let (tag_offset, tag_size, ..) = base_pointer_of_tagged_union_reg
545            .underlying()
546            .unwrap_info()
547            .unwrap();
548        assert_eq!(tag_size.0, 1);
549        */
550
551        // Move the tag portion to the target variable
552        self.builder.add_ld8_from_pointer_with_offset(
553            target_reg,
554            &base_pointer_of_tagged_union_reg,
555            MemoryOffset(0),
556            &expr.node,
557            "load option tag to bool register",
558        );
559    }
560
561    pub(crate) fn emit_coerce_to_any(&mut self, output: &Place, expr: &Expression, ctx: &Context) {
562        //info!(?target_reg.ty, "it wants to coerce this to bool");
563
564        let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
565
566        let pointer_register = self.emit_compute_effective_address_to_register(
567            output,
568            &expr.node,
569            "get starting ptr to output",
570        );
571        let output_aggregate_location = AggregateMemoryLocation::new(
572            MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register),
573        );
574
575        self.builder.add_st32_using_ptr_with_offset(
576            &output_aggregate_location
577                .offset(ANY_HEADER_PTR_OFFSET, u32_type())
578                .location,
579            &source_aggregate_pointer,
580            &expr.node,
581            "store aggregate pointer into Any Header",
582        );
583        let temp_size = self.temp_registers.allocate(
584            VmType::new_contained_in_register(u32_type()),
585            "Any header size temp",
586        );
587
588        self.builder.add_mov_32_immediate_value(
589            temp_size.register(),
590            source_aggregate_pointer.ty.basic_type.total_size.0,
591            &expr.node,
592            "fixed size",
593        );
594        self.builder.add_st32_using_ptr_with_offset(
595            &output_aggregate_location
596                .offset(ANY_HEADER_SIZE_OFFSET, u32_type())
597                .location,
598            temp_size.register(),
599            &expr.node,
600            "copy size into Any Header",
601        );
602
603        self.builder.add_mov_32_immediate_value(
604            temp_size.register(),
605            source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32,
606            &expr.node,
607            "reuse for hash",
608        );
609        self.builder.add_st32_using_ptr_with_offset(
610            &output_aggregate_location
611                .offset(ANY_HEADER_HASH_OFFSET, u32_type())
612                .location,
613            temp_size.register(),
614            &expr.node,
615            "copy size into Any Header",
616        );
617    }
618
619    pub(crate) fn merge_arguments_keep_literals(
620        outer_args: &Vec<ArgumentExpression>,
621        intrinsic_args: &Vec<ArgumentExpression>,
622    ) -> Vec<ArgumentExpression> {
623        // HACK: we assume that the parameters are in the same order.
624        // If one has more arguments, we assume that those extra arguments are in the end
625        // We also assume that the first is self
626        let mut all_args = outer_args.clone();
627
628        if intrinsic_args.len() > outer_args.len() + 1 {
629            all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
630        }
631
632        all_args
633    }
634
635    pub(crate) fn emit_borrow_mutable_reference(
636        &mut self,
637        target_register: &TypedRegister,
638        node: &Node,
639        expr: &SingleLocationExpression,
640        ctx: &Context,
641    ) {
642        let location = self.emit_lvalue_address(expr, ctx);
643
644        let abs_pointer = self.emit_compute_effective_address_to_register(
645            &location,
646            node,
647            "calculate absolute address for reference",
648        );
649
650        self.builder.add_mov_reg(
651            target_register,
652            &abs_pointer,
653            node,
654            "copy calculated address for borrow",
655        );
656    }
657
658    pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
659        let node = &expr.node;
660        let (line, _column) = self
661            .source_map_lookup
662            .source_map
663            .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
664        let source_line_info = SourceFileLineInfo {
665            row: line,
666            file_id: node.span.file_id as usize,
667        };
668
669        let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
670        if let Some((start, end)) = answer {
671            let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
672            let (line, col) = self
673                .source_map_lookup
674                .source_map
675                .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
676            let source_line = self
677                .source_map_lookup
678                .source_map
679                .get_source_line(node.span.file_id, line)
680                .unwrap_or("<source line not found>");
681
682            info!(
683                file=%relative_file_name,
684                line=%line,
685                col=%col,
686                source=%source_line,
687                "{}",
688                description
689            );
690        }
691    }
692}