swamp_code_gen/
code_bld.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::err;
8use crate::reg_pool::HwmTempRegisterPool;
9use crate::state::CodeGenState;
10use seq_map::SeqMap;
11use source_map_cache::{
12    KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
13};
14use source_map_node::Node;
15use swamp_semantic::{
16    ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
17    UnaryOperator, UnaryOperatorKind, VariableRef,
18};
19use swamp_types::TypeKind;
20use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
21use swamp_vm_isa::aligner::{SAFE_ALIGNMENT, align};
22use swamp_vm_isa::{
23    ANY_HEADER_HASH_OFFSET, ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET, FrameMemorySize,
24    MemoryOffset, MemorySize, REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
25};
26use swamp_vm_types::types::{
27    BasicTypeRef, Place, TypedRegister, VmType, b8_type, u8_type, u32_type,
28};
29use swamp_vm_types::{AggregateMemoryLocation, FrameMemoryRegion, MemoryLocation, PointerLocation};
30use tracing::info;
31
32#[derive(Copy, Clone)]
33pub struct CodeBuilderOptions {
34    pub should_show_debug: bool,
35}
36pub struct CodeBuilder<'a> {
37    pub state: &'a mut CodeGenState,
38    pub builder: &'a mut InstructionBuilder<'a>,
39    pub variable_registers: SeqMap<usize, TypedRegister>,
40    pub temp_registers: HwmTempRegisterPool,
41    pub frame_allocator: StackFrameAllocator,
42    pub debug_line_tracker: KeepTrackOfSourceLine,
43    //pub spilled_registers: SpilledRegisterScopes,
44    pub source_map_lookup: &'a SourceMapWrapper<'a>,
45    pub options: CodeBuilderOptions,
46    pub errors: Vec<err::Error>,
47}
48
49impl<'a> CodeBuilder<'a> {
50    pub fn new(
51        state: &'a mut CodeGenState,
52        builder: &'a mut InstructionBuilder<'a>,
53        variable_registers: SeqMap<usize, TypedRegister>,
54        temp_registers: HwmTempRegisterPool,
55        temp_allocator: StackFrameAllocator,
56        options: CodeBuilderOptions,
57        source_map_lookup: &'a SourceMapWrapper<'a>,
58    ) -> Self {
59        Self {
60            state,
61            builder,
62            variable_registers,
63            //frame_memory_registers,
64            temp_registers,
65            frame_allocator: temp_allocator,
66            debug_line_tracker: KeepTrackOfSourceLine::default(),
67            options,
68            source_map_lookup,
69            errors: Vec::new(),
70        }
71    }
72}
73impl CodeBuilder<'_> {
74    pub(crate) fn emit_copy_register(
75        &mut self,
76        target_reg: &TypedRegister,
77        source_reg: &TypedRegister,
78        node: &Node,
79        comment: &str,
80    ) {
81        if source_reg.ty.is_mutable_reference_semantic() {
82            if target_reg.ty().is_mutable_reference() {
83                self.builder.add_mov_reg(
84                    target_reg,
85                    source_reg,
86                    node,
87                    &format!("emit_copy_register. ptr to ptr. {comment}"),
88                );
89            } else {
90                let size = source_reg.size();
91
92                let target_pointer_location = PointerLocation {
93                    ptr_reg: target_reg.clone(),
94                };
95                let source_pointer_location = PointerLocation {
96                    ptr_reg: source_reg.clone(),
97                };
98                self.builder.add_block_copy_with_immediate_size(
99                    &target_pointer_location,
100                    &source_pointer_location,
101                    size,
102                    node,
103                    &format!("emit_copy_register.copy struct. {comment}"),
104                );
105            }
106        } else {
107            {
108                self.builder.add_mov_reg(
109                    target_reg,
110                    source_reg,
111                    node,
112                    &format!("emit_copy_register. primitive to primitive. {comment}"),
113                );
114            }
115        }
116    }
117
118    #[must_use]
119    pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
120        let aligned = align(
121            self.frame_allocator.addr().as_size().0 as usize,
122            SAFE_ALIGNMENT,
123        );
124        FrameMemorySize(aligned as u32)
125    }
126
127    pub fn patch_enter(&mut self, patch_position: PatchPosition) {
128        self.builder
129            .patch_enter(self.total_aligned_frame_size(), patch_position);
130    }
131
132    fn debug_node(&self, node: &Node) {
133        let line_info = self.source_map_lookup.get_line(&node.span);
134        let span_text = self.source_map_lookup.get_text_span(&node.span);
135        eprintln!(
136            "{}:{}:{}> {}",
137            line_info.relative_file_name, line_info.row, line_info.col, span_text
138        );
139        //info!(?source_code_line, "generating");
140    }
141
142    pub(crate) fn emit_unary_operator(
143        &mut self,
144        target_reg: &TypedRegister,
145        unary_operator: &UnaryOperator,
146        ctx: &Context,
147    ) {
148        let node = &unary_operator.node;
149        match &unary_operator.kind {
150            UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
151                TypeKind::Bool => {
152                    let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
153                    self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
154                }
155                _ => panic!("unknown not op"),
156            },
157
158            UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
159                TypeKind::Int => {
160                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
161                    self.builder
162                        .add_neg_i32(target_reg, &left_source, node, "negate i32");
163                }
164
165                TypeKind::Float => {
166                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
167                    self.builder
168                        .add_neg_f32(target_reg, &left_source, node, "negate f32");
169                }
170                _ => panic!("negate should only be possible on Int and Float"),
171            },
172        }
173    }
174
175    pub(crate) fn emit_if(
176        &mut self,
177        output_destination: &Place,
178        condition: &BooleanExpression,
179        true_expr: &Expression,
180        maybe_false_expr: Option<&Expression>,
181        ctx: &Context,
182    ) {
183        let jump_on_false_condition = self.emit_condition_context(condition, ctx);
184
185        // True expression just takes over our target
186        // Both to reuse the current target, and for the fact when there is no else
187        self.emit_expression(output_destination, true_expr, ctx);
188
189        if let Some(false_expr) = maybe_false_expr {
190            // we need to help the true expression to jump over false
191            let skip_false_if_true = self.builder.add_jump_placeholder(
192                &condition.expression.node,
193                "since it was true, skip over false section",
194            );
195
196            // If the expression was false, it should continue here
197            self.builder.patch_jump_here(jump_on_false_condition);
198
199            // Else expression also can just take over our if target
200            self.emit_expression(output_destination, false_expr, ctx);
201
202            self.builder.patch_jump_here(skip_false_if_true);
203        } else {
204            self.builder.patch_jump_here(jump_on_false_condition);
205        }
206    }
207
208    pub(crate) fn temp_frame_space_for_register(
209        &mut self,
210        count: u8,
211        comment: &str,
212    ) -> FrameMemoryRegion {
213        let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
214        let start = self
215            .frame_allocator
216            .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
217
218        FrameMemoryRegion {
219            addr: start,
220            size: total_size,
221        }
222    }
223
224    pub(crate) fn emit_option_expression_into_target_memory_location(
225        &mut self,
226        output: &Place,
227        node: &Node,
228        maybe_option: Option<&Expression>,
229        ctx: &Context,
230    ) {
231        let memory_target = output.memory_location_or_pointer_reg();
232        let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
233
234        let hwm = self.temp_registers.save_mark();
235
236        let tag_reg = self
237            .temp_registers
238            .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
239
240        if let Some(some_expression) = maybe_option {
241            let union_information = memory_lvalue_location
242                .location
243                .ty
244                .basic_type()
245                .optional_info()
246                .unwrap()
247                .clone();
248
249            {
250                // Overwrite the tag with 1 (`Some`)
251                let ty = memory_lvalue_location.location.ty.basic_type();
252                self.builder.add_mov8_immediate(
253                    tag_reg.register(),
254                    1,
255                    node,
256                    &format!("set the tag Some (1) in register {ty}"),
257                );
258                // for options, we know that the tag size is one byte
259                self.builder.add_st8_using_ptr_with_offset(
260                    &memory_lvalue_location
261                        .offset(union_information.tag_offset, b8_type())
262                        .location,
263                    tag_reg.register(),
264                    node,
265                    "store optional Some tag",
266                );
267            }
268            {
269                let payload_location = &memory_lvalue_location
270                    .offset(
271                        union_information.payload_offset,
272                        union_information.get_variant_by_index(1).ty.clone(),
273                    )
274                    .location;
275                self.emit_initialize_memory_for_any_type(
276                    payload_location,
277                    node,
278                    "initialize error",
279                );
280                self.emit_expression_into_target_memory(
281                    payload_location,
282                    some_expression,
283                    "store option payload",
284                    ctx,
285                );
286            }
287        } else {
288            self.builder
289                .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); // 0 signals `None`
290
291            // For `none` we simply overwrite the tag with zero
292            self.builder.add_st8_using_ptr_with_offset(
293                &memory_lvalue_location.location,
294                tag_reg.register(),
295                node,
296                "store optional None tag",
297            );
298        }
299        self.temp_registers.restore_to_mark(hwm);
300    }
301
302    pub(crate) fn emit_block(
303        &mut self,
304        target_reg: &Place,
305        expressions: &[Expression],
306        ctx: &Context,
307    ) {
308        if let Some((last, others)) = expressions.split_last() {
309            for expr in others {
310                self.emit_statement(expr, ctx);
311            }
312            if matches!(&*last.ty.kind, TypeKind::Unit) {
313                self.emit_statement(last, ctx);
314            } else {
315                self.emit_expression(target_reg, last, ctx);
316            }
317        } else {
318            // empty blocks are allowed for side effects
319        }
320    }
321
322    pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
323        //info!(unique_id=?variable.unique_id_within_function, name=?variable.assigned_name, "trying to fetch");
324        self.variable_registers
325            .get(&variable.unique_id_within_function)
326            .unwrap()
327    }
328
329    pub fn allocate_frame_space_and_return_absolute_pointer_reg(
330        &mut self,
331        ty: &BasicTypeRef,
332        node: &Node,
333        comment: &str,
334    ) -> TypedRegister {
335        let frame_placed_type = self.frame_allocator.allocate_type(ty);
336
337        let temp = self.temp_registers.allocate(
338            VmType::new_frame_placed(frame_placed_type),
339            &format!("{comment}: allocate frame space"),
340        );
341
342        self.builder.add_lea_from_frame_region(
343            &temp.register,
344            temp.register.region(),
345            node,
346            &format!("{comment}: set the allocated memory to pointer reg"),
347        );
348
349        temp.register
350    }
351
352    pub fn allocate_frame_space_and_return_pointer_location(
353        &mut self,
354        ty: &BasicTypeRef,
355        node: &Node,
356        comment: &str,
357    ) -> PointerLocation {
358        let absolute_base_ptr_reg =
359            self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
360        PointerLocation {
361            ptr_reg: absolute_base_ptr_reg,
362        }
363    }
364
365    pub fn allocate_frame_space_and_return_memory_location(
366        &mut self,
367        ty: &BasicTypeRef,
368        node: &Node,
369        comment: &str,
370    ) -> MemoryLocation {
371        let absolute_base_ptr_reg =
372            self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
373        MemoryLocation {
374            ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
375            base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
376            offset: MemoryOffset(0),
377        }
378    }
379
380    pub fn allocate_frame_space_and_return_destination_to_it(
381        &mut self,
382        ty: &BasicTypeRef,
383        node: &Node,
384        comment: &str,
385    ) -> Place {
386        let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
387        Place::new_location(location)
388    }
389
390    pub(crate) fn emit_constant_access(
391        &mut self,
392        output: &Place,
393        constant_reference: &ConstantRef,
394        node: &Node,
395        ctx: &Context,
396    ) {
397        let constant_region = self
398            .state
399            .constant_offsets
400            .get(&constant_reference.id)
401            .unwrap();
402        // TODO: Bring this back// assert_eq!(target_reg.size(), constant_region.size());
403
404        if constant_region.ty().is_aggregate() {
405            // load the known constant address into a temp register to use as a base for the block copy
406            let source_base_ptr = self.temp_registers.allocate(
407                VmType::new_contained_in_register(u32_type()),
408                "temp register for the base pointer to the constant",
409            );
410            self.builder.add_mov_32_immediate_value(
411                source_base_ptr.register(),
412                constant_region.addr().0,
413                node,
414                &format!(
415                    "load constant pointer '{}' type:{}",
416                    constant_reference.assigned_name,
417                    constant_region.ty()
418                ),
419            );
420
421            let source_memory_location = MemoryLocation {
422                base_ptr_reg: source_base_ptr.register,
423                offset: MemoryOffset(0),
424                ty: VmType::new_heap_placement(
425                    constant_region.ty().clone(),
426                    constant_region.region(),
427                ),
428            };
429
430            let output_reg = output.memory_location_or_pointer_reg();
431
432            self.emit_copy_value_from_memory_location(
433                output,
434                &source_memory_location,
435                node,
436                &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
437            );
438        } else if let Some(output_memory_location) = output.memory_location() {
439            let hwm = self.temp_registers.save_mark();
440            let temp_reg = self.temp_registers.allocate(
441                VmType::new_contained_in_register(constant_region.ty().clone()),
442                "temporary for constant",
443            );
444
445            self.emit_load_scalar_from_absolute_address_instruction(
446                temp_reg.register(),
447                constant_region.addr(),
448                &VmType::new_unknown_placement(constant_region.ty().clone()),
449                node,
450                &format!(
451                    "load constant primitive '{}' {:?}",
452                    constant_reference.assigned_name,
453                    constant_region.ty()
454                ),
455            );
456
457            self.emit_store_scalar_to_memory_offset_instruction(
458                output_memory_location,
459                temp_reg.register(),
460                node,
461                &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
462            );
463
464            self.temp_registers.restore_to_mark(hwm);
465        } else {
466            self.emit_load_scalar_from_absolute_address_instruction(
467                output.grab_register(),
468                constant_region.addr(),
469                &VmType::new_unknown_placement(constant_region.ty().clone()),
470                node,
471                &format!(
472                    "load constant primitive '{}' {:?}",
473                    constant_reference.assigned_name,
474                    constant_region.ty()
475                ),
476            );
477        }
478    }
479
480    pub(crate) fn emit_coerce_int_to_char(
481        &mut self,
482        target_reg: &TypedRegister,
483        expr: &Expression,
484        ctx: &Context,
485    ) {
486        let destination = Place::Register(target_reg.clone());
487
488        // Since Char (u32) is same size as Int(i32), we can just use it directly
489        self.emit_expression(&destination, expr, ctx);
490    }
491
492    pub(crate) fn emit_coerce_int_to_byte(
493        &mut self,
494        output: &Place,
495        expr: &Expression,
496        ctx: &Context,
497    ) {
498        // Since u32 is same size as byte (a register), we can just use it directly
499        self.emit_expression(output, expr, ctx);
500
501        match output {
502            Place::Discard => {}
503            Place::Register(dest_reg) => {
504                self.builder
505                    .add_check_u8(dest_reg, &expr.node, "trunc int to byte");
506            }
507            Place::Memory(mem) => {
508                let hwm = self.temp_registers.save_mark();
509                let temp_u8 = self
510                    .temp_registers
511                    .allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
512                self.builder.add_ld8_from_pointer_with_offset(
513                    temp_u8.register(),
514                    &mem.base_ptr_reg,
515                    mem.offset,
516                    &expr.node,
517                    "load it to check it",
518                );
519                self.builder
520                    .add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
521                self.temp_registers.restore_to_mark(hwm);
522            }
523        }
524    }
525
526    pub(crate) fn emit_coerce_option_to_bool(
527        &mut self,
528        target_reg: &TypedRegister,
529        expr: &Expression,
530        ctx: &Context,
531    ) {
532        //info!(?target_reg.ty, "it wants to coerce this to bool");
533
534        let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
535
536        /* TODO: Bring this back // let (tag_offset, tag_size, ..) = base_pointer_of_tagged_union_reg
537            .underlying()
538            .unwrap_info()
539            .unwrap();
540        assert_eq!(tag_size.0, 1);
541        */
542
543        // Move the tag portion to the target variable
544        self.builder.add_ld8_from_pointer_with_offset(
545            target_reg,
546            &base_pointer_of_tagged_union_reg,
547            MemoryOffset(0),
548            &expr.node,
549            "load option tag to bool register",
550        );
551    }
552
553    pub(crate) fn emit_coerce_to_any(&mut self, output: &Place, expr: &Expression, ctx: &Context) {
554        //info!(?target_reg.ty, "it wants to coerce this to bool");
555
556        let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
557
558        let pointer_register = self.emit_compute_effective_address_to_register(
559            output,
560            &expr.node,
561            "get starting ptr to output",
562        );
563        let output_aggregate_location = AggregateMemoryLocation::new(
564            MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register),
565        );
566
567        self.builder.add_st32_using_ptr_with_offset(
568            &output_aggregate_location
569                .offset(ANY_HEADER_PTR_OFFSET, u32_type())
570                .location,
571            &source_aggregate_pointer,
572            &expr.node,
573            "store aggregate pointer into Any Header",
574        );
575        let temp_size = self.temp_registers.allocate(
576            VmType::new_contained_in_register(u32_type()),
577            "Any header size temp",
578        );
579
580        self.builder.add_mov_32_immediate_value(
581            temp_size.register(),
582            source_aggregate_pointer.ty.basic_type.total_size.0,
583            &expr.node,
584            "fixed size",
585        );
586        self.builder.add_st32_using_ptr_with_offset(
587            &output_aggregate_location
588                .offset(ANY_HEADER_SIZE_OFFSET, u32_type())
589                .location,
590            temp_size.register(),
591            &expr.node,
592            "copy size into Any Header",
593        );
594
595        self.builder.add_mov_32_immediate_value(
596            temp_size.register(),
597            source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32,
598            &expr.node,
599            "reuse for hash",
600        );
601        self.builder.add_st32_using_ptr_with_offset(
602            &output_aggregate_location
603                .offset(ANY_HEADER_HASH_OFFSET, u32_type())
604                .location,
605            temp_size.register(),
606            &expr.node,
607            "copy size into Any Header",
608        );
609    }
610
611    pub(crate) fn merge_arguments_keep_literals(
612        outer_args: &Vec<ArgumentExpression>,
613        intrinsic_args: &Vec<ArgumentExpression>,
614    ) -> Vec<ArgumentExpression> {
615        // HACK: we assume that the parameters are in the same order.
616        // If one has more arguments, we assume that those extra arguments are in the end
617        // We also assume that the first is self
618        let mut all_args = outer_args.clone();
619
620        if intrinsic_args.len() > outer_args.len() + 1 {
621            all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
622        }
623
624        all_args
625    }
626
627    pub(crate) fn emit_borrow_mutable_reference(
628        &mut self,
629        target_register: &TypedRegister,
630        node: &Node,
631        expr: &SingleLocationExpression,
632        ctx: &Context,
633    ) {
634        let location = self.emit_lvalue_address(expr, ctx);
635
636        let abs_pointer = self.emit_compute_effective_address_to_register(
637            &location,
638            node,
639            "calculate absolute address for reference",
640        );
641
642        self.builder.add_mov_reg(
643            target_register,
644            &abs_pointer,
645            node,
646            "copy calculated address for borrow",
647        );
648    }
649
650    pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
651        let node = &expr.node;
652        let (line, _column) = self
653            .source_map_lookup
654            .source_map
655            .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
656        let source_line_info = SourceFileLineInfo {
657            row: line,
658            file_id: node.span.file_id as usize,
659        };
660
661        let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
662        if let Some((start, end)) = answer {
663            let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
664            let (line, col) = self
665                .source_map_lookup
666                .source_map
667                .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
668            let source_line = self
669                .source_map_lookup
670                .source_map
671                .get_source_line(node.span.file_id, line)
672                .unwrap_or("<source line not found>");
673
674            info!(
675                file=%relative_file_name,
676                line=%line,
677                col=%col,
678                source=%source_line,
679                "{}",
680                description
681            );
682        }
683    }
684}