swamp_code_gen/
code_bld.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::err;
8use crate::reg_pool::HwmTempRegisterPool;
9use crate::state::CodeGenState;
10use seq_map::SeqMap;
11use source_map_cache::{
12    KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
13};
14use source_map_node::Node;
15use swamp_semantic::{
16    ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
17    UnaryOperator, UnaryOperatorKind, VariableRef,
18};
19use swamp_types::TypeKind;
20use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
21use swamp_vm_isa::aligner::{align, SAFE_ALIGNMENT};
22use swamp_vm_isa::{
23    FrameMemorySize, MemoryOffset, MemorySize, ANY_HEADER_HASH_OFFSET,
24    ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET, REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
25};
26use swamp_vm_types::types::{
27    b8_type, u16_type, u32_type, u8_type, BasicTypeRef, Place, TypedRegister, VmType,
28};
29use swamp_vm_types::{AggregateMemoryLocation, FrameMemoryRegion, MemoryLocation, PointerLocation};
30use tracing::info;
31
32#[derive(Copy, Clone)]
33pub struct CodeBuilderOptions {
34    pub should_show_debug: bool,
35}
36pub struct CodeBuilder<'a> {
37    pub state: &'a mut CodeGenState,
38    pub builder: &'a mut InstructionBuilder<'a>,
39    pub variable_registers: SeqMap<usize, TypedRegister>,
40    pub temp_registers: HwmTempRegisterPool,
41    pub frame_allocator: StackFrameAllocator,
42    pub debug_line_tracker: KeepTrackOfSourceLine,
43    //pub spilled_registers: SpilledRegisterScopes,
44    pub source_map_lookup: &'a SourceMapWrapper<'a>,
45    pub options: CodeBuilderOptions,
46    pub errors: Vec<err::Error>,
47}
48
49impl<'a> CodeBuilder<'a> {
50    pub fn new(
51        state: &'a mut CodeGenState,
52        builder: &'a mut InstructionBuilder<'a>,
53        variable_registers: SeqMap<usize, TypedRegister>,
54        temp_registers: HwmTempRegisterPool,
55        temp_allocator: StackFrameAllocator,
56        options: CodeBuilderOptions,
57        source_map_lookup: &'a SourceMapWrapper<'a>,
58    ) -> Self {
59        Self {
60            state,
61            builder,
62            variable_registers,
63            //frame_memory_registers,
64            temp_registers,
65            frame_allocator: temp_allocator,
66            debug_line_tracker: KeepTrackOfSourceLine::default(),
67            options,
68            source_map_lookup,
69            errors: Vec::new(),
70        }
71    }
72}
73impl CodeBuilder<'_> {
74    pub(crate) fn emit_copy_register(
75        &mut self,
76        target_reg: &TypedRegister,
77        source_reg: &TypedRegister,
78        node: &Node,
79        comment: &str,
80    ) {
81        if source_reg.ty.is_mutable_reference_semantic() {
82            if target_reg.ty().is_mutable_reference() {
83                self.builder.add_mov_reg(
84                    target_reg,
85                    source_reg,
86                    node,
87                    &format!("emit_copy_register. ptr to ptr. {comment}"),
88                );
89            } else {
90                let size = source_reg.size();
91
92                let target_pointer_location = PointerLocation {
93                    ptr_reg: target_reg.clone(),
94                };
95                let source_pointer_location = PointerLocation {
96                    ptr_reg: source_reg.clone(),
97                };
98                self.builder.add_block_copy_with_immediate_size(
99                    &target_pointer_location,
100                    &source_pointer_location,
101                    size,
102                    node,
103                    &format!("emit_copy_register.copy struct. {comment}"),
104                );
105            }
106        } else {
107            {
108                self.builder.add_mov_reg(
109                    target_reg,
110                    source_reg,
111                    node,
112                    &format!("emit_copy_register. primitive to primitive. {comment}"),
113                );
114            }
115        }
116    }
117
118    #[must_use]
119    pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
120        let aligned = align(
121            self.frame_allocator.addr().as_size().0 as usize,
122            SAFE_ALIGNMENT,
123        );
124        FrameMemorySize(aligned as u32)
125    }
126
127    pub fn patch_enter(&mut self, patch_position: PatchPosition) {
128        self.builder
129            .patch_enter(self.total_aligned_frame_size(), patch_position);
130    }
131
132    fn debug_node(&self, node: &Node) {
133        let line_info = self.source_map_lookup.get_line(&node.span);
134        let span_text = self.source_map_lookup.get_text_span(&node.span);
135        eprintln!(
136            "{}:{}:{}> {}",
137            line_info.relative_file_name, line_info.row, line_info.col, span_text
138        );
139        //info!(?source_code_line, "generating");
140    }
141
142    pub(crate) fn emit_unary_operator(
143        &mut self,
144        target_reg: &TypedRegister,
145        unary_operator: &UnaryOperator,
146        ctx: &Context,
147    ) {
148        let node = &unary_operator.node;
149        match &unary_operator.kind {
150            UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
151                TypeKind::Bool => {
152                    let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
153                    self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
154                }
155                _ => panic!("unknown not op"),
156            },
157
158            UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
159                TypeKind::Int => {
160                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
161                    self.builder
162                        .add_neg_i32(target_reg, &left_source, node, "negate i32");
163                }
164
165                TypeKind::Float => {
166                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
167                    self.builder
168                        .add_neg_f32(target_reg, &left_source, node, "negate f32");
169                }
170                _ => panic!("negate should only be possible on Int and Float"),
171            },
172        }
173    }
174
175    pub(crate) fn emit_if(
176        &mut self,
177        output_destination: &Place,
178        condition: &BooleanExpression,
179        true_expr: &Expression,
180        maybe_false_expr: Option<&Expression>,
181        ctx: &Context,
182    ) {
183        let jump_on_false_condition = self.emit_condition_context(condition, ctx);
184
185        // True expression just takes over our target
186        // Both to reuse the current target, and for the fact when there is no else
187        self.emit_expression(output_destination, true_expr, ctx);
188
189        if let Some(false_expr) = maybe_false_expr {
190            // we need to help the true expression to jump over false
191            let skip_false_if_true = self.builder.add_jump_placeholder(
192                &condition.expression.node,
193                "since it was true, skip over false section",
194            );
195
196            // If the expression was false, it should continue here
197            self.builder.patch_jump_here(jump_on_false_condition);
198
199            // Else expression also can just take over our if target
200            self.emit_expression(output_destination, false_expr, ctx);
201
202            self.builder.patch_jump_here(skip_false_if_true);
203        } else {
204            self.builder.patch_jump_here(jump_on_false_condition);
205        }
206    }
207
208    pub(crate) fn temp_frame_space_for_register(
209        &mut self,
210        count: u8,
211        comment: &str,
212    ) -> FrameMemoryRegion {
213        let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
214        let start = self
215            .frame_allocator
216            .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
217
218        FrameMemoryRegion {
219            addr: start,
220            size: total_size,
221        }
222    }
223
224    pub(crate) fn emit_option_expression_into_target_memory_location(
225        &mut self,
226        output: &Place,
227        node: &Node,
228        maybe_option: Option<&Expression>,
229        ctx: &Context,
230    ) {
231        let memory_target = output.memory_location_or_pointer_reg();
232        let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
233
234        let hwm = self.temp_registers.save_mark();
235
236        let tag_reg = self
237            .temp_registers
238            .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
239
240        if let Some(some_expression) = maybe_option {
241            let union_information = memory_lvalue_location
242                .location
243                .ty
244                .basic_type()
245                .optional_info()
246                .unwrap()
247                .clone();
248
249            {
250                // Overwrite the tag with 1 (`Some`)
251                let ty = memory_lvalue_location.location.ty.basic_type();
252                self.builder.add_mov8_immediate(
253                    tag_reg.register(),
254                    1,
255                    node,
256                    &format!("set the tag Some (1) in register {ty}"),
257                );
258                // for options, we know that the tag size is one byte
259                self.builder.add_st8_using_ptr_with_offset(
260                    &memory_lvalue_location
261                        .offset(union_information.tag_offset, b8_type())
262                        .location,
263                    tag_reg.register(),
264                    node,
265                    "store optional Some tag",
266                );
267            }
268            {
269                let payload_location = &memory_lvalue_location
270                    .offset(
271                        union_information.payload_offset,
272                        union_information.get_variant_by_index(1).ty.clone(),
273                    )
274                    .location;
275                self.emit_initialize_memory_for_any_type(
276                    payload_location,
277                    node,
278                    "initialize error",
279                );
280                self.emit_expression_into_target_memory(
281                    payload_location,
282                    some_expression,
283                    "store option payload",
284                    ctx,
285                );
286            }
287        } else {
288            self.builder
289                .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); // 0 signals `None`
290
291            // For `none` we simply overwrite the tag with zero
292            self.builder.add_st8_using_ptr_with_offset(
293                &memory_lvalue_location.location,
294                tag_reg.register(),
295                node,
296                "store optional None tag",
297            );
298        }
299        self.temp_registers.restore_to_mark(hwm);
300    }
301
302    pub(crate) fn emit_block(
303        &mut self,
304        target_reg: &Place,
305        expressions: &[Expression],
306        ctx: &Context,
307    ) {
308        if let Some((last, others)) = expressions.split_last() {
309            for expr in others {
310                self.emit_statement(expr, ctx);
311            }
312            if matches!(&*last.ty.kind, TypeKind::Unit) {
313                self.emit_statement(last, ctx);
314            } else {
315                self.emit_expression(target_reg, last, ctx);
316            }
317        } else {
318            // empty blocks are allowed for side effects
319        }
320    }
321
322    pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
323        //info!(unique_id=?variable.unique_id_within_function, name=?variable.assigned_name, "trying to fetch");
324        self.variable_registers
325            .get(&variable.unique_id_within_function)
326            .unwrap()
327    }
328
329    pub fn allocate_frame_space_and_return_absolute_pointer_reg(
330        &mut self,
331        ty: &BasicTypeRef,
332        clear_it: bool,
333        node: &Node,
334        comment: &str,
335    ) -> TypedRegister {
336        let frame_placed_type = self.frame_allocator.allocate_type(ty);
337
338        let temp = self.temp_registers.allocate(
339            VmType::new_frame_placed(frame_placed_type),
340            &format!("{comment}: allocate frame space"),
341        );
342
343        self.builder.add_lea_from_frame_region(
344            &temp.register,
345            temp.register.region(),
346            node,
347            &format!("{comment}: set the allocated memory to pointer reg"),
348        );
349
350        if clear_it {
351            self.builder.add_frame_memory_clear(
352                temp.register.region(),
353                node,
354                &format!("{comment}: clear temporary memory"),
355            );
356        }
357
358        temp.register
359    }
360
361    pub fn allocate_frame_space_and_return_pointer_location(
362        &mut self,
363        ty: &BasicTypeRef,
364        clear_it: bool,
365        node: &Node,
366        comment: &str,
367    ) -> PointerLocation {
368        let absolute_base_ptr_reg =
369            self.allocate_frame_space_and_return_absolute_pointer_reg(ty, clear_it, node, comment);
370        PointerLocation {
371            ptr_reg: absolute_base_ptr_reg,
372        }
373    }
374
375    pub fn allocate_frame_space_and_return_memory_location(
376        &mut self,
377        ty: &BasicTypeRef,
378        clear_it: bool,
379        node: &Node,
380        comment: &str,
381    ) -> MemoryLocation {
382        let absolute_base_ptr_reg =
383            self.allocate_frame_space_and_return_pointer_location(ty, clear_it, node, comment);
384        MemoryLocation {
385            ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
386            base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
387            offset: MemoryOffset(0),
388        }
389    }
390
391    pub fn allocate_frame_space_and_return_destination_to_it(
392        &mut self,
393        ty: &BasicTypeRef,
394        clear_it: bool,
395        node: &Node,
396        comment: &str,
397    ) -> Place {
398        let location =
399            self.allocate_frame_space_and_return_memory_location(ty, clear_it, node, comment);
400        Place::new_location(location)
401    }
402
403    pub(crate) fn emit_constant_access(
404        &mut self,
405        output: &Place,
406        constant_reference: &ConstantRef,
407        node: &Node,
408        ctx: &Context,
409    ) {
410        let constant_region = self
411            .state
412            .constant_offsets
413            .get(&constant_reference.id)
414            .unwrap();
415        // TODO: Bring this back// assert_eq!(target_reg.size(), constant_region.size());
416
417        if constant_region.ty().is_aggregate() {
418            // load the known constant address into a temp register to use as a base for the block copy
419            let source_base_ptr = self.temp_registers.allocate(
420                VmType::new_contained_in_register(u32_type()),
421                "temp register for the base pointer to the constant",
422            );
423            self.builder.add_mov_32_immediate_value(
424                source_base_ptr.register(),
425                constant_region.addr().0,
426                node,
427                &format!(
428                    "load constant pointer '{}' type:{}",
429                    constant_reference.assigned_name,
430                    constant_region.ty()
431                ),
432            );
433
434            let source_memory_location = MemoryLocation {
435                base_ptr_reg: source_base_ptr.register,
436                offset: MemoryOffset(0),
437                ty: VmType::new_heap_placement(
438                    constant_region.ty().clone(),
439                    constant_region.region(),
440                ),
441            };
442
443            let output_reg = output.memory_location_or_pointer_reg();
444
445            self.emit_copy_value_from_memory_location(
446                output,
447                &source_memory_location,
448                node,
449                &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
450            );
451        } else if let Some(output_memory_location) = output.memory_location() {
452            let hwm = self.temp_registers.save_mark();
453            let temp_reg = self.temp_registers.allocate(
454                VmType::new_contained_in_register(constant_region.ty().clone()),
455                "temporary for constant",
456            );
457
458            self.emit_load_scalar_from_absolute_address_instruction(
459                temp_reg.register(),
460                constant_region.addr(),
461                &VmType::new_unknown_placement(constant_region.ty().clone()),
462                node,
463                &format!(
464                    "load constant primitive '{}' {:?}",
465                    constant_reference.assigned_name,
466                    constant_region.ty()
467                ),
468            );
469
470            self.emit_store_scalar_to_memory_offset_instruction(
471                output_memory_location,
472                temp_reg.register(),
473                node,
474                &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
475            );
476
477            self.temp_registers.restore_to_mark(hwm);
478        } else {
479            self.emit_load_scalar_from_absolute_address_instruction(
480                output.grab_register(),
481                constant_region.addr(),
482                &VmType::new_unknown_placement(constant_region.ty().clone()),
483                node,
484                &format!(
485                    "load constant primitive '{}' {:?}",
486                    constant_reference.assigned_name,
487                    constant_region.ty()
488                ),
489            );
490        }
491    }
492
493    pub(crate) fn emit_coerce_int_to_char(
494        &mut self,
495        target_reg: &TypedRegister,
496        expr: &Expression,
497        ctx: &Context,
498    ) {
499        let destination = Place::Register(target_reg.clone());
500
501        // Since Char (u32) is same size as Int(i32), we can just use it directly
502        self.emit_expression(&destination, expr, ctx);
503    }
504
505    pub(crate) fn emit_coerce_int_to_byte(
506        &mut self,
507        output: &Place,
508        expr: &Expression,
509        ctx: &Context,
510    ) {
511        // Since u32 is same size as byte (a register), we can just use it directly
512        self.emit_expression(output, expr, ctx);
513
514        match output {
515            Place::Discard => {}
516            Place::Register(dest_reg) => {
517                self.builder
518                    .add_check_u8(dest_reg, &expr.node, "trunc int to byte");
519            }
520            Place::Memory(mem) => {
521                let hwm = self.temp_registers.save_mark();
522                let temp_u8 = self
523                    .temp_registers
524                    .allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
525                self.builder.add_ld8_from_pointer_with_offset(
526                    temp_u8.register(),
527                    &mem.base_ptr_reg,
528                    mem.offset,
529                    &expr.node,
530                    "load it to check it",
531                );
532                self.builder
533                    .add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
534                self.temp_registers.restore_to_mark(hwm);
535            }
536        }
537    }
538
539    pub(crate) fn emit_coerce_int_to_short(
540        &mut self,
541        output: &Place,
542        expr: &Expression,
543        ctx: &Context,
544    ) {
545        // Since u32 is same size as byte (a register), we can just use it directly
546        self.emit_expression(output, expr, ctx);
547
548        match output {
549            Place::Discard => {}
550            Place::Register(dest_reg) => {
551                //self.builder
552                //  .add_check_u16(dest_reg, &expr.node, "trunc int to byte");
553            }
554            Place::Memory(mem) => {
555                let hwm = self.temp_registers.save_mark();
556                let temp_u16 = self
557                    .temp_registers
558                    .allocate(VmType::new_contained_in_register(u16_type()), "temp u16");
559                self.builder.add_ld16_from_pointer_with_offset_u16(
560                    temp_u16.register(),
561                    &mem.base_ptr_reg,
562                    mem.offset,
563                    &expr.node,
564                    "load it to check it",
565                );
566                //self.builder
567                //  .add_check_u16(temp_u16.register(), &expr.node, "trunc int to byte");
568                self.temp_registers.restore_to_mark(hwm);
569            }
570        }
571    }
572
573    pub(crate) fn emit_coerce_option_to_bool(
574        &mut self,
575        target_reg: &TypedRegister,
576        expr: &Expression,
577        ctx: &Context,
578    ) {
579        //info!(?target_reg.ty, "it wants to coerce this to bool");
580
581        let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
582
583        /* TODO: Bring this back // let (tag_offset, tag_size, ..) = base_pointer_of_tagged_union_reg
584            .underlying()
585            .unwrap_info()
586            .unwrap();
587        assert_eq!(tag_size.0, 1);
588        */
589
590        // Move the tag portion to the target variable
591        self.builder.add_ld8_from_pointer_with_offset(
592            target_reg,
593            &base_pointer_of_tagged_union_reg,
594            MemoryOffset(0),
595            &expr.node,
596            "load option tag to bool register",
597        );
598    }
599
600    pub(crate) fn emit_coerce_to_any(&mut self, output: &Place, expr: &Expression, ctx: &Context) {
601        //info!(?target_reg.ty, "it wants to coerce this to bool");
602
603        let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
604
605        let pointer_register = self.emit_compute_effective_address_to_register(
606            output,
607            &expr.node,
608            "get starting ptr to output",
609        );
610        let output_aggregate_location = AggregateMemoryLocation::new(
611            MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register),
612        );
613
614        self.builder.add_st32_using_ptr_with_offset(
615            &output_aggregate_location
616                .offset(ANY_HEADER_PTR_OFFSET, u32_type())
617                .location,
618            &source_aggregate_pointer,
619            &expr.node,
620            "store aggregate pointer into Any Header",
621        );
622        let temp_size = self.temp_registers.allocate(
623            VmType::new_contained_in_register(u32_type()),
624            "Any header size temp",
625        );
626
627        self.builder.add_mov_32_immediate_value(
628            temp_size.register(),
629            source_aggregate_pointer.ty.basic_type.total_size.0,
630            &expr.node,
631            "fixed size",
632        );
633        self.builder.add_st32_using_ptr_with_offset(
634            &output_aggregate_location
635                .offset(ANY_HEADER_SIZE_OFFSET, u32_type())
636                .location,
637            temp_size.register(),
638            &expr.node,
639            "copy size into Any Header",
640        );
641
642        self.builder.add_mov_32_immediate_value(
643            temp_size.register(),
644            source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32,
645            &expr.node,
646            "reuse for hash",
647        );
648        self.builder.add_st32_using_ptr_with_offset(
649            &output_aggregate_location
650                .offset(ANY_HEADER_HASH_OFFSET, u32_type())
651                .location,
652            temp_size.register(),
653            &expr.node,
654            "copy size into Any Header",
655        );
656    }
657
658    pub(crate) fn merge_arguments_keep_literals(
659        outer_args: &Vec<ArgumentExpression>,
660        intrinsic_args: &Vec<ArgumentExpression>,
661    ) -> Vec<ArgumentExpression> {
662        // HACK: we assume that the parameters are in the same order.
663        // If one has more arguments, we assume that those extra arguments are in the end
664        // We also assume that the first is self
665        let mut all_args = outer_args.clone();
666
667        if intrinsic_args.len() > outer_args.len() + 1 {
668            all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
669        }
670
671        all_args
672    }
673
674    pub(crate) fn emit_borrow_mutable_reference(
675        &mut self,
676        target_register: &TypedRegister,
677        node: &Node,
678        expr: &SingleLocationExpression,
679        ctx: &Context,
680    ) {
681        let location = self.emit_lvalue_address(expr, ctx);
682
683        let abs_pointer = self.emit_compute_effective_address_to_register(
684            &location,
685            node,
686            "calculate absolute address for reference",
687        );
688
689        self.builder.add_mov_reg(
690            target_register,
691            &abs_pointer,
692            node,
693            "copy calculated address for borrow",
694        );
695    }
696
697    pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
698        let node = &expr.node;
699        let (line, _column) = self
700            .source_map_lookup
701            .source_map
702            .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
703        let source_line_info = SourceFileLineInfo {
704            row: line,
705            file_id: node.span.file_id as usize,
706        };
707
708        let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
709        if let Some((start, end)) = answer {
710            let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
711            let (line, col) = self
712                .source_map_lookup
713                .source_map
714                .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
715            let source_line = self
716                .source_map_lookup
717                .source_map
718                .get_source_line(node.span.file_id, line)
719                .unwrap_or("<source line not found>");
720
721            info!(
722                file=%relative_file_name,
723                line=%line,
724                col=%col,
725                source=%source_line,
726                "{}",
727                description
728            );
729        }
730    }
731}