swamp_code_gen/
code_bld.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::reg_pool::HwmTempRegisterPool;
8use crate::state::CodeGenState;
9use seq_map::SeqMap;
10use source_map_cache::{
11    KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
12};
13use source_map_node::Node;
14use swamp_semantic::{
15    ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
16    UnaryOperator, UnaryOperatorKind, VariableRef,
17};
18use swamp_types::TypeKind;
19use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
20use swamp_vm_types::aligner::{SAFE_ALIGNMENT, align};
21use swamp_vm_types::types::{
22    BasicTypeRef, Destination, FramePlacedType, TypedRegister, VmType, b8_type, u8_type, u32_type,
23};
24use swamp_vm_types::{
25    AggregateMemoryLocation, FrameMemoryRegion, FrameMemorySize, MemoryLocation, MemoryOffset,
26    MemorySize, PointerLocation, REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
27};
28use tracing::info;
29
30#[derive(Copy, Clone)]
31pub struct CodeBuilderOptions {
32    pub should_show_debug: bool,
33}
34pub(crate) struct CodeBuilder<'a> {
35    pub state: &'a mut CodeGenState,
36    pub(crate) builder: &'a mut InstructionBuilder<'a>,
37    pub(crate) variable_registers: SeqMap<usize, TypedRegister>,
38    pub(crate) temp_registers: HwmTempRegisterPool,
39    pub(crate) frame_allocator: StackFrameAllocator,
40    pub debug_line_tracker: KeepTrackOfSourceLine,
41    //pub spilled_registers: SpilledRegisterScopes,
42    pub source_map_lookup: &'a SourceMapWrapper<'a>,
43    pub options: CodeBuilderOptions,
44}
45
46impl<'a> CodeBuilder<'a> {
47    pub fn new(
48        state: &'a mut CodeGenState,
49        builder: &'a mut InstructionBuilder<'a>,
50        variable_registers: SeqMap<usize, TypedRegister>,
51        temp_registers: HwmTempRegisterPool,
52        temp_allocator: StackFrameAllocator,
53        options: CodeBuilderOptions,
54        source_map_lookup: &'a SourceMapWrapper<'a>,
55    ) -> Self {
56        Self {
57            state,
58            builder,
59            variable_registers,
60            //frame_memory_registers,
61            temp_registers,
62            frame_allocator: temp_allocator,
63            debug_line_tracker: KeepTrackOfSourceLine::default(),
64            options,
65            source_map_lookup,
66        }
67    }
68}
69impl CodeBuilder<'_> {
70    pub(crate) fn emit_copy_register(
71        &mut self,
72        target_reg: &TypedRegister,
73        source_reg: &TypedRegister,
74        node: &Node,
75        comment: &str,
76    ) {
77        if source_reg.ty.is_mutable_reference_semantic() {
78            if target_reg.ty().is_mutable_reference() {
79                self.builder.add_mov_reg(
80                    target_reg,
81                    source_reg,
82                    node,
83                    &format!("emit_copy_register. ptr to ptr. {comment}"),
84                );
85            } else {
86                let size = source_reg.size();
87
88                let target_pointer_location = PointerLocation {
89                    ptr_reg: target_reg.clone(),
90                };
91                let source_pointer_location = PointerLocation {
92                    ptr_reg: source_reg.clone(),
93                };
94                self.builder.add_block_copy_with_immediate_size(
95                    &target_pointer_location,
96                    &source_pointer_location,
97                    size,
98                    node,
99                    &format!("emit_copy_register.copy struct. {comment}"),
100                );
101            }
102        } else {
103            self.builder.add_mov_reg(
104                target_reg,
105                source_reg,
106                node,
107                &format!("emit_copy_register. primitive to primitive. {comment}"),
108            );
109        }
110    }
111
112    pub(crate) fn add_ld_regs_from_frame(
113        &mut self,
114        start_reg: &TypedRegister,
115        start_address: FrameMemoryRegion,
116        count: u8,
117        node: &Node,
118        comment: &str,
119    ) {
120        self.builder.add_ld_contiguous_regs_from_frame(
121            start_reg.index,
122            start_address,
123            count,
124            node,
125            comment,
126        );
127    }
128
129    pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
130        let aligned = align(
131            self.frame_allocator.addr().as_size().0 as usize,
132            SAFE_ALIGNMENT,
133        );
134        FrameMemorySize(aligned as u32)
135    }
136
137    pub fn patch_enter(&mut self, patch_position: PatchPosition) {
138        self.builder
139            .patch_enter(self.total_aligned_frame_size(), patch_position);
140    }
141
142    fn debug_node(&self, node: &Node) {
143        let line_info = self.source_map_lookup.get_line(&node.span);
144        let span_text = self.source_map_lookup.get_text_span(&node.span);
145        eprintln!(
146            "{}:{}:{}> {}",
147            line_info.relative_file_name, line_info.row, line_info.col, span_text
148        );
149        //info!(?source_code_line, "generating");
150    }
151
152    pub(crate) fn emit_unary_operator(
153        &mut self,
154        target_reg: &TypedRegister,
155        unary_operator: &UnaryOperator,
156        ctx: &Context,
157    ) {
158        let node = &unary_operator.node;
159        match &unary_operator.kind {
160            UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
161                TypeKind::Bool => {
162                    let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
163                    self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
164                }
165                _ => panic!("unknown not op"),
166            },
167
168            UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
169                TypeKind::Int => {
170                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
171                    self.builder
172                        .add_neg_i32(target_reg, &left_source, node, "negate i32");
173                }
174
175                TypeKind::Float => {
176                    let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
177                    self.builder
178                        .add_neg_f32(target_reg, &left_source, node, "negate f32");
179                }
180                _ => panic!("negate should only be possible on Int and Float"),
181            },
182        }
183    }
184
185    pub(crate) fn emit_if(
186        &mut self,
187        output_destination: &Destination,
188        condition: &BooleanExpression,
189        true_expr: &Expression,
190        maybe_false_expr: Option<&Expression>,
191        ctx: &Context,
192    ) {
193        let jump_on_false_condition = self.emit_condition_context(condition, ctx);
194
195        // True expression just takes over our target
196        // Both to reuse the current target, and for the fact when there is no else
197        self.emit_expression(output_destination, true_expr, ctx);
198
199        if let Some(false_expr) = maybe_false_expr {
200            // we need to help the true expression to jump over false
201            let skip_false_if_true = self.builder.add_jump_placeholder(
202                &condition.expression.node,
203                "since it was true, skip over false section",
204            );
205
206            // If the expression was false, it should continue here
207            self.builder.patch_jump_here(jump_on_false_condition);
208
209            // Else expression also can just take over our if target
210            self.emit_expression(output_destination, false_expr, ctx);
211
212            self.builder.patch_jump_here(skip_false_if_true);
213        } else {
214            self.builder.patch_jump_here(jump_on_false_condition);
215        }
216    }
217
218    pub(crate) fn temp_frame_space_for_register(
219        &mut self,
220        count: u8,
221        comment: &str,
222    ) -> FrameMemoryRegion {
223        let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
224        let start = self
225            .frame_allocator
226            .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
227
228        FrameMemoryRegion {
229            addr: start,
230            size: total_size,
231        }
232    }
233
234    pub(crate) fn emit_option_expression_into_target_memory_location(
235        &mut self,
236        output: &Destination,
237        node: &Node,
238        maybe_option: Option<&Expression>,
239        ctx: &Context,
240    ) {
241        let memory_target = output.memory_location_or_pointer_reg();
242        let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
243
244        let hwm = self.temp_registers.save_mark();
245
246        let tag_reg = self
247            .temp_registers
248            .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
249
250        if let Some(some_expression) = maybe_option {
251            let union_information = memory_lvalue_location
252                .location
253                .ty
254                .basic_type()
255                .optional_info()
256                .unwrap()
257                .clone();
258
259            {
260                // Overwrite the tag with 1 (`Some`)
261                let ty = memory_lvalue_location.location.ty.basic_type();
262                self.builder.add_mov8_immediate(
263                    tag_reg.register(),
264                    1,
265                    node,
266                    &format!("set the tag Some (1) in register {ty}"),
267                );
268                // for options, we know that the tag size is one byte
269                self.builder.add_st8_using_ptr_with_offset(
270                    &memory_lvalue_location
271                        .offset(union_information.tag_offset, b8_type())
272                        .location,
273                    tag_reg.register(),
274                    node,
275                    "store optional Some tag",
276                );
277            }
278            {
279                let payload_location = &memory_lvalue_location
280                    .offset(union_information.payload_offset, b8_type())
281                    .location;
282                self.emit_expression_into_target_memory(
283                    payload_location,
284                    some_expression,
285                    "store option payload",
286                    ctx,
287                );
288            }
289        } else {
290            self.builder
291                .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); // 0 signals `None`
292
293            // For `none` we simply overwrite the tag with zero
294            self.builder.add_st8_using_ptr_with_offset(
295                &memory_lvalue_location.location,
296                tag_reg.register(),
297                node,
298                "store optional None tag",
299            );
300        }
301        self.temp_registers.restore_to_mark(hwm);
302    }
303
304    pub(crate) fn emit_block(
305        &mut self,
306        target_reg: &Destination,
307        expressions: &[Expression],
308        ctx: &Context,
309    ) {
310        if let Some((last, others)) = expressions.split_last() {
311            for expr in others {
312                self.emit_statement(expr, ctx);
313            }
314            if matches!(&*last.ty.kind, TypeKind::Unit) {
315                self.emit_statement(last, ctx);
316            } else {
317                self.emit_expression(target_reg, last, ctx);
318            }
319        } else {
320            // empty blocks are allowed for side effects
321        }
322    }
323
324    pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
325        //info!(unique_id=?variable.unique_id_within_function, name=?variable.assigned_name, "trying to fetch");
326        self.variable_registers
327            .get(&variable.unique_id_within_function)
328            .unwrap()
329    }
330
331    fn get_variable_frame_placed(&self, variable: &VariableRef) -> FramePlacedType {
332        let frame_address = self
333            .variable_registers
334            .get(&variable.unique_id_within_function)
335            .unwrap();
336
337        frame_address.frame_placed()
338    }
339
340    pub fn allocate_frame_space_and_return_absolute_pointer_reg(
341        &mut self,
342        ty: &BasicTypeRef,
343        node: &Node,
344        comment: &str,
345    ) -> TypedRegister {
346        let frame_placed_type = self.frame_allocator.allocate_type(ty);
347
348        let temp = self.temp_registers.allocate(
349            VmType::new_frame_placed(frame_placed_type),
350            &format!("{comment}: allocate frame space"),
351        );
352
353        self.builder.add_lea_from_frame_region(
354            &temp.register,
355            temp.register.region(),
356            node,
357            &format!("{comment}: set the allocated memory to pointer reg"),
358        );
359
360        temp.register
361    }
362
363    pub fn allocate_frame_space_and_return_pointer_location(
364        &mut self,
365        ty: &BasicTypeRef,
366        node: &Node,
367        comment: &str,
368    ) -> PointerLocation {
369        let absolute_base_ptr_reg =
370            self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
371        PointerLocation {
372            ptr_reg: absolute_base_ptr_reg,
373        }
374    }
375
376    pub fn allocate_frame_space_and_return_memory_location(
377        &mut self,
378        ty: &BasicTypeRef,
379        node: &Node,
380        comment: &str,
381    ) -> MemoryLocation {
382        let absolute_base_ptr_reg =
383            self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
384        MemoryLocation {
385            ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
386            base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
387            offset: MemoryOffset(0),
388        }
389    }
390
391    pub fn allocate_frame_space_and_return_destination_to_it(
392        &mut self,
393        ty: &BasicTypeRef,
394        node: &Node,
395        comment: &str,
396    ) -> Destination {
397        let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
398        Destination::new_location(location)
399    }
400
401    pub(crate) fn emit_constant_access(
402        &mut self,
403        output: &Destination,
404        constant_reference: &ConstantRef,
405        node: &Node,
406        ctx: &Context,
407    ) {
408        let constant_region = self
409            .state
410            .constant_offsets
411            .get(&constant_reference.id)
412            .unwrap();
413        // TODO: Bring this back// assert_eq!(target_reg.size(), constant_region.size());
414
415        if constant_region.ty().is_aggregate() {
416            // load the known constant address into a temp register to use as a base for the block copy
417            let source_base_ptr = self.temp_registers.allocate(
418                VmType::new_contained_in_register(u32_type()),
419                "temp register for the base pointer to the constant",
420            );
421            self.builder.add_mov_32_immediate_value(
422                source_base_ptr.register(),
423                constant_region.addr().0,
424                node,
425                &format!(
426                    "load constant pointer '{}' type:{}",
427                    constant_reference.assigned_name,
428                    constant_region.ty()
429                ),
430            );
431
432            let source_memory_location = MemoryLocation {
433                base_ptr_reg: source_base_ptr.register,
434                offset: MemoryOffset(0),
435                ty: VmType::new_heap_placement(
436                    constant_region.ty().clone(),
437                    constant_region.region(),
438                ),
439            };
440
441            let output_reg = output.memory_location_or_pointer_reg();
442
443            self.emit_copy_value_from_memory_location(
444                output,
445                &source_memory_location,
446                node,
447                &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
448            );
449        } else if let Some(output_memory_location) = output.memory_location() {
450            let hwm = self.temp_registers.save_mark();
451            let temp_reg = self.temp_registers.allocate(
452                VmType::new_contained_in_register(constant_region.ty().clone()),
453                "temporary for constant",
454            );
455
456            self.emit_load_scalar_from_absolute_address_instruction(
457                temp_reg.register(),
458                constant_region.addr(),
459                &VmType::new_unknown_placement(constant_region.ty().clone()),
460                node,
461                &format!(
462                    "load constant primitive '{}' {:?}",
463                    constant_reference.assigned_name,
464                    constant_region.ty()
465                ),
466            );
467
468            self.emit_store_scalar_to_memory_offset_instruction(
469                output_memory_location,
470                temp_reg.register(),
471                node,
472                &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
473            );
474
475            self.temp_registers.restore_to_mark(hwm);
476        } else {
477            self.emit_load_scalar_from_absolute_address_instruction(
478                output.grab_register(),
479                constant_region.addr(),
480                &VmType::new_unknown_placement(constant_region.ty().clone()),
481                node,
482                &format!(
483                    "load constant primitive '{}' {:?}",
484                    constant_reference.assigned_name,
485                    constant_region.ty()
486                ),
487            );
488        }
489    }
490
491    pub(crate) fn emit_coerce_int_to_char(
492        &mut self,
493        target_reg: &TypedRegister,
494        expr: &Expression,
495        ctx: &Context,
496    ) {
497        let destination = Destination::Register(target_reg.clone());
498
499        // Since Char (u32) is same size as Int(i32), we can just use it directly
500        self.emit_expression(&destination, expr, ctx);
501    }
502
503    pub(crate) fn emit_coerce_option_to_bool(
504        &mut self,
505        target_reg: &TypedRegister,
506        expr: &Expression,
507        ctx: &Context,
508    ) {
509        //info!(?target_reg.ty, "it wants to coerce this to bool");
510
511        let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
512
513        /* TODO: Bring this back // let (tag_offset, tag_size, ..) = base_pointer_of_tagged_union_reg
514            .underlying()
515            .unwrap_info()
516            .unwrap();
517        assert_eq!(tag_size.0, 1);
518        */
519
520        // Move the tag portion to the target variable
521        self.builder.add_ld8_from_pointer_with_offset_u16(
522            target_reg,
523            &base_pointer_of_tagged_union_reg,
524            MemoryOffset(0),
525            &expr.node,
526            "load option tag to bool register",
527        );
528    }
529
530    pub(crate) fn merge_arguments_keep_literals(
531        outer_args: &Vec<ArgumentExpression>,
532        intrinsic_args: &Vec<ArgumentExpression>,
533    ) -> Vec<ArgumentExpression> {
534        // HACK: we assume that the parameters are in the same order.
535        // If one has more arguments, we assume that those extra arguments are in the end
536        // We also assume that the first is self
537        let mut all_args = outer_args.clone();
538
539        if intrinsic_args.len() > outer_args.len() + 1 {
540            all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
541        }
542
543        all_args
544    }
545
546    pub(crate) fn emit_borrow_mutable_reference(
547        &mut self,
548        target_register: &TypedRegister,
549        node: &Node,
550        expr: &SingleLocationExpression,
551        ctx: &Context,
552    ) {
553        let location = self.emit_lvalue_address(expr, ctx);
554
555        let abs_pointer = self.emit_compute_effective_address_to_register(
556            &location,
557            node,
558            "calculate absolute address for reference",
559        );
560
561        self.builder.add_mov_reg(
562            target_register,
563            &abs_pointer,
564            node,
565            "copy calculated address for borrow",
566        );
567    }
568
569    pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
570        let node = &expr.node;
571        let (line, _column) = self
572            .source_map_lookup
573            .source_map
574            .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
575        let source_line_info = SourceFileLineInfo {
576            row: line,
577            file_id: node.span.file_id as usize,
578        };
579
580        let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
581        if let Some((start, end)) = answer {
582            let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
583            let (line, col) = self
584                .source_map_lookup
585                .source_map
586                .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
587            let source_line = self
588                .source_map_lookup
589                .source_map
590                .get_source_line(node.span.file_id, line)
591                .unwrap_or("<source line not found>");
592
593            info!(
594                file=%relative_file_name,
595                line=%line,
596                col=%col,
597                source=%source_line,
598                "{}",
599                description
600            );
601        }
602    }
603}