swamp_code_gen/
func.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::code_bld::CodeBuilder;
6use crate::ctx::Context;
7use crate::layout::layout_variables;
8use crate::reg_pool::HwmTempRegisterPool;
9use crate::state::GenOptions;
10use crate::top_state::TopLevelGenState;
11use crate::{
12    FunctionInData, FunctionIp, FunctionIpKind, GenFunctionInfo, MAX_REGISTER_INDEX_FOR_PARAMETERS,
13    RepresentationOfRegisters, SpilledRegisterRegion,
14};
15use source_map_cache::SourceMapWrapper;
16use source_map_node::Node;
17use std::collections::HashSet;
18use swamp_semantic::{InternalFunctionDefinitionRef, InternalMainExpression, formal_function_name};
19use swamp_vm_debug_info::FunctionDebugInfo;
20use swamp_vm_instr_build::InstructionBuilder;
21use swamp_vm_types::types::{
22    Destination, FunctionInfo, FunctionInfoKind, TypedRegister, VariableRegister, VmType,
23    VmTypeOrigin,
24};
25use swamp_vm_types::{
26    InstructionPosition, InstructionPositionOffset, InstructionRange, MemoryLocation, MemoryOffset,
27    PatchPosition,
28};
29
30impl TopLevelGenState {
31    /// # Panics
32    ///
33    pub fn emit_function_def(
34        &mut self,
35        internal_fn_def: &InternalFunctionDefinitionRef,
36        source_map_wrapper: &SourceMapWrapper,
37        should_ignore_host_call: bool,
38    ) {
39        assert_ne!(internal_fn_def.program_unique_id, 0);
40
41        let complete_function_name = formal_function_name(internal_fn_def);
42        //info!(complete_function_name, "code generating function def");
43
44        let in_data = FunctionInData {
45            function_name_node: internal_fn_def.name.0.clone(),
46            kind: FunctionInfoKind::Normal(internal_fn_def.program_unique_id as usize),
47            assigned_name: complete_function_name,
48            function_variables: internal_fn_def.function_variables.clone(),
49            return_type: internal_fn_def.signature.return_type.clone(),
50            expression: internal_fn_def.body.clone(),
51        };
52
53        let attrs = &internal_fn_def.attributes;
54
55        let should_insert_halt =
56            Self::is_test_call(attrs) || (!should_ignore_host_call && Self::is_host_call(attrs));
57
58        let (start_ip, end_ip, function_info) =
59            self.emit_function_preamble(&in_data, source_map_wrapper, should_insert_halt);
60
61        let count_ip = end_ip.0 - start_ip.0;
62
63        let range = InstructionRange {
64            start: start_ip,
65            count: InstructionPositionOffset(count_ip),
66        };
67
68        self.codegen_state
69            .function_infos
70            .insert(
71                internal_fn_def.program_unique_id,
72                GenFunctionInfo {
73                    ip_range: range.clone(),
74                    params: function_info.params.clone(),
75                    return_type: self.codegen_state.layout_cache.layout(&in_data.return_type),
76                    internal_function_definition: internal_fn_def.clone(),
77                },
78            )
79            .unwrap();
80
81        self.codegen_state.function_ips.ranges.push(FunctionIp {
82            ip_range: range.clone(),
83            kind: FunctionIpKind::Normal(internal_fn_def.program_unique_id),
84        });
85
86        self.codegen_state
87            .debug_info
88            .function_table
89            .entries
90            .push(FunctionDebugInfo {
91                start_pc: range.start.0,
92                function_id: internal_fn_def.program_unique_id,
93            });
94
95        self.codegen_state
96            .debug_info
97            .function_infos
98            .insert(internal_fn_def.program_unique_id, function_info)
99            .unwrap();
100    }
101
102    /// # Errors
103    ///
104    pub fn emit_main_function(
105        &mut self,
106        main: &InternalMainExpression,
107        options: &GenOptions,
108        source_map_lookup: &SourceMapWrapper,
109    ) {
110        let variable_and_frame_memory = layout_variables(
111            &mut self.codegen_state.layout_cache,
112            &main.expression.node,
113            &main.scopes,
114            &main.expression.ty,
115        );
116
117        let in_data = FunctionInData {
118            function_name_node: main.expression.node.clone(),
119            kind: FunctionInfoKind::Normal(main.program_unique_id as usize),
120            assigned_name: "main_expr".to_string(),
121            function_variables: main.scopes.clone(),
122            return_type: main.expression.ty.clone(),
123            expression: main.expression.clone(),
124        };
125
126        let (start_ip, end_ip, function_info) =
127            self.emit_function_preamble(&in_data, source_map_lookup, true);
128
129        let function_info = FunctionInfo {
130            kind: FunctionInfoKind::Normal(main.program_unique_id as usize),
131            frame_memory: variable_and_frame_memory.frame_memory,
132            params: vec![],
133            return_type: variable_and_frame_memory.return_type,
134            name: "main".to_string(),
135            ip_range: InstructionRange {
136                start: start_ip,
137                count: InstructionPositionOffset(end_ip.0 - start_ip.0),
138            },
139        };
140    }
141
142    #[must_use]
143    pub const fn is_callee_save(reg_index: u8) -> bool {
144        reg_index > MAX_REGISTER_INDEX_FOR_PARAMETERS
145    }
146
147    pub fn spill_callee_save_registers(
148        code_builder: &mut CodeBuilder,
149        function_info: &FunctionInfo,
150        node: &Node,
151    ) -> Option<SpilledRegisterRegion> {
152        // Collect the actual register indices that need to be saved
153        let mut registers_to_save: Vec<u8> = Vec::new();
154
155        for variable_register in &function_info.frame_memory.variable_registers {
156            if Self::is_callee_save(variable_register.register.index)
157                && !variable_register.register.ty.is_mutable_primitive()
158            {
159                registers_to_save.push(variable_register.register.index);
160            }
161        }
162
163        if registers_to_save.is_empty() {
164            return None;
165        }
166
167        // Remove duplicates and sort
168        registers_to_save.sort_unstable();
169        registers_to_save.dedup();
170
171        let count = registers_to_save.len() as u8;
172
173        let abi_parameter_frame_memory_region = code_builder.temp_frame_space_for_register(
174            count,
175            "temporary space for callee_save (and not mutable primitives)",
176        );
177
178        // Always use contiguous range approach since callee-save registers are typically >= r6
179        // and the mask approach only works for registers 0-7
180        let start_reg = registers_to_save[0];
181
182        code_builder.builder.add_st_contiguous_regs_to_frame(
183            abi_parameter_frame_memory_region,
184            start_reg,
185            count,
186            node,
187            "prologue, spill contiguous range of callee-save registers to stack frame memory",
188        );
189
190        Some(SpilledRegisterRegion {
191            registers: RepresentationOfRegisters::Range { start_reg, count },
192            frame_memory_region: abi_parameter_frame_memory_region,
193        })
194    }
195    pub fn initialize_and_clear_variables_that_are_on_the_frame(
196        instruction_builder: &mut InstructionBuilder,
197        variable_registers: &[VariableRegister],
198        node: &Node,
199    ) {
200        for variable_reg in variable_registers {
201            if let VmTypeOrigin::Frame(frame_region) = variable_reg.register.ty.origin {
202                instruction_builder.add_lea_from_frame_region(
203                    &variable_reg.register,
204                    frame_region,
205                    node,
206                    &format!("define frame placed register {variable_reg}"),
207                );
208
209                instruction_builder.add_frame_memory_clear(
210                    frame_region,
211                    node,
212                    &format!("clear memory for indirect variable {variable_reg}"),
213                );
214            }
215        }
216    }
217
218    pub fn function_prologue(
219        code_builder: &mut CodeBuilder,
220        function_info: &FunctionInfo,
221        node: &Node,
222    ) -> (Option<SpilledRegisterRegion>, PatchPosition) {
223        let enter_patch_position = code_builder
224            .builder
225            .add_enter_placeholder(&Node::default(), "prologue");
226
227        let maybe_spilled = Self::spill_callee_save_registers(code_builder, function_info, node);
228
229        // Note: Variable initialization (LEA instructions) are now generated
230        // at the point of variable definition, not in the function prologue
231
232        (maybe_spilled, enter_patch_position)
233    }
234
235    fn function_epilogue(
236        instruction_builder: &mut CodeBuilder,
237        maybe_spilled_registers: Option<SpilledRegisterRegion>,
238        node: &Node,
239        comment: &str,
240    ) {
241        if let Some(spilled_register_region) = maybe_spilled_registers {
242            instruction_builder.emit_restore_region(
243                spilled_register_region,
244                &HashSet::new(),
245                node,
246                &format!("function epilogue: {comment}"),
247            );
248        }
249    }
250
251    pub fn emit_function_preamble(
252        &mut self,
253        in_data: &FunctionInData,
254        source_map_wrapper: &SourceMapWrapper,
255        is_called_by_host: bool,
256    ) -> (InstructionPosition, InstructionPosition, FunctionInfo) {
257        let start_ip = self.ip();
258
259        //info!(in_data.assigned_name, "emit_function");
260
261        let frame_and_variable_info = layout_variables(
262            &mut self.codegen_state.layout_cache,
263            &in_data.function_name_node,
264            &in_data.function_variables,
265            &in_data.return_type,
266        );
267
268        // Get the return type layout before borrowing codegen_state mutably
269        let return_basic_type = self.codegen_state.layout_cache.layout(&in_data.return_type);
270
271        let mut params = Vec::new();
272        for (index, x) in &frame_and_variable_info.parameter_and_variable_offsets {
273            params.push(x.ty.basic_type.clone());
274        }
275
276        let mut function_info = FunctionInfo {
277            kind: in_data.kind.clone(),
278            frame_memory: frame_and_variable_info.frame_memory,
279            params,
280            return_type: frame_and_variable_info.return_type,
281            name: in_data.assigned_name.clone(),
282            ip_range: InstructionRange {
283                start: start_ip,
284                count: InstructionPositionOffset(0),
285            },
286        };
287
288        // debug!(name=?in_data.assigned_name, "code generating function");
289
290        let _complete_function_info = self.codegen_state.add_function(
291            function_info.clone(),
292            &in_data.function_name_node,
293            "function",
294        );
295
296        let mut instruction_builder = InstructionBuilder::new(&mut self.builder_state);
297
298        let temp_pool = HwmTempRegisterPool::new(128, 32);
299
300        let ctx = Context::new();
301
302        let mut function_code_builder = CodeBuilder::new(
303            &mut self.codegen_state,
304            &mut instruction_builder,
305            frame_and_variable_info.parameter_and_variable_offsets,
306            //frame_and_variable_info.frame_registers,
307            temp_pool,
308            frame_and_variable_info.local_frame_allocator,
309            self.code_builder_options,
310            source_map_wrapper,
311        );
312
313        let (maybe_spilled_registers, enter_patch_position) = Self::function_prologue(
314            &mut function_code_builder,
315            &function_info,
316            &in_data.function_name_node,
317        );
318        let return_register =
319            TypedRegister::new_vm_type(0, VmType::new_unknown_placement(return_basic_type));
320
321        let destination = if return_register.ty.basic_type.is_scalar() {
322            Destination::Register(return_register)
323        } else {
324            let memory_location = MemoryLocation {
325                ty: VmType::new_unknown_placement(return_register.ty().clone()),
326                base_ptr_reg: return_register,
327                offset: MemoryOffset(0),
328            };
329            if let FunctionInfoKind::Constant(found_constant) = &in_data.kind {
330                function_code_builder.emit_initialize_memory_for_any_type(
331                    &memory_location,
332                    &in_data.expression.node,
333                    "prepare r0 memory for constant",
334                );
335            }
336
337            Destination::Memory(memory_location)
338        };
339
340        function_code_builder.emit_expression(&destination, &in_data.expression, &ctx);
341
342        function_code_builder.patch_enter(enter_patch_position);
343
344        Self::function_epilogue(
345            &mut function_code_builder,
346            maybe_spilled_registers,
347            &in_data.expression.node,
348            "epilogue",
349        );
350
351        self.finalize_function(&GenOptions {
352            is_halt_function: is_called_by_host,
353        });
354
355        let end_ip = self.ip();
356
357        function_info.ip_range.count = InstructionPositionOffset(end_ip.0 - start_ip.0);
358
359        (start_ip, end_ip, function_info)
360    }
361
362    pub fn finalize_function(&mut self, options: &GenOptions) {
363        if options.is_halt_function {
364            self.builder_state.add_hlt(&Node::default(), "");
365        } else {
366            self.builder_state.add_ret(&Node::default(), "");
367        }
368    }
369}