1use crate::code_bld::CodeBuilder;
8use crate::ctx::Context;
9use crate::err::Error;
10use crate::reg_pool::RegisterPool;
11use crate::state::FunctionFixup;
12use crate::{
13 ArgumentAndTempScope, MAX_REGISTER_INDEX_FOR_PARAMETERS, RepresentationOfRegisters,
14 SpilledRegisterRegion, err,
15};
16use source_map_node::Node;
17use std::collections::HashSet;
18use swamp_semantic::{ArgumentExpression, InternalFunctionDefinitionRef, pretty_module_name};
19use swamp_types::TypeKind;
20use swamp_types::prelude::Signature;
21use swamp_vm_types::types::{BasicTypeRef, Destination, TypedRegister, VmType};
22use swamp_vm_types::{FrameMemoryRegion, REG_ON_FRAME_SIZE};
23
24pub struct CopyArgument {
25 pub canonical_target: TypedRegister,
26 pub source_temporary: TypedRegister,
27}
28pub struct EmitArgumentInfo {
29 pub argument_and_temp_scope: ArgumentAndTempScope,
30 pub copy_back_of_registers_mutated_by_callee: Vec<MutableReturnReg>,
31}
32
33pub struct MutableReturnReg {
34 pub target_location_after_call: Destination,
35 pub parameter_reg: TypedRegister,
36}
37
38impl CodeBuilder<'_> {
39 pub fn spill_required_registers(&mut self, node: &Node, comment: &str) -> ArgumentAndTempScope {
48 const ABI_ARGUMENT_RETURN_AND_ARGUMENT_REGISTERS: usize =
49 MAX_REGISTER_INDEX_FOR_PARAMETERS as usize + 1; const ABI_ARGUMENT_MASK: u8 =
51 ((1u16 << ABI_ARGUMENT_RETURN_AND_ARGUMENT_REGISTERS) - 1) as u8;
52
53 let abi_parameter_frame_memory_region = self.temp_frame_space_for_register(
54 ABI_ARGUMENT_RETURN_AND_ARGUMENT_REGISTERS as u8,
55 &format!("emit abi arguments r0-r6 {comment}"),
56 );
57
58 self.builder.add_st_masked_regs_to_frame(
59 abi_parameter_frame_memory_region.addr,
60 ABI_ARGUMENT_MASK,
61 node,
62 "spill masked registers to stack frame memory.",
63 );
64
65 let abi_parameter_region = SpilledRegisterRegion {
66 registers: RepresentationOfRegisters::Mask(ABI_ARGUMENT_MASK),
67 frame_memory_region: abi_parameter_frame_memory_region,
68 };
69
70 let (first_temp_register_index, temp_register_probable_live_count) =
71 self.temp_registers.start_index_and_number_of_allocated();
72 debug_assert_eq!(first_temp_register_index, 128);
73
74 let temp_register_region = if temp_register_probable_live_count > 0 {
75 let temp_register_frame_memory_region = self.temp_frame_space_for_register(temp_register_probable_live_count, &format!("emit temp arguments from r{first_temp_register_index} count:{temp_register_probable_live_count} {comment}"));
76 let temp_register_region = SpilledRegisterRegion {
77 registers: RepresentationOfRegisters::Range {
78 start_reg: first_temp_register_index,
79 count: temp_register_probable_live_count,
80 },
81 frame_memory_region: temp_register_frame_memory_region,
82 };
83
84 self.builder.add_st_contiguous_regs_to_frame(
85 temp_register_frame_memory_region,
86 first_temp_register_index,
87 temp_register_probable_live_count,
88 node,
89 "spill contiguous range of registers to stack frame memory",
90 );
91 Some(temp_register_region)
92 } else {
93 None
94 };
95
96 ArgumentAndTempScope {
97 argument_registers: abi_parameter_region,
98 scratch_registers: temp_register_region,
99 }
100 }
101
102 pub fn setup_return_pointer_reg(
103 &mut self,
104 output_destination: &Destination,
105 return_basic_type: BasicTypeRef,
106 node: &Node,
107 ) {
108 let r0 = TypedRegister::new_vm_type(0, VmType::new_unknown_placement(return_basic_type));
109
110 let return_pointer_reg = self.emit_compute_effective_address_to_register(
111 output_destination,
112 node,
113 "r0: create an absolute pointer to r0 if needed",
114 );
115
116 self.builder.add_mov_reg(
117 &r0,
118 &return_pointer_reg,
119 node,
120 "r0: copy the return pointer into r0",
121 );
122 }
123
124 fn emit_single_argument(
125 &mut self,
126 argument_expr: &ArgumentExpression,
127 argument_to_use: &TypedRegister,
128 target_canonical_argument_register: &TypedRegister,
129 parameter_basic_type: &BasicTypeRef,
130 copy_back_phase_one: &mut Vec<MutableReturnReg>,
131 node: &Node,
132 ctx: &Context,
133 ) {
134 match argument_expr {
135 ArgumentExpression::BorrowMutableReference(lvalue) => {
136 let original_destination = self.emit_lvalue_address(lvalue, ctx);
137
138 if parameter_basic_type.should_be_copied_back_when_mutable_arg_or_return() {
139 self.emit_transfer_value_to_register(
141 argument_to_use,
142 &original_destination,
143 node,
144 "must get primitive from lvalue and pass as copy back (by value)",
145 );
146
147 copy_back_phase_one.push(MutableReturnReg {
149 target_location_after_call: original_destination,
150 parameter_reg: target_canonical_argument_register.clone(),
151 });
152 } else {
153 let flattened_source_pointer_reg = self
154 .emit_compute_effective_address_to_register(
155 &original_destination,
156 node,
157 "flattened into absolute pointer",
158 );
159 self.builder.add_mov_reg(
160 argument_to_use,
161 &flattened_source_pointer_reg,
162 node,
163 "copy absolute address",
164 );
165 }
166 }
167 ArgumentExpression::MaterializedExpression(expr) => {
168 if Self::rvalue_needs_memory_location_to_materialize_in(
169 &mut self.state.layout_cache,
170 expr,
171 ) {
172 let temp_ptr = self.emit_scalar_rvalue_or_pointer_to_temporary(expr, ctx, true);
174
175 self.builder.add_mov_reg(
176 argument_to_use,
177 &temp_ptr,
178 node,
179 "copy temporary storage address to argument register",
180 );
181 } else {
182 self.emit_expression_into_register(
183 argument_to_use,
184 expr,
185 "argument expression into specific argument register",
186 ctx,
187 );
188 }
189 }
190
191 ArgumentExpression::Expression(expr) => {
192 self.emit_expression_into_register(
194 argument_to_use,
195 expr,
196 "argument expression into specific argument register",
197 ctx,
198 );
199 }
200 }
201 }
202
203 pub(crate) fn emit_arguments(
204 &mut self,
205 output_destination: &Destination,
206 node: &Node,
207 signature: &Signature,
208 self_variable: Option<&TypedRegister>,
209 arguments: &[ArgumentExpression],
210 is_host_call: bool,
211 ctx: &Context,
212 ) -> EmitArgumentInfo {
213 let mut copy_back_operations: Vec<MutableReturnReg> = Vec::new();
214 let has_return_value = !matches!(&*signature.return_type.kind, TypeKind::Unit);
215
216 let spill_scope = self.spill_required_registers(node, "spill before emit arguments");
218
219 if has_return_value {
221 let return_basic_type = self.state.layout_cache.layout(&signature.return_type);
222
223 if return_basic_type.is_aggregate() {
224 self.setup_return_pointer_reg(output_destination, return_basic_type, node);
226 } else {
227 let r0 =
229 TypedRegister::new_vm_type(0, VmType::new_unknown_placement(return_basic_type));
230 copy_back_operations.push(MutableReturnReg {
231 target_location_after_call: output_destination.clone(),
232 parameter_reg: r0,
233 });
234 }
235 }
236
237 assert!(
238 signature.parameters.len() <= MAX_REGISTER_INDEX_FOR_PARAMETERS.into(),
239 "signature is wrong {signature:?}"
240 );
241
242 let mut temp_to_abi_copies = Vec::new();
244 let mut argument_registers = RegisterPool::new(1, 6); for (index_in_signature, type_for_parameter) in signature.parameters.iter().enumerate() {
247 let parameter_basic_type = self
248 .state
249 .layout_cache
250 .layout(&type_for_parameter.resolved_type);
251 let target_canonical_argument_register = argument_registers.alloc_register(
252 VmType::new_unknown_placement(parameter_basic_type.clone()),
253 &format!("{index_in_signature}:{}", type_for_parameter.name),
254 );
255
256 let argument_to_use = if self.argument_needs_to_be_in_a_temporary_register_first(
257 &target_canonical_argument_register,
258 ) {
259 let temp_reg = self.temp_registers.allocate(
260 target_canonical_argument_register.ty.clone(),
261 &format!(
262 "temporary argument for '{}'",
263 target_canonical_argument_register.comment
264 ),
265 );
266 let copy_argument = CopyArgument {
267 canonical_target: target_canonical_argument_register.clone(),
268 source_temporary: temp_reg.register.clone(),
269 };
270 temp_to_abi_copies.push(copy_argument);
271 temp_reg.register
272 } else {
273 target_canonical_argument_register.clone()
274 };
275
276 if index_in_signature == 0 && self_variable.is_some() {
278 let self_reg = self_variable.as_ref().unwrap();
279 if self_reg.index != argument_to_use.index {
280 self.builder.add_mov_reg(
281 &argument_to_use,
282 self_reg,
283 node,
284 &format!(
285 "move self_variable ({}) to first argument register",
286 self_reg.ty
287 ),
288 );
289 }
290 } else {
291 let argument_vector_index = if self_variable.is_some() {
293 index_in_signature - 1
294 } else {
295 index_in_signature
296 };
297 let argument_expr_or_location = &arguments[argument_vector_index];
298
299 self.emit_single_argument(
300 argument_expr_or_location,
301 &argument_to_use,
302 &target_canonical_argument_register,
303 ¶meter_basic_type,
304 &mut copy_back_operations,
305 node,
306 ctx,
307 );
308 }
309 }
310
311 for (index, copy_argument) in temp_to_abi_copies.iter().enumerate() {
313 let parameter_in_signature = &signature.parameters[index];
314 self.builder.add_mov_reg(
315 ©_argument.canonical_target,
316 ©_argument.source_temporary,
317 node,
318 &format!(
319 "copy argument {index} ({}) in place from temporary '{}'",
320 parameter_in_signature.name, copy_argument.source_temporary.comment
321 ),
322 );
323 }
324
325 EmitArgumentInfo {
326 argument_and_temp_scope: spill_scope,
327 copy_back_of_registers_mutated_by_callee: copy_back_operations,
328 }
329 }
330
331 pub(crate) fn emit_post_call(
332 &mut self,
333 spilled_arguments: EmitArgumentInfo,
334 node: &Node,
335 comment: &str,
336 ) {
337 let mut temp_saved_values = Vec::new();
339 for copy_back in &spilled_arguments.copy_back_of_registers_mutated_by_callee {
340 let temp_reg = self.temp_registers.allocate(
341 copy_back.parameter_reg.ty.clone(),
342 &format!(
343 "temp save for copy-back of {}",
344 copy_back.parameter_reg.comment
345 ),
346 );
347
348 self.builder.add_mov_reg(
349 temp_reg.register(),
350 ©_back.parameter_reg,
351 node,
352 &format!(
353 "save {} to temp before register restoration",
354 copy_back.parameter_reg
355 ),
356 );
357
358 temp_saved_values.push((temp_reg, copy_back));
359 }
360
361 if let Some(scratch_region) = spilled_arguments.argument_and_temp_scope.scratch_registers {
363 self.emit_restore_region(scratch_region, &HashSet::new(), node, comment);
364 }
365
366 self.emit_restore_region(
368 spilled_arguments.argument_and_temp_scope.argument_registers,
369 &HashSet::new(),
370 node,
371 comment,
372 );
373
374 for (temp_reg, copy_back) in temp_saved_values {
376 let temp_source = Destination::Register(temp_reg.register().clone());
377 self.emit_copy_value_between_destinations(
378 ©_back.target_location_after_call,
379 &temp_source,
380 node,
381 "copy-back from temp to final destination",
382 );
383 }
384 }
385
386 #[allow(clippy::too_many_lines)]
387 pub fn emit_restore_region(
388 &mut self,
389 region: SpilledRegisterRegion,
390 output_destination_registers: &HashSet<u8>, node: &Node,
392 comment: &str,
393 ) {
394 match region.registers {
395 RepresentationOfRegisters::Individual(spilled_registers_list) => {
396 if !spilled_registers_list.is_empty() {
397 let mut sorted_regs = spilled_registers_list;
398 sorted_regs.sort_by_key(|reg| reg.index);
399
400 let filtered_regs: Vec<_> = sorted_regs
402 .into_iter()
403 .filter(|reg| !output_destination_registers.contains(®.index))
404 .collect();
405
406 if !filtered_regs.is_empty() {
407 let mut i = 0;
408 while i < filtered_regs.len() {
409 let seq_start_idx = i;
410 let start_reg = filtered_regs[i].index;
411 let mut seq_length = 1;
412
413 while i + 1 < filtered_regs.len()
414 && filtered_regs[i + 1].index == filtered_regs[i].index + 1
415 {
416 seq_length += 1;
417 i += 1;
418 }
419
420 let memory_offset = if seq_start_idx > 0 {
421 (filtered_regs[seq_start_idx].index - filtered_regs[0].index)
422 as usize
423 * REG_ON_FRAME_SIZE.0 as usize
424 } else {
425 0
426 };
427
428 let specific_mem_location = FrameMemoryRegion {
429 addr: region.frame_memory_region.addr
430 + swamp_vm_types::MemoryOffset(memory_offset as u32),
431 size: REG_ON_FRAME_SIZE,
432 };
433
434 self.builder.add_ld_contiguous_regs_from_frame(
435 start_reg,
436 specific_mem_location,
437 seq_length,
438 node,
439 &format!(
440 "restoring r{}-r{} (sequence) {comment}",
441 start_reg,
442 start_reg + seq_length - 1
443 ),
444 );
445
446 i += 1;
447 }
448 }
449 }
450 }
451
452 RepresentationOfRegisters::Mask(original_spill_mask) => {
453 let mut mask_to_actually_restore = original_spill_mask;
454
455 for i in 0..8 {
456 let reg_idx = i as u8;
457 if (original_spill_mask >> i) & 1 != 0
458 && output_destination_registers.contains(®_idx)
459 {
460 mask_to_actually_restore &= !(1 << i); }
462 }
463
464 if mask_to_actually_restore != 0 {
465 self.builder.add_ld_masked_regs_from_frame(
466 mask_to_actually_restore,
467 region.frame_memory_region,
468 node,
469 &format!("restore registers using mask {comment}"),
470 );
471 }
472 }
473 RepresentationOfRegisters::Range { start_reg, count } => {
474 let base_mem_addr_of_spilled_range = region.frame_memory_region.addr;
475
476 let mut i = 0;
478 while i < count {
479 while i < count && output_destination_registers.contains(&(start_reg + i)) {
480 i += 1;
481 }
482
483 if i < count {
484 let seq_start_reg = start_reg + i;
485 let seq_start_offset = (i as usize) * REG_ON_FRAME_SIZE.0 as usize;
486 let mut seq_length = 1;
487
488 while i + seq_length < count
489 && !output_destination_registers.contains(&(start_reg + i + seq_length))
490 {
491 seq_length += 1;
492 }
493
494 let specific_mem_location = FrameMemoryRegion {
495 addr: base_mem_addr_of_spilled_range
496 + swamp_vm_types::MemoryOffset(seq_start_offset as u32),
497 size: REG_ON_FRAME_SIZE,
498 };
499
500 self.builder.add_ld_contiguous_regs_from_frame(
501 seq_start_reg,
502 specific_mem_location,
503 seq_length,
504 node,
505 &format!(
506 "restoring spilled contiguous range of registers from stack frame r{}-r{} {comment}",
507 seq_start_reg,
508 seq_start_reg + seq_length - 1
509 ),
510 );
511
512 i += seq_length;
513 }
514 }
515 }
516 }
517 }
518
519 pub(crate) fn emit_call(
520 &mut self,
521 node: &Node,
522 internal_fn: &InternalFunctionDefinitionRef,
523 comment: &str,
524 ) {
525 let function_name = internal_fn.associated_with_type.as_ref().map_or_else(
526 || {
527 format!(
528 "{}::{}",
529 pretty_module_name(&internal_fn.defined_in_module_path),
530 internal_fn.assigned_name
531 )
532 },
533 |associated_with_type| {
534 format!(
535 "{}::{}:{}",
536 pretty_module_name(&internal_fn.defined_in_module_path),
537 associated_with_type,
538 internal_fn.assigned_name
539 )
540 },
541 );
542 let call_comment = &format!("calling `{function_name}` ({comment})",);
543
544 let patch_position = self.builder.add_call_placeholder(node, call_comment);
545 self.state.function_fixups.push(FunctionFixup {
546 patch_position,
547 fn_id: internal_fn.program_unique_id,
548 internal_function_definition: internal_fn.clone(),
549 });
550 }
552 pub(crate) fn emit_internal_call(
553 &mut self,
554 target_reg: &Destination,
555 node: &Node,
556 internal_fn: &InternalFunctionDefinitionRef,
557 arguments: &Vec<ArgumentExpression>,
558 ctx: &Context,
559 ) {
560 let argument_info = self.emit_arguments(
561 target_reg,
562 node,
563 &internal_fn.signature,
564 None,
565 arguments,
566 false,
567 ctx,
568 );
569
570 self.emit_call(node, internal_fn, "call"); if !matches!(&*internal_fn.signature.return_type.kind, TypeKind::Never) {
573 self.emit_post_call(argument_info, node, "restore spilled after call");
574 }
575 }
576
577 const fn argument_needs_to_be_in_a_temporary_register_first(
582 &self,
583 reg: &TypedRegister,
584 ) -> bool {
585 true
587 }
588
589 fn add_error(&mut self, error_kind: err::ErrorKind, node: &Node) {
590 self.errors.push(Error {
591 node: node.clone(),
592 kind: error_kind,
593 });
594 }
595}