1use crate::code_bld::CodeBuilder;
8use crate::ctx::Context;
9use crate::err::Error;
10use crate::reg_pool::RegisterPool;
11use crate::state::FunctionFixup;
12use crate::{
13 ArgumentAndTempScope, MAX_REGISTER_INDEX_FOR_PARAMETERS, RepresentationOfRegisters,
14 SpilledRegisterRegion, err,
15};
16use source_map_node::Node;
17use std::collections::HashSet;
18use swamp_semantic::{ArgumentExpression, InternalFunctionDefinitionRef, pretty_module_name};
19use swamp_types::TypeKind;
20use swamp_types::prelude::Signature;
21use swamp_vm_isa::REG_ON_FRAME_SIZE;
22use swamp_vm_types::FrameMemoryRegion;
23use swamp_vm_types::types::{BasicTypeRef, Place, TypedRegister, VmType};
24
25pub struct CopyArgument {
26 pub canonical_target: TypedRegister,
27 pub source_temporary: TypedRegister,
28}
29pub struct EmitArgumentInfo {
30 pub argument_and_temp_scope: ArgumentAndTempScope,
31 pub copy_back_of_registers_mutated_by_callee: Vec<MutableReturnReg>,
32}
33
34pub struct MutableReturnReg {
35 pub target_location_after_call: Place,
36 pub parameter_reg: TypedRegister,
37}
38
39impl CodeBuilder<'_> {
40 pub fn spill_required_registers(&mut self, node: &Node, comment: &str) -> ArgumentAndTempScope {
49 const ABI_ARGUMENT_RETURN_AND_ARGUMENT_REGISTERS: usize =
50 MAX_REGISTER_INDEX_FOR_PARAMETERS as usize + 1; const ABI_ARGUMENT_MASK: u8 =
52 ((1u16 << ABI_ARGUMENT_RETURN_AND_ARGUMENT_REGISTERS) - 1) as u8;
53
54 let abi_parameter_frame_memory_region = self.temp_frame_space_for_register(
55 ABI_ARGUMENT_RETURN_AND_ARGUMENT_REGISTERS as u8,
56 &format!("emit abi arguments r0-r6 {comment}"),
57 );
58
59 self.builder.add_st_masked_regs_to_frame(
60 abi_parameter_frame_memory_region.addr,
61 ABI_ARGUMENT_MASK,
62 node,
63 "spill masked registers to stack frame memory.",
64 );
65
66 let abi_parameter_region = SpilledRegisterRegion {
67 registers: RepresentationOfRegisters::Mask(ABI_ARGUMENT_MASK),
68 frame_memory_region: abi_parameter_frame_memory_region,
69 };
70
71 let (first_temp_register_index, temp_register_probable_live_count) =
72 self.temp_registers.start_index_and_number_of_allocated();
73 debug_assert_eq!(first_temp_register_index, 128);
74
75 let temp_register_region = if temp_register_probable_live_count > 0 {
76 let temp_register_frame_memory_region = self.temp_frame_space_for_register(temp_register_probable_live_count, &format!("emit temp arguments from r{first_temp_register_index} count:{temp_register_probable_live_count} {comment}"));
77 let temp_register_region = SpilledRegisterRegion {
78 registers: RepresentationOfRegisters::Range {
79 start_reg: first_temp_register_index,
80 count: temp_register_probable_live_count,
81 },
82 frame_memory_region: temp_register_frame_memory_region,
83 };
84
85 self.builder.add_st_contiguous_regs_to_frame(
86 temp_register_frame_memory_region,
87 first_temp_register_index,
88 temp_register_probable_live_count,
89 node,
90 "spill contiguous range of registers to stack frame memory",
91 );
92 Some(temp_register_region)
93 } else {
94 None
95 };
96
97 ArgumentAndTempScope {
98 argument_registers: abi_parameter_region,
99 scratch_registers: temp_register_region,
100 }
101 }
102
103 pub fn setup_return_pointer_reg(
104 &mut self,
105 output_destination: &Place,
106 return_basic_type: BasicTypeRef,
107 node: &Node,
108 ) {
109 let r0 = TypedRegister::new_vm_type(0, VmType::new_unknown_placement(return_basic_type));
110
111 let return_pointer_reg = self.emit_compute_effective_address_to_register(
112 output_destination,
113 node,
114 "r0: create an absolute pointer to r0 if needed",
115 );
116
117 self.builder.add_mov_reg(
118 &r0,
119 &return_pointer_reg,
120 node,
121 "r0: copy the return pointer into r0",
122 );
123 }
124
125 fn emit_single_argument(
126 &mut self,
127 argument_expr: &ArgumentExpression,
128 argument_to_use: &TypedRegister,
129 target_canonical_argument_register: &TypedRegister,
130 parameter_basic_type: &BasicTypeRef,
131 copy_back_phase_one: &mut Vec<MutableReturnReg>,
132 node: &Node,
133 ctx: &Context,
134 ) {
135 match argument_expr {
136 ArgumentExpression::BorrowMutableReference(lvalue) => {
137 let original_destination = self.emit_lvalue_address(lvalue, ctx);
138
139 if parameter_basic_type.should_be_copied_back_when_mutable_arg_or_return() {
140 self.emit_transfer_value_to_register(
142 argument_to_use,
143 &original_destination,
144 node,
145 "must get primitive from lvalue and pass as copy back (by value)",
146 );
147
148 copy_back_phase_one.push(MutableReturnReg {
150 target_location_after_call: original_destination,
151 parameter_reg: target_canonical_argument_register.clone(),
152 });
153 } else {
154 let flattened_source_pointer_reg = self
155 .emit_compute_effective_address_to_register(
156 &original_destination,
157 node,
158 "flattened into absolute pointer",
159 );
160 self.builder.add_mov_reg(
161 argument_to_use,
162 &flattened_source_pointer_reg,
163 node,
164 "copy absolute address",
165 );
166 }
167 }
168 ArgumentExpression::MaterializedExpression(expr) => {
169 if Self::rvalue_needs_memory_location_to_materialize_in(
170 &mut self.state.layout_cache,
171 expr,
172 ) {
173 let temp_ptr = self.emit_scalar_rvalue_or_pointer_to_temporary(expr, ctx, true);
175
176 self.builder.add_mov_reg(
177 argument_to_use,
178 &temp_ptr,
179 node,
180 "copy temporary storage address to argument register",
181 );
182 } else {
183 self.emit_expression_into_register(
184 argument_to_use,
185 expr,
186 "argument expression into specific argument register",
187 ctx,
188 );
189 }
190 }
191
192 ArgumentExpression::Expression(expr) => {
193 self.emit_expression_into_register(
195 argument_to_use,
196 expr,
197 "argument expression into specific argument register",
198 ctx,
199 );
200 }
201 }
202 }
203
204 pub(crate) fn emit_arguments(
205 &mut self,
206 output_destination: &Place,
207 node: &Node,
208 signature: &Signature,
209 self_variable: Option<&TypedRegister>,
210 arguments: &[ArgumentExpression],
211 is_host_call: bool,
212 ctx: &Context,
213 ) -> EmitArgumentInfo {
214 let mut copy_back_operations: Vec<MutableReturnReg> = Vec::new();
215 let has_return_value = !matches!(&*signature.return_type.kind, TypeKind::Unit);
216
217 let spill_scope = self.spill_required_registers(node, "spill before emit arguments");
219
220 if has_return_value {
222 let return_basic_type = self.state.layout_cache.layout(&signature.return_type);
223
224 if return_basic_type.needs_hidden_pointer_as_return() {
225 self.setup_return_pointer_reg(output_destination, return_basic_type, node);
227 } else {
228 let r0 =
230 TypedRegister::new_vm_type(0, VmType::new_unknown_placement(return_basic_type));
231 copy_back_operations.push(MutableReturnReg {
232 target_location_after_call: output_destination.clone(),
233 parameter_reg: r0,
234 });
235 }
236 }
237
238 assert!(
239 signature.parameters.len() <= MAX_REGISTER_INDEX_FOR_PARAMETERS.into(),
240 "signature is wrong {signature:?}"
241 );
242
243 let mut temp_to_abi_copies = Vec::new();
245 let mut argument_registers = RegisterPool::new(1, 6); for (index_in_signature, type_for_parameter) in signature.parameters.iter().enumerate() {
248 let parameter_basic_type = self
249 .state
250 .layout_cache
251 .layout(&type_for_parameter.resolved_type);
252 let target_canonical_argument_register = argument_registers.alloc_register(
253 VmType::new_unknown_placement(parameter_basic_type.clone()),
254 &format!("{index_in_signature}:{}", type_for_parameter.name),
255 );
256
257 let argument_to_use = if self.argument_needs_to_be_in_a_temporary_register_first(
258 &target_canonical_argument_register,
259 ) {
260 let temp_reg = self.temp_registers.allocate(
261 target_canonical_argument_register.ty.clone(),
262 &format!(
263 "temporary argument for '{}'",
264 target_canonical_argument_register.comment
265 ),
266 );
267 let copy_argument = CopyArgument {
268 canonical_target: target_canonical_argument_register.clone(),
269 source_temporary: temp_reg.register.clone(),
270 };
271 temp_to_abi_copies.push(copy_argument);
272 temp_reg.register
273 } else {
274 target_canonical_argument_register.clone()
275 };
276
277 if index_in_signature == 0 && self_variable.is_some() {
279 let self_reg = self_variable.as_ref().unwrap();
280 if self_reg.index != argument_to_use.index {
281 self.builder.add_mov_reg(
282 &argument_to_use,
283 self_reg,
284 node,
285 &format!(
286 "move self_variable ({}) to first argument register",
287 self_reg.ty
288 ),
289 );
290 }
291 } else {
292 let argument_vector_index = if self_variable.is_some() {
294 index_in_signature - 1
295 } else {
296 index_in_signature
297 };
298 let argument_expr_or_location = &arguments[argument_vector_index];
299
300 self.emit_single_argument(
301 argument_expr_or_location,
302 &argument_to_use,
303 &target_canonical_argument_register,
304 ¶meter_basic_type,
305 &mut copy_back_operations,
306 node,
307 ctx,
308 );
309 }
310 }
311
312 for (index, copy_argument) in temp_to_abi_copies.iter().enumerate() {
314 let parameter_in_signature = &signature.parameters[index];
315 self.builder.add_mov_reg(
316 ©_argument.canonical_target,
317 ©_argument.source_temporary,
318 node,
319 &format!(
320 "copy argument {index} ({}) in place from temporary '{}'",
321 parameter_in_signature.name, copy_argument.source_temporary.comment
322 ),
323 );
324 }
325
326 EmitArgumentInfo {
327 argument_and_temp_scope: spill_scope,
328 copy_back_of_registers_mutated_by_callee: copy_back_operations,
329 }
330 }
331
332 pub(crate) fn emit_post_call(
333 &mut self,
334 spilled_arguments: EmitArgumentInfo,
335 node: &Node,
336 comment: &str,
337 ) {
338 let mut temp_saved_values = Vec::new();
340 for copy_back in &spilled_arguments.copy_back_of_registers_mutated_by_callee {
341 let temp_reg = self.temp_registers.allocate(
342 copy_back.parameter_reg.ty.clone(),
343 &format!(
344 "temp save for copy-back of {}",
345 copy_back.parameter_reg.comment
346 ),
347 );
348
349 self.builder.add_mov_reg(
350 temp_reg.register(),
351 ©_back.parameter_reg,
352 node,
353 &format!(
354 "save {} to temp before register restoration",
355 copy_back.parameter_reg
356 ),
357 );
358
359 temp_saved_values.push((temp_reg, copy_back));
360 }
361
362 if let Some(scratch_region) = spilled_arguments.argument_and_temp_scope.scratch_registers {
364 self.emit_restore_region(scratch_region, &HashSet::new(), node, comment);
365 }
366
367 self.emit_restore_region(
369 spilled_arguments.argument_and_temp_scope.argument_registers,
370 &HashSet::new(),
371 node,
372 comment,
373 );
374
375 for (temp_reg, copy_back) in temp_saved_values {
377 let temp_source = Place::Register(temp_reg.register().clone());
378 self.emit_copy_value_between_places(
379 ©_back.target_location_after_call,
380 &temp_source,
381 node,
382 "copy-back from temp to final destination",
383 );
384 }
385 }
386
387 #[allow(clippy::too_many_lines)]
388 pub fn emit_restore_region(
389 &mut self,
390 region: SpilledRegisterRegion,
391 output_destination_registers: &HashSet<u8>, node: &Node,
393 comment: &str,
394 ) {
395 match region.registers {
396 RepresentationOfRegisters::Individual(spilled_registers_list) => {
397 if !spilled_registers_list.is_empty() {
398 let mut sorted_regs = spilled_registers_list;
399 sorted_regs.sort_by_key(|reg| reg.index);
400
401 let filtered_regs: Vec<_> = sorted_regs
403 .into_iter()
404 .filter(|reg| !output_destination_registers.contains(®.index))
405 .collect();
406
407 if !filtered_regs.is_empty() {
408 let mut i = 0;
409 while i < filtered_regs.len() {
410 let seq_start_idx = i;
411 let start_reg = filtered_regs[i].index;
412 let mut seq_length = 1;
413
414 while i + 1 < filtered_regs.len()
415 && filtered_regs[i + 1].index == filtered_regs[i].index + 1
416 {
417 seq_length += 1;
418 i += 1;
419 }
420
421 let memory_offset = if seq_start_idx > 0 {
422 (filtered_regs[seq_start_idx].index - filtered_regs[0].index)
423 as usize
424 * REG_ON_FRAME_SIZE.0 as usize
425 } else {
426 0
427 };
428
429 let specific_mem_location = FrameMemoryRegion {
430 addr: region.frame_memory_region.addr
431 + swamp_vm_isa::MemoryOffset(memory_offset as u32),
432 size: REG_ON_FRAME_SIZE,
433 };
434
435 self.builder.add_ld_contiguous_regs_from_frame(
436 start_reg,
437 specific_mem_location,
438 seq_length,
439 node,
440 &format!(
441 "restoring r{}-r{} (sequence) {comment}",
442 start_reg,
443 start_reg + seq_length - 1
444 ),
445 );
446
447 i += 1;
448 }
449 }
450 }
451 }
452
453 RepresentationOfRegisters::Mask(original_spill_mask) => {
454 let mut mask_to_actually_restore = original_spill_mask;
455
456 for i in 0..8 {
457 let reg_idx = i as u8;
458 if (original_spill_mask >> i) & 1 != 0
459 && output_destination_registers.contains(®_idx)
460 {
461 mask_to_actually_restore &= !(1 << i); }
463 }
464
465 if mask_to_actually_restore != 0 {
466 self.builder.add_ld_masked_regs_from_frame(
467 mask_to_actually_restore,
468 region.frame_memory_region,
469 node,
470 &format!("restore registers using mask {comment}"),
471 );
472 }
473 }
474 RepresentationOfRegisters::Range { start_reg, count } => {
475 let base_mem_addr_of_spilled_range = region.frame_memory_region.addr;
476
477 let mut i = 0;
479 while i < count {
480 while i < count && output_destination_registers.contains(&(start_reg + i)) {
481 i += 1;
482 }
483
484 if i < count {
485 let seq_start_reg = start_reg + i;
486 let seq_start_offset = (i as usize) * REG_ON_FRAME_SIZE.0 as usize;
487 let mut seq_length = 1;
488
489 while i + seq_length < count
490 && !output_destination_registers.contains(&(start_reg + i + seq_length))
491 {
492 seq_length += 1;
493 }
494
495 let specific_mem_location = FrameMemoryRegion {
496 addr: base_mem_addr_of_spilled_range
497 + swamp_vm_isa::MemoryOffset(seq_start_offset as u32),
498 size: REG_ON_FRAME_SIZE,
499 };
500
501 self.builder.add_ld_contiguous_regs_from_frame(
502 seq_start_reg,
503 specific_mem_location,
504 seq_length,
505 node,
506 &format!(
507 "restoring spilled contiguous range of registers from stack frame r{}-r{} {comment}",
508 seq_start_reg,
509 seq_start_reg + seq_length - 1
510 ),
511 );
512
513 i += seq_length;
514 }
515 }
516 }
517 }
518 }
519
520 pub(crate) fn emit_call(
521 &mut self,
522 node: &Node,
523 internal_fn: &InternalFunctionDefinitionRef,
524 comment: &str,
525 ) {
526 let function_name = internal_fn.associated_with_type.as_ref().map_or_else(
527 || {
528 format!(
529 "{}::{}",
530 pretty_module_name(&internal_fn.defined_in_module_path),
531 internal_fn.assigned_name
532 )
533 },
534 |associated_with_type| {
535 format!(
536 "{}::{}:{}",
537 pretty_module_name(&internal_fn.defined_in_module_path),
538 associated_with_type,
539 internal_fn.assigned_name
540 )
541 },
542 );
543 let call_comment = &format!("calling `{function_name}` ({comment})",);
544
545 let patch_position = self.builder.add_call_placeholder(node, call_comment);
546 self.state.function_fixups.push(FunctionFixup {
547 patch_position,
548 fn_id: internal_fn.program_unique_id,
549 internal_function_definition: internal_fn.clone(),
550 });
551 }
553 pub(crate) fn emit_internal_call(
554 &mut self,
555 target_reg: &Place,
556 node: &Node,
557 internal_fn: &InternalFunctionDefinitionRef,
558 arguments: &Vec<ArgumentExpression>,
559 ctx: &Context,
560 ) {
561 let argument_info = self.emit_arguments(
562 target_reg,
563 node,
564 &internal_fn.signature,
565 None,
566 arguments,
567 false,
568 ctx,
569 );
570
571 self.emit_call(node, internal_fn, "call"); if !matches!(&*internal_fn.signature.return_type.kind, TypeKind::Never) {
574 self.emit_post_call(argument_info, node, "restore spilled after call");
575 }
576 }
577
578 const fn argument_needs_to_be_in_a_temporary_register_first(
583 &self,
584 reg: &TypedRegister,
585 ) -> bool {
586 true
588 }
589
590 fn add_error(&mut self, error_kind: err::ErrorKind, node: &Node) {
591 self.errors.push(Error {
592 node: node.clone(),
593 kind: error_kind,
594 });
595 }
596}