1use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::reg_pool::HwmTempRegisterPool;
8use crate::state::CodeGenState;
9use seq_map::SeqMap;
10use source_map_cache::{
11 KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
12};
13use source_map_node::Node;
14use swamp_semantic::{
15 ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
16 UnaryOperator, UnaryOperatorKind, VariableRef,
17};
18use swamp_types::TypeKind;
19use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
20use swamp_vm_types::aligner::{SAFE_ALIGNMENT, align};
21use swamp_vm_types::types::{
22 BasicTypeRef, Destination, FramePlacedType, TypedRegister, VmType, b8_type, u8_type, u32_type,
23};
24use swamp_vm_types::{
25 AggregateMemoryLocation, FrameMemoryRegion, FrameMemorySize, MemoryLocation, MemoryOffset,
26 MemorySize, PointerLocation, REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
27};
28use tracing::info;
29
30#[derive(Copy, Clone)]
31pub struct CodeBuilderOptions {
32 pub should_show_debug: bool,
33}
34pub(crate) struct CodeBuilder<'a> {
35 pub state: &'a mut CodeGenState,
36 pub(crate) builder: &'a mut InstructionBuilder<'a>,
37 pub(crate) variable_registers: SeqMap<usize, TypedRegister>,
38 pub(crate) temp_registers: HwmTempRegisterPool,
39 pub(crate) frame_allocator: StackFrameAllocator,
40 pub debug_line_tracker: KeepTrackOfSourceLine,
41 pub source_map_lookup: &'a SourceMapWrapper<'a>,
43 pub options: CodeBuilderOptions,
44}
45
46impl<'a> CodeBuilder<'a> {
47 pub fn new(
48 state: &'a mut CodeGenState,
49 builder: &'a mut InstructionBuilder<'a>,
50 variable_registers: SeqMap<usize, TypedRegister>,
51 temp_registers: HwmTempRegisterPool,
52 temp_allocator: StackFrameAllocator,
53 options: CodeBuilderOptions,
54 source_map_lookup: &'a SourceMapWrapper<'a>,
55 ) -> Self {
56 Self {
57 state,
58 builder,
59 variable_registers,
60 temp_registers,
62 frame_allocator: temp_allocator,
63 debug_line_tracker: KeepTrackOfSourceLine::default(),
64 options,
65 source_map_lookup,
66 }
67 }
68}
69impl CodeBuilder<'_> {
70 pub(crate) fn emit_copy_register(
71 &mut self,
72 target_reg: &TypedRegister,
73 source_reg: &TypedRegister,
74 node: &Node,
75 comment: &str,
76 ) {
77 if source_reg.ty.is_mutable_reference_semantic() {
78 if target_reg.ty().is_mutable_reference() {
79 self.builder.add_mov_reg(
80 target_reg,
81 source_reg,
82 node,
83 &format!("emit_copy_register. ptr to ptr. {comment}"),
84 );
85 } else {
86 let size = source_reg.size();
87
88 let target_pointer_location = PointerLocation {
89 ptr_reg: target_reg.clone(),
90 };
91 let source_pointer_location = PointerLocation {
92 ptr_reg: source_reg.clone(),
93 };
94 self.builder.add_block_copy_with_immediate_size(
95 &target_pointer_location,
96 &source_pointer_location,
97 size,
98 node,
99 &format!("emit_copy_register.copy struct. {comment}"),
100 );
101 }
102 } else {
103 self.builder.add_mov_reg(
104 target_reg,
105 source_reg,
106 node,
107 &format!("emit_copy_register. primitive to primitive. {comment}"),
108 );
109 }
110 }
111
112 pub(crate) fn add_ld_regs_from_frame(
113 &mut self,
114 start_reg: &TypedRegister,
115 start_address: FrameMemoryRegion,
116 count: u8,
117 node: &Node,
118 comment: &str,
119 ) {
120 self.builder.add_ld_contiguous_regs_from_frame(
121 start_reg.index,
122 start_address,
123 count,
124 node,
125 comment,
126 );
127 }
128
129 pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
130 let aligned = align(
131 self.frame_allocator.addr().as_size().0 as usize,
132 SAFE_ALIGNMENT,
133 );
134 FrameMemorySize(aligned as u32)
135 }
136
137 pub fn patch_enter(&mut self, patch_position: PatchPosition) {
138 self.builder
139 .patch_enter(self.total_aligned_frame_size(), patch_position);
140 }
141
142 fn debug_node(&self, node: &Node) {
143 let line_info = self.source_map_lookup.get_line(&node.span);
144 let span_text = self.source_map_lookup.get_text_span(&node.span);
145 eprintln!(
146 "{}:{}:{}> {}",
147 line_info.relative_file_name, line_info.row, line_info.col, span_text
148 );
149 }
151
152 pub(crate) fn emit_unary_operator(
153 &mut self,
154 target_reg: &TypedRegister,
155 unary_operator: &UnaryOperator,
156 ctx: &Context,
157 ) {
158 let node = &unary_operator.node;
159 match &unary_operator.kind {
160 UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
161 TypeKind::Bool => {
162 let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
163 self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
164 }
165 _ => panic!("unknown not op"),
166 },
167
168 UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
169 TypeKind::Int => {
170 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
171 self.builder
172 .add_neg_i32(target_reg, &left_source, node, "negate i32");
173 }
174
175 TypeKind::Float => {
176 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
177 self.builder
178 .add_neg_f32(target_reg, &left_source, node, "negate f32");
179 }
180 _ => panic!("negate should only be possible on Int and Float"),
181 },
182 }
183 }
184
185 pub(crate) fn emit_if(
186 &mut self,
187 output_destination: &Destination,
188 condition: &BooleanExpression,
189 true_expr: &Expression,
190 maybe_false_expr: Option<&Expression>,
191 ctx: &Context,
192 ) {
193 let jump_on_false_condition = self.emit_condition_context(condition, ctx);
194
195 self.emit_expression(output_destination, true_expr, ctx);
198
199 if let Some(false_expr) = maybe_false_expr {
200 let skip_false_if_true = self.builder.add_jump_placeholder(
202 &condition.expression.node,
203 "since it was true, skip over false section",
204 );
205
206 self.builder.patch_jump_here(jump_on_false_condition);
208
209 self.emit_expression(output_destination, false_expr, ctx);
211
212 self.builder.patch_jump_here(skip_false_if_true);
213 } else {
214 self.builder.patch_jump_here(jump_on_false_condition);
215 }
216 }
217
218 pub(crate) fn temp_frame_space_for_register(
219 &mut self,
220 count: u8,
221 comment: &str,
222 ) -> FrameMemoryRegion {
223 let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
224 let start = self
225 .frame_allocator
226 .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
227
228 FrameMemoryRegion {
229 addr: start,
230 size: total_size,
231 }
232 }
233
234 pub(crate) fn emit_option_expression_into_target_memory_location(
235 &mut self,
236 output: &Destination,
237 node: &Node,
238 maybe_option: Option<&Expression>,
239 ctx: &Context,
240 ) {
241 let memory_target = output.memory_location_or_pointer_reg();
242 let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
243
244 let hwm = self.temp_registers.save_mark();
245
246 let tag_reg = self
247 .temp_registers
248 .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
249
250 if let Some(some_expression) = maybe_option {
251 let union_information = memory_lvalue_location
252 .location
253 .ty
254 .basic_type()
255 .optional_info()
256 .unwrap()
257 .clone();
258
259 {
260 let ty = memory_lvalue_location.location.ty.basic_type();
262 self.builder.add_mov8_immediate(
263 tag_reg.register(),
264 1,
265 node,
266 &format!("set the tag Some (1) in register {ty}"),
267 );
268 self.builder.add_st8_using_ptr_with_offset(
270 &memory_lvalue_location
271 .offset(union_information.tag_offset, b8_type())
272 .location,
273 tag_reg.register(),
274 node,
275 "store optional Some tag",
276 );
277 }
278 {
279 let payload_location = &memory_lvalue_location
280 .offset(union_information.payload_offset, b8_type())
281 .location;
282 self.emit_expression_into_target_memory(
283 payload_location,
284 some_expression,
285 "store option payload",
286 ctx,
287 );
288 }
289 } else {
290 self.builder
291 .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); self.builder.add_st8_using_ptr_with_offset(
295 &memory_lvalue_location.location,
296 tag_reg.register(),
297 node,
298 "store optional None tag",
299 );
300 }
301 self.temp_registers.restore_to_mark(hwm);
302 }
303
304 pub(crate) fn emit_block(
305 &mut self,
306 target_reg: &Destination,
307 expressions: &[Expression],
308 ctx: &Context,
309 ) {
310 if let Some((last, others)) = expressions.split_last() {
311 for expr in others {
312 self.emit_statement(expr, ctx);
314 }
315 if matches!(&*last.ty.kind, TypeKind::Unit) {
316 self.emit_statement(last, ctx);
317 } else {
318 self.emit_expression(target_reg, last, ctx);
320 }
321 } else {
322 }
324 }
325
326 pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
327 self.variable_registers
329 .get(&variable.unique_id_within_function)
330 .unwrap()
331 }
332
333 fn get_variable_frame_placed(&self, variable: &VariableRef) -> FramePlacedType {
334 let frame_address = self
335 .variable_registers
336 .get(&variable.unique_id_within_function)
337 .unwrap();
338
339 frame_address.frame_placed()
340 }
341
342 pub fn allocate_frame_space_and_return_absolute_pointer_reg(
343 &mut self,
344 ty: &BasicTypeRef,
345 node: &Node,
346 comment: &str,
347 ) -> TypedRegister {
348 let frame_placed_type = self.frame_allocator.allocate_type(ty);
349
350 let temp = self.temp_registers.allocate(
351 VmType::new_frame_placed(frame_placed_type),
352 &format!("{comment}: allocate frame space"),
353 );
354
355 self.builder.add_lea_from_frame_region(
356 &temp.register,
357 temp.register.region(),
358 node,
359 &format!("{comment}: set the allocated memory to pointer reg"),
360 );
361
362 temp.register
363 }
364
365 pub fn allocate_frame_space_and_return_pointer_location(
366 &mut self,
367 ty: &BasicTypeRef,
368 node: &Node,
369 comment: &str,
370 ) -> PointerLocation {
371 let absolute_base_ptr_reg =
372 self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
373 PointerLocation {
374 ptr_reg: absolute_base_ptr_reg,
375 }
376 }
377
378 pub fn allocate_frame_space_and_return_memory_location(
379 &mut self,
380 ty: &BasicTypeRef,
381 node: &Node,
382 comment: &str,
383 ) -> MemoryLocation {
384 let absolute_base_ptr_reg =
385 self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
386 MemoryLocation {
387 ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
388 base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
389 offset: MemoryOffset(0),
390 }
391 }
392
393 pub fn allocate_frame_space_and_return_destination_to_it(
394 &mut self,
395 ty: &BasicTypeRef,
396 node: &Node,
397 comment: &str,
398 ) -> Destination {
399 let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
400 Destination::new_location(location)
401 }
402
403 pub(crate) fn emit_constant_access(
404 &mut self,
405 output: &Destination,
406 constant_reference: &ConstantRef,
407 node: &Node,
408 ctx: &Context,
409 ) {
410 let constant_region = self
411 .state
412 .constant_offsets
413 .get(&constant_reference.id)
414 .unwrap();
415 if constant_region.ty().is_aggregate() {
418 let source_base_ptr = self.temp_registers.allocate(
420 VmType::new_contained_in_register(u32_type()),
421 "temp register for the base pointer to the constant",
422 );
423 self.builder.add_mov_32_immediate_value(
424 source_base_ptr.register(),
425 constant_region.addr().0,
426 node,
427 &format!(
428 "load constant pointer '{}' type:{}",
429 constant_reference.assigned_name,
430 constant_region.ty()
431 ),
432 );
433
434 let source_memory_location = MemoryLocation {
435 base_ptr_reg: source_base_ptr.register,
436 offset: MemoryOffset(0),
437 ty: VmType::new_heap_placement(
438 constant_region.ty().clone(),
439 constant_region.region(),
440 ),
441 };
442
443 let output_reg = output.memory_location_or_pointer_reg();
444
445 self.emit_copy_value_from_memory_location(
446 output,
447 &source_memory_location,
448 node,
449 &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
450 );
451 } else if let Some(output_memory_location) = output.memory_location() {
452 let hwm = self.temp_registers.save_mark();
453 let temp_reg = self.temp_registers.allocate(
454 VmType::new_contained_in_register(constant_region.ty().clone()),
455 "temporary for constant",
456 );
457
458 self.emit_load_scalar_from_absolute_address_instruction(
459 temp_reg.register(),
460 constant_region.addr(),
461 &VmType::new_unknown_placement(constant_region.ty().clone()),
462 node,
463 &format!(
464 "load constant primitive '{}' {:?}",
465 constant_reference.assigned_name,
466 constant_region.ty()
467 ),
468 );
469
470 self.emit_store_scalar_to_memory_offset_instruction(
471 output_memory_location,
472 temp_reg.register(),
473 node,
474 &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
475 );
476
477 self.temp_registers.restore_to_mark(hwm);
478 } else {
479 self.emit_load_scalar_from_absolute_address_instruction(
480 output.grab_register(),
481 constant_region.addr(),
482 &VmType::new_unknown_placement(constant_region.ty().clone()),
483 node,
484 &format!(
485 "load constant primitive '{}' {:?}",
486 constant_reference.assigned_name,
487 constant_region.ty()
488 ),
489 );
490 }
491 }
492
493 pub(crate) fn emit_coerce_option_to_bool(
494 &mut self,
495 target_reg: &TypedRegister,
496 expr: &Expression,
497 ctx: &Context,
498 ) {
499 let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
502
503 self.builder.add_ld8_from_pointer_with_offset_u16(
512 target_reg,
513 &base_pointer_of_tagged_union_reg,
514 MemoryOffset(0),
515 &expr.node,
516 "load option tag to bool register",
517 );
518 }
519
520 pub(crate) fn merge_arguments_keep_literals(
521 outer_args: &Vec<ArgumentExpression>,
522 intrinsic_args: &Vec<ArgumentExpression>,
523 ) -> Vec<ArgumentExpression> {
524 let mut all_args = outer_args.clone();
528
529 if intrinsic_args.len() > outer_args.len() + 1 {
530 all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
531 }
532
533 all_args
534 }
535
536 pub(crate) fn emit_borrow_mutable_reference(
537 &mut self,
538 target_register: &TypedRegister,
539 node: &Node,
540 expr: &SingleLocationExpression,
541 ctx: &Context,
542 ) {
543 let location = self.emit_lvalue_address(expr, ctx);
544
545 let abs_pointer = self.emit_compute_effective_address_to_register(
546 &location,
547 node,
548 "calculate absolute address for reference",
549 );
550
551 self.builder.add_mov_reg(
552 target_register,
553 &abs_pointer,
554 node,
555 "copy calculated address for borrow",
556 );
557 }
558
559 pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
560 let node = &expr.node;
561 let (line, _column) = self
562 .source_map_lookup
563 .source_map
564 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
565 let source_line_info = SourceFileLineInfo {
566 row: line,
567 file_id: node.span.file_id as usize,
568 };
569
570 let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
571 if let Some((start, end)) = answer {
572 let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
573 let (line, col) = self
574 .source_map_lookup
575 .source_map
576 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
577 let source_line = self
578 .source_map_lookup
579 .source_map
580 .get_source_line(node.span.file_id, line)
581 .unwrap_or("<source line not found>");
582
583 info!(
584 file=%relative_file_name,
585 line=%line,
586 col=%col,
587 source=%source_line,
588 "{}",
589 description
590 );
591 }
592 }
593}