1use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::reg_pool::HwmTempRegisterPool;
8use crate::state::CodeGenState;
9use seq_map::SeqMap;
10use source_map_cache::{
11 KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
12};
13use source_map_node::Node;
14use swamp_semantic::{
15 ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
16 UnaryOperator, UnaryOperatorKind, VariableRef,
17};
18use swamp_types::TypeKind;
19use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
20use swamp_vm_types::aligner::{SAFE_ALIGNMENT, align};
21use swamp_vm_types::types::BasicTypeKind;
22use swamp_vm_types::types::{
23 BasicTypeRef, Destination, FramePlacedType, TypedRegister, VmType, b8_type, u8_type, u32_type,
24};
25use swamp_vm_types::{
26 ANY_HEADER_HASH_OFFSET, ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET, AggregateMemoryLocation,
27 FrameMemoryRegion, FrameMemorySize, MemoryLocation, MemoryOffset, MemorySize, PointerLocation,
28 REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
29};
30use tracing::info;
31
32#[derive(Copy, Clone)]
33pub struct CodeBuilderOptions {
34 pub should_show_debug: bool,
35}
36pub(crate) struct CodeBuilder<'a> {
37 pub state: &'a mut CodeGenState,
38 pub(crate) builder: &'a mut InstructionBuilder<'a>,
39 pub(crate) variable_registers: SeqMap<usize, TypedRegister>,
40 pub(crate) temp_registers: HwmTempRegisterPool,
41 pub(crate) frame_allocator: StackFrameAllocator,
42 pub debug_line_tracker: KeepTrackOfSourceLine,
43 pub source_map_lookup: &'a SourceMapWrapper<'a>,
45 pub options: CodeBuilderOptions,
46}
47
48impl<'a> CodeBuilder<'a> {
49 pub fn new(
50 state: &'a mut CodeGenState,
51 builder: &'a mut InstructionBuilder<'a>,
52 variable_registers: SeqMap<usize, TypedRegister>,
53 temp_registers: HwmTempRegisterPool,
54 temp_allocator: StackFrameAllocator,
55 options: CodeBuilderOptions,
56 source_map_lookup: &'a SourceMapWrapper<'a>,
57 ) -> Self {
58 Self {
59 state,
60 builder,
61 variable_registers,
62 temp_registers,
64 frame_allocator: temp_allocator,
65 debug_line_tracker: KeepTrackOfSourceLine::default(),
66 options,
67 source_map_lookup,
68 }
69 }
70}
71impl CodeBuilder<'_> {
72 pub(crate) fn emit_copy_register(
73 &mut self,
74 target_reg: &TypedRegister,
75 source_reg: &TypedRegister,
76 node: &Node,
77 comment: &str,
78 ) {
79 if source_reg.ty.is_mutable_reference_semantic() {
80 if target_reg.ty().is_mutable_reference() {
81 self.builder.add_mov_reg(
82 target_reg,
83 source_reg,
84 node,
85 &format!("emit_copy_register. ptr to ptr. {comment}"),
86 );
87 } else {
88 let size = source_reg.size();
89
90 let target_pointer_location = PointerLocation {
91 ptr_reg: target_reg.clone(),
92 };
93 let source_pointer_location = PointerLocation {
94 ptr_reg: source_reg.clone(),
95 };
96 self.builder.add_block_copy_with_immediate_size(
97 &target_pointer_location,
98 &source_pointer_location,
99 size,
100 node,
101 &format!("emit_copy_register.copy struct. {comment}"),
102 );
103 }
104 } else {
105 if matches!(
107 source_reg.ty.basic_type.kind,
108 BasicTypeKind::StringStorage {
109 element_type: _,
110 char: _,
111 capacity: _
112 }
113 ) && matches!(
114 target_reg.ty.basic_type.kind,
115 BasicTypeKind::StringView { byte: _, char: _ }
116 ) {
117 self.builder.add_string_duplicate(
118 target_reg,
119 source_reg,
120 node,
121 &format!("emit_copy_register. string storage to view (duplicate). {comment}"),
122 );
123 } else {
124 self.builder.add_mov_reg(
125 target_reg,
126 source_reg,
127 node,
128 &format!("emit_copy_register. primitive to primitive. {comment}"),
129 );
130 }
131 }
132 }
133
134 pub(crate) fn add_ld_regs_from_frame(
135 &mut self,
136 start_reg: &TypedRegister,
137 start_address: FrameMemoryRegion,
138 count: u8,
139 node: &Node,
140 comment: &str,
141 ) {
142 self.builder.add_ld_contiguous_regs_from_frame(
143 start_reg.index,
144 start_address,
145 count,
146 node,
147 comment,
148 );
149 }
150
151 pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
152 let aligned = align(
153 self.frame_allocator.addr().as_size().0 as usize,
154 SAFE_ALIGNMENT,
155 );
156 FrameMemorySize(aligned as u32)
157 }
158
159 pub fn patch_enter(&mut self, patch_position: PatchPosition) {
160 self.builder
161 .patch_enter(self.total_aligned_frame_size(), patch_position);
162 }
163
164 fn debug_node(&self, node: &Node) {
165 let line_info = self.source_map_lookup.get_line(&node.span);
166 let span_text = self.source_map_lookup.get_text_span(&node.span);
167 eprintln!(
168 "{}:{}:{}> {}",
169 line_info.relative_file_name, line_info.row, line_info.col, span_text
170 );
171 }
173
174 pub(crate) fn emit_unary_operator(
175 &mut self,
176 target_reg: &TypedRegister,
177 unary_operator: &UnaryOperator,
178 ctx: &Context,
179 ) {
180 let node = &unary_operator.node;
181 match &unary_operator.kind {
182 UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
183 TypeKind::Bool => {
184 let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
185 self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
186 }
187 _ => panic!("unknown not op"),
188 },
189
190 UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
191 TypeKind::Int => {
192 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
193 self.builder
194 .add_neg_i32(target_reg, &left_source, node, "negate i32");
195 }
196
197 TypeKind::Float => {
198 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
199 self.builder
200 .add_neg_f32(target_reg, &left_source, node, "negate f32");
201 }
202 _ => panic!("negate should only be possible on Int and Float"),
203 },
204 }
205 }
206
207 pub(crate) fn emit_if(
208 &mut self,
209 output_destination: &Destination,
210 condition: &BooleanExpression,
211 true_expr: &Expression,
212 maybe_false_expr: Option<&Expression>,
213 ctx: &Context,
214 ) {
215 let jump_on_false_condition = self.emit_condition_context(condition, ctx);
216
217 self.emit_expression(output_destination, true_expr, ctx);
220
221 if let Some(false_expr) = maybe_false_expr {
222 let skip_false_if_true = self.builder.add_jump_placeholder(
224 &condition.expression.node,
225 "since it was true, skip over false section",
226 );
227
228 self.builder.patch_jump_here(jump_on_false_condition);
230
231 self.emit_expression(output_destination, false_expr, ctx);
233
234 self.builder.patch_jump_here(skip_false_if_true);
235 } else {
236 self.builder.patch_jump_here(jump_on_false_condition);
237 }
238 }
239
240 pub(crate) fn temp_frame_space_for_register(
241 &mut self,
242 count: u8,
243 comment: &str,
244 ) -> FrameMemoryRegion {
245 let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
246 let start = self
247 .frame_allocator
248 .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
249
250 FrameMemoryRegion {
251 addr: start,
252 size: total_size,
253 }
254 }
255
256 pub(crate) fn emit_option_expression_into_target_memory_location(
257 &mut self,
258 output: &Destination,
259 node: &Node,
260 maybe_option: Option<&Expression>,
261 ctx: &Context,
262 ) {
263 let memory_target = output.memory_location_or_pointer_reg();
264 let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
265
266 let hwm = self.temp_registers.save_mark();
267
268 let tag_reg = self
269 .temp_registers
270 .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
271
272 if let Some(some_expression) = maybe_option {
273 let union_information = memory_lvalue_location
274 .location
275 .ty
276 .basic_type()
277 .optional_info()
278 .unwrap()
279 .clone();
280
281 {
282 let ty = memory_lvalue_location.location.ty.basic_type();
284 self.builder.add_mov8_immediate(
285 tag_reg.register(),
286 1,
287 node,
288 &format!("set the tag Some (1) in register {ty}"),
289 );
290 self.builder.add_st8_using_ptr_with_offset(
292 &memory_lvalue_location
293 .offset(union_information.tag_offset, b8_type())
294 .location,
295 tag_reg.register(),
296 node,
297 "store optional Some tag",
298 );
299 }
300 {
301 let payload_location = &memory_lvalue_location
302 .offset(union_information.payload_offset, b8_type())
303 .location;
304 self.emit_expression_into_target_memory(
305 payload_location,
306 some_expression,
307 "store option payload",
308 ctx,
309 );
310 }
311 } else {
312 self.builder
313 .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); self.builder.add_st8_using_ptr_with_offset(
317 &memory_lvalue_location.location,
318 tag_reg.register(),
319 node,
320 "store optional None tag",
321 );
322 }
323 self.temp_registers.restore_to_mark(hwm);
324 }
325
326 pub(crate) fn emit_block(
327 &mut self,
328 target_reg: &Destination,
329 expressions: &[Expression],
330 ctx: &Context,
331 ) {
332 if let Some((last, others)) = expressions.split_last() {
333 for expr in others {
334 self.emit_statement(expr, ctx);
335 }
336 if matches!(&*last.ty.kind, TypeKind::Unit) {
337 self.emit_statement(last, ctx);
338 } else {
339 self.emit_expression(target_reg, last, ctx);
340 }
341 } else {
342 }
344 }
345
346 pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
347 self.variable_registers
349 .get(&variable.unique_id_within_function)
350 .unwrap()
351 }
352
353 fn get_variable_frame_placed(&self, variable: &VariableRef) -> FramePlacedType {
354 let frame_address = self
355 .variable_registers
356 .get(&variable.unique_id_within_function)
357 .unwrap();
358
359 frame_address.frame_placed()
360 }
361
362 pub fn allocate_frame_space_and_return_absolute_pointer_reg(
363 &mut self,
364 ty: &BasicTypeRef,
365 node: &Node,
366 comment: &str,
367 ) -> TypedRegister {
368 let frame_placed_type = self.frame_allocator.allocate_type(ty);
369
370 let temp = self.temp_registers.allocate(
371 VmType::new_frame_placed(frame_placed_type),
372 &format!("{comment}: allocate frame space"),
373 );
374
375 self.builder.add_lea_from_frame_region(
376 &temp.register,
377 temp.register.region(),
378 node,
379 &format!("{comment}: set the allocated memory to pointer reg"),
380 );
381
382 temp.register
383 }
384
385 pub fn allocate_frame_space_and_return_pointer_location(
386 &mut self,
387 ty: &BasicTypeRef,
388 node: &Node,
389 comment: &str,
390 ) -> PointerLocation {
391 let absolute_base_ptr_reg =
392 self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
393 PointerLocation {
394 ptr_reg: absolute_base_ptr_reg,
395 }
396 }
397
398 pub fn allocate_frame_space_and_return_memory_location(
399 &mut self,
400 ty: &BasicTypeRef,
401 node: &Node,
402 comment: &str,
403 ) -> MemoryLocation {
404 let absolute_base_ptr_reg =
405 self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
406 MemoryLocation {
407 ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
408 base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
409 offset: MemoryOffset(0),
410 }
411 }
412
413 pub fn allocate_frame_space_and_return_destination_to_it(
414 &mut self,
415 ty: &BasicTypeRef,
416 node: &Node,
417 comment: &str,
418 ) -> Destination {
419 let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
420 Destination::new_location(location)
421 }
422
423 pub(crate) fn emit_constant_access(
424 &mut self,
425 output: &Destination,
426 constant_reference: &ConstantRef,
427 node: &Node,
428 ctx: &Context,
429 ) {
430 let constant_region = self
431 .state
432 .constant_offsets
433 .get(&constant_reference.id)
434 .unwrap();
435 if constant_region.ty().is_aggregate() {
438 let source_base_ptr = self.temp_registers.allocate(
440 VmType::new_contained_in_register(u32_type()),
441 "temp register for the base pointer to the constant",
442 );
443 self.builder.add_mov_32_immediate_value(
444 source_base_ptr.register(),
445 constant_region.addr().0,
446 node,
447 &format!(
448 "load constant pointer '{}' type:{}",
449 constant_reference.assigned_name,
450 constant_region.ty()
451 ),
452 );
453
454 let source_memory_location = MemoryLocation {
455 base_ptr_reg: source_base_ptr.register,
456 offset: MemoryOffset(0),
457 ty: VmType::new_heap_placement(
458 constant_region.ty().clone(),
459 constant_region.region(),
460 ),
461 };
462
463 let output_reg = output.memory_location_or_pointer_reg();
464
465 self.emit_copy_value_from_memory_location(
466 output,
467 &source_memory_location,
468 node,
469 &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
470 );
471 } else if let Some(output_memory_location) = output.memory_location() {
472 let hwm = self.temp_registers.save_mark();
473 let temp_reg = self.temp_registers.allocate(
474 VmType::new_contained_in_register(constant_region.ty().clone()),
475 "temporary for constant",
476 );
477
478 self.emit_load_scalar_from_absolute_address_instruction(
479 temp_reg.register(),
480 constant_region.addr(),
481 &VmType::new_unknown_placement(constant_region.ty().clone()),
482 node,
483 &format!(
484 "load constant primitive '{}' {:?}",
485 constant_reference.assigned_name,
486 constant_region.ty()
487 ),
488 );
489
490 self.emit_store_scalar_to_memory_offset_instruction(
491 output_memory_location,
492 temp_reg.register(),
493 node,
494 &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
495 );
496
497 self.temp_registers.restore_to_mark(hwm);
498 } else {
499 self.emit_load_scalar_from_absolute_address_instruction(
500 output.grab_register(),
501 constant_region.addr(),
502 &VmType::new_unknown_placement(constant_region.ty().clone()),
503 node,
504 &format!(
505 "load constant primitive '{}' {:?}",
506 constant_reference.assigned_name,
507 constant_region.ty()
508 ),
509 );
510 }
511 }
512
513 pub(crate) fn emit_coerce_int_to_char(
514 &mut self,
515 target_reg: &TypedRegister,
516 expr: &Expression,
517 ctx: &Context,
518 ) {
519 let destination = Destination::Register(target_reg.clone());
520
521 self.emit_expression(&destination, expr, ctx);
523 }
524
525 pub(crate) fn emit_coerce_int_to_byte(
526 &mut self,
527 output: &Destination,
528 expr: &Expression,
529 ctx: &Context,
530 ) {
531 self.emit_expression(output, expr, ctx);
533
534 match output {
535 Destination::Unit => {}
536 Destination::Register(dest_reg) => {
537 self.builder
538 .add_check_u8(dest_reg, &expr.node, "trunc int to byte");
539 }
540 Destination::Memory(mem) => {
541 let hwm = self.temp_registers.save_mark();
542 let temp_u8 = self
543 .temp_registers
544 .allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
545 self.builder.add_ld8_from_pointer_with_offset(
546 temp_u8.register(),
547 &mem.base_ptr_reg,
548 mem.offset,
549 &expr.node,
550 "load it to check it",
551 );
552 self.builder
553 .add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
554 self.temp_registers.restore_to_mark(hwm);
555 }
556 }
557 }
558
559 pub(crate) fn emit_coerce_option_to_bool(
560 &mut self,
561 target_reg: &TypedRegister,
562 expr: &Expression,
563 ctx: &Context,
564 ) {
565 let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
568
569 self.builder.add_ld8_from_pointer_with_offset(
578 target_reg,
579 &base_pointer_of_tagged_union_reg,
580 MemoryOffset(0),
581 &expr.node,
582 "load option tag to bool register",
583 );
584 }
585
586 pub(crate) fn emit_coerce_to_any(
587 &mut self,
588 output: &Destination,
589 expr: &Expression,
590 ctx: &Context,
591 ) {
592 let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
595
596 let pointer_register = self.emit_compute_effective_address_to_register(
597 output,
598 &expr.node,
599 "get starting ptr to output",
600 );
601 let output_aggregate_location = AggregateMemoryLocation::new(
602 MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register),
603 );
604
605 self.builder.add_st32_using_ptr_with_offset(
606 &output_aggregate_location
607 .offset(ANY_HEADER_PTR_OFFSET, u32_type())
608 .location,
609 &source_aggregate_pointer,
610 &expr.node,
611 "store aggregate pointer into Any Header",
612 );
613 let temp_size = self.temp_registers.allocate(
614 VmType::new_contained_in_register(u32_type()),
615 "Any header size temp",
616 );
617
618 self.builder.add_mov_32_immediate_value(
619 temp_size.register(),
620 source_aggregate_pointer.ty.basic_type.total_size.0,
621 &expr.node,
622 "fixed size",
623 );
624 self.builder.add_st32_using_ptr_with_offset(
625 &output_aggregate_location
626 .offset(ANY_HEADER_SIZE_OFFSET, u32_type())
627 .location,
628 temp_size.register(),
629 &expr.node,
630 "copy size into Any Header",
631 );
632
633 self.builder.add_mov_32_immediate_value(
634 temp_size.register(),
635 source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32,
636 &expr.node,
637 "reuse for hash",
638 );
639 self.builder.add_st32_using_ptr_with_offset(
640 &output_aggregate_location
641 .offset(ANY_HEADER_HASH_OFFSET, u32_type())
642 .location,
643 temp_size.register(),
644 &expr.node,
645 "copy size into Any Header",
646 );
647 }
648
649 pub(crate) fn merge_arguments_keep_literals(
650 outer_args: &Vec<ArgumentExpression>,
651 intrinsic_args: &Vec<ArgumentExpression>,
652 ) -> Vec<ArgumentExpression> {
653 let mut all_args = outer_args.clone();
657
658 if intrinsic_args.len() > outer_args.len() + 1 {
659 all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
660 }
661
662 all_args
663 }
664
665 pub(crate) fn emit_borrow_mutable_reference(
666 &mut self,
667 target_register: &TypedRegister,
668 node: &Node,
669 expr: &SingleLocationExpression,
670 ctx: &Context,
671 ) {
672 let location = self.emit_lvalue_address(expr, ctx);
673
674 let abs_pointer = self.emit_compute_effective_address_to_register(
675 &location,
676 node,
677 "calculate absolute address for reference",
678 );
679
680 self.builder.add_mov_reg(
681 target_register,
682 &abs_pointer,
683 node,
684 "copy calculated address for borrow",
685 );
686 }
687
688 pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
689 let node = &expr.node;
690 let (line, _column) = self
691 .source_map_lookup
692 .source_map
693 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
694 let source_line_info = SourceFileLineInfo {
695 row: line,
696 file_id: node.span.file_id as usize,
697 };
698
699 let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
700 if let Some((start, end)) = answer {
701 let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
702 let (line, col) = self
703 .source_map_lookup
704 .source_map
705 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
706 let source_line = self
707 .source_map_lookup
708 .source_map
709 .get_source_line(node.span.file_id, line)
710 .unwrap_or("<source line not found>");
711
712 info!(
713 file=%relative_file_name,
714 line=%line,
715 col=%col,
716 source=%source_line,
717 "{}",
718 description
719 );
720 }
721 }
722}