1use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::err;
8use crate::reg_pool::HwmTempRegisterPool;
9use crate::state::CodeGenState;
10use seq_map::SeqMap;
11use source_map_cache::{
12 KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
13};
14use source_map_node::Node;
15use swamp_semantic::{
16 ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
17 UnaryOperator, UnaryOperatorKind, VariableRef,
18};
19use swamp_types::TypeKind;
20use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
21use swamp_vm_types::aligner::{align, SAFE_ALIGNMENT};
22use swamp_vm_types::types::BasicTypeKind;
23use swamp_vm_types::types::{
24 b8_type, u32_type, u8_type, BasicTypeRef, Destination, TypedRegister, VmType,
25};
26use swamp_vm_types::{
27 AggregateMemoryLocation, FrameMemoryRegion, FrameMemorySize, MemoryLocation,
28 MemoryOffset, MemorySize, PointerLocation, ANY_HEADER_HASH_OFFSET, ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET,
29 REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE,
30};
31use tracing::info;
32
33#[derive(Copy, Clone)]
34pub struct CodeBuilderOptions {
35 pub should_show_debug: bool,
36}
37pub struct CodeBuilder<'a> {
38 pub state: &'a mut CodeGenState,
39 pub builder: &'a mut InstructionBuilder<'a>,
40 pub variable_registers: SeqMap<usize, TypedRegister>,
41 pub temp_registers: HwmTempRegisterPool,
42 pub frame_allocator: StackFrameAllocator,
43 pub debug_line_tracker: KeepTrackOfSourceLine,
44 pub source_map_lookup: &'a SourceMapWrapper<'a>,
46 pub options: CodeBuilderOptions,
47 pub errors: Vec<err::Error>,
48}
49
50impl<'a> CodeBuilder<'a> {
51 pub fn new(
52 state: &'a mut CodeGenState,
53 builder: &'a mut InstructionBuilder<'a>,
54 variable_registers: SeqMap<usize, TypedRegister>,
55 temp_registers: HwmTempRegisterPool,
56 temp_allocator: StackFrameAllocator,
57 options: CodeBuilderOptions,
58 source_map_lookup: &'a SourceMapWrapper<'a>,
59 ) -> Self {
60 Self {
61 state,
62 builder,
63 variable_registers,
64 temp_registers,
66 frame_allocator: temp_allocator,
67 debug_line_tracker: KeepTrackOfSourceLine::default(),
68 options,
69 source_map_lookup,
70 errors: Vec::new(),
71 }
72 }
73}
74impl CodeBuilder<'_> {
75 pub(crate) fn emit_copy_register(
76 &mut self,
77 target_reg: &TypedRegister,
78 source_reg: &TypedRegister,
79 node: &Node,
80 comment: &str,
81 ) {
82 if source_reg.ty.is_mutable_reference_semantic() {
83 if target_reg.ty().is_mutable_reference() {
84 self.builder.add_mov_reg(
85 target_reg,
86 source_reg,
87 node,
88 &format!("emit_copy_register. ptr to ptr. {comment}"),
89 );
90 } else {
91 let size = source_reg.size();
92
93 let target_pointer_location = PointerLocation {
94 ptr_reg: target_reg.clone(),
95 };
96 let source_pointer_location = PointerLocation {
97 ptr_reg: source_reg.clone(),
98 };
99 self.builder.add_block_copy_with_immediate_size(
100 &target_pointer_location,
101 &source_pointer_location,
102 size,
103 node,
104 &format!("emit_copy_register.copy struct. {comment}"),
105 );
106 }
107 } else {
108 if matches!(
110 source_reg.ty.basic_type.kind,
111 BasicTypeKind::StringStorage {
112 element_type: _,
113 char: _,
114 capacity: _
115 }
116 ) && matches!(
117 target_reg.ty.basic_type.kind,
118 BasicTypeKind::StringView { byte: _, char: _ }
119 ) {
120 self.builder.add_string_duplicate(
121 target_reg,
122 source_reg,
123 node,
124 &format!("emit_copy_register. string storage to view (duplicate). {comment}"),
125 );
126 } else {
127 self.builder.add_mov_reg(
128 target_reg,
129 source_reg,
130 node,
131 &format!("emit_copy_register. primitive to primitive. {comment}"),
132 );
133 }
134 }
135 }
136
137
138 #[must_use] pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
139 let aligned = align(
140 self.frame_allocator.addr().as_size().0 as usize,
141 SAFE_ALIGNMENT,
142 );
143 FrameMemorySize(aligned as u32)
144 }
145
146 pub fn patch_enter(&mut self, patch_position: PatchPosition) {
147 self.builder
148 .patch_enter(self.total_aligned_frame_size(), patch_position);
149 }
150
151 fn debug_node(&self, node: &Node) {
152 let line_info = self.source_map_lookup.get_line(&node.span);
153 let span_text = self.source_map_lookup.get_text_span(&node.span);
154 eprintln!(
155 "{}:{}:{}> {}",
156 line_info.relative_file_name, line_info.row, line_info.col, span_text
157 );
158 }
160
161 pub(crate) fn emit_unary_operator(
162 &mut self,
163 target_reg: &TypedRegister,
164 unary_operator: &UnaryOperator,
165 ctx: &Context,
166 ) {
167 let node = &unary_operator.node;
168 match &unary_operator.kind {
169 UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
170 TypeKind::Bool => {
171 let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
172 self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
173 }
174 _ => panic!("unknown not op"),
175 },
176
177 UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
178 TypeKind::Int => {
179 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
180 self.builder
181 .add_neg_i32(target_reg, &left_source, node, "negate i32");
182 }
183
184 TypeKind::Float => {
185 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
186 self.builder
187 .add_neg_f32(target_reg, &left_source, node, "negate f32");
188 }
189 _ => panic!("negate should only be possible on Int and Float"),
190 },
191 }
192 }
193
194 pub(crate) fn emit_if(
195 &mut self,
196 output_destination: &Destination,
197 condition: &BooleanExpression,
198 true_expr: &Expression,
199 maybe_false_expr: Option<&Expression>,
200 ctx: &Context,
201 ) {
202 let jump_on_false_condition = self.emit_condition_context(condition, ctx);
203
204 self.emit_expression(output_destination, true_expr, ctx);
207
208 if let Some(false_expr) = maybe_false_expr {
209 let skip_false_if_true = self.builder.add_jump_placeholder(
211 &condition.expression.node,
212 "since it was true, skip over false section",
213 );
214
215 self.builder.patch_jump_here(jump_on_false_condition);
217
218 self.emit_expression(output_destination, false_expr, ctx);
220
221 self.builder.patch_jump_here(skip_false_if_true);
222 } else {
223 self.builder.patch_jump_here(jump_on_false_condition);
224 }
225 }
226
227 pub(crate) fn temp_frame_space_for_register(
228 &mut self,
229 count: u8,
230 comment: &str,
231 ) -> FrameMemoryRegion {
232 let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
233 let start = self
234 .frame_allocator
235 .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
236
237 FrameMemoryRegion {
238 addr: start,
239 size: total_size,
240 }
241 }
242
243 pub(crate) fn emit_option_expression_into_target_memory_location(
244 &mut self,
245 output: &Destination,
246 node: &Node,
247 maybe_option: Option<&Expression>,
248 ctx: &Context,
249 ) {
250 let memory_target = output.memory_location_or_pointer_reg();
251 let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
252
253 let hwm = self.temp_registers.save_mark();
254
255 let tag_reg = self
256 .temp_registers
257 .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
258
259 if let Some(some_expression) = maybe_option {
260 let union_information = memory_lvalue_location
261 .location
262 .ty
263 .basic_type()
264 .optional_info()
265 .unwrap()
266 .clone();
267
268 {
269 let ty = memory_lvalue_location.location.ty.basic_type();
271 self.builder.add_mov8_immediate(
272 tag_reg.register(),
273 1,
274 node,
275 &format!("set the tag Some (1) in register {ty}"),
276 );
277 self.builder.add_st8_using_ptr_with_offset(
279 &memory_lvalue_location
280 .offset(union_information.tag_offset, b8_type())
281 .location,
282 tag_reg.register(),
283 node,
284 "store optional Some tag",
285 );
286 }
287 {
288 let payload_location = &memory_lvalue_location
289 .offset(union_information.payload_offset, b8_type())
290 .location;
291 self.emit_expression_into_target_memory(
292 payload_location,
293 some_expression,
294 "store option payload",
295 ctx,
296 );
297 }
298 } else {
299 self.builder
300 .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); self.builder.add_st8_using_ptr_with_offset(
304 &memory_lvalue_location.location,
305 tag_reg.register(),
306 node,
307 "store optional None tag",
308 );
309 }
310 self.temp_registers.restore_to_mark(hwm);
311 }
312
313 pub(crate) fn emit_block(
314 &mut self,
315 target_reg: &Destination,
316 expressions: &[Expression],
317 ctx: &Context,
318 ) {
319 if let Some((last, others)) = expressions.split_last() {
320 for expr in others {
321 self.emit_statement(expr, ctx);
322 }
323 if matches!(&*last.ty.kind, TypeKind::Unit) {
324 self.emit_statement(last, ctx);
325 } else {
326 self.emit_expression(target_reg, last, ctx);
327 }
328 } else {
329 }
331 }
332
333 pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
334 self.variable_registers
336 .get(&variable.unique_id_within_function)
337 .unwrap()
338 }
339
340 pub fn allocate_frame_space_and_return_absolute_pointer_reg(
341 &mut self,
342 ty: &BasicTypeRef,
343 node: &Node,
344 comment: &str,
345 ) -> TypedRegister {
346 let frame_placed_type = self.frame_allocator.allocate_type(ty);
347
348 let temp = self.temp_registers.allocate(
349 VmType::new_frame_placed(frame_placed_type),
350 &format!("{comment}: allocate frame space"),
351 );
352
353 self.builder.add_lea_from_frame_region(
354 &temp.register,
355 temp.register.region(),
356 node,
357 &format!("{comment}: set the allocated memory to pointer reg"),
358 );
359
360 temp.register
361 }
362
363 pub fn allocate_frame_space_and_return_pointer_location(
364 &mut self,
365 ty: &BasicTypeRef,
366 node: &Node,
367 comment: &str,
368 ) -> PointerLocation {
369 let absolute_base_ptr_reg =
370 self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
371 PointerLocation {
372 ptr_reg: absolute_base_ptr_reg,
373 }
374 }
375
376 pub fn allocate_frame_space_and_return_memory_location(
377 &mut self,
378 ty: &BasicTypeRef,
379 node: &Node,
380 comment: &str,
381 ) -> MemoryLocation {
382 let absolute_base_ptr_reg =
383 self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
384 MemoryLocation {
385 ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
386 base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
387 offset: MemoryOffset(0),
388 }
389 }
390
391 pub fn allocate_frame_space_and_return_destination_to_it(
392 &mut self,
393 ty: &BasicTypeRef,
394 node: &Node,
395 comment: &str,
396 ) -> Destination {
397 let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
398 Destination::new_location(location)
399 }
400
401 pub(crate) fn emit_constant_access(
402 &mut self,
403 output: &Destination,
404 constant_reference: &ConstantRef,
405 node: &Node,
406 ctx: &Context,
407 ) {
408 let constant_region = self
409 .state
410 .constant_offsets
411 .get(&constant_reference.id)
412 .unwrap();
413 if constant_region.ty().is_aggregate() {
416 let source_base_ptr = self.temp_registers.allocate(
418 VmType::new_contained_in_register(u32_type()),
419 "temp register for the base pointer to the constant",
420 );
421 self.builder.add_mov_32_immediate_value(
422 source_base_ptr.register(),
423 constant_region.addr().0,
424 node,
425 &format!(
426 "load constant pointer '{}' type:{}",
427 constant_reference.assigned_name,
428 constant_region.ty()
429 ),
430 );
431
432 let source_memory_location = MemoryLocation {
433 base_ptr_reg: source_base_ptr.register,
434 offset: MemoryOffset(0),
435 ty: VmType::new_heap_placement(
436 constant_region.ty().clone(),
437 constant_region.region(),
438 ),
439 };
440
441 let output_reg = output.memory_location_or_pointer_reg();
442
443 self.emit_copy_value_from_memory_location(
444 output,
445 &source_memory_location,
446 node,
447 &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
448 );
449 } else if let Some(output_memory_location) = output.memory_location() {
450 let hwm = self.temp_registers.save_mark();
451 let temp_reg = self.temp_registers.allocate(
452 VmType::new_contained_in_register(constant_region.ty().clone()),
453 "temporary for constant",
454 );
455
456 self.emit_load_scalar_from_absolute_address_instruction(
457 temp_reg.register(),
458 constant_region.addr(),
459 &VmType::new_unknown_placement(constant_region.ty().clone()),
460 node,
461 &format!(
462 "load constant primitive '{}' {:?}",
463 constant_reference.assigned_name,
464 constant_region.ty()
465 ),
466 );
467
468 self.emit_store_scalar_to_memory_offset_instruction(
469 output_memory_location,
470 temp_reg.register(),
471 node,
472 &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
473 );
474
475 self.temp_registers.restore_to_mark(hwm);
476 } else {
477 self.emit_load_scalar_from_absolute_address_instruction(
478 output.grab_register(),
479 constant_region.addr(),
480 &VmType::new_unknown_placement(constant_region.ty().clone()),
481 node,
482 &format!(
483 "load constant primitive '{}' {:?}",
484 constant_reference.assigned_name,
485 constant_region.ty()
486 ),
487 );
488 }
489 }
490
491 pub(crate) fn emit_coerce_int_to_char(
492 &mut self,
493 target_reg: &TypedRegister,
494 expr: &Expression,
495 ctx: &Context,
496 ) {
497 let destination = Destination::Register(target_reg.clone());
498
499 self.emit_expression(&destination, expr, ctx);
501 }
502
503 pub(crate) fn emit_coerce_int_to_byte(
504 &mut self,
505 output: &Destination,
506 expr: &Expression,
507 ctx: &Context,
508 ) {
509 self.emit_expression(output, expr, ctx);
511
512 match output {
513 Destination::Unit => {}
514 Destination::Register(dest_reg) => {
515 self.builder
516 .add_check_u8(dest_reg, &expr.node, "trunc int to byte");
517 }
518 Destination::Memory(mem) => {
519 let hwm = self.temp_registers.save_mark();
520 let temp_u8 = self
521 .temp_registers
522 .allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
523 self.builder.add_ld8_from_pointer_with_offset(
524 temp_u8.register(),
525 &mem.base_ptr_reg,
526 mem.offset,
527 &expr.node,
528 "load it to check it",
529 );
530 self.builder
531 .add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
532 self.temp_registers.restore_to_mark(hwm);
533 }
534 }
535 }
536
537 pub(crate) fn emit_coerce_option_to_bool(
538 &mut self,
539 target_reg: &TypedRegister,
540 expr: &Expression,
541 ctx: &Context,
542 ) {
543 let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
546
547 self.builder.add_ld8_from_pointer_with_offset(
556 target_reg,
557 &base_pointer_of_tagged_union_reg,
558 MemoryOffset(0),
559 &expr.node,
560 "load option tag to bool register",
561 );
562 }
563
564 pub(crate) fn emit_coerce_to_any(
565 &mut self,
566 output: &Destination,
567 expr: &Expression,
568 ctx: &Context,
569 ) {
570 let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
573
574 let pointer_register = self.emit_compute_effective_address_to_register(
575 output,
576 &expr.node,
577 "get starting ptr to output",
578 );
579 let output_aggregate_location = AggregateMemoryLocation::new(
580 MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register),
581 );
582
583 self.builder.add_st32_using_ptr_with_offset(
584 &output_aggregate_location
585 .offset(ANY_HEADER_PTR_OFFSET, u32_type())
586 .location,
587 &source_aggregate_pointer,
588 &expr.node,
589 "store aggregate pointer into Any Header",
590 );
591 let temp_size = self.temp_registers.allocate(
592 VmType::new_contained_in_register(u32_type()),
593 "Any header size temp",
594 );
595
596 self.builder.add_mov_32_immediate_value(
597 temp_size.register(),
598 source_aggregate_pointer.ty.basic_type.total_size.0,
599 &expr.node,
600 "fixed size",
601 );
602 self.builder.add_st32_using_ptr_with_offset(
603 &output_aggregate_location
604 .offset(ANY_HEADER_SIZE_OFFSET, u32_type())
605 .location,
606 temp_size.register(),
607 &expr.node,
608 "copy size into Any Header",
609 );
610
611 self.builder.add_mov_32_immediate_value(
612 temp_size.register(),
613 source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32,
614 &expr.node,
615 "reuse for hash",
616 );
617 self.builder.add_st32_using_ptr_with_offset(
618 &output_aggregate_location
619 .offset(ANY_HEADER_HASH_OFFSET, u32_type())
620 .location,
621 temp_size.register(),
622 &expr.node,
623 "copy size into Any Header",
624 );
625 }
626
627 pub(crate) fn merge_arguments_keep_literals(
628 outer_args: &Vec<ArgumentExpression>,
629 intrinsic_args: &Vec<ArgumentExpression>,
630 ) -> Vec<ArgumentExpression> {
631 let mut all_args = outer_args.clone();
635
636 if intrinsic_args.len() > outer_args.len() + 1 {
637 all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
638 }
639
640 all_args
641 }
642
643 pub(crate) fn emit_borrow_mutable_reference(
644 &mut self,
645 target_register: &TypedRegister,
646 node: &Node,
647 expr: &SingleLocationExpression,
648 ctx: &Context,
649 ) {
650 let location = self.emit_lvalue_address(expr, ctx);
651
652 let abs_pointer = self.emit_compute_effective_address_to_register(
653 &location,
654 node,
655 "calculate absolute address for reference",
656 );
657
658 self.builder.add_mov_reg(
659 target_register,
660 &abs_pointer,
661 node,
662 "copy calculated address for borrow",
663 );
664 }
665
666 pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
667 let node = &expr.node;
668 let (line, _column) = self
669 .source_map_lookup
670 .source_map
671 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
672 let source_line_info = SourceFileLineInfo {
673 row: line,
674 file_id: node.span.file_id as usize,
675 };
676
677 let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
678 if let Some((start, end)) = answer {
679 let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
680 let (line, col) = self
681 .source_map_lookup
682 .source_map
683 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
684 let source_line = self
685 .source_map_lookup
686 .source_map
687 .get_source_line(node.span.file_id, line)
688 .unwrap_or("<source line not found>");
689
690 info!(
691 file=%relative_file_name,
692 line=%line,
693 col=%col,
694 source=%source_line,
695 "{}",
696 description
697 );
698 }
699 }
700}