1use crate::alloc::StackFrameAllocator;
6use crate::ctx::Context;
7use crate::reg_pool::HwmTempRegisterPool;
8use crate::state::CodeGenState;
9use seq_map::SeqMap;
10use source_map_cache::{
11 KeepTrackOfSourceLine, SourceFileLineInfo, SourceMapLookup, SourceMapWrapper,
12};
13use source_map_node::Node;
14use swamp_semantic::{
15 ArgumentExpression, BooleanExpression, ConstantRef, Expression, SingleLocationExpression,
16 UnaryOperator, UnaryOperatorKind, VariableRef,
17};
18use swamp_types::TypeKind;
19use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
20use swamp_vm_types::aligner::{align, SAFE_ALIGNMENT};
21use swamp_vm_types::types::BasicTypeKind;
22use swamp_vm_types::types::{
23 b8_type, u32_type, u8_type, BasicTypeRef, Destination, FramePlacedType, TypedRegister, VmType,
24};
25use swamp_vm_types::{AggregateMemoryLocation, FrameMemoryRegion, FrameMemorySize, MemoryLocation, MemoryOffset, MemorySize, PointerLocation, ANY_HEADER_HASH_OFFSET, ANY_HEADER_PTR_OFFSET, ANY_HEADER_SIZE_OFFSET, REG_ON_FRAME_ALIGNMENT, REG_ON_FRAME_SIZE};
26use tracing::info;
27
28#[derive(Copy, Clone)]
29pub struct CodeBuilderOptions {
30 pub should_show_debug: bool,
31}
32pub(crate) struct CodeBuilder<'a> {
33 pub state: &'a mut CodeGenState,
34 pub(crate) builder: &'a mut InstructionBuilder<'a>,
35 pub(crate) variable_registers: SeqMap<usize, TypedRegister>,
36 pub(crate) temp_registers: HwmTempRegisterPool,
37 pub(crate) frame_allocator: StackFrameAllocator,
38 pub debug_line_tracker: KeepTrackOfSourceLine,
39 pub source_map_lookup: &'a SourceMapWrapper<'a>,
41 pub options: CodeBuilderOptions,
42}
43
44impl<'a> CodeBuilder<'a> {
45 pub fn new(
46 state: &'a mut CodeGenState,
47 builder: &'a mut InstructionBuilder<'a>,
48 variable_registers: SeqMap<usize, TypedRegister>,
49 temp_registers: HwmTempRegisterPool,
50 temp_allocator: StackFrameAllocator,
51 options: CodeBuilderOptions,
52 source_map_lookup: &'a SourceMapWrapper<'a>,
53 ) -> Self {
54 Self {
55 state,
56 builder,
57 variable_registers,
58 temp_registers,
60 frame_allocator: temp_allocator,
61 debug_line_tracker: KeepTrackOfSourceLine::default(),
62 options,
63 source_map_lookup,
64 }
65 }
66}
67impl CodeBuilder<'_> {
68 pub(crate) fn emit_copy_register(
69 &mut self,
70 target_reg: &TypedRegister,
71 source_reg: &TypedRegister,
72 node: &Node,
73 comment: &str,
74 ) {
75 if source_reg.ty.is_mutable_reference_semantic() {
76 if target_reg.ty().is_mutable_reference() {
77 self.builder.add_mov_reg(
78 target_reg,
79 source_reg,
80 node,
81 &format!("emit_copy_register. ptr to ptr. {comment}"),
82 );
83 } else {
84 let size = source_reg.size();
85
86 let target_pointer_location = PointerLocation {
87 ptr_reg: target_reg.clone(),
88 };
89 let source_pointer_location = PointerLocation {
90 ptr_reg: source_reg.clone(),
91 };
92 self.builder.add_block_copy_with_immediate_size(
93 &target_pointer_location,
94 &source_pointer_location,
95 size,
96 node,
97 &format!("emit_copy_register.copy struct. {comment}"),
98 );
99 }
100 } else {
101 if matches!(
103 source_reg.ty.basic_type.kind,
104 BasicTypeKind::StringStorage {
105 element_type: _,
106 char: _,
107 capacity: _
108 }
109 ) && matches!(
110 target_reg.ty.basic_type.kind,
111 BasicTypeKind::StringView { byte: _, char: _ }
112 ) {
113 self.builder.add_string_duplicate(
114 target_reg,
115 source_reg,
116 node,
117 &format!("emit_copy_register. string storage to view (duplicate). {comment}"),
118 );
119 } else {
120 self.builder.add_mov_reg(
121 target_reg,
122 source_reg,
123 node,
124 &format!("emit_copy_register. primitive to primitive. {comment}"),
125 );
126 }
127 }
128 }
129
130 pub(crate) fn add_ld_regs_from_frame(
131 &mut self,
132 start_reg: &TypedRegister,
133 start_address: FrameMemoryRegion,
134 count: u8,
135 node: &Node,
136 comment: &str,
137 ) {
138 self.builder.add_ld_contiguous_regs_from_frame(
139 start_reg.index,
140 start_address,
141 count,
142 node,
143 comment,
144 );
145 }
146
147 pub fn total_aligned_frame_size(&self) -> FrameMemorySize {
148 let aligned = align(
149 self.frame_allocator.addr().as_size().0 as usize,
150 SAFE_ALIGNMENT,
151 );
152 FrameMemorySize(aligned as u32)
153 }
154
155 pub fn patch_enter(&mut self, patch_position: PatchPosition) {
156 self.builder
157 .patch_enter(self.total_aligned_frame_size(), patch_position);
158 }
159
160 fn debug_node(&self, node: &Node) {
161 let line_info = self.source_map_lookup.get_line(&node.span);
162 let span_text = self.source_map_lookup.get_text_span(&node.span);
163 eprintln!(
164 "{}:{}:{}> {}",
165 line_info.relative_file_name, line_info.row, line_info.col, span_text
166 );
167 }
169
170 pub(crate) fn emit_unary_operator(
171 &mut self,
172 target_reg: &TypedRegister,
173 unary_operator: &UnaryOperator,
174 ctx: &Context,
175 ) {
176 let node = &unary_operator.node;
177 match &unary_operator.kind {
178 UnaryOperatorKind::Not => match &*unary_operator.left.ty.kind {
179 TypeKind::Bool => {
180 let t_flag = self.emit_unary_operator_logical(target_reg, unary_operator, ctx);
181 self.force_normalized_bool_reg_if_needed(target_reg, t_flag, node);
182 }
183 _ => panic!("unknown not op"),
184 },
185
186 UnaryOperatorKind::Negate => match &*unary_operator.left.ty.kind {
187 TypeKind::Int => {
188 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
189 self.builder
190 .add_neg_i32(target_reg, &left_source, node, "negate i32");
191 }
192
193 TypeKind::Float => {
194 let left_source = self.emit_scalar_rvalue(&unary_operator.left, ctx);
195 self.builder
196 .add_neg_f32(target_reg, &left_source, node, "negate f32");
197 }
198 _ => panic!("negate should only be possible on Int and Float"),
199 },
200 }
201 }
202
203 pub(crate) fn emit_if(
204 &mut self,
205 output_destination: &Destination,
206 condition: &BooleanExpression,
207 true_expr: &Expression,
208 maybe_false_expr: Option<&Expression>,
209 ctx: &Context,
210 ) {
211 let jump_on_false_condition = self.emit_condition_context(condition, ctx);
212
213 self.emit_expression(output_destination, true_expr, ctx);
216
217 if let Some(false_expr) = maybe_false_expr {
218 let skip_false_if_true = self.builder.add_jump_placeholder(
220 &condition.expression.node,
221 "since it was true, skip over false section",
222 );
223
224 self.builder.patch_jump_here(jump_on_false_condition);
226
227 self.emit_expression(output_destination, false_expr, ctx);
229
230 self.builder.patch_jump_here(skip_false_if_true);
231 } else {
232 self.builder.patch_jump_here(jump_on_false_condition);
233 }
234 }
235
236 pub(crate) fn temp_frame_space_for_register(
237 &mut self,
238 count: u8,
239 comment: &str,
240 ) -> FrameMemoryRegion {
241 let total_size = MemorySize(REG_ON_FRAME_SIZE.0 * u32::from(count));
242 let start = self
243 .frame_allocator
244 .allocate(total_size, REG_ON_FRAME_ALIGNMENT);
245
246 FrameMemoryRegion {
247 addr: start,
248 size: total_size,
249 }
250 }
251
252 pub(crate) fn emit_option_expression_into_target_memory_location(
253 &mut self,
254 output: &Destination,
255 node: &Node,
256 maybe_option: Option<&Expression>,
257 ctx: &Context,
258 ) {
259 let memory_target = output.memory_location_or_pointer_reg();
260 let memory_lvalue_location = AggregateMemoryLocation::new(memory_target);
261
262 let hwm = self.temp_registers.save_mark();
263
264 let tag_reg = self
265 .temp_registers
266 .allocate(VmType::new_unknown_placement(u8_type()), "emit_option tag");
267
268 if let Some(some_expression) = maybe_option {
269 let union_information = memory_lvalue_location
270 .location
271 .ty
272 .basic_type()
273 .optional_info()
274 .unwrap()
275 .clone();
276
277 {
278 let ty = memory_lvalue_location.location.ty.basic_type();
280 self.builder.add_mov8_immediate(
281 tag_reg.register(),
282 1,
283 node,
284 &format!("set the tag Some (1) in register {ty}"),
285 );
286 self.builder.add_st8_using_ptr_with_offset(
288 &memory_lvalue_location
289 .offset(union_information.tag_offset, b8_type())
290 .location,
291 tag_reg.register(),
292 node,
293 "store optional Some tag",
294 );
295 }
296 {
297 let payload_location = &memory_lvalue_location
298 .offset(union_information.payload_offset, b8_type())
299 .location;
300 self.emit_expression_into_target_memory(
301 payload_location,
302 some_expression,
303 "store option payload",
304 ctx,
305 );
306 }
307 } else {
308 self.builder
309 .add_mov8_immediate(tag_reg.register(), 0, node, "option None tag"); self.builder.add_st8_using_ptr_with_offset(
313 &memory_lvalue_location.location,
314 tag_reg.register(),
315 node,
316 "store optional None tag",
317 );
318 }
319 self.temp_registers.restore_to_mark(hwm);
320 }
321
322 pub(crate) fn emit_block(
323 &mut self,
324 target_reg: &Destination,
325 expressions: &[Expression],
326 ctx: &Context,
327 ) {
328 if let Some((last, others)) = expressions.split_last() {
329 for expr in others {
330 self.emit_statement(expr, ctx);
331 }
332 if matches!(&*last.ty.kind, TypeKind::Unit) {
333 self.emit_statement(last, ctx);
334 } else {
335 self.emit_expression(target_reg, last, ctx);
336 }
337 } else {
338 }
340 }
341
342 pub(crate) fn get_variable_register(&self, variable: &VariableRef) -> &TypedRegister {
343 self.variable_registers
345 .get(&variable.unique_id_within_function)
346 .unwrap()
347 }
348
349 fn get_variable_frame_placed(&self, variable: &VariableRef) -> FramePlacedType {
350 let frame_address = self
351 .variable_registers
352 .get(&variable.unique_id_within_function)
353 .unwrap();
354
355 frame_address.frame_placed()
356 }
357
358 pub fn allocate_frame_space_and_return_absolute_pointer_reg(
359 &mut self,
360 ty: &BasicTypeRef,
361 node: &Node,
362 comment: &str,
363 ) -> TypedRegister {
364 let frame_placed_type = self.frame_allocator.allocate_type(ty);
365
366 let temp = self.temp_registers.allocate(
367 VmType::new_frame_placed(frame_placed_type),
368 &format!("{comment}: allocate frame space"),
369 );
370
371 self.builder.add_lea_from_frame_region(
372 &temp.register,
373 temp.register.region(),
374 node,
375 &format!("{comment}: set the allocated memory to pointer reg"),
376 );
377
378 temp.register
379 }
380
381 pub fn allocate_frame_space_and_return_pointer_location(
382 &mut self,
383 ty: &BasicTypeRef,
384 node: &Node,
385 comment: &str,
386 ) -> PointerLocation {
387 let absolute_base_ptr_reg =
388 self.allocate_frame_space_and_return_absolute_pointer_reg(ty, node, comment);
389 PointerLocation {
390 ptr_reg: absolute_base_ptr_reg,
391 }
392 }
393
394 pub fn allocate_frame_space_and_return_memory_location(
395 &mut self,
396 ty: &BasicTypeRef,
397 node: &Node,
398 comment: &str,
399 ) -> MemoryLocation {
400 let absolute_base_ptr_reg =
401 self.allocate_frame_space_and_return_pointer_location(ty, node, comment);
402 MemoryLocation {
403 ty: absolute_base_ptr_reg.ptr_reg.ty.clone(),
404 base_ptr_reg: absolute_base_ptr_reg.ptr_reg,
405 offset: MemoryOffset(0),
406 }
407 }
408
409 pub fn allocate_frame_space_and_return_destination_to_it(
410 &mut self,
411 ty: &BasicTypeRef,
412 node: &Node,
413 comment: &str,
414 ) -> Destination {
415 let location = self.allocate_frame_space_and_return_memory_location(ty, node, comment);
416 Destination::new_location(location)
417 }
418
419 pub(crate) fn emit_constant_access(
420 &mut self,
421 output: &Destination,
422 constant_reference: &ConstantRef,
423 node: &Node,
424 ctx: &Context,
425 ) {
426 let constant_region = self
427 .state
428 .constant_offsets
429 .get(&constant_reference.id)
430 .unwrap();
431 if constant_region.ty().is_aggregate() {
434 let source_base_ptr = self.temp_registers.allocate(
436 VmType::new_contained_in_register(u32_type()),
437 "temp register for the base pointer to the constant",
438 );
439 self.builder.add_mov_32_immediate_value(
440 source_base_ptr.register(),
441 constant_region.addr().0,
442 node,
443 &format!(
444 "load constant pointer '{}' type:{}",
445 constant_reference.assigned_name,
446 constant_region.ty()
447 ),
448 );
449
450 let source_memory_location = MemoryLocation {
451 base_ptr_reg: source_base_ptr.register,
452 offset: MemoryOffset(0),
453 ty: VmType::new_heap_placement(
454 constant_region.ty().clone(),
455 constant_region.region(),
456 ),
457 };
458
459 let output_reg = output.memory_location_or_pointer_reg();
460
461 self.emit_copy_value_from_memory_location(
462 output,
463 &source_memory_location,
464 node,
465 &format!("copy to target memory {output_reg} from constant memory area {source_memory_location}"),
466 );
467 } else if let Some(output_memory_location) = output.memory_location() {
468 let hwm = self.temp_registers.save_mark();
469 let temp_reg = self.temp_registers.allocate(
470 VmType::new_contained_in_register(constant_region.ty().clone()),
471 "temporary for constant",
472 );
473
474 self.emit_load_scalar_from_absolute_address_instruction(
475 temp_reg.register(),
476 constant_region.addr(),
477 &VmType::new_unknown_placement(constant_region.ty().clone()),
478 node,
479 &format!(
480 "load constant primitive '{}' {:?}",
481 constant_reference.assigned_name,
482 constant_region.ty()
483 ),
484 );
485
486 self.emit_store_scalar_to_memory_offset_instruction(
487 output_memory_location,
488 temp_reg.register(),
489 node,
490 &format!("put constant into memory {output_memory_location} <- {temp_reg}"),
491 );
492
493 self.temp_registers.restore_to_mark(hwm);
494 } else {
495 self.emit_load_scalar_from_absolute_address_instruction(
496 output.grab_register(),
497 constant_region.addr(),
498 &VmType::new_unknown_placement(constant_region.ty().clone()),
499 node,
500 &format!(
501 "load constant primitive '{}' {:?}",
502 constant_reference.assigned_name,
503 constant_region.ty()
504 ),
505 );
506 }
507 }
508
509 pub(crate) fn emit_coerce_int_to_char(
510 &mut self,
511 target_reg: &TypedRegister,
512 expr: &Expression,
513 ctx: &Context,
514 ) {
515 let destination = Destination::Register(target_reg.clone());
516
517 self.emit_expression(&destination, expr, ctx);
519 }
520
521 pub(crate) fn emit_coerce_int_to_byte(
522 &mut self,
523 output: &Destination,
524 expr: &Expression,
525 ctx: &Context,
526 ) {
527 self.emit_expression(output, expr, ctx);
529
530 match output {
531 Destination::Unit => {}
532 Destination::Register(dest_reg) => {
533 self.builder.add_check_u8(dest_reg, &expr.node, "trunc int to byte");
534 }
535 Destination::Memory(mem) => {
536 let hwm = self.temp_registers.save_mark();
537 let temp_u8 = self.temp_registers.allocate(VmType::new_contained_in_register(u8_type()), "temp u8");
538 self.builder.add_ld8_from_pointer_with_offset(temp_u8.register(), &mem.base_ptr_reg, mem.offset, &expr.node, "load it to check it");
539 self.builder.add_check_u8(temp_u8.register(), &expr.node, "trunc int to byte");
540 self.temp_registers.restore_to_mark(hwm);
541 }
542 }
543 }
544
545 pub(crate) fn emit_coerce_option_to_bool(
546 &mut self,
547 target_reg: &TypedRegister,
548 expr: &Expression,
549 ctx: &Context,
550 ) {
551 let base_pointer_of_tagged_union_reg = self.emit_scalar_rvalue(expr, ctx);
554
555 self.builder.add_ld8_from_pointer_with_offset(
564 target_reg,
565 &base_pointer_of_tagged_union_reg,
566 MemoryOffset(0),
567 &expr.node,
568 "load option tag to bool register",
569 );
570 }
571
572
573 pub(crate) fn emit_coerce_to_any(
574 &mut self,
575 output: &Destination,
576 expr: &Expression,
577 ctx: &Context,
578 ) {
579 let source_aggregate_pointer = self.emit_scalar_rvalue(expr, ctx);
582
583 let pointer_register = self.emit_compute_effective_address_to_register(output, &expr.node, "get starting ptr to output");
584 let output_aggregate_location = AggregateMemoryLocation::new(MemoryLocation::new_copy_over_whole_type_with_zero_offset(pointer_register));
585
586 self.builder.add_st32_using_ptr_with_offset(&output_aggregate_location.offset(ANY_HEADER_PTR_OFFSET, u32_type()).location, &source_aggregate_pointer, &expr.node, "store aggregate pointer into Any Header");
587 let temp_size = self.temp_registers.allocate(VmType::new_contained_in_register(u32_type()), "Any header size temp");
588
589 self.builder.add_mov_32_immediate_value(temp_size.register(), source_aggregate_pointer.ty.basic_type.total_size.0, &expr.node, "fixed size");
590 self.builder.add_st32_using_ptr_with_offset(&output_aggregate_location.offset(ANY_HEADER_SIZE_OFFSET, u32_type()).location, temp_size.register(), &expr.node, "copy size into Any Header");
591
592 self.builder.add_mov_32_immediate_value(temp_size.register(), source_aggregate_pointer.ty.basic_type.universal_hash_u64() as u32, &expr.node, "reuse for hash");
593 self.builder.add_st32_using_ptr_with_offset(&output_aggregate_location.offset(ANY_HEADER_HASH_OFFSET, u32_type()).location, temp_size.register(), &expr.node, "copy size into Any Header");
594 }
595
596
597 pub(crate) fn merge_arguments_keep_literals(
598 outer_args: &Vec<ArgumentExpression>,
599 intrinsic_args: &Vec<ArgumentExpression>,
600 ) -> Vec<ArgumentExpression> {
601 let mut all_args = outer_args.clone();
605
606 if intrinsic_args.len() > outer_args.len() + 1 {
607 all_args.extend_from_slice(&intrinsic_args[outer_args.len() + 1..]);
608 }
609
610 all_args
611 }
612
613 pub(crate) fn emit_borrow_mutable_reference(
614 &mut self,
615 target_register: &TypedRegister,
616 node: &Node,
617 expr: &SingleLocationExpression,
618 ctx: &Context,
619 ) {
620 let location = self.emit_lvalue_address(expr, ctx);
621
622 let abs_pointer = self.emit_compute_effective_address_to_register(
623 &location,
624 node,
625 "calculate absolute address for reference",
626 );
627
628 self.builder.add_mov_reg(
629 target_register,
630 &abs_pointer,
631 node,
632 "copy calculated address for borrow",
633 );
634 }
635
636 pub fn debug_expression(&mut self, expr: &Expression, description: &str) {
637 let node = &expr.node;
638 let (line, _column) = self
639 .source_map_lookup
640 .source_map
641 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
642 let source_line_info = SourceFileLineInfo {
643 row: line,
644 file_id: node.span.file_id as usize,
645 };
646
647 let answer = self.debug_line_tracker.check_if_new_line(&source_line_info);
648 if let Some((start, end)) = answer {
649 let relative_file_name = self.source_map_lookup.get_relative_path(node.span.file_id);
650 let (line, col) = self
651 .source_map_lookup
652 .source_map
653 .get_span_location_utf8(node.span.file_id, node.span.offset as usize);
654 let source_line = self
655 .source_map_lookup
656 .source_map
657 .get_source_line(node.span.file_id, line)
658 .unwrap_or("<source line not found>");
659
660 info!(
661 file=%relative_file_name,
662 line=%line,
663 col=%col,
664 source=%source_line,
665 "{}",
666 description
667 );
668 }
669 }
670}