1pub mod alloc;
2pub mod alloc_util;
3pub mod constants;
4pub mod ctx;
5mod location;
6mod vec;
7
8use crate::alloc::{ConstantMemoryRegion, FrameMemoryRegion, ScopeAllocator};
9use crate::alloc_util::{
10 is_map, is_vec, layout_struct, layout_tuple, layout_tuple_elements, reserve_space_for_type,
11 type_size_and_alignment,
12};
13use crate::constants::ConstantsManager;
14use crate::ctx::Context;
15use seq_map::SeqMap;
16use swamp_script_node::Node;
17use swamp_script_semantic::intr::IntrinsicFunction;
18use swamp_script_semantic::{
19 AnonymousStructLiteral, ArgumentExpressionOrLocation, BinaryOperator, BinaryOperatorKind,
20 BooleanExpression, CompoundOperatorKind, ConstantId, ConstantRef, EnumLiteralData, Expression,
21 ExpressionKind, ForPattern, Function, Guard, InternalFunctionDefinitionRef, InternalFunctionId,
22 InternalMainExpression, Iterable, Literal, Match, MutOrImmutableExpression, NormalPattern,
23 Pattern, Postfix, PostfixKind, RangeMode, SingleLocationExpression,
24 SingleMutLocationExpression, StructInstantiation, UnaryOperator, UnaryOperatorKind,
25 VariableRef, WhenBinding,
26};
27use swamp_script_types::{AnonymousStructType, EnumVariantType, Signature, StructTypeField, Type};
28use swamp_vm_instr_build::{InstructionBuilder, PatchPosition};
29use swamp_vm_types::{
30 BOOL_SIZE, BinaryInstruction, CountU16, FrameMemoryAddress, FrameMemoryAddressIndirectPointer,
31 FrameMemorySize, HEAP_PTR_ALIGNMENT, HEAP_PTR_SIZE, HeapMemoryAddress, INT_SIZE,
32 InstructionPosition, MemoryAlignment, MemoryOffset, MemorySize, PTR_SIZE,
33 TempFrameMemoryAddress, VEC_ITERATOR_ALIGNMENT, VEC_ITERATOR_SIZE,
34};
35use tracing::{error, info, trace};
36
37#[derive(Debug)]
38pub enum ErrorKind {
39 IllegalCompoundAssignment,
40 VariableNotUnique,
41 IllegalCollection,
42 NotAnIterableCollection,
43}
44
45#[derive(Debug)]
46pub struct Error {
47 pub kind: ErrorKind,
48 pub node: Node,
49}
50
51pub struct SlicePairInfo {
52 pub addr: TempFrameMemoryAddress,
53 pub key_size: MemorySize,
54 pub value_size: MemorySize,
55 pub element_count: CountU16,
56 pub element_size: MemorySize,
57}
58
59pub struct FunctionInfo {
60 pub starts_at_ip: InstructionPosition,
61 pub internal_function_definition: InternalFunctionDefinitionRef,
62}
63
64pub struct FunctionFixup {
65 pub patch_position: PatchPosition,
66 pub fn_id: InternalFunctionId,
67 }
69
70pub struct ConstantInfo {
71 pub ip: InstructionPosition,
72 pub constant_ref: ConstantRef,
73}
74
75pub struct CodeGenState {
76 builder: InstructionBuilder,
77 constants: ConstantsManager,
78 constant_offsets: SeqMap<ConstantId, ConstantMemoryRegion>,
79 constant_functions: SeqMap<ConstantId, ConstantInfo>,
80 function_infos: SeqMap<InternalFunctionId, FunctionInfo>,
81 function_fixups: Vec<FunctionFixup>,
82}
83
84pub struct GenOptions {
85 pub is_halt_function: bool,
86}
87
88impl CodeGenState {
89 #[must_use]
90 pub fn new() -> Self {
91 Self {
92 builder: InstructionBuilder::default(),
93 constants: ConstantsManager::new(),
94 constant_offsets: SeqMap::default(),
95 function_infos: SeqMap::default(),
96 constant_functions: SeqMap::default(),
97 function_fixups: vec![],
98 }
99 }
100
101 #[must_use]
102 pub fn instructions(&self) -> &[BinaryInstruction] {
103 &self.builder.instructions
104 }
105 pub fn create_function_sections(&self) -> SeqMap<InstructionPosition, String> {
106 let mut lookups = SeqMap::new();
107 for (_func_id, function_info) in &self.function_infos {
108 let description = format!(
109 "{}",
110 function_info.internal_function_definition.assigned_name
111 );
112 lookups
113 .insert(function_info.starts_at_ip.clone(), description)
114 .unwrap()
115 }
116
117 lookups
118 }
119 #[must_use]
120 pub fn builder(&self) -> &InstructionBuilder {
121 &self.builder
122 }
123 pub fn constant_functions(&self) -> &SeqMap<ConstantId, ConstantInfo> {
124 &self.constant_functions
125 }
126 pub(crate) fn add_call(&mut self, internal_fn: &InternalFunctionDefinitionRef, comment: &str) {
127 let call_comment = &format!("calling {} ({})", internal_fn.assigned_name, comment);
128
129 if let Some(found) = self.function_infos.get(&internal_fn.program_unique_id) {
130 self.builder.add_call(&found.starts_at_ip, call_comment);
131 } else {
132 let patch_position = self.builder.add_call_placeholder(call_comment);
133 self.function_fixups.push(FunctionFixup {
134 patch_position,
135 fn_id: internal_fn.program_unique_id,
136 });
137 }
138 }
139 #[must_use]
140 pub fn comments(&self) -> &[String] {
141 &self.builder.comments
142 }
143
144 pub fn finalize(&mut self) {
145 for function_fixup in &self.function_fixups {
146 let func = self.function_infos.get(&function_fixup.fn_id).unwrap();
147 self.builder.patch_call(
148 PatchPosition(InstructionPosition(function_fixup.patch_position.0.0)),
149 &func.starts_at_ip,
150 );
151 }
152 }
153
154 #[must_use]
155 pub fn take_instructions_and_constants(self) -> (Vec<BinaryInstruction>, Vec<u8>) {
156 (self.builder.instructions, self.constants.take_data())
157 }
158
159 pub fn gen_function_def(
160 &mut self,
161 internal_fn_def: &InternalFunctionDefinitionRef,
162 options: &GenOptions,
163 ) -> Result<(), Error> {
164 assert_ne!(internal_fn_def.program_unique_id, 0);
165 self.function_infos
166 .insert(
167 internal_fn_def.program_unique_id,
168 FunctionInfo {
169 starts_at_ip: self.builder.position(),
170 internal_function_definition: internal_fn_def.clone(),
171 },
172 )
173 .unwrap();
174
175 let mut function_generator = FunctionCodeGen::new(self);
176
177 let ctx = Context::new(FrameMemoryRegion::new(
178 FrameMemoryAddress(0),
179 MemorySize(512),
180 ));
181
182 function_generator.layout_variables(
183 &internal_fn_def.function_scope_state,
184 &internal_fn_def.signature.return_type,
185 );
186
187 let ExpressionKind::Block(block_expressions) = &internal_fn_def.body.kind else {
188 panic!("function body should be a block")
189 };
190
191 if let ExpressionKind::IntrinsicCallEx(found_intrinsic_fn, _non_instantiated_arguments) =
192 &block_expressions[0].kind
193 {
194 todo!()
196 } else {
197 function_generator.gen_expression(&internal_fn_def.body, &ctx)?;
198 }
199
200 self.finalize_function(options);
201
202 Ok(())
203 }
204
205 pub fn finalize_function(&mut self, options: &GenOptions) {
206 if options.is_halt_function {
207 self.builder.add_hlt("");
208 } else {
209 self.builder.add_ret("");
210 }
211 }
212
213 pub fn gen_constants_in_order(&mut self, constants: &[ConstantRef]) -> Result<(), Error> {
214 for constant in constants {
215 let (size, _alignment) = type_size_and_alignment(&constant.resolved_type);
216 let ip = self.builder.position();
217 {
218 let mut function_generator = FunctionCodeGen::new(self);
219
220 let empty_ctx = Context::new(FrameMemoryRegion::new(FrameMemoryAddress(0), size));
221 function_generator.gen_expression(&constant.expr, &empty_ctx)?;
222 self.finalize_function(&GenOptions {
223 is_halt_function: true,
224 });
225 }
226
227 let constant_info = ConstantInfo {
228 ip,
229 constant_ref: constant.clone(),
231 };
232
233 self.constant_functions
234 .insert(constant.id, constant_info)
235 .unwrap();
236 }
237
238 Ok(())
239 }
240
241 pub fn gen_main_function(
244 &mut self,
245 main: &InternalMainExpression,
246 options: &GenOptions,
247 ) -> Result<(), Error> {
248 let mut function_generator = FunctionCodeGen::new(self);
249
250 function_generator.layout_variables(&main.function_scope_state, &main.expression.ty);
251 let empty_ctx = Context::new(FrameMemoryRegion::default());
252 function_generator.gen_expression(&main.expression, &empty_ctx)?;
253 self.finalize_function(options);
254 Ok(())
255 }
256}
257
258pub struct FunctionCodeGen<'a> {
259 state: &'a mut CodeGenState,
260 variable_offsets: SeqMap<usize, FrameMemoryRegion>,
261 frame_size: FrameMemorySize,
262 temp_allocator: ScopeAllocator,
264 argument_allocator: ScopeAllocator,
265}
266
267impl<'a> FunctionCodeGen<'a> {
268 #[must_use]
269 pub fn new(state: &'a mut CodeGenState) -> Self {
270 Self {
271 state,
272 variable_offsets: SeqMap::default(),
273 frame_size: FrameMemorySize(0),
274 temp_allocator: ScopeAllocator::new(FrameMemoryRegion::default()),
276 argument_allocator: ScopeAllocator::new(FrameMemoryRegion::default()),
277 }
278 }
279}
280
281impl FunctionCodeGen<'_> {
282 #[allow(clippy::too_many_lines)]
283 pub(crate) fn gen_single_intrinsic_call(
284 &mut self,
285 intrinsic_fn: &IntrinsicFunction,
286 self_addr: Option<FrameMemoryRegion>,
287 arguments: &[ArgumentExpressionOrLocation],
288 ctx: &Context,
289 ) -> Result<(), Error> {
290 info!(?intrinsic_fn, "generate specific call for intrinsic");
291 match intrinsic_fn {
292 IntrinsicFunction::FloatRound => todo!(),
294 IntrinsicFunction::FloatFloor => todo!(),
295 IntrinsicFunction::FloatSqrt => todo!(),
296 IntrinsicFunction::FloatSign => todo!(),
297 IntrinsicFunction::FloatAbs => todo!(),
298 IntrinsicFunction::FloatRnd => todo!(),
299 IntrinsicFunction::FloatCos => todo!(),
300 IntrinsicFunction::FloatSin => todo!(),
301 IntrinsicFunction::FloatAcos => todo!(),
302 IntrinsicFunction::FloatAsin => todo!(),
303 IntrinsicFunction::FloatAtan2 => todo!(),
304 IntrinsicFunction::FloatMin => todo!(),
305 IntrinsicFunction::FloatMax => todo!(),
306 IntrinsicFunction::FloatClamp => todo!(),
307 IntrinsicFunction::IntAbs => todo!(),
309 IntrinsicFunction::IntRnd => todo!(),
310 IntrinsicFunction::IntMax => todo!(),
311 IntrinsicFunction::IntMin => todo!(),
312 IntrinsicFunction::IntClamp => todo!(),
313 IntrinsicFunction::IntToFloat => todo!(),
314
315 IntrinsicFunction::StringLen => {
317 self.state.builder.add_string_len(
318 ctx.addr(),
319 FrameMemoryAddressIndirectPointer(self_addr.unwrap().addr),
320 "get the length",
321 );
322 Ok(())
323 }
324
325 IntrinsicFunction::VecFromSlice => {
327 let slice_variable = &arguments[0];
328 let slice_region = self.gen_for_access_or_location_ex(slice_variable)?;
329 let (element_size, element_alignment) =
330 type_size_and_alignment(&slice_variable.ty());
331 self.state.builder.add_vec_from_slice(
332 ctx.addr(),
333 slice_region.addr,
334 element_size,
335 CountU16(slice_region.size.0 / element_size.0),
336 "create vec from slice",
337 );
338 Ok(())
339 }
340 IntrinsicFunction::VecPush => todo!(),
341 IntrinsicFunction::VecPop => todo!(),
342 IntrinsicFunction::VecRemoveIndex => todo!(),
343 IntrinsicFunction::VecClear => todo!(),
344 IntrinsicFunction::VecCreate => todo!(),
345 IntrinsicFunction::VecSubscript => todo!(),
346 IntrinsicFunction::VecSubscriptMut => todo!(),
347 IntrinsicFunction::VecIter => todo!(),
348 IntrinsicFunction::VecIterMut => todo!(),
349 IntrinsicFunction::VecSelfPush => todo!(),
350 IntrinsicFunction::VecSelfExtend => todo!(),
351 IntrinsicFunction::VecLen => todo!(),
352 IntrinsicFunction::VecIsEmpty => todo!(),
353
354 IntrinsicFunction::MapCreate => todo!(),
356 IntrinsicFunction::MapFromSlicePair => {
357 let slice_pair_argument = &arguments[0];
358 let ArgumentExpressionOrLocation::Expression(expr) = slice_pair_argument else {
359 panic!();
360 };
361
362 let ExpressionKind::Literal(some_lit) = &expr.kind else {
363 panic!();
364 };
365
366 let Literal::SlicePair(slice_type, expression_pairs) = some_lit else {
367 panic!();
368 };
369
370 let slice_pair_info = self.gen_slice_pair_literal(slice_type, expression_pairs);
371 self.state.builder.add_map_new_from_slice(
372 ctx.addr(),
373 slice_pair_info.addr.to_addr(),
374 slice_pair_info.key_size,
375 slice_pair_info.value_size,
376 slice_pair_info.element_count,
377 "create map from temporary slice pair",
378 );
379
380 Ok(())
381 }
382 IntrinsicFunction::MapHas => todo!(),
383 IntrinsicFunction::MapRemove => {
384 let ArgumentExpressionOrLocation::Expression(key_argument) = &arguments[0] else {
385 panic!("must be expression for key");
386 };
387 self.gen_intrinsic_map_remove(self_addr.unwrap(), key_argument, ctx)
388 }
389 IntrinsicFunction::MapIter => todo!(),
390 IntrinsicFunction::MapIterMut => todo!(),
391 IntrinsicFunction::MapLen => todo!(),
392 IntrinsicFunction::MapIsEmpty => todo!(),
393 IntrinsicFunction::MapSubscript => todo!(),
394 IntrinsicFunction::MapSubscriptSet => todo!(),
395 IntrinsicFunction::MapSubscriptMut => todo!(),
396 IntrinsicFunction::MapSubscriptMutCreateIfNeeded => todo!(),
397
398 IntrinsicFunction::Map2Remove => todo!(),
400 IntrinsicFunction::Map2Insert => todo!(),
401 IntrinsicFunction::Map2GetColumn => todo!(),
402 IntrinsicFunction::Map2GetRow => todo!(),
403 IntrinsicFunction::Map2Get => todo!(),
404 IntrinsicFunction::Map2Has => todo!(),
405 IntrinsicFunction::Map2Create => todo!(),
406
407 IntrinsicFunction::SparseCreate => todo!(),
409 IntrinsicFunction::SparseFromSlice => todo!(),
410 IntrinsicFunction::SparseIter => todo!(),
411 IntrinsicFunction::SparseIterMut => todo!(),
412 IntrinsicFunction::SparseSubscript => todo!(),
413 IntrinsicFunction::SparseSubscriptMut => todo!(),
414 IntrinsicFunction::SparseHas => todo!(),
415 IntrinsicFunction::SparseRemove => todo!(),
416
417 IntrinsicFunction::GridCreate => todo!(),
419 IntrinsicFunction::GridFromSlice => todo!(),
420 IntrinsicFunction::GridSet => todo!(),
421 IntrinsicFunction::GridGet => todo!(),
422 IntrinsicFunction::GridGetColumn => todo!(),
423
424 IntrinsicFunction::Float2Magnitude => todo!(),
425
426 IntrinsicFunction::SparseAdd => todo!(),
427 IntrinsicFunction::SparseNew => todo!(),
428 }
429 }
430
431 fn gen_intrinsic_map_remove(
432 &mut self,
433 map_region: FrameMemoryRegion,
434 key_expr: &Expression,
435 ctx: &Context,
436 ) -> Result<(), Error> {
437 let key_region = self.gen_expression_for_access(key_expr)?;
438
439 self.state
440 .builder
441 .add_map_remove(map_region.addr, key_region.addr, "");
442
443 Ok(())
444 }
445
446 pub fn reserve(ty: &Type, allocator: &mut ScopeAllocator) -> FrameMemoryRegion {
447 let (size, alignment) = type_size_and_alignment(ty);
448 allocator.reserve(size, alignment)
449 }
450
451 pub fn layout_variables(
454 &mut self,
455 variables: &Vec<VariableRef>,
456 return_type: &Type,
457 ) -> Result<(), Error> {
458 let mut allocator = ScopeAllocator::new(FrameMemoryRegion::new(
459 FrameMemoryAddress(0),
460 MemorySize(1024),
461 ));
462 let _current_offset = Self::reserve(return_type, &mut allocator);
463
464 let mut enter_comment = "variables:\n".to_string();
465
466 for var_ref in variables {
467 let var_target = Self::reserve(&var_ref.resolved_type, &mut allocator);
468 trace!(?var_ref.assigned_name, ?var_target, "laying out");
469 enter_comment += &format!(
470 " ${:04X}:{} {}\n",
471 var_target.addr.0, var_target.size.0, var_ref.assigned_name
472 );
473 self.variable_offsets
474 .insert(var_ref.unique_id_within_function, var_target)
475 .map_err(|_| self.create_err(ErrorKind::VariableNotUnique, &var_ref.name))?;
476 }
477
478 let extra_frame_size = MemorySize(80);
479 let extra_target = FrameMemoryRegion::new(allocator.addr(), extra_frame_size);
480 self.frame_size = allocator.addr().as_size().add(extra_frame_size);
481
482 self.state
483 .builder
484 .add_enter(self.frame_size, &enter_comment);
485
486 const ARGUMENT_MAX_SIZE: u16 = 256;
487 self.argument_allocator = ScopeAllocator::new(FrameMemoryRegion::new(
488 FrameMemoryAddress(self.frame_size.0),
489 MemorySize(ARGUMENT_MAX_SIZE),
490 ));
491
492 self.temp_allocator = ScopeAllocator::new(FrameMemoryRegion::new(
493 FrameMemoryAddress(self.frame_size.0 + ARGUMENT_MAX_SIZE),
494 MemorySize(1024),
495 ));
496
497 Ok(())
498 }
499
500 pub fn temp_memory_region_for_type(&mut self, ty: &Type, comment: &str) -> FrameMemoryRegion {
501 let new_target_info = reserve_space_for_type(ty, &mut self.temp_allocator);
502 info!(?new_target_info, "creating temporary space");
503 new_target_info
504 }
505
506 pub fn temp_space_for_type(&mut self, ty: &Type, comment: &str) -> Context {
507 Context::new(self.temp_memory_region_for_type(ty, comment))
508 }
509
510 #[allow(clippy::single_match_else)]
513 pub fn gen_expression_for_access(
514 &mut self,
515 expr: &Expression,
516 ) -> Result<FrameMemoryRegion, Error> {
517 match &expr.kind {
518 ExpressionKind::VariableAccess(var_ref) => {
519 info!(?var_ref, "variable access");
520 let frame_address = self
521 .variable_offsets
522 .get(&var_ref.unique_id_within_function)
523 .unwrap();
524
525 return Ok(*frame_address);
526 }
527
528 ExpressionKind::Literal(lit) => match lit {
529 Literal::Slice(slice_type, expressions) => {
530 return self.gen_slice_literal(slice_type, expressions);
531 }
532 Literal::SlicePair(slice_pair_type, pairs) => {
533 let info = self.gen_slice_pair_literal(slice_pair_type, pairs);
534 return Ok(FrameMemoryRegion::new(
535 info.addr.0,
536 MemorySize(info.element_count.0 * info.element_size.0),
537 ));
538 }
539 _ => {}
540 },
541 _ => {}
542 };
543
544 let temp_ctx = self.temp_space_for_type(&expr.ty, "expression");
545
546 self.gen_expression(expr, &temp_ctx)?;
547
548 Ok(temp_ctx.target())
549 }
550
551 pub(crate) fn extra_frame_space_for_type(&mut self, ty: &Type) -> Context {
552 let target = Self::reserve(ty, &mut self.temp_allocator);
553 Context::new(target)
554 }
555
556 pub fn gen_expression(&mut self, expr: &Expression, ctx: &Context) -> Result<(), Error> {
557 match &expr.kind {
558 ExpressionKind::InterpolatedString(_) => todo!(),
559
560 ExpressionKind::ConstantAccess(constant_ref) => {
561 self.gen_constant_access(constant_ref, ctx)
562 }
563 ExpressionKind::TupleDestructuring(variables, tuple_types, tuple_expression) => {
564 self.gen_tuple_destructuring(variables, tuple_types, tuple_expression)
565 }
566 ExpressionKind::Range(start, end, mode) => self.gen_range(start, end, mode, ctx),
567
568 ExpressionKind::Assignment(target_mut_location_expr, source_expr) => {
569 self.gen_assignment(target_mut_location_expr, source_expr)
570 }
571 ExpressionKind::VariableAccess(variable_ref) => {
572 self.gen_variable_access(variable_ref, ctx)
573 }
574 ExpressionKind::InternalFunctionAccess(function) => {
575 self.internal_function_access(function, ctx)
576 }
577 ExpressionKind::BinaryOp(operator) => self.gen_binary_operator(operator, ctx),
578 ExpressionKind::UnaryOp(operator) => self.gen_unary_operator(operator, ctx),
579 ExpressionKind::PostfixChain(start, chain) => self.gen_postfix_chain(start, chain, ctx),
580 ExpressionKind::VariableDefinition(variable, expression) => {
581 self.gen_variable_definition(variable, expression, ctx)
582 }
583 ExpressionKind::VariableReassignment(variable, expression) => {
584 self.gen_variable_reassignment(variable, expression, ctx)
585 }
586 ExpressionKind::StructInstantiation(struct_literal) => {
587 self.gen_struct_literal(struct_literal, ctx)
588 }
589 ExpressionKind::AnonymousStructLiteral(anon_struct) => {
590 self.gen_anonymous_struct_literal(anon_struct, ctx)
591 }
592 ExpressionKind::Literal(basic_literal) => self.gen_literal(basic_literal, ctx),
593 ExpressionKind::Option(maybe_option) => {
594 self.gen_option_expression(maybe_option.as_deref(), ctx)
595 }
596 ExpressionKind::ForLoop(a, b, c) => self.gen_for_loop(a, b, c),
597 ExpressionKind::WhileLoop(condition, expression) => {
598 self.gen_while_loop(condition, expression, ctx)
599 }
600 ExpressionKind::Block(expressions) => self.gen_block(expressions, ctx),
601 ExpressionKind::Match(match_expr) => self.gen_match(match_expr, ctx),
602 ExpressionKind::Guard(guards) => self.gen_guard(guards, ctx),
603 ExpressionKind::If(conditional, true_expr, false_expr) => {
604 self.gen_if(conditional, true_expr, false_expr.as_deref(), ctx)
605 }
606 ExpressionKind::When(bindings, true_expr, false_expr) => {
607 self.gen_when(bindings, true_expr, false_expr.as_deref(), ctx)
608 }
609 ExpressionKind::CompoundAssignment(target_location, operator_kind, source_expr) => {
610 self.compound_assignment(target_location, operator_kind, source_expr, ctx)
611 }
612 ExpressionKind::IntrinsicCallEx(intrinsic_fn, arguments) => {
613 self.gen_intrinsic_call_ex(intrinsic_fn, arguments, ctx)
614 }
615 ExpressionKind::CoerceOptionToBool(_) => todo!(),
617 ExpressionKind::FunctionValueCall(_, _, _) => todo!(),
618
619 ExpressionKind::IntrinsicFunctionAccess(_) => todo!(), ExpressionKind::ExternalFunctionAccess(_) => todo!(), }
623 }
624
625 fn gen_unary_operator(
626 &mut self,
627 unary_operator: &UnaryOperator,
628 ctx: &Context,
629 ) -> Result<(), Error> {
630 match &unary_operator.kind {
631 UnaryOperatorKind::Not => {}
632 UnaryOperatorKind::Negate => match (&unary_operator.left.ty) {
633 Type::Int => {
634 let left_source = self.gen_expression_for_access(&unary_operator.left)?;
635 self.state
636 .builder
637 .add_neg_i32(ctx.addr(), left_source.addr, "negate i32");
638 }
639
640 Type::Float => {
641 let left_source = self.gen_expression_for_access(&unary_operator.left)?;
642 self.state
643 .builder
644 .add_neg_f32(ctx.addr(), left_source.addr, "negate f32");
645 }
646 _ => todo!(),
647 },
648 }
649
650 Ok(())
651 }
652
653 fn gen_binary_operator(
654 &mut self,
655 binary_operator: &BinaryOperator,
656 ctx: &Context,
657 ) -> Result<(), Error> {
658 match (&binary_operator.left.ty, &binary_operator.right.ty) {
659 (Type::Int, Type::Int) => self.gen_binary_operator_i32(binary_operator, ctx)?,
660 (Type::Bool, Type::Bool) => self.gen_binary_operator_bool(binary_operator)?,
661 (Type::String, Type::String) => {
662 self.gen_binary_operator_string(binary_operator, ctx)?
663 }
664 _ => todo!(),
665 }
666
667 Ok(())
668 }
669
670 fn gen_binary_operator_i32(
671 &mut self,
672 binary_operator: &BinaryOperator,
673 ctx: &Context,
674 ) -> Result<(), Error> {
675 let left_source = self.gen_expression_for_access(&binary_operator.left)?;
676 let right_source = self.gen_expression_for_access(&binary_operator.right)?;
677
678 match binary_operator.kind {
679 BinaryOperatorKind::Add => {
680 self.state.builder.add_add_i32(
681 ctx.addr(),
682 left_source.addr(),
683 right_source.addr(),
684 "i32 add",
685 );
686 }
687
688 BinaryOperatorKind::Subtract => todo!(),
689 BinaryOperatorKind::Multiply => {
690 self.state.builder.add_mul_i32(
691 ctx.addr(),
692 left_source.addr(),
693 right_source.addr(),
694 "i32 add",
695 );
696 }
697 BinaryOperatorKind::Divide => todo!(),
698 BinaryOperatorKind::Modulo => todo!(),
699 BinaryOperatorKind::LogicalOr => todo!(),
700 BinaryOperatorKind::LogicalAnd => todo!(),
701 BinaryOperatorKind::Equal => todo!(),
702 BinaryOperatorKind::NotEqual => todo!(),
703 BinaryOperatorKind::LessThan => {
704 self.state
705 .builder
706 .add_lt_i32(left_source.addr(), right_source.addr(), "i32 lt");
707 }
708 BinaryOperatorKind::LessEqual => todo!(),
709 BinaryOperatorKind::GreaterThan => {
710 self.state
711 .builder
712 .add_gt_i32(left_source.addr(), right_source.addr(), "i32 gt");
713 }
714 BinaryOperatorKind::GreaterEqual => todo!(),
715 BinaryOperatorKind::RangeExclusive => todo!(),
716 }
717
718 Ok(())
719 }
720
721 fn gen_binary_operator_string(
722 &mut self,
723 binary_operator: &BinaryOperator,
724 ctx: &Context,
725 ) -> Result<(), Error> {
726 let left_source = self.gen_expression_for_access(&binary_operator.left)?;
727 let right_source = self.gen_expression_for_access(&binary_operator.right)?;
728
729 info!(?left_source, ?right_source, "binary string");
730
731 match binary_operator.kind {
732 BinaryOperatorKind::Add => {
733 self.state.builder.add_string_append(
734 ctx.addr(),
735 left_source.addr(),
736 right_source.addr(),
737 "string add",
738 );
739 }
740
741 BinaryOperatorKind::Equal => todo!(),
742 BinaryOperatorKind::NotEqual => todo!(),
743 _ => panic!("illegal string operator"),
744 }
745
746 Ok(())
747 }
748
749 fn gen_binary_operator_bool(&mut self, binary_operator: &BinaryOperator) -> Result<(), Error> {
750 match binary_operator.kind {
751 BinaryOperatorKind::LogicalOr => {
752 self.gen_boolean_access(&binary_operator.left);
754
755 let jump_after_patch = self
756 .state
757 .builder
758 .add_jmp_if_equal_placeholder("skip rhs `or` expression");
759
760 self.gen_boolean_access(&binary_operator.right);
762
763 self.state.builder.patch_jump_here(jump_after_patch);
764 }
765 BinaryOperatorKind::LogicalAnd => {
766 self.gen_boolean_access(&binary_operator.left);
768
769 let jump_after_patch = self
770 .state
771 .builder
772 .add_jmp_if_not_equal_placeholder("skip rhs `and` expression");
773
774 self.gen_boolean_access(&binary_operator.right);
776
777 self.state.builder.patch_jump_here(jump_after_patch);
778 }
779 _ => {
780 panic!("unknown operator")
781 }
782 }
783
784 Ok(())
785 }
786
787 fn gen_condition_context(
788 &mut self,
789 condition: &BooleanExpression,
790 ) -> Result<(Context, PatchPosition), Error> {
791 let condition_ctx = self.extra_frame_space_for_type(&Type::Bool);
792 self.gen_expression(&condition.expression, &condition_ctx)?;
793
794 let jump_on_false_condition = self
795 .state
796 .builder
797 .add_jmp_if_not_equal_placeholder("jump boolean condition false");
798
799 Ok((condition_ctx, jump_on_false_condition))
800 }
801
802 fn gen_boolean_access(&mut self, condition: &Expression) {
803 let _frame_memory_region = self.gen_expression_for_access(&condition);
804 }
815
816 fn gen_boolean_expression(&mut self, condition: &BooleanExpression) {
817 self.gen_boolean_access(&condition.expression);
818 }
819
820 fn gen_if(
821 &mut self,
822 condition: &BooleanExpression,
823 true_expr: &Expression,
824 maybe_false_expr: Option<&Expression>,
825 ctx: &Context,
826 ) -> Result<(), Error> {
827 let (_condition_ctx, jump_on_false_condition) = self.gen_condition_context(condition)?;
828
829 self.gen_expression(true_expr, ctx)?;
831
832 if let Some(false_expr) = maybe_false_expr {
833 let skip_false_if_true = self
835 .state
836 .builder
837 .add_jump_placeholder("condition is false skip");
838
839 self.state.builder.patch_jump_here(jump_on_false_condition);
841
842 self.gen_expression(false_expr, ctx)?;
844
845 self.state.builder.patch_jump_here(skip_false_if_true);
846 } else {
847 self.state.builder.patch_jump_here(jump_on_false_condition);
848 }
849
850 Ok(())
851 }
852
853 fn gen_while_loop(
854 &mut self,
855 condition: &BooleanExpression,
856 expression: &Expression,
857 ctx: &Context,
858 ) -> Result<(), Error> {
859 assert_eq!(ctx.target_size().0, 0);
861
862 let ip_for_condition = self.state.builder.position();
863
864 let (_condition_ctx, jump_on_false_condition) = self.gen_condition_context(condition)?;
865
866 let mut unit_ctx = self.temp_space_for_type(&Type::Unit, "while body expression");
868 self.gen_expression(expression, &mut unit_ctx)?;
869
870 self.state
872 .builder
873 .add_jmp(ip_for_condition, "jmp to while condition");
874
875 self.state.builder.patch_jump_here(jump_on_false_condition);
876
877 Ok(())
878 }
879
880 fn gen_location_argument(
881 &mut self,
882 argument: &SingleLocationExpression,
883 ctx: &Context,
884 comment: &str,
885 ) -> Result<(), Error> {
886 let region = self.gen_lvalue_address(argument)?;
887
888 self.state
889 .builder
890 .add_mov(ctx.addr(), region.addr, region.size, comment);
891
892 Ok(())
893 }
894
895 fn gen_variable_assignment(
896 &mut self,
897 variable: &VariableRef,
898 mut_or_immutable_expression: &MutOrImmutableExpression,
899 ctx: &Context,
900 ) -> Result<(), Error> {
901 let target_relative_frame_pointer = self
902 .variable_offsets
903 .get(&variable.unique_id_within_function)
904 .unwrap_or_else(|| panic!("{}", variable.assigned_name));
905
906 let init_ctx =
907 ctx.with_target(*target_relative_frame_pointer, "variable assignment target");
908
909 self.gen_mut_or_immute(mut_or_immutable_expression, &init_ctx)
910 }
911
912 fn gen_assignment(
913 &mut self,
914 lhs: &SingleMutLocationExpression,
915 rhs: &Expression,
916 ) -> Result<(), Error> {
917 let lhs_addr = self.gen_lvalue_address(&lhs.0)?;
918 let access = self.gen_expression_for_access(rhs)?;
919
920 self.state
921 .builder
922 .add_mov(lhs_addr.addr, access.addr, access.size, "assignment");
923
924 Ok(())
925 }
926
927 fn gen_variable_definition(
928 &mut self,
929 variable: &VariableRef,
930 mut_or_immutable_expression: &MutOrImmutableExpression,
931 ctx: &Context,
932 ) -> Result<(), Error> {
933 self.gen_variable_assignment(variable, mut_or_immutable_expression, ctx)
934 }
935
936 fn gen_variable_reassignment(
937 &mut self,
938 variable: &VariableRef,
939 mut_or_immutable_expression: &Box<MutOrImmutableExpression>,
940 ctx: &Context,
941 ) -> Result<(), Error> {
942 self.gen_variable_assignment(variable, mut_or_immutable_expression, ctx)
943 }
944
945 fn copy_back_mutable_arguments(
946 &mut self,
947 signature: &Signature,
948 maybe_self: Option<FrameMemoryRegion>,
949 arguments: &Vec<ArgumentExpressionOrLocation>,
950 ) -> Result<(), Error> {
951 let arguments_memory_region = self.infinite_above_frame_size();
952 let mut arguments_allocator = ScopeAllocator::new(arguments_memory_region);
953
954 let _argument_addr = Self::reserve(&signature.return_type, &mut arguments_allocator);
955
956 let mut parameters = signature.parameters.clone();
957 if let Some(found_self) = maybe_self {
958 let source_region =
959 Self::reserve(¶meters[0].resolved_type, &mut arguments_allocator);
960 self.state.builder.add_mov(
961 found_self.addr,
962 source_region.addr,
963 source_region.size,
964 "copy back to <self>",
965 );
966 parameters.remove(0);
967 }
968 for (parameter, argument) in parameters.iter().zip(arguments) {
969 let source_region = Self::reserve(¶meter.resolved_type, &mut arguments_allocator);
970 if !parameter.is_mutable {
971 continue;
972 }
973
974 if let ArgumentExpressionOrLocation::Location(found_location) = argument {
975 let argument_target = self.gen_lvalue_address(found_location)?;
976 self.state.builder.add_mov(
977 argument_target.addr,
978 source_region.addr,
979 source_region.size,
980 &format!(
981 "copy back mutable argument {}",
982 found_location.starting_variable.assigned_name
983 ),
984 );
985 } else {
986 panic!("internal error. argument is mut but not a location")
987 }
988 }
989 Ok(())
990 }
991 fn gen_arguments(
992 &mut self,
993 signature: &Signature,
994 self_region: Option<FrameMemoryRegion>,
995 arguments: &Vec<ArgumentExpressionOrLocation>,
996 ) -> Result<FrameMemoryRegion, Error> {
997 self.argument_allocator.reset();
998 let argument_addr = Self::reserve(&signature.return_type, &mut self.argument_allocator);
1000 assert_eq!(argument_addr.addr.0, self.frame_size.0);
1001
1002 let mut argument_targets = Vec::new();
1003 let mut argument_comments = Vec::new();
1004
1005 for (index, type_for_parameter) in signature.parameters.iter().enumerate() {
1007 let argument_target = Self::reserve(
1008 &type_for_parameter.resolved_type,
1009 &mut self.argument_allocator,
1010 );
1011 let arg_ctx = Context::new(argument_target);
1012 info!(?index, %argument_target.addr, "layout argument");
1013 argument_targets.push(arg_ctx);
1014 argument_comments.push(format!("argument {}", type_for_parameter.name));
1015 }
1016
1017 if let Some(push_self) = self_region {
1018 self.state.builder.add_mov(
1019 argument_targets[0].addr(),
1020 push_self.addr,
1021 push_self.size,
1022 "<self>",
1023 );
1024 argument_targets.remove(0);
1025 }
1026
1027 for ((argument_target_ctx, argument_expr_or_loc), argument_comment) in argument_targets
1028 .iter()
1029 .zip(arguments)
1030 .zip(argument_comments)
1031 {
1032 let debug_addr = argument_target_ctx.target().addr();
1033 info!(%debug_addr, "set argument");
1034 self.gen_argument(
1035 argument_expr_or_loc,
1036 &argument_target_ctx,
1037 &argument_comment,
1038 )?;
1039 }
1040
1041 let last_addr = argument_targets[argument_targets.len() - 1]
1042 .addr()
1043 .add(argument_targets[argument_targets.len() - 1].target_size());
1044 Ok(FrameMemoryRegion {
1045 addr: argument_targets[0].addr(),
1046 size: MemorySize(last_addr.0 - argument_targets[0].addr().0),
1047 })
1048 }
1049
1050 #[allow(clippy::too_many_lines)]
1051 fn gen_postfix_chain(
1052 &mut self,
1053 start_expression: &Expression,
1054 chain: &[Postfix],
1055 ctx: &Context,
1056 ) -> Result<(), Error> {
1057 if let ExpressionKind::InternalFunctionAccess(internal_fn) = &start_expression.kind {
1058 if chain.len() == 1 {
1059 if let PostfixKind::FunctionCall(args) = &chain[0].kind {
1060 if let Some(intrinsic_fn) = single_intrinsic_fn(&internal_fn.body) {
1061 self.gen_single_intrinsic_call(intrinsic_fn, None, args, ctx)?;
1062 } else {
1063 self.gen_arguments(&internal_fn.signature, None, args)?;
1064 self.state
1065 .add_call(internal_fn, &format!("frame size: {}", self.frame_size)); let (return_size, _alignment) =
1067 type_size_and_alignment(&internal_fn.signature.return_type);
1068 if return_size.0 != 0 {
1069 self.state.builder.add_mov(
1070 ctx.addr(),
1071 self.infinite_above_frame_size().addr,
1072 return_size,
1073 "copy the ret value to destination",
1074 );
1075 }
1076 self.copy_back_mutable_arguments(&internal_fn.signature, None, args)?;
1077 }
1078
1079 return Ok(());
1080 }
1081 }
1082 }
1083
1084 if let ExpressionKind::ExternalFunctionAccess(external_fn) = &start_expression.kind {
1085 if chain.len() == 1 {
1086 if let PostfixKind::FunctionCall(args) = &chain[0].kind {
1087 let total_region = self.gen_arguments(&external_fn.signature, None, args)?;
1088 self.state.builder.add_host_call(
1089 external_fn.id as u16,
1090 total_region.size,
1091 &format!("call external '{}'", external_fn.assigned_name),
1092 );
1093 let (return_size, _alignment) =
1094 type_size_and_alignment(&external_fn.signature.return_type);
1095 if return_size.0 != 0 {
1096 self.state.builder.add_mov(
1097 ctx.addr(),
1098 self.infinite_above_frame_size().addr,
1099 return_size,
1100 "copy the ret value to destination",
1101 );
1102 }
1103
1104 return Ok(());
1105 }
1106 }
1107 }
1108
1109 let mut start_source = self.gen_expression_for_access(start_expression)?;
1110
1111 for element in chain {
1112 match &element.kind {
1113 PostfixKind::StructField(anonymous_struct, field_index) => {
1114 let (memory_offset, memory_size, _max_alignment) =
1115 Self::get_struct_field_offset(
1116 &anonymous_struct.field_name_sorted_fields,
1117 *field_index,
1118 );
1119 info!(
1120 ?field_index,
1121 ?memory_offset,
1122 ?memory_size,
1123 "lookup struct field",
1124 );
1125 start_source = FrameMemoryRegion::new(
1126 start_source.addr.advance(memory_offset),
1127 memory_size,
1128 );
1129 }
1130 PostfixKind::MemberCall(function_to_call, arguments) => {
1131 match &**function_to_call {
1132 Function::Internal(internal_fn) => {
1133 if let Some(intrinsic_fn) = single_intrinsic_fn(&internal_fn.body) {
1134 self.gen_single_intrinsic_call(
1135 intrinsic_fn,
1136 Some(start_source),
1137 arguments,
1138 ctx,
1139 );
1140 } else {
1141 self.gen_arguments(
1142 &internal_fn.signature,
1143 Some(start_source),
1144 arguments,
1145 );
1146 self.state.add_call(
1147 internal_fn,
1148 &format!("frame size: {}", self.frame_size),
1149 ); let (return_size, _alignment) =
1152 type_size_and_alignment(&internal_fn.signature.return_type);
1153 if return_size.0 != 0 {
1154 self.state.builder.add_mov(
1155 ctx.addr(),
1156 self.infinite_above_frame_size().addr,
1157 return_size,
1158 "copy the return value to destination",
1159 );
1160 }
1161
1162 self.copy_back_mutable_arguments(
1163 &internal_fn.signature,
1164 Some(start_source),
1165 arguments,
1166 );
1167 }
1168 }
1169 Function::External(external_fn) => {
1170 }
1172 }
1173 }
1174 PostfixKind::FunctionCall(arguments) => {
1175 }
1178 PostfixKind::OptionUnwrap => todo!(),
1179 PostfixKind::NoneCoalesce(_) => todo!(),
1180 PostfixKind::IntrinsicCall(_, _) => todo!(),
1181 }
1182 }
1183
1184 Ok(())
1185 }
1186
1187 fn gen_tuple(&mut self, expressions: &[Expression], ctx: &Context) -> Result<(), Error> {
1188 let mut scope = ScopeAllocator::new(ctx.target());
1189
1190 for expr in expressions {
1191 let (memory_size, alignment) = type_size_and_alignment(&expr.ty);
1192 let start_addr = scope.allocate(memory_size, alignment);
1193 let element_region = FrameMemoryRegion::new(start_addr, memory_size);
1194 let element_ctx = Context::new(element_region);
1195 self.gen_expression(expr, &element_ctx)?;
1196 }
1197
1198 Ok(())
1199 }
1200
1201 fn get_struct_field_offset(
1202 fields: &SeqMap<String, StructTypeField>,
1203 index_to_find: usize,
1204 ) -> (MemoryOffset, MemorySize, MemoryAlignment) {
1205 let mut offset = 0;
1206
1207 for (index, (_name, field)) in fields.iter().enumerate() {
1208 let (struct_field_size, struct_field_align) =
1209 type_size_and_alignment(&field.field_type);
1210 if index == index_to_find {
1211 return (MemoryOffset(offset), struct_field_size, struct_field_align);
1212 }
1213
1214 offset += struct_field_size.0;
1215 }
1216
1217 panic!("field not found");
1218 }
1219
1220 fn gen_anonymous_struct(
1221 &mut self,
1222 anon_struct_type: &AnonymousStructType,
1223 source_order_expressions: &Vec<(usize, Expression)>,
1224 base_context: &Context,
1225 ) -> Result<(), Error> {
1226 for (field_index, expression) in source_order_expressions {
1227 let (field_memory_offset, field_size, _field_alignment) = Self::get_struct_field_offset(
1228 &anon_struct_type.field_name_sorted_fields,
1229 *field_index,
1230 );
1231 let field_ctx = base_context.with_offset(field_memory_offset, field_size);
1232 self.gen_expression(expression, &field_ctx)?;
1233 }
1234
1235 Ok(())
1236 }
1237
1238 fn gen_literal(&mut self, literal: &Literal, ctx: &Context) -> Result<(), Error> {
1239 match literal {
1240 Literal::IntLiteral(int) => {
1241 self.state.builder.add_ld32(ctx.addr(), *int, "int literal");
1242 }
1243 Literal::FloatLiteral(fixed_point) => {
1244 self.state
1245 .builder
1246 .add_ld32(ctx.addr(), fixed_point.inner(), "float literal");
1247 }
1248 Literal::NoneLiteral => {
1249 self.state.builder.add_ld8(ctx.addr(), 0, "none literal");
1250 }
1251 Literal::BoolLiteral(truthy) => {
1252 self.state
1253 .builder
1254 .add_ld8(ctx.addr(), u8::from(*truthy), "bool literal");
1255 }
1256
1257 Literal::EnumVariantLiteral(enum_type, a, b) => {
1258 self.state.builder.add_ld8(
1259 ctx.addr(),
1260 a.common().container_index,
1261 &format!("enum variant {} tag", a.common().assigned_name),
1262 );
1263
1264 let starting_offset = MemoryOffset(1);
1265
1266 let (data_size, data_alignment) = match a {
1267 EnumVariantType::Struct(enum_variant_struct) => {
1268 layout_struct(&enum_variant_struct.anon_struct)
1269 }
1270 EnumVariantType::Tuple(tuple_type) => layout_tuple(&tuple_type.fields_in_order),
1271 EnumVariantType::Nothing(_) => (MemorySize(0), MemoryAlignment::U8),
1272 };
1273
1274 let skip_octets: usize = data_alignment.into();
1275 let skip = MemorySize(skip_octets as u16);
1276 let inner_addr = ctx.addr().add(skip);
1277 let region = FrameMemoryRegion::new(inner_addr, data_size);
1278 let inner_ctx = Context::new(region);
1279
1280 match b {
1282 EnumLiteralData::Nothing => {}
1283 EnumLiteralData::Tuple(expressions) => {
1284 self.gen_tuple(expressions, &inner_ctx)?;
1285 }
1286 EnumLiteralData::Struct(sorted_expressions) => {
1287 if let EnumVariantType::Struct(variant_struct_type) = a {
1288 self.gen_anonymous_struct(
1289 &variant_struct_type.anon_struct,
1290 sorted_expressions,
1291 &inner_ctx,
1292 )?;
1293 }
1294 }
1295 }
1296 }
1297 Literal::TupleLiteral(_tuple_type, expressions) => self.gen_tuple(expressions, ctx)?,
1298 Literal::StringLiteral(str) => {
1299 self.gen_string_literal(str, ctx);
1300 }
1301 Literal::Slice(ty, expressions) => {
1302 todo!()
1304 }
1305 Literal::SlicePair(ty, expression_pairs) => {
1306 todo!()
1307 }
1308 }
1309
1310 Ok(())
1311 }
1312
1313 fn gen_string_literal(&mut self, string: &str, ctx: &Context) {
1314 let string_bytes = string.as_bytes();
1315 let string_byte_count = string_bytes.len();
1316
1317 let data_ptr = self
1318 .state
1319 .constants
1320 .allocate(string_bytes, MemoryAlignment::U8);
1321
1322 let mem_size = MemorySize(string_byte_count as u16);
1323
1324 self.state.builder.add_string_from_constant_slice(
1325 ctx.addr(),
1326 data_ptr,
1327 mem_size,
1328 "create string",
1329 );
1330 }
1332
1333 fn gen_option_expression(
1362 &mut self,
1363 maybe_option: Option<&Expression>,
1364 ctx: &Context,
1365 ) -> Result<(), Error> {
1366 if let Some(found_value) = maybe_option {
1367 self.state.builder.add_ld8(ctx.addr(), 1, "option Some tag"); let (inner_size, inner_alignment) = type_size_and_alignment(&found_value.ty);
1369 let one_offset_ctx = ctx.with_offset(inner_alignment.into(), inner_size);
1370
1371 self.gen_expression(found_value, &one_offset_ctx)?; } else {
1373 self.state.builder.add_ld8(ctx.addr(), 0, "option None tag"); }
1376
1377 Ok(())
1378 }
1379
1380 fn gen_for_loop_vec(
1381 &mut self,
1382 for_pattern: &ForPattern,
1383 collection_expr: &MutOrImmutableExpression,
1384 ) -> Result<(InstructionPosition, PatchPosition), Error> {
1385 let collection_region = self.gen_for_access_or_location(collection_expr)?;
1386
1387 let temp_iterator_region = self
1388 .temp_allocator
1389 .allocate(MemorySize(VEC_ITERATOR_SIZE), VEC_ITERATOR_ALIGNMENT);
1390 self.state.builder.add_vec_iter_init(
1391 temp_iterator_region,
1392 FrameMemoryAddressIndirectPointer(collection_region.addr),
1393 "initialize vec iterator",
1394 );
1395
1396 let loop_ip = self.state.builder.position();
1397
1398 let placeholder_position = match for_pattern {
1399 ForPattern::Single(variable) => {
1400 let target_variable = self
1401 .variable_offsets
1402 .get(&variable.unique_id_within_function)
1403 .unwrap();
1404 self.state.builder.add_vec_iter_next_placeholder(
1405 temp_iterator_region,
1406 target_variable.addr,
1407 "move to next or jump over",
1408 )
1409 }
1410 ForPattern::Pair(variable_a, variable_b) => {
1411 let target_variable_a = self
1412 .variable_offsets
1413 .get(&variable_a.unique_id_within_function)
1414 .unwrap();
1415 let target_variable_b = self
1416 .variable_offsets
1417 .get(&variable_b.unique_id_within_function)
1418 .unwrap();
1419 self.state.builder.add_vec_iter_next_pair_placeholder(
1420 temp_iterator_region,
1421 target_variable_a.addr,
1422 target_variable_b.addr,
1423 "move to next or jump over",
1424 )
1425 }
1426 };
1427
1428 Ok((loop_ip, placeholder_position))
1429 }
1430
1431 fn gen_for_loop_map(
1432 &mut self,
1433 for_pattern: &ForPattern,
1434 ) -> Result<(InstructionPosition, PatchPosition), Error> {
1435 self.state.builder.add_map_iter_init(
1436 FrameMemoryAddress(0x80),
1437 FrameMemoryAddressIndirectPointer(FrameMemoryAddress(0xffff)),
1438 "initialize map iterator",
1439 );
1440
1441 let jump_ip = self.state.builder.position();
1442
1443 match for_pattern {
1444 ForPattern::Single(_) => {
1445 self.state.builder.add_map_iter_next(
1446 FrameMemoryAddress(0x80),
1447 FrameMemoryAddress(0x16),
1448 InstructionPosition(256),
1449 "move to next or jump over",
1450 );
1451 }
1452 ForPattern::Pair(_, _) => {
1453 self.state.builder.add_map_iter_next_pair(
1454 FrameMemoryAddress(0x80),
1455 FrameMemoryAddress(0x16),
1456 FrameMemoryAddress(0x16),
1457 InstructionPosition(256),
1458 "move to next or jump over",
1459 );
1460 }
1461 }
1462
1463 Ok((jump_ip, PatchPosition(InstructionPosition(0))))
1464 }
1465
1466 fn gen_for_loop(
1467 &mut self,
1468 for_pattern: &ForPattern,
1469 iterable: &Iterable,
1470 closure: &Box<Expression>,
1471 ) -> Result<(), Error> {
1472 let collection_type = &iterable.resolved_expression.expression_or_location.ty();
1479 let (jump_ip, placeholder_position) = match collection_type {
1480 Type::String => {
1481 todo!();
1482 }
1483 Type::NamedStruct(_vec) => {
1484 if let Some(found_info) = is_vec(collection_type) {
1485 self.gen_for_loop_vec(for_pattern, &iterable.resolved_expression)?
1486 } else if let Some(found_info) = is_map(collection_type) {
1487 self.gen_for_loop_map(for_pattern)?
1488 } else {
1489 return Err(self.create_err(
1490 ErrorKind::NotAnIterableCollection,
1491 iterable.resolved_expression.node(),
1492 ));
1493 }
1494 }
1495 _ => {
1496 return Err(self.create_err(
1497 ErrorKind::IllegalCollection,
1498 iterable.resolved_expression.node(),
1499 ));
1500 }
1501 };
1502
1503 match for_pattern {
1504 ForPattern::Single(value_variable) => {}
1505 ForPattern::Pair(key_variable, value_variable) => {}
1506 }
1507
1508 let unit_expr = self.temp_space_for_type(&Type::Unit, "for loop body");
1509 self.gen_expression(closure, &unit_expr)?;
1510
1511 self.state
1512 .builder
1513 .add_jmp(jump_ip, "jump to next iteration");
1514 self.state.builder.patch_jump_here(placeholder_position);
1517
1518 Ok(())
1519 }
1520
1521 fn gen_for_loop_for_vec(
1522 &mut self,
1523 element_type: &Type,
1524 vector_expr: Expression,
1525 ctx: &mut Context,
1526 ) -> Result<(), Error> {
1527 let vector_ctx = self.temp_space_for_type(&vector_expr.ty, "vector space");
1529 self.gen_expression(&vector_expr, &vector_ctx)
1530
1531 }
1615
1616 fn gen_block(&mut self, expressions: &[Expression], ctx: &Context) -> Result<(), Error> {
1617 if let Some((last, others)) = expressions.split_last() {
1618 for expr in others {
1619 let temp_context = self.temp_space_for_type(&Type::Unit, "block target");
1620 self.gen_expression(expr, &temp_context)?;
1621 }
1622 self.gen_expression(last, ctx)?;
1623 }
1624
1625 Ok(())
1626 }
1627
1628 fn get_variable_region(&self, variable: &VariableRef) -> (FrameMemoryRegion, MemoryAlignment) {
1629 let frame_address = self
1630 .variable_offsets
1631 .get(&variable.unique_id_within_function)
1632 .unwrap();
1633 let (_size, align) = type_size_and_alignment(&variable.resolved_type);
1634
1635 (*frame_address, align)
1636 }
1637
1638 fn gen_variable_access(&mut self, variable: &VariableRef, ctx: &Context) -> Result<(), Error> {
1639 let (region, alignment) = self.get_variable_region(variable);
1640 self.state.builder.add_mov(
1641 ctx.addr(),
1642 region.addr,
1643 region.size,
1644 &format!(
1645 "variable access '{}' ({})",
1646 variable.assigned_name,
1647 ctx.comment()
1648 ),
1649 );
1650
1651 Ok(())
1652 }
1653
1654 fn referenced_or_not_type(ty: &Type) -> Type {
1655 if let Type::MutableReference(inner_type) = ty {
1656 *inner_type.clone()
1657 } else {
1658 ty.clone()
1659 }
1660 }
1661
1662 fn compound_assignment(
1663 &mut self,
1664 target_location: &SingleMutLocationExpression,
1665 op: &CompoundOperatorKind,
1666 source: &Expression,
1667 ctx: &Context,
1668 ) -> Result<(), Error> {
1669 let target_location = self.gen_lvalue_address(&target_location.0)?;
1670
1671 let source_info = self.gen_expression_for_access(source)?;
1672
1673 let type_to_consider = Self::referenced_or_not_type(&source.ty);
1674
1675 match &type_to_consider {
1676 Type::Int => {
1677 self.gen_compound_assignment_i32(&target_location, op, &source_info);
1678 }
1679 Type::Float => {
1680 self.gen_compound_assignment_f32(&target_location, op, &source_info);
1681 }
1682 Type::String => todo!(),
1683 _ => return Err(self.create_err(ErrorKind::IllegalCompoundAssignment, &source.node)),
1684 }
1685
1686 Ok(())
1687 }
1688
1689 fn gen_compound_assignment_i32(
1690 &mut self,
1691 target: &FrameMemoryRegion,
1692 op: &CompoundOperatorKind,
1693 source_ctx: &FrameMemoryRegion,
1694 ) {
1695 match op {
1696 CompoundOperatorKind::Add => {
1697 self.state.builder.add_add_i32(
1698 target.addr(),
1699 target.addr(),
1700 source_ctx.addr(),
1701 "+= (i32)",
1702 );
1703 }
1704 CompoundOperatorKind::Sub => todo!(),
1705 CompoundOperatorKind::Mul => todo!(),
1706 CompoundOperatorKind::Div => todo!(),
1707 CompoundOperatorKind::Modulo => todo!(),
1708 }
1709 }
1710
1711 fn gen_compound_assignment_f32(
1712 &mut self,
1713 target: &FrameMemoryRegion,
1714 op: &CompoundOperatorKind,
1715 source_ctx: &FrameMemoryRegion,
1716 ) {
1717 match op {
1718 CompoundOperatorKind::Add => {
1719 self.state.builder.add_add_f32(
1720 target.addr(),
1721 target.addr(),
1722 source_ctx.addr(),
1723 "+= (f32)",
1724 );
1725 }
1726 CompoundOperatorKind::Sub => todo!(),
1727 CompoundOperatorKind::Mul => todo!(),
1728 CompoundOperatorKind::Div => todo!(),
1729 CompoundOperatorKind::Modulo => todo!(),
1730 }
1731 }
1732
1733 fn internal_function_access(
1734 &mut self,
1735 internal: &InternalFunctionDefinitionRef,
1736 ctx: &Context,
1737 ) -> Result<(), Error> {
1738 self.state.builder.add_ld_u16(
1739 ctx.addr(),
1740 internal.program_unique_id,
1741 &format!("function access '{}'", internal.assigned_name),
1742 );
1743 Ok(())
1744 }
1745
1746 fn infinite_above_frame_size(&self) -> FrameMemoryRegion {
1747 FrameMemoryRegion::new(FrameMemoryAddress(self.frame_size.0), MemorySize(1024))
1748 }
1749
1750 fn gen_struct_literal(
1751 &mut self,
1752 struct_literal: &StructInstantiation,
1753 ctx: &Context,
1754 ) -> Result<(), Error> {
1755 self.gen_struct_literal_helper(
1756 &struct_literal.struct_type_ref.anon_struct_type,
1757 &struct_literal.source_order_expressions,
1758 ctx,
1759 )
1760 }
1761
1762 fn gen_anonymous_struct_literal(
1763 &mut self,
1764 anon_struct_literal: &AnonymousStructLiteral,
1765 ctx: &Context,
1766 ) -> Result<(), Error> {
1767 self.gen_struct_literal_helper(
1768 &anon_struct_literal.anonymous_struct_type,
1769 &anon_struct_literal.source_order_expressions,
1770 ctx,
1771 )
1772 }
1773
1774 fn gen_struct_literal_helper(
1775 &mut self,
1776 struct_type_ref: &AnonymousStructType,
1777 source_order_expressions: &Vec<(usize, Expression)>,
1778 ctx: &Context,
1779 ) -> Result<(), Error> {
1780 let struct_type = Type::AnonymousStruct(struct_type_ref.clone());
1781 let (whole_struct_size, whole_struct_alignment) = type_size_and_alignment(&struct_type);
1782 if ctx.target_size().0 != whole_struct_size.0 {
1783 info!("problem");
1784 }
1785 assert_eq!(ctx.target_size().0, whole_struct_size.0);
1786
1787 for (field_index, expression) in source_order_expressions {
1788 let (field_offset, field_size, field_alignment) =
1789 struct_field_offset(*field_index, struct_type_ref);
1790 let new_address = ctx.addr().advance(field_offset);
1792 let field_ctx = Context::new(FrameMemoryRegion::new(new_address, field_size));
1793 self.gen_expression(expression, &field_ctx)?;
1794 }
1795
1796 Ok(())
1797 }
1798
1799 fn gen_slice_literal(
1800 &mut self,
1801 ty: &Type,
1802 expressions: &Vec<Expression>,
1803 ) -> Result<FrameMemoryRegion, Error> {
1804 let (element_size, element_alignment) = type_size_and_alignment(ty);
1805 let element_count = expressions.len() as u16;
1806 let total_slice_size = MemorySize(element_size.0 * element_count);
1807
1808 let start_frame_address_to_transfer = self
1809 .temp_allocator
1810 .allocate(total_slice_size, element_alignment);
1811 for (index, expr) in expressions.iter().enumerate() {
1812 let memory_offset = MemoryOffset((index as u16) * element_size.0);
1813 let region = FrameMemoryRegion::new(
1814 start_frame_address_to_transfer.advance(memory_offset),
1815 element_size,
1816 );
1817 let element_ctx = Context::new(region);
1818 self.gen_expression(expr, &element_ctx)?;
1819 }
1820
1821 Ok(FrameMemoryRegion::new(
1822 start_frame_address_to_transfer,
1823 total_slice_size,
1824 ))
1825 }
1826
1827 fn gen_slice_pair_literal(
1828 &mut self,
1829 slice_type: &Type,
1830 expressions: &[(Expression, Expression)],
1831 ) -> SlicePairInfo {
1832 let Type::SlicePair(key_type, value_type) = slice_type else {
1833 panic!("should have been slice pair type")
1834 };
1835
1836 let constructed_tuple = Type::Tuple(vec![*key_type.clone(), *value_type.clone()]);
1837
1838 let (key_size, key_alignment) = type_size_and_alignment(key_type);
1839 let (value_size, value_alignment) = type_size_and_alignment(value_type);
1840 let (element_size, tuple_alignment) = type_size_and_alignment(&constructed_tuple);
1841 let element_count = expressions.len() as u16;
1842 let total_slice_size = MemorySize(element_size.0 * element_count);
1843
1844 let start_frame_address_to_transfer = self
1845 .temp_allocator
1846 .allocate(total_slice_size, tuple_alignment);
1847
1848 for (index, (key_expr, value_expr)) in expressions.iter().enumerate() {
1849 let memory_offset = MemoryOffset((index as u16) * element_size.0);
1850 let key_region = FrameMemoryRegion::new(
1851 start_frame_address_to_transfer.advance(memory_offset),
1852 element_size,
1853 );
1854 let key_ctx = Context::new(key_region);
1855 self.gen_expression(key_expr, &key_ctx);
1856
1857 let value_region = FrameMemoryRegion::new(
1858 start_frame_address_to_transfer.advance(memory_offset.add(key_size, key_alignment)),
1859 value_size,
1860 );
1861 let value_ctx = Context::new(value_region);
1862 self.gen_expression(value_expr, &value_ctx);
1863 }
1864
1865 SlicePairInfo {
1866 addr: TempFrameMemoryAddress(start_frame_address_to_transfer),
1867 key_size,
1868 value_size,
1869 element_count: CountU16(element_count),
1870 element_size,
1871 }
1872 }
1873
1874 fn gen_slice_helper(
1875 &mut self,
1876 start_temp_frame_address_to_transfer: FrameMemoryAddress,
1877 element_count: u16,
1878 element_size: MemorySize,
1879 ctx: &Context,
1880 ) {
1881 let total_slice_size = MemorySize(element_size.0 * element_count);
1882 let vec_len_addr = ctx.addr().advance(MemoryOffset(0));
1883 self.state
1884 .builder
1885 .add_ld_u16(vec_len_addr, element_count, "slice len");
1886
1887 let vec_capacity_addr = ctx.addr().advance(MemoryOffset(2));
1888 self.state
1889 .builder
1890 .add_ld_u16(vec_capacity_addr, element_count, "slice capacity");
1891
1892 let vec_element_size_addr = ctx.addr().advance(MemoryOffset(4));
1893 self.state
1894 .builder
1895 .add_ld_u16(vec_element_size_addr, element_size.0, "slice element size");
1896
1897 }
1913
1914 fn gen_intrinsic_call_ex(
1915 &mut self,
1916 intrinsic_fn: &IntrinsicFunction,
1917 arguments: &Vec<ArgumentExpressionOrLocation>,
1918 ctx: &Context,
1919 ) -> Result<(), Error> {
1920 match intrinsic_fn {
1923 IntrinsicFunction::FloatRound => todo!(),
1925 IntrinsicFunction::FloatFloor => todo!(),
1926 IntrinsicFunction::FloatSqrt => todo!(),
1927 IntrinsicFunction::FloatSign => todo!(),
1928 IntrinsicFunction::FloatAbs => todo!(),
1929 IntrinsicFunction::FloatRnd => todo!(),
1930 IntrinsicFunction::FloatCos => todo!(),
1931 IntrinsicFunction::FloatSin => todo!(),
1932 IntrinsicFunction::FloatAcos => todo!(),
1933 IntrinsicFunction::FloatAsin => todo!(),
1934 IntrinsicFunction::FloatAtan2 => todo!(),
1935 IntrinsicFunction::FloatMin => todo!(),
1936 IntrinsicFunction::FloatMax => todo!(),
1937 IntrinsicFunction::FloatClamp => todo!(),
1938
1939 IntrinsicFunction::IntAbs => todo!(),
1941 IntrinsicFunction::IntRnd => todo!(),
1942 IntrinsicFunction::IntMax => todo!(),
1943 IntrinsicFunction::IntMin => todo!(),
1944 IntrinsicFunction::IntClamp => todo!(),
1945 IntrinsicFunction::IntToFloat => todo!(),
1946
1947 IntrinsicFunction::StringLen => todo!(),
1949
1950 IntrinsicFunction::VecFromSlice => self.gen_intrinsic_vec_from_slice(arguments, ctx),
1952 IntrinsicFunction::VecPush => todo!(),
1953 IntrinsicFunction::VecPop => todo!(),
1954 IntrinsicFunction::VecRemoveIndex => todo!(),
1955 IntrinsicFunction::VecClear => todo!(),
1956 IntrinsicFunction::VecCreate => {
1957 self.gen_intrinsic_vec_create(arguments);
1958 Ok(())
1959 }
1960 IntrinsicFunction::VecSubscript => todo!(),
1961 IntrinsicFunction::VecSubscriptMut => todo!(),
1962 IntrinsicFunction::VecIter => todo!(), IntrinsicFunction::VecIterMut => todo!(), IntrinsicFunction::VecLen => todo!(),
1965 IntrinsicFunction::VecIsEmpty => todo!(),
1966 IntrinsicFunction::VecSelfPush => todo!(),
1967 IntrinsicFunction::VecSelfExtend => todo!(),
1968
1969 IntrinsicFunction::MapCreate => todo!(),
1971 IntrinsicFunction::MapFromSlicePair => todo!(),
1972 IntrinsicFunction::MapHas => todo!(),
1973 IntrinsicFunction::MapRemove => todo!(),
1974 IntrinsicFunction::MapIter => todo!(),
1975 IntrinsicFunction::MapIterMut => todo!(),
1976 IntrinsicFunction::MapLen => todo!(),
1977 IntrinsicFunction::MapIsEmpty => todo!(),
1978 IntrinsicFunction::MapSubscript => todo!(),
1979 IntrinsicFunction::MapSubscriptSet => todo!(),
1980 IntrinsicFunction::MapSubscriptMut => todo!(),
1981 IntrinsicFunction::MapSubscriptMutCreateIfNeeded => todo!(),
1982
1983 IntrinsicFunction::Map2GetColumn => todo!(),
1984 IntrinsicFunction::Map2GetRow => todo!(),
1985 IntrinsicFunction::Map2Remove => todo!(),
1986 IntrinsicFunction::Map2Has => todo!(),
1987 IntrinsicFunction::Map2Get => todo!(),
1988 IntrinsicFunction::Map2Insert => todo!(),
1989 IntrinsicFunction::Map2Create => todo!(),
1990
1991 IntrinsicFunction::SparseAdd => todo!(),
1993 IntrinsicFunction::SparseNew => todo!(),
1994 IntrinsicFunction::SparseCreate => todo!(),
1995 IntrinsicFunction::SparseFromSlice => todo!(),
1996 IntrinsicFunction::SparseIter => todo!(),
1997 IntrinsicFunction::SparseIterMut => todo!(),
1998 IntrinsicFunction::SparseSubscript => todo!(),
1999 IntrinsicFunction::SparseSubscriptMut => todo!(),
2000 IntrinsicFunction::SparseHas => todo!(),
2001 IntrinsicFunction::SparseRemove => todo!(),
2002
2003 IntrinsicFunction::GridCreate => todo!(),
2005 IntrinsicFunction::GridFromSlice => todo!(),
2006 IntrinsicFunction::GridSet => todo!(),
2007 IntrinsicFunction::GridGet => todo!(),
2008 IntrinsicFunction::GridGetColumn => todo!(),
2009
2010 IntrinsicFunction::Float2Magnitude => todo!(),
2012 };
2013
2014 Ok(())
2015 }
2016
2017 fn gen_intrinsic_vec_create(&self, arguments: &Vec<ArgumentExpressionOrLocation>) {
2018 for arg in arguments {
2019 info!(?arg, "argument");
2020 }
2021 }
2022
2023 fn gen_intrinsic_vec_from_slice(
2024 &mut self,
2025 arguments: &[ArgumentExpressionOrLocation],
2026 ctx: &Context,
2027 ) -> Result<(), Error> {
2028 if let ArgumentExpressionOrLocation::Expression(found_expr) = &arguments[0] {
2029 let memory = self.gen_expression_for_access(found_expr)?;
2030 self.state.builder.add_vec_from_slice(
2031 ctx.addr(),
2032 memory.addr,
2033 MemorySize(0),
2034 CountU16(0),
2035 "create vec",
2036 );
2037 } else {
2038 panic!("vec_from_slice");
2039 }
2040
2041 Ok(())
2042 }
2043
2044 fn gen_match(&mut self, match_expr: &Match, ctx: &Context) -> Result<(), Error> {
2045 let region_to_match = self.gen_for_access_or_location(&match_expr.expression)?;
2046
2047 let mut jump_to_exit_placeholders = Vec::new();
2048
2049 let arm_len_to_consider = if match_expr.contains_wildcard() {
2050 match_expr.arms.len()
2051 } else {
2052 match_expr.arms.len()
2053 };
2054 for (index, arm) in match_expr.arms.iter().enumerate() {
2055 let is_last = index == arm_len_to_consider - 1;
2056
2057 let maybe_guard = match &arm.pattern {
2059 Pattern::Normal(normal_pattern, maybe_guard) => match normal_pattern {
2060 NormalPattern::PatternList(_) => None,
2061 NormalPattern::EnumPattern(enum_variant, maybe_patterns) => {
2062 self.state.builder.add_eq_u8_immediate(
2063 region_to_match.addr,
2064 enum_variant.common().container_index,
2065 "check for enum variant",
2066 );
2067 maybe_guard.as_ref()
2068 }
2069 NormalPattern::Literal(_) => {
2070 todo!()
2071 }
2072 },
2073 Pattern::Wildcard(_) => {
2074 None
2076 }
2077 };
2078
2079 let did_add_comparison = !matches!(arm.pattern, Pattern::Wildcard(_));
2080
2081 let maybe_skip_added = if did_add_comparison {
2082 Some(
2083 self.state
2084 .builder
2085 .add_jmp_if_not_equal_placeholder("placeholder for enum match"),
2086 )
2087 } else {
2088 None
2089 };
2090
2091 let maybe_guard_skip = if let Some(guard) = maybe_guard {
2092 self.gen_boolean_expression(guard);
2093 Some(
2096 self.state
2097 .builder
2098 .add_jmp_if_not_equal_placeholder("placeholder for skip guard"),
2099 )
2100 } else {
2101 None
2102 };
2103
2104 self.gen_expression(&arm.expression, ctx)?;
2105
2106 if !is_last {
2107 let jump_to_exit_placeholder =
2108 self.state.builder.add_jump_placeholder("jump to exit");
2109 jump_to_exit_placeholders.push(jump_to_exit_placeholder);
2110 }
2111
2112 if let Some(skip) = maybe_skip_added {
2113 self.state.builder.patch_jump_here(skip);
2114 }
2115 if let Some(guard_skip) = maybe_guard_skip {
2116 self.state.builder.patch_jump_here(guard_skip);
2117 }
2118 }
2119
2120 for placeholder in jump_to_exit_placeholders {
2121 self.state.builder.patch_jump_here(placeholder);
2122 }
2123
2124 Ok(())
2125 }
2126
2127 fn gen_guard(&mut self, guards: &Vec<Guard>, ctx: &Context) -> Result<(), Error> {
2128 let mut jump_to_exit_placeholders = Vec::new();
2129 for guard in guards {
2130 if let Some(condition) = &guard.condition {
2131 self.gen_boolean_expression(condition); let skip_expression_patch = self
2133 .state
2134 .builder
2135 .add_jmp_if_not_equal_placeholder("guard condition");
2136 self.gen_expression(&guard.result, ctx)?;
2137 let jump_to_exit_placeholder =
2138 self.state.builder.add_jump_placeholder("jump to exit");
2139 jump_to_exit_placeholders.push(jump_to_exit_placeholder);
2140 self.state.builder.patch_jump_here(skip_expression_patch);
2141 } else {
2142 self.gen_expression(&guard.result, ctx)?;
2144 }
2145 }
2146
2147 for placeholder in jump_to_exit_placeholders {
2148 self.state.builder.patch_jump_here(placeholder);
2149 }
2150
2151 Ok(())
2152 }
2153
2154 fn gen_when(
2155 &mut self,
2156 bindings: &Vec<WhenBinding>,
2157 true_expr: &Expression,
2158 maybe_false_expr: Option<&Expression>,
2159 ctx: &Context,
2160 ) -> Result<(), Error> {
2161 let mut all_false_jumps = Vec::new();
2162
2163 for binding in bindings {
2164 let (variable_region, _alignment) = self.get_variable_region(&binding.variable);
2165
2166 let old_variable_region = self.gen_for_access_or_location(&binding.expr)?;
2167
2168 self.state
2169 .builder
2170 .add_tst8(old_variable_region.addr, "check binding");
2171 let patch = self
2172 .state
2173 .builder
2174 .add_jmp_if_not_equal_placeholder("jump if none");
2175 all_false_jumps.push(patch);
2176 }
2177
2178 for binding in bindings {
2180 let (variable_region, alignment) = self.get_variable_region(&binding.variable);
2181
2182 if binding.has_expression() {
2183 let var_ctx = Context::new(variable_region);
2184 self.gen_mut_or_immute(&binding.expr, &var_ctx)?;
2185 } else {
2186 let ArgumentExpressionOrLocation::Expression(variable_access_expression) =
2187 &binding.expr.expression_or_location
2188 else {
2189 panic!("must be expression");
2190 };
2191 let old_variable_region =
2192 self.gen_expression_for_access(variable_access_expression)?;
2193 let alignment_offset: MemoryOffset = alignment.into();
2194 let some_value_region = FrameMemoryRegion::new(
2195 old_variable_region.addr.advance(alignment_offset),
2196 MemorySize(variable_region.size.0),
2197 );
2198 self.state.builder.add_movlp(
2199 variable_region.addr,
2200 some_value_region.addr,
2201 some_value_region.size,
2202 "move from Some to value",
2203 );
2204 }
2205 }
2206
2207 self.gen_expression(true_expr, ctx)?;
2208 let maybe_jump_over_false = if let Some(_else_expr) = maybe_false_expr {
2209 Some(
2210 self.state
2211 .builder
2212 .add_jump_placeholder("jump over false section"),
2213 )
2214 } else {
2215 None
2216 };
2217
2218 for false_jump_patch in all_false_jumps {
2219 self.state.builder.patch_jump_here(false_jump_patch);
2220 }
2221
2222 if let Some(else_expr) = maybe_false_expr {
2223 self.gen_expression(else_expr, ctx);
2224 self.state
2225 .builder
2226 .patch_jump_here(maybe_jump_over_false.unwrap());
2227 }
2228
2229 Ok(())
2230 }
2231
2232 fn gen_range(
2233 &mut self,
2234 start: &Expression,
2235 end: &Expression,
2236 mode: &RangeMode,
2237 ctx: &Context,
2238 ) -> Result<(), Error> {
2239 let start_ctx = ctx.with_offset(MemoryOffset(0), MemorySize(INT_SIZE));
2240 self.gen_expression(start, &start_ctx)?;
2241
2242 let end_ctx = ctx.with_offset(MemoryOffset(INT_SIZE), MemorySize(INT_SIZE));
2243 self.gen_expression(end, &end_ctx)?;
2244
2245 let mode_ctx = ctx.with_offset(MemoryOffset(INT_SIZE + INT_SIZE), MemorySize(BOOL_SIZE));
2246 let val = match &mode {
2247 RangeMode::Inclusive => 1u8,
2248 RangeMode::Exclusive => 0u8,
2249 };
2250 self.state
2251 .builder
2252 .add_ld8(mode_ctx.addr(), val, "range mode");
2253
2254 Ok(())
2255 }
2256
2257 fn create_err(&mut self, kind: ErrorKind, node: &Node) -> Error {
2258 error!(?kind, "encountered error");
2259 Error {
2260 kind,
2261 node: node.clone(),
2262 }
2263 }
2264
2265 fn gen_tuple_destructuring(
2266 &mut self,
2267 target_variables: &Vec<VariableRef>,
2268 tuple_type: &Vec<Type>,
2269 source_tuple_expression: &Expression,
2270 ) -> Result<(), Error> {
2271 let source_region = self.gen_expression_for_access(source_tuple_expression)?;
2272
2273 let (total_size, _max_alignment, element_offsets) = layout_tuple_elements(tuple_type);
2274 assert_eq!(total_size.0, source_region.size.0);
2275
2276 for (target_variable, (element_offset, element_size)) in
2277 target_variables.iter().zip(element_offsets)
2278 {
2279 if target_variable.is_unused {
2280 } else {
2281 let (target_region, _variable_alignment) =
2282 self.get_variable_region(target_variable);
2283 assert_eq!(target_region.size.0, element_size.0);
2284
2285 let source_element_region = FrameMemoryRegion::new(
2286 source_region.addr.advance(element_offset),
2287 element_size,
2288 );
2289 self.state.builder.add_mov(
2290 target_region.addr,
2291 source_element_region.addr,
2292 source_element_region.size,
2293 &format!(
2294 "destructuring to variable {}",
2295 target_variable.assigned_name
2296 ),
2297 );
2298 }
2299 }
2300
2301 Ok(())
2302 }
2303
2304 fn gen_constant_access(
2305 &mut self,
2306 constant_reference: &ConstantRef,
2307 ctx: &Context,
2308 ) -> Result<(), Error> {
2309 let constant_region = self
2310 .state
2311 .constant_offsets
2312 .get(&constant_reference.id)
2313 .unwrap();
2314 assert_eq!(constant_region.size.0, ctx.target_size().0);
2315
2316 self.state.builder.add_ld_constant(
2317 ctx.addr(),
2318 constant_region.addr,
2319 constant_region.size,
2320 &format!("load constant '{}'", constant_reference.assigned_name),
2321 );
2322
2323 Ok(())
2324 }
2325}
2326
2327fn single_intrinsic_fn(body: &Expression) -> Option<&IntrinsicFunction> {
2328 let ExpressionKind::Block(block_expressions) = &body.kind else {
2329 panic!("function body should be a block")
2330 };
2331
2332 if let ExpressionKind::IntrinsicCallEx(found_intrinsic_fn, _non_instantiated_arguments) =
2333 &block_expressions[0].kind
2334 {
2335 Some(found_intrinsic_fn)
2336 } else {
2337 None
2338 }
2339}
2340
2341fn struct_field_offset(
2342 index_to_look_for: usize,
2343 anon_struct_type: &AnonymousStructType,
2344) -> (MemoryOffset, MemorySize, MemoryAlignment) {
2345 let mut offset = MemoryOffset(0);
2346 for (field_index, (_name, field)) in
2347 anon_struct_type.field_name_sorted_fields.iter().enumerate()
2348 {
2349 let (field_size, field_alignment) = type_size_and_alignment(&field.field_type);
2350 let field_start_offset = offset.space(field_size, field_alignment);
2351 if field_index == index_to_look_for {
2352 return (field_start_offset, field_size, field_alignment);
2353 }
2354 }
2355
2356 panic!("field index is wrong")
2357}