1mod stack_frame;
4#[macro_use] mod macros;
5pub mod error;
6mod util;
7mod binding_state;
8
9use crate::prelude::*;
10use crate::{StackAddress, StackOffset, ItemIndex, VariantIndex, STACK_ADDRESS_TYPE};
11use crate::shared::{BindingContainer, TypeContainer, numeric::Numeric, meta::{Type, ImplTrait, Struct, Array, Enum, Function, FunctionKind, BuiltinGroup, Binding}, typed_ids::{BindingId, FunctionId, TypeId}};
12use crate::frontend::{ast::{self, Typeable, TypeName, Returns}, resolver::resolved::{ResolvedProgram, IdMappings}};
13use crate::bytecode::{Constructor, Writer, StoreConst, Program, VMFunc, ARG1, ARG2, ARG3, runtime::heap::HeapRefOp};
14use stack_frame::{Local, StackFrame, StackFrames, LocalOrigin};
15use error::{CompileError, CompileErrorKind, CompileResult};
16use util::CallInfo;
17use binding_state::{BindingState, BranchingKind, BranchingPath};
18
19struct Compiler<T> {
21 writer: Writer<T>,
23 id_mappings: IdMappings,
25 locals: StackFrames,
27 constructors: UnorderedMap<TypeId, StackAddress>,
29 functions: UnorderedMap<FunctionId, CallInfo>,
31 call_placeholder: UnorderedMap<FunctionId, Vec<StackAddress>>,
33 init_state: BindingState,
35 module_path: String,
37 trait_function_indices: UnorderedMap<FunctionId, ItemIndex>,
39 trait_implementor_indices: UnorderedMap<TypeId, ItemIndex>,
41 trait_vtable_base: StackAddress,
43 trait_function_implementors: UnorderedMap<FunctionId, FunctionId>,
45}
46
47pub fn compile<T>(program: ResolvedProgram<T>) -> CompileResult<Program<T>> where T: VMFunc<T> {
79
80 let ResolvedProgram { modules, id_mappings, entry_fn, .. } = program;
81
82 let trait_functions = Compiler::<T>::filter_trait_functions(&id_mappings);
84 let trait_implementors: Vec<_> = id_mappings.implementors().collect();
85
86 let trait_function_implementations = Compiler::<T>::select_trait_function_implementations(&trait_functions, &trait_implementors);
88
89 let mut compiler = Compiler {
91 writer : Writer::new(),
92 locals : StackFrames::new(),
93 functions : UnorderedMap::new(),
94 call_placeholder : UnorderedMap::new(),
95 constructors : UnorderedMap::new(),
96 init_state : BindingState::new(),
97 module_path : "".to_string(),
98 trait_vtable_base : (trait_implementors.len() * size_of::<StackAddress>()) as StackAddress, trait_function_indices : Compiler::<T>::enumerate_trait_function_indices(&trait_functions),
100 trait_implementor_indices : Compiler::<T>::enumerate_trait_implementor_indices(&trait_implementors),
101 trait_function_implementors : Compiler::<T>::map_trait_function_implementors(&trait_functions, &trait_implementors),
102 id_mappings : id_mappings,
103 };
104
105 let vtable_size = compiler.trait_function_indices.len() * compiler.trait_implementor_indices.len() * size_of::<StackAddress>();
107 compiler.writer.reserve_const_data(compiler.trait_vtable_base + vtable_size as StackAddress); if compiler.writer.position() == 0 {
111 compiler.writer.store_const(101 as u8);
112 }
113 for (type_id, ty) in compiler.id_mappings.types() {
114 if !ty.is_primitive() && !ty.as_trait().is_some() {
115 let position = compiler.store_constructor(type_id);
117 compiler.constructors.insert(type_id, position);
118 if let Some(&implementor_index) = compiler.trait_implementor_indices.get(&type_id) {
120 compiler.writer.update_const((implementor_index as usize * size_of::<StackAddress>()) as StackAddress, position);
121 }
122 }
123 }
124
125 let initial_pos = compiler.writer.call(123, 0);
127 compiler.writer.exit();
128
129 for module in modules {
131 compiler.module_path = module.path.clone();
132 for statement in module.iter() {
133 compiler.compile_statement(statement)?;
134 }
135 }
136
137 for (implementor_index, selected_function_id) in trait_function_implementations {
139 if let Some(selected_function_id) = selected_function_id {
140 let selected_function_offset = compiler.functions.get(&selected_function_id).expect("Missing function callinfo").addr;
141 let vtable_function_offset = compiler.vtable_function_offset(selected_function_id);
142 compiler.writer.update_const(vtable_function_offset + (implementor_index * size_of::<StackAddress>()) as StackAddress, selected_function_offset);
143 }
144 }
145
146 let &entry_call = compiler.functions.get(&entry_fn).expect("Failed to locate entry function in generated code.");
148 compiler.writer.overwrite(initial_pos, |w| w.call(entry_call.addr, entry_call.arg_size));
149
150 Ok(compiler.writer.into_program())
152}
153
154impl<T> Compiler<T> where T: VMFunc<T> {
156
157 fn compile_statement(self: &mut Self, item: &ast::Statement) -> CompileResult {
159 use self::ast::Statement as S;
160 match item {
161 S::StructDef(_) => Ok(()),
162 S::Module(_) => Ok(()),
163 S::Use(_) => Ok(()),
164 S::EnumDef(_) => Ok(()),
165 S::Return(_) => unreachable!("Return AST nodes should have been rewritten"),
166 S::Function(function) => self.compile_function(function),
167 S::ImplBlock(impl_block) => {
168 for function in &impl_block.functions {
169 self.compile_function(function)?;
170 if let Some(TypeName { type_id, .. }) = impl_block.trt {
172 let trait_type_id = type_id.expect("Unresolved trait encountered");
173 let trt = self.type_by_id(trait_type_id).as_trait().unwrap();
174 let function_id = function.function_id;
175 let function_name = &function.sig.ident.name;
176 let trait_function_id = trt.provided.get(function_name).or(trt.required.get(function_name)).unwrap();
178 let trait_function = self.id_mappings.function(trait_function_id.unwrap());
179 let impl_function = self.id_mappings.function(function_id.unwrap());
180 if !self.is_compatible_function(trait_function, impl_function) {
181 return Err(CompileError::new(function, CompileErrorKind::IncompatibleTraitMethod(function_name.clone()), &self.module_path));
182 }
183 }
184 }
185 Ok(())
186 }
187 S::TraitDef(trait_def) => {
188 for function in &trait_def.functions {
189 if function.block.is_some() {
190 self.compile_function(function)?;
191 }
192 }
193 Ok(())
194 },
195 S::Binding(binding) => self.compile_binding(binding),
196 S::IfBlock(if_block) => {
197 self.compile_if_block(if_block)?;
198 if let Some(result) = &if_block.if_block.result {
199 let result_type = self.item_type(result);
200 self.write_discard(result_type);
201 }
202 Ok(())
203 }
204 S::ForLoop(for_loop) => self.compile_for_loop(for_loop),
205 S::WhileLoop(while_loop) => self.compile_while_loop(while_loop),
206 S::Block(block) => {
207 self.compile_block(block)?;
208 if let Some(result) = &block.result {
209 let result_type = self.item_type(result);
210 self.write_discard(result_type);
211 }
212 Ok(())
213 }
214 S::Expression(expression) => {
215 self.compile_expression(expression)?;
216 let result_type = self.item_type(expression);
217 self.write_discard(result_type);
218 Ok(())
219 }
220 }
221 }
222
223 fn compile_expression(self: &mut Self, item: &ast::Expression) -> CompileResult {
225 use self::ast::Expression as E;
226 match item {
227 E::Literal(literal) => self.compile_literal(literal),
228 E::Variable(variable) => self.compile_variable(variable),
229 E::Member(_) => Ok(()),
230 E::Call(call) => self.compile_call(call),
231 E::Assignment(assignment) => self.compile_assignment(assignment),
232 E::BinaryOp(binary_op) => self.compile_binary_op(binary_op),
233 E::UnaryOp(unary_op) => self.compile_unary_op(unary_op),
234 E::Cast(cast) => self.compile_cast(cast),
235 E::Block(block) => self.compile_block(block),
236 E::IfBlock(if_block) => self.compile_if_block(if_block),
237 E::MatchBlock(match_block) => self.compile_match_block(match_block),
238 }
239 }
240
241 fn compile_assignment(self: &mut Self, item: &ast::Assignment) -> CompileResult {
243 comment!(self, "{}", item);
244 match item.left {
245 ast::Expression::Variable(_) => self.compile_assignment_to_var(item),
246 ast::Expression::BinaryOp(_) => self.compile_assignment_to_offset(item),
247 _ => panic!("cannot assign to left expression"),
248 }
249 }
250
251 fn compile_assignment_to_var(self: &mut Self, item: &ast::Assignment) -> CompileResult {
253 use crate::frontend::ast::BinaryOperator as BO;
254 comment!(self, "direct assignment");
255 let binding_id = item.left.as_variable().unwrap().binding_id.unwrap();
256 let local = self.locals.lookup(binding_id);
257 match item.op {
258 BO::Assign => {
259 self.compile_expression(&item.right)?;
260 self.write_storex(local, &item.left, binding_id);
261 self.init_state.initialize(binding_id);
262 },
263 _ => {
264 if !self.init_state.initialized(binding_id) {
265 let variable = item.left.as_variable().unwrap();
266 return Err(CompileError::new(item, CompileErrorKind::Uninitialized(variable.ident.name.clone()), &self.module_path));
267 }
268 let ty = self.item_type(&item.left);
269 self.write_load(local.index as StackOffset, ty); self.compile_expression(&item.right)?; let ty = self.item_type(&item.left);
272 match item.op { BO::AddAssign => self.write_add(ty),
274 BO::SubAssign => self.write_sub(ty),
275 BO::MulAssign => self.write_mul(ty),
276 BO::DivAssign => self.write_div(ty),
277 BO::RemAssign => self.write_rem(ty),
278 op @ _ => unreachable!("Invalid assignment operator {}", op),
279 };
280 self.write_storex(local, &item.left, binding_id); },
282 };
283 Ok(())
284 }
285
286 fn compile_assignment_to_offset(self: &mut Self, item: &ast::Assignment) -> CompileResult {
288 use crate::frontend::ast::BinaryOperator as BO;
289 comment!(self, "offset assignment");
290 match item.op {
291 BO::Assign => {
292 self.compile_expression(&item.left)?; self.compile_expression(&item.right)?; let ty = self.item_type(&item.left);
295 self.write_heap_putx(ty, false); },
297 _ => {
298 self.compile_expression(&item.left)?; self.write_clone_ref(); let ty = self.item_type(&item.left);
301 self.write_heap_fetch(ty); self.compile_expression(&item.right)?; let ty = self.item_type(&item.left);
304 match item.op { BO::AddAssign => self.write_add(ty),
306 BO::SubAssign => self.write_sub(ty),
307 BO::MulAssign => self.write_mul(ty),
308 BO::DivAssign => self.write_div(ty),
309 BO::RemAssign => self.write_rem(ty),
310 _ => unreachable!("Unsupported assignment operator encountered"),
311 };
312 self.write_heap_putx(ty, false); },
314 }
315 Ok(())
316 }
317
318 fn compile_call_args(self: &mut Self, function: &Function, item: &ast::Call) -> CompileResult {
320 for (_index, arg) in item.args.iter().enumerate() {
323 comment!(self, "{}() arg {}", item.ident.name, arg);
324 self.compile_expression(arg)?;
325 match function.kind.unwrap() {
326 FunctionKind::Method(_) | FunctionKind::Function => {
327 self.item_cnt(arg, true, HeapRefOp::Inc);
328 },
329 _ => { }
330 }
331 }
332 Ok(())
333 }
334
335 fn compile_call(self: &mut Self, item: &ast::Call) -> CompileResult {
337 comment!(self, "prepare {}() args", item.ident.name);
338 let function_id = item.function_id.expect("Unresolved function encountered");
339 let function = self.id_mappings.function(function_id).clone();
340 match function.kind.unwrap() {
341 FunctionKind::Rust(rust_fn_index) => {
342 self.compile_call_args(&function, item)?;
343 comment!(self, "call {}()", item.ident.name);
344 self.writer.rustcall(T::from_index(rust_fn_index));
345 },
346 FunctionKind::Builtin(type_id, builtin_group) => {
347 self.compile_call_args(&function, item)?;
348 comment!(self, "call {}()", item.ident.name);
349 self.write_builtin(builtin_group, self.type_by_id(type_id));
350 },
351 FunctionKind::Method(object_type_id) => {
352 self.compile_call_args(&function, item)?;
353 if self.type_by_id(object_type_id).as_trait().is_some() {
354 let function_offset = self.vtable_function_offset(function_id);
356 let function_arg_size = self.id_mappings.function_arg_size(function_id);
357 comment!(self, "call {}()", item.ident.name);
358 self.writer.vcall(function_offset, function_arg_size);
359 } else {
360 comment!(self, "call {}()", item.ident.name);
362 self.write_call(function_id);
363 }
364 },
365 FunctionKind::Function => {
366 self.compile_call_args(&function, item)?;
367 comment!(self, "call {}()", item.ident.name);
368 self.write_call(function_id);
369 },
370 FunctionKind::Variant(type_id, variant_index) => {
371 let index_type = Type::unsigned(size_of::<VariantIndex>());
372 self.write_literal_numeric(Numeric::Unsigned(variant_index as u64), &index_type);
373 self.compile_call_args(&function, item)?;
374 let function_id = item.function_id.expect("Unresolved function encountered");
375 let arg_size = self.id_mappings.function_arg_size(function_id);
376 self.writer.upload(arg_size + index_type.primitive_size() as StackAddress, *self.trait_implementor_indices.get(&type_id).unwrap_or(&0));
377 },
378 }
379 Ok(())
380 }
381
382 fn compile_binding(self: &mut Self, item: &ast::Binding) -> CompileResult {
384 let binding_id = item.binding_id.expect("Unresolved binding encountered");
385 self.init_state.activate(binding_id);
386 if let Some(expr) = &item.expr {
387 comment!(self, "let {} = ...", item.ident.name);
388 self.compile_expression(expr)?;
389 let local = self.locals.lookup(binding_id);
390 self.write_storex(local, item, binding_id);
391 self.init_state.initialize(binding_id);
392 }
393 Ok(())
394 }
395
396 fn compile_variable(self: &mut Self, item: &ast::Variable) -> CompileResult {
398 comment!(self, "variable {}", item);
399 let load_index = {
400 let binding_id = item.binding_id.expect("Unresolved binding encountered");
401 let local = self.locals.lookup(binding_id);
402 if !self.init_state.initialized(binding_id) {
403 return Err(CompileError::new(item, CompileErrorKind::Uninitialized(item.ident.name.clone()), &self.module_path));
404 }
405 local.index
406 };
407 self.write_load(load_index as StackOffset, self.item_type(item));
408 Ok(())
409 }
410
411 fn compile_if_only_block(self: &mut Self, item: &ast::IfBlock) -> CompileResult {
413 comment!(self, "{}", item);
414 let exit_jump = self.writer.j0(123);
415 self.init_state.push(BranchingKind::Double);
416 let result = self.compile_block(&item.if_block);
417 self.init_state.pop();
418 let exit_target = self.writer.position();
419 self.writer.overwrite(exit_jump, |w| w.j0(exit_target));
420 result
421 }
422
423 fn compile_if_else_block(self: &mut Self, if_block: &ast::Block, else_block: &ast::Block) -> CompileResult {
425
426 let else_jump = self.writer.j0(123);
427 self.init_state.push(BranchingKind::Double);
428 self.init_state.set_path(BranchingPath::A);
429 let result = self.compile_block(if_block);
430 let exit_jump = if !if_block.returns() {
431 Some(self.writer.jmp(123))
432 } else {
433 None
434 };
435
436 let else_target = self.writer.position();
437 self.init_state.set_path(BranchingPath::B);
438 self.compile_block(else_block)?;
439 self.init_state.pop();
440
441 let exit_target = self.writer.position();
442
443 self.writer.overwrite(else_jump, |w| w.j0(else_target));
445 if let Some(exit_jump) = exit_jump {
446 self.writer.overwrite(exit_jump, |w| w.jmp(exit_target));
447 }
448
449 result
450 }
451
452 fn compile_if_block(self: &mut Self, item: &ast::IfBlock) -> CompileResult {
454
455 self.compile_expression(&item.cond)?;
457
458 if item.else_block.is_none() {
459 self.compile_if_only_block(item)
460 } else {
461 self.compile_if_else_block(&item.if_block, item.else_block.as_ref().unwrap())
462 }
463 }
464
465 fn compile_match_arm(self: &mut Self, exit_jumps: &mut Vec<StackAddress>, block: &ast::Block) -> CompileResult {
467 self.compile_block(block)?;
468 if !block.returns() {
469 exit_jumps.push(self.writer.jmp(123));
470 }
471 Ok(())
472 }
473
474 fn compile_match_block_recursive(self: &mut Self, exit_jumps: &mut Vec<StackAddress>, remaining_branches: &[(ast::Pattern, ast::Block)]) -> CompileResult {
476 if remaining_branches.len() == 1 {
477 self.init_state.push(BranchingKind::Single); self.compile_match_arm(exit_jumps, &remaining_branches[0].1)?;
479 self.init_state.pop();
480 } else if remaining_branches.len() > 1 {
481 self.init_state.push(BranchingKind::Double);
482 self.init_state.set_path(BranchingPath::A);
483 self.compile_match_arm(exit_jumps, &remaining_branches[0].1)?;
484 self.init_state.set_path(BranchingPath::B);
485 self.compile_match_block_recursive(exit_jumps, &remaining_branches[1..])?;
486 self.init_state.pop();
487 }
488 Ok(())
489 }
490
491 fn compile_match_block(self: &mut Self, item: &ast::MatchBlock) -> CompileResult {
493 comment!(self, "{}", item);
494 self.compile_expression(&item.expr)?;
495 let mut exit_jumps = Vec::new();
496 self.compile_match_block_recursive(&mut exit_jumps, &item.branches)?;
497 let exit_target = self.writer.position();
499 while let Some(exit_jump) = exit_jumps.pop() {
500 self.writer.overwrite(exit_jump, |w| w.jmp(exit_target));
501 }
502 Ok(())
503 }
504
505 fn compile_while_loop(self: &mut Self, item: &ast::WhileLoop) -> CompileResult {
507 comment!(self, "{}", item);
508 let start_target = self.writer.position();
509 self.compile_expression(&item.expr)?;
510 let exit_jump = self.writer.j0(123);
511 self.compile_block(&item.block)?;
512 self.writer.jmp(start_target);
513 let exit_target = self.writer.position();
514 self.writer.overwrite(exit_jump, |w| w.j0(exit_target));
515 Ok(())
516 }
517
518 fn compile_for_loop_range(self: &mut Self, item: &ast::ForLoop, iter_local: Local, iter_type_id: TypeId) -> CompileResult {
519 comment!(self, "for in range");
520 let binary_op = item.expr.as_binary_op().unwrap();
522 self.compile_expression(&binary_op.left)?;
523 let iter_ty = self.type_by_id(iter_type_id);
524 self.write_store(iter_local, iter_ty);
525 self.compile_expression(&binary_op.right)?;
527 let iter_ty = self.type_by_id(iter_type_id);
529 self.write_load(iter_local.index as StackOffset, iter_ty);
530 self.write_load(-(2 * iter_ty.primitive_size() as StackOffset), iter_ty);
532 if binary_op.op == ast::BinaryOperator::Range {
533 self.write_lt(iter_ty);
534 } else {
535 self.write_lte(iter_ty);
536 }
537 let exit_jump = self.writer.j0(123);
538 let start_target = self.writer.position();
540 self.compile_block(&item.block)?;
541 let iter_ty = self.type_by_id(iter_type_id);
543 self.write_preinc(iter_local.index as StackOffset, iter_ty);
544 self.write_load(-(2 * iter_ty.primitive_size() as StackOffset), iter_ty);
546 if binary_op.op == ast::BinaryOperator::Range {
547 self.write_lt(iter_ty);
548 } else {
549 self.write_lte(iter_ty);
550 }
551 self.writer.jn0(start_target);
552 let exit_target = self.writer.position();
554 self.writer.overwrite(exit_jump, |w| w.j0(exit_target));
555 self.write_discard(iter_ty);
556 Ok(())
557 }
558
559 fn compile_for_loop_array(self: &mut Self, item: &ast::ForLoop, element_local: Local, element_type_id: TypeId) -> CompileResult {
560 comment!(self, "for in array");
561 let element_ty = self.type_by_id(element_type_id);
562 let element_constructor = if element_ty.is_ref() { self.get_constructor(element_ty) } else { 0 };
563 let array_ty = self.item_type(&item.expr);
564 let array_constructor = self.get_constructor(array_ty);
565
566 self.compile_expression(&item.expr)?; self.write_cnt_nc(array_constructor, HeapRefOp::Inc);
568 self.write_clone_ref();
569 let array_ty = self.item_type(&item.expr);
570 self.write_builtin(BuiltinGroup::ArrayLen, array_ty); let exit_jump = self.writer.j0_sa_nc(123);
572 let loop_start = self.write_dec(&STACK_ADDRESS_TYPE); let element_ty = self.type_by_id(element_type_id);
579 self.write_heap_tail_element_nc(array_ty, element_ty); if element_ty.is_ref() {
582 self.write_clone(element_ty); self.write_cnt_nc(element_constructor, HeapRefOp::Inc);
584 }
585
586 self.write_store(element_local, element_ty); self.compile_block(&item.block)?; let element_ty = self.type_by_id(element_type_id);
590 if element_ty.is_ref() {
591 self.write_cnt(element_constructor, HeapRefOp::Dec); }
593
594 self.writer.jn0_sa_nc(loop_start);
595
596 let exit_target = self.writer.position();
598 self.writer.overwrite(exit_jump, |w| w.j0_sa_nc(exit_target));
599 self.write_discard(&STACK_ADDRESS_TYPE); self.write_cnt(array_constructor, HeapRefOp::Dec); Ok(())
602 }
603
604 fn compile_for_loop(self: &mut Self, item: &ast::ForLoop) -> CompileResult {
606 use ast::{Expression, BinaryOperator as Op};
607 let binding_id = item.iter.binding_id.expect("Unresolved binding encountered");
609 self.init_state.push(BranchingKind::Single);
610 let iter_local = self.locals.lookup(binding_id);
611 self.init_state.initialize(binding_id);
612 let iter_type_id = item.iter.type_id(self).unwrap();
613 let result = match &item.expr { Expression::BinaryOp(bo) if bo.op == Op::Range || bo.op == Op::RangeInclusive => {
616 self.compile_for_loop_range(item, iter_local, iter_type_id)
617 },
618 Expression::Block(_) | Expression::Call(_) | Expression::IfBlock(_) | Expression::Literal(_) | Expression::Variable(_) => {
619 self.compile_for_loop_array(item, iter_local, iter_type_id)
620 },
621 _ => Err(CompileError::new(item, CompileErrorKind::Internal, &self.module_path))
622 };
623 self.init_state.pop();
624 result
625 }
626
627 fn compile_function(self: &mut Self, item: &ast::Function) -> CompileResult {
629
630 let position = self.writer.position();
632 comment!(self, "\nfn {}", item.sig.ident.name);
633
634 self.init_state.push(BranchingKind::Single);
636 let mut frame = StackFrame::new();
637 frame.ret_size = item.sig.ret.as_ref().map_or(0, |ret| self.item_type(ret).primitive_size());
638 for arg in item.sig.args.iter() {
639 frame.insert(arg.binding_id.unwrap(), frame.arg_pos, LocalOrigin::Argument);
640 self.init_state.initialize(arg.binding_id.unwrap());
641 frame.arg_pos += self.item_type(arg).primitive_size() as StackAddress;
642 }
643
644 frame.var_pos = frame.arg_pos + size_of::<StackAddress>() as StackAddress * 2;
646 let arg_size = frame.arg_pos;
647 let ret_size = frame.ret_size;
648 self.create_stack_frame_block(item.block.as_ref().unwrap(), &mut frame);
649 let var_size = frame.var_pos - (frame.arg_pos + size_of::<StackAddress>() as StackAddress * 2);
650 if var_size > 0 {
651 self.writer.reserve(var_size as u8);
652 }
653
654 let function_id = item.function_id.unwrap();
656 let call_info = CallInfo { addr: position, arg_size: frame.arg_pos };
657 self.functions.insert(function_id, call_info);
658 self.fix_targets(function_id, call_info);
659
660 self.locals.push(frame);
662 self.compile_block(item.block.as_ref().unwrap())?;
663 self.init_state.pop();
664 let mut frame = self.locals.pop();
665
666 let exit_address = self.writer.position();
668 while let Some(jmp_address) = frame.exit_placeholder.pop() {
669 self.writer.overwrite(jmp_address, |w| w.jmp(exit_address));
670 }
671
672 if let Some(ret) = &item.sig.ret {
677 self.item_cnt(ret, true, HeapRefOp::Inc);
678 }
679 for arg in item.sig.args.iter() {
680 let ty = self.item_type(arg);
681 if ty.is_ref() {
682 let local = frame.lookup(arg.binding_id.unwrap());
683 comment!(self, "freeing argument {}", local.index);
684 self.write_load(local.index as StackOffset, ty);
685 self.write_cnt(self.get_constructor(ty), HeapRefOp::Dec);
686 }
687 }
688 if let Some(ret) = &item.sig.ret {
689 self.item_cnt(ret, true, HeapRefOp::DecNoFree);
690 }
691
692 comment!(self, "exiting fn {}", item.sig.ident.name);
693 match ret_size {
694 0 => self.writer.ret0(arg_size),
695 1 => self.writer.ret8(arg_size),
696 2 => self.writer.ret16(arg_size),
697 4 => self.writer.ret32(arg_size),
698 8 => self.writer.ret64(arg_size),
699 _ => unreachable!(),
700 };
701
702 Ok(())
703 }
704
705 fn compile_block(self: &mut Self, item: &ast::Block) -> CompileResult {
707 self.init_state.push(BranchingKind::Single);
708 for statement in item.statements.iter() {
709 self.compile_statement(statement)?;
710 }
711 if let Some(returns) = &item.returns {
712 comment!(self, "block returning");
713 self.compile_expression(returns)?;
714 self.item_cnt(returns, true, HeapRefOp::Inc);
716 self.decref_block_locals();
717 self.item_cnt(returns, true, HeapRefOp::DecNoFree);
718 let exit_jump = self.writer.jmp(123);
719 self.locals.add_exit_placeholder(exit_jump);
720 } else if let Some(result) = &item.result {
721 comment!(self, "block resulting");
722 self.compile_expression(result)?;
723 self.item_cnt(result, true, HeapRefOp::Inc);
725 self.decref_block_locals();
726 self.item_cnt(result, true, HeapRefOp::DecNoFree);
727 } else {
728 comment!(self, "block ending");
729 self.decref_block_locals();
730 }
731
732 self.init_state.pop();
734 Ok(())
735 }
736
737 fn compile_literal(self: &mut Self, item: &ast::Literal) -> CompileResult {
739 use crate::frontend::ast::LiteralValue;
740 comment!(self, "{}", item);
741 let ty = self.item_type(item);
742 match item.value {
743 LiteralValue::Numeric(numeric) => self.write_literal_numeric(numeric, ty),
744 LiteralValue::Bool(v) => {
745 match ty {
746 Type::bool => { if v { self.writer.one8(); } else { self.writer.zero8(); } },
747 _ => panic!("Unexpected boolean literal type: {:?}", ty)
748 };
749 },
750 LiteralValue::Variant(ref variant) if ty.as_enum().map_or(false, |e| e.primitive.is_some()) => {
751 let enum_def = ty.as_enum().expect("Encountered non-enum type on enum variant");
753 let enum_ty = self.type_by_id(enum_def.primitive.unwrap().0);
754 let variant_value = enum_def.variant_value(&variant.ident.name).unwrap();
755 self.write_literal_numeric(variant_value, enum_ty);
756 },
757 LiteralValue::Array(_) | LiteralValue::Struct(_) | LiteralValue::String(_) | LiteralValue::Variant(_) => {
758 if item.value.is_const() {
759 let constructor = self.get_constructor(ty);
761 let prototype = self.store_literal_prototype(item);
762 self.writer.construct(constructor, prototype);
763 } else {
764 let type_id = item.type_id(self).unwrap();
766 let size = self.write_literal_prototype_builder(item)?;
767 self.writer.upload(size, *self.trait_implementor_indices.get(&type_id).unwrap_or(&0));
768 }
769 },
770 }
771 Ok(())
772 }
773
774 fn compile_unary_op(self: &mut Self, item: &ast::UnaryOp) -> CompileResult {
776 use crate::frontend::ast::{UnaryOperator as UO, BinaryOperator};
777 match item.op {
778 UO::Not => {
780 self.compile_expression(&item.expr)?;
781 comment!(self, "{}", item);
782 self.writer.not();
783 }
784 UO::IncBefore | UO::DecBefore | UO::IncAfter | UO::DecAfter => {
786 if let ast::Expression::Variable(var) = &item.expr {
787 comment!(self, "{}", item);
788 let load_index = {
789 let binding_id = var.binding_id.expect("Unresolved binding encountered");
790 self.locals.lookup(binding_id).index
791 };
792 let exp_type = self.item_type(&item.expr);
793 match item.op {
794 UO::IncBefore => self.write_preinc(load_index as StackOffset, &exp_type),
795 UO::DecBefore => self.write_predec(load_index as StackOffset, &exp_type),
796 UO::IncAfter => self.write_postinc(load_index as StackOffset, &exp_type),
797 UO::DecAfter => self.write_postdec(load_index as StackOffset, &exp_type),
798 _ => panic!("Internal error in operator handling"),
799 };
800 } else if let ast::Expression::BinaryOp(binary_op) = &item.expr {
801 assert!(binary_op.op == BinaryOperator::IndexWrite || binary_op.op == BinaryOperator::AccessWrite, "Expected IndexWrite or AccessWrite operation");
802 self.compile_expression(&item.expr)?; comment!(self, "{}", item);
804 let exp_type = self.item_type(&item.expr);
805 match item.op {
806 UO::IncBefore => self.write_heap_preinc(&exp_type),
807 UO::DecBefore => self.write_heap_predec(&exp_type),
808 UO::IncAfter => self.write_heap_postinc(&exp_type),
809 UO::DecAfter => self.write_heap_postdec(&exp_type),
810 _ => panic!("Internal error in operator handling"),
811 };
812 } else {
813 panic!("Operator {:?} can not be used here", item.op);
814 }
815 },
816 }
817 Ok(())
818 }
819
820 fn compile_binary_op_simple(self: &mut Self, item: &ast::BinaryOp) -> CompileResult {
822 use crate::frontend::ast::BinaryOperator as BO;
823 self.compile_expression(&item.left)?; comment!(self, "{}", item.op);
826 self.compile_expression(&item.right)?; let ty_result = self.item_type(item);
828 let ty_left = self.item_type(&item.left);
829 match item.op { BO::Add => self.write_add(ty_result),
832 BO::Sub => self.write_sub(ty_result),
833 BO::Mul => self.write_mul(ty_result),
834 BO::Div => self.write_div(ty_result),
835 BO::Rem => self.write_rem(ty_result),
836 BO::Greater => { self.write_swap(ty_left); self.write_lt(ty_left); },
838 BO::GreaterOrEq => { self.write_swap(ty_left); self.write_lte(ty_left); },
839 BO::Less => self.write_lt(ty_left),
840 BO::LessOrEq => self.write_lte(ty_left),
841 BO::Equal => self.write_eq(ty_left),
842 BO::NotEqual => self.write_neq(ty_left),
843 _ => unreachable!("Invalid simple-operation {:?} in compile_binary_op", item.op),
844 }
845 Ok(())
846 }
847
848 fn compile_binary_op_shortcircuiting(self: &mut Self, item: &ast::BinaryOp) -> CompileResult {
850 use crate::frontend::ast::BinaryOperator as BO;
851 match item.op {
852 BO::And => {
853 self.compile_expression(&item.left)?;
854 let exit_jump = self.writer.j0_nc(123); self.compile_expression(&item.right)?;
856 self.writer.and();
857 let exit_target = self.writer.position();
858 self.writer.overwrite(exit_jump, |w| w.j0_nc(exit_target));
859 },
860 BO::Or => {
861 self.compile_expression(&item.left)?;
862 let exit_jump = self.writer.jn0_nc(123); self.compile_expression(&item.right)?;
864 self.writer.or();
865 let exit_target = self.writer.position();
866 self.writer.overwrite(exit_jump, |w| w.jn0_nc(exit_target));
867 },
868 _ => unreachable!("Invalid shortcircuit-operation {:?} in compile_binary_op", item.op),
869 }
870 Ok(())
871 }
872
873 fn compile_binary_op_offseting(self: &mut Self, item: &ast::BinaryOp) -> CompileResult {
875 use crate::frontend::ast::BinaryOperator as BO;
876 self.compile_expression(&item.left)?;
877 self.compile_expression(&item.right)?;
878 let result_type = self.item_type(item);
879 let compare_type = self.item_type(&item.left);
880 match item.op {
881 BO::Index => {
882 comment!(self, "[{}]", &item.right);
883 self.write_heap_fetch_element(compare_type, result_type);
885 },
886 BO::IndexWrite => {
887 comment!(self, "[{}] (writing)", &item.right);
888 self.writer.index(result_type.primitive_size());
890 },
891 BO::Access => {
892 comment!(self, ".{}", &item.right);
893 let struct_ = compare_type.as_struct().unwrap();
895 let offset = self.compute_member_offset(struct_, &item.right.as_member().unwrap().ident.name);
896 self.write_heap_fetch_member(compare_type, result_type, offset);
897 },
898 BO::AccessWrite => {
899 comment!(self, ".{} (writing)", &item.right);
900 let struct_ = compare_type.as_struct().unwrap();
902 let offset = self.compute_member_offset(struct_, &item.right.as_member().unwrap().ident.name);
903 self.write_member_offset(offset);
904
905 },
906 _ => unreachable!("Invalid offset-operation {:?} in compile_binary_op", item.op),
907 }
908 Ok(())
909 }
910
911 fn compile_binary_op(self: &mut Self, item: &ast::BinaryOp) -> CompileResult {
913 if item.op.is_simple() {
914 self.compile_binary_op_simple(item)
915 } else if item.op.is_shortcircuit() {
916 self.compile_binary_op_shortcircuiting(item)
917 } else if item.op.is_offset() {
918 self.compile_binary_op_offseting(item)
919 } else {
920 unreachable!()
921 }
922 }
923
924 fn compile_cast(self: &mut Self, item: &ast::Cast) -> CompileResult {
926
927 self.compile_expression(&item.expr)?;
928 let from = self.item_type(&item.expr);
929 let to = self.item_type(&item.ty);
930 self.write_cast(from, to);
931 Ok(())
932 }
933
934}
935
936impl<T> Compiler<T> where T: VMFunc<T> {
937
938 fn item_type(self: &Self, item: &impl Typeable) -> &Type {
940 match item.type_id(self) {
941 None => panic!("Unresolved type encountered"),
942 Some(type_id) => self.type_by_id(type_id)
943 }
944 }
945
946 fn get_constructor(self: &Self, ty: &Type) -> StackAddress {
948 let type_id = self.id_mappings.types().find(|m| m.1 == ty).unwrap().0;
949 *self.constructors.get(&type_id).unwrap_or(&0)
950 }
951
952 fn is_compatible_function(self: &Self, target: &Function, other: &Function) -> bool {
954 if discriminant(&target.kind.unwrap()) != discriminant(&other.kind.unwrap()) {
955 return false;
956 }
957 if target.ret_type != other.ret_type {
958 return false;
959 }
960 if target.arg_type.len() != other.arg_type.len() {
961 return false;
962 }
963 for (target_arg, other_arg) in target.arg_type.iter().zip(other.arg_type.iter()) {
964 if !self.type_accepted_for(other_arg.unwrap(), target_arg.unwrap()) {
965 return false;
966 }
967 }
968 true
969 }
970
971 fn compute_member_offset(self: &Self, struct_: &Struct, member_name: &str) -> StackAddress {
973 let mut offset = 0;
974 for (field_name, field_type_id) in struct_.fields.iter() {
975 if field_name == member_name {
976 break;
977 }
978 let field_type = self.type_by_id(field_type_id.expect("Unresolved struct field encountered"));
979 offset += field_type.primitive_size() as StackAddress;
981 }
982 offset
983 }
984
985 fn fix_targets(self: &mut Self, function_id: FunctionId, info: CallInfo) {
987 if let Some(targets) = self.call_placeholder.remove(&function_id) {
988 let backup_position = self.writer.position();
989 for &target in targets.iter() {
990 self.writer.set_position(target);
991 self.writer.call(info.addr, info.arg_size);
992 }
993 self.writer.set_position(backup_position);
994 }
995 }
996
997 fn create_stack_frame_exp(self: &Self, expression: &ast::Expression, frame: &mut StackFrame) {
999 if let ast::Expression::Block(block) = expression {
1000 self.create_stack_frame_block(block, frame);
1001 } else if let ast::Expression::Call(call) = expression {
1002 for arg in &call.args {
1003 if let ast::Expression::Block(block) = arg {
1004 self.create_stack_frame_block(block, frame);
1005 }
1006 }
1007 } else if let ast::Expression::Assignment(assignment) = expression {
1008 if let ast::Expression::Block(block) = &assignment.right {
1009 self.create_stack_frame_block(block, frame);
1010 }
1011 } else if let ast::Expression::BinaryOp(binary_op) = expression {
1012 if let ast::Expression::Block(block) = &binary_op.left {
1013 self.create_stack_frame_block(block, frame);
1014 }
1015 if let ast::Expression::Block(block) = &binary_op.right {
1016 self.create_stack_frame_block(block, frame);
1017 }
1018 } else if let ast::Expression::UnaryOp(unary_op) = expression {
1019 if let ast::Expression::Block(block) = &unary_op.expr {
1020 self.create_stack_frame_block(block, frame);
1021 }
1022 } else if let ast::Expression::IfBlock(if_block) = expression {
1023 self.create_stack_frame_block(&if_block.if_block, frame);
1024 if let Some(block) = &if_block.else_block {
1025 self.create_stack_frame_block(block, frame);
1026 }
1027 }
1028 }
1029
1030 fn create_stack_frame_block(self: &Self, item: &ast::Block, frame: &mut StackFrame) {
1032 for statement in item.statements.iter() {
1034 if let ast::Statement::Binding(binding) = statement {
1035 frame.insert(binding.binding_id.unwrap(), frame.var_pos, LocalOrigin::Binding);
1036 frame.var_pos += self.item_type(binding).primitive_size() as StackAddress;
1037 if let Some(expression) = &binding.expr {
1038 self.create_stack_frame_exp(expression, frame);
1039 }
1040 } else if let ast::Statement::ForLoop(for_loop) = statement {
1041 frame.insert(for_loop.iter.binding_id.unwrap(), frame.var_pos, LocalOrigin::Binding);
1042 frame.var_pos += self.item_type(&for_loop.iter).primitive_size() as StackAddress;
1043 self.create_stack_frame_block(&for_loop.block, frame);
1044 } else if let ast::Statement::WhileLoop(while_loop) = statement {
1045 self.create_stack_frame_block(&while_loop.block, frame);
1046 } else if let ast::Statement::Block(block) = statement {
1047 self.create_stack_frame_block(&block, frame);
1048 } else if let ast::Statement::IfBlock(if_block) = statement {
1049 self.create_stack_frame_block(&if_block.if_block, frame);
1050 if let Some(block) = &if_block.else_block {
1051 self.create_stack_frame_block(block, frame);
1052 }
1053 } else if let ast::Statement::Expression(expression) = statement {
1054 self.create_stack_frame_exp(&expression, frame);
1055 }
1056 }
1057 if let Some(result) = &item.result {
1058 self.create_stack_frame_exp(result, frame);
1059 }
1060 }
1061
1062 fn item_cnt(self: &Self, item: &impl Typeable, nc: bool, op: HeapRefOp) {
1064 let ty = self.item_type(item);
1065 if ty.is_ref() {
1066 match nc {
1067 true => self.write_cnt_nc(self.get_constructor(ty), op),
1068 false => self.write_cnt(self.get_constructor(ty), op),
1069 };
1070 }
1071 }
1072
1073 fn decref_block_locals(self: &mut Self) {
1075 let frame = self.locals.pop();
1076 for (&binding_id, local) in frame.map.iter() {
1077 if self.init_state.activated(binding_id) && self.init_state.initialized(binding_id) {
1078 let type_id = self.binding_by_id(binding_id).type_id.unwrap();
1079 let ty = self.type_by_id(type_id);
1080 if ty.is_ref() {
1081 comment!(self, "freeing local {}", local.index);
1082 self.write_load(local.index as StackOffset, ty);
1083 self.write_cnt(self.get_constructor(ty), HeapRefOp::Dec);
1084 }
1085 }
1086 }
1087 self.locals.push(frame);
1088 }
1089}
1090
1091impl<T> Compiler<T> where T: VMFunc<T> {
1093
1094 fn store_constructor(self: &Self, type_id: TypeId) -> StackAddress {
1096 let store_len = |inner: &mut dyn FnMut()| {
1097 let len_position = self.writer.const_len();
1098 self.writer.store_const(123 as ItemIndex);
1099 let inner_position = self.writer.const_len();
1100 inner();
1101 let inner_len = self.writer.const_len() - inner_position;
1102 self.writer.update_const(len_position, inner_len as ItemIndex);
1103 };
1104 let position = self.writer.const_len();
1105 match self.type_by_id(type_id) {
1106 Type::Array(array) => {
1107 self.writer.store_const(Constructor::Array);
1108 store_len(&mut || {
1109 self.store_constructor(array.type_id.expect("Unresolved array element type"));
1110 });
1111 }
1112 Type::Struct(structure) => {
1113 self.writer.store_const(Constructor::Struct);
1114 store_len(&mut || {
1115 self.writer.store_const(*self.trait_implementor_indices.get(&type_id).unwrap_or(&0));
1116 self.writer.store_const(structure.fields.len() as ItemIndex);
1117 for field in &structure.fields {
1118 self.store_constructor(field.1.expect("Unresolved struct field type"));
1119 }
1120 });
1121 }
1122 Type::String => {
1123 self.writer.store_const(Constructor::String);
1124 }
1125 Type::Enum(enumeration) => {
1126 self.writer.store_const(Constructor::Enum);
1127 store_len(&mut || {
1128 self.writer.store_const(*self.trait_implementor_indices.get(&type_id).unwrap_or(&0));
1129 self.writer.store_const(enumeration.variants.len() as ItemIndex);
1130 let variant_offsets_pos = self.writer.const_len();
1131 for _ in &enumeration.variants {
1133 self.writer.store_const(123 as StackAddress);
1134 }
1135 let mut variant_offsets = Vec::with_capacity(enumeration.variants.len());
1137 for (_, fields) in &enumeration.variants {
1138 let num_fields = fields.as_data().map_or(0, |f| f.len());
1139 let variant_offset = self.writer.store_const(num_fields as ItemIndex);
1140 variant_offsets.push(variant_offset);
1141 if num_fields > 0 {
1142 for field in fields.as_data().unwrap() {
1143 self.store_constructor(field.expect("Unresolved enum field type"));
1144 }
1145 }
1146 }
1147 for (index, &variant_offset) in variant_offsets.iter().enumerate() {
1149 let const_position = variant_offsets_pos + (index as StackAddress) * size_of::<StackAddress>() as StackAddress;
1150 self.writer.update_const(const_position, variant_offset as StackAddress);
1151 }
1152 });
1153 }
1154 Type::Trait(_) => unimplemented!("trait constructor"),
1155 ty @ _ => {
1156 self.writer.store_const(Constructor::Primitive);
1157 self.writer.store_const(ty.primitive_size() as ItemIndex);
1158 }
1159 }
1160 position
1161 }
1162
1163 fn store_literal_prototype(self: &Self, item: &ast::Literal) -> StackAddress {
1165 use crate::frontend::ast::LiteralValue;
1166 let ty = self.item_type(item);
1167 let pos = self.writer.const_len();
1168 match &item.value {
1169 &LiteralValue::Numeric(int) => {
1170 self.store_numeric_prototype(int, ty);
1171 },
1172 &LiteralValue::Bool(boolean) => {
1173 match ty {
1174 Type::bool => self.writer.store_const(if boolean { 1u8 } else { 0u8 }),
1175 _ => panic!("Unexpected boolean literal type: {:?}", ty)
1176 };
1177 },
1178 LiteralValue::String(string_literal) => {
1179 self.writer.store_const(string_literal.as_str());
1180 },
1181 LiteralValue::Array(array_literal) => {
1182 self.writer.store_const(array_literal.elements.len() as ItemIndex);
1183 for element in &array_literal.elements {
1184 self.store_literal_prototype(element.as_literal().unwrap());
1185 }
1186 },
1187 LiteralValue::Struct(struct_literal) => {
1188 let struct_def = ty.as_struct().expect("Expected struct, got something else");
1189 for (name, _) in struct_def.fields.iter() {
1190 let field = struct_literal.fields.get(&name[..]).expect("Missing struct field");
1191 self.store_literal_prototype(field.as_literal().unwrap());
1192 }
1193 },
1194 LiteralValue::Variant(variant) => {
1195 let enum_def = ty.as_enum().expect("Encountered non-enum type on enum variant");
1196 let index_type = Type::unsigned(size_of::<VariantIndex>());
1197 let variant_index = enum_def.variant_index(&variant.ident.name).unwrap();
1198 self.store_numeric_prototype(Numeric::Unsigned(variant_index as u64), &index_type);
1199 },
1200 };
1201 pos
1202 }
1203
1204 fn store_numeric_prototype(self: &Self, numeric: Numeric, ty: &Type) -> StackAddress {
1206 match numeric {
1207 Numeric::Signed(v) => {
1208 match ty {
1209 Type::i8 => self.writer.store_const(v as i8),
1210 Type::i16 => self.writer.store_const(v as i16),
1211 Type::i32 => self.writer.store_const(v as i32),
1212 Type::i64 => self.writer.store_const(v as i64),
1213 _ => panic!("Unexpected signed integer literal type: {:?}", ty)
1214 }
1215 },
1216 Numeric::Unsigned(v) => {
1217 match ty {
1218 Type::i8 | Type::u8 => self.writer.store_const(v as u8),
1219 Type::i16 | Type::u16 => self.writer.store_const(v as u16),
1220 Type::i32 | Type::u32 => self.writer.store_const(v as u32),
1221 Type::i64 | Type::u64 => self.writer.store_const(v as u64),
1222 _ => panic!("Unexpected unsigned integer literal type: {:?}", ty)
1223 }
1224 },
1225 Numeric::Float(v) => {
1226 match ty {
1227 Type::f32 => self.writer.store_const(v as f32),
1228 Type::f64 => self.writer.store_const(v as f64),
1229 _ => panic!("Unexpected float literal type: {:?}", ty)
1230 }
1231 },
1232 }
1234 }
1235}
1236
1237impl<T> Compiler<T> where T: VMFunc<T> {
1239
1240 fn write_literal_numeric(self: &Self, numeric: Numeric, ty: &Type) {
1242 match numeric { Numeric::Unsigned(0) if ty.is_integer() && ty.primitive_size() == 1 => { self.writer.zero8(); }
1245 Numeric::Unsigned(0) if ty.is_integer() && ty.primitive_size() == 2 => { self.writer.zero16(); }
1246 Numeric::Unsigned(0) if ty.is_integer() && ty.primitive_size() == 4 => { self.writer.zero32(); }
1247 Numeric::Unsigned(0) if ty.is_integer() && ty.primitive_size() == 8 => { self.writer.zero64(); }
1248
1249 Numeric::Unsigned(1) if ty.is_integer() && ty.primitive_size() == 1 => { self.writer.one8(); }
1250 Numeric::Unsigned(1) if ty.is_integer() && ty.primitive_size() == 2 => { self.writer.one16(); }
1251 Numeric::Unsigned(1) if ty.is_integer() && ty.primitive_size() == 4 => { self.writer.one32(); }
1252 Numeric::Unsigned(1) if ty.is_integer() && ty.primitive_size() == 8 => { self.writer.one64(); }
1253
1254 Numeric::Signed(-1) if ty.is_signed() && ty.primitive_size() == 1 => { self.writer.fill8(); }
1255 Numeric::Signed(-1) if ty.is_signed() && ty.primitive_size() == 2 => { self.writer.fill16(); }
1256 Numeric::Signed(-1) if ty.is_signed() && ty.primitive_size() == 4 => { self.writer.fill32(); }
1257 Numeric::Signed(-1) if ty.is_signed() && ty.primitive_size() == 8 => { self.writer.fill64(); }
1258
1259 Numeric::Unsigned(val) if ty.is_integer() && ty.primitive_size() == 1 => { self.writer.literali8(val as u8); }
1260 Numeric::Unsigned(val) if ty.is_integer() && ty.primitive_size() == 4 && val <= u8::MAX as u64 => { self.writer.literalu32(val as u8); }
1261
1262 Numeric::Signed(val) if ty.is_signed() && ty.primitive_size() == 1 => { self.writer.literali8((val as i8) as u8); }
1263 Numeric::Signed(val) if ty.is_signed() && ty.primitive_size() == 4 && val >= i8::MIN as i64 && val <= i8::MAX as i64 => { self.writer.literals32(val as i8); }
1264
1265 _ if ty.is_integer() || ty.is_float() => {
1266 let address = self.store_numeric_prototype(numeric, ty);
1267 self.write_const(address, ty);
1268 },
1269 _ => panic!("Unexpected numeric literal type: {:?}", ty),
1270 }
1271 }
1272
1273 fn write_literal_prototype_builder(self: &mut Self, item: &ast::Literal) -> CompileResult<StackAddress> {
1275 use crate::frontend::ast::LiteralValue;
1276 let ty = self.item_type(item);
1277 Ok(match &item.value {
1278 LiteralValue::Numeric(_) | LiteralValue::Bool(_) => unreachable!("Invalid prototype type"),
1279 LiteralValue::String(_) => {
1280 let constructor = self.get_constructor(ty);
1284 let prototype = self.store_literal_prototype(item);
1285 self.writer.construct(constructor, prototype);
1286 Type::String.primitive_size() as StackAddress
1287 },
1288 LiteralValue::Array(array_literal) => {
1289 for element in &array_literal.elements {
1290 self.compile_expression(element)?;
1291 }
1292 let array_ty = self.item_type(item).as_array().expect("Expected array type, got something else");
1293 array_literal.elements.len() as StackAddress * self.type_by_id(array_ty.type_id.unwrap()).primitive_size() as StackAddress
1294 },
1295 LiteralValue::Struct(struct_literal) => {
1296 let struct_def = ty.as_struct().expect("Expected struct, got something else");
1297 let fields: Vec<_> = struct_def.fields.iter().map(|(name, _)| struct_literal.fields.get(name).expect("Missing struct field")).collect();
1299 for field in fields {
1300 self.compile_expression(field)?;
1301 }
1302 let struct_ty = self.item_type(item).as_struct().expect("Expected struct type, got something else");
1303 struct_ty.fields.iter().fold(0, |acc, f| acc + self.type_by_id(f.1.unwrap()).primitive_size() as StackAddress)
1304 },
1305 LiteralValue::Variant(variant) => {
1306 let enum_def = self.item_type(item).as_enum().expect("Encountered non-enum type on enum variant");
1307 let index_type = Type::unsigned(size_of::<VariantIndex>());
1308 let variant_index = enum_def.variant_index(&variant.ident.name).unwrap();
1309 self.write_literal_numeric(Numeric::Unsigned(variant_index as u64), &index_type);
1310 index_type.primitive_size() as StackAddress
1311 },
1312 })
1313 }
1314
1315 fn write_cast(self: &Self, from: &Type, to: &Type) {
1317 if from.is_signed() && !to.is_signed() && !to.is_float() && !to.is_string() {
1318 self.write_zclamp(from);
1319 }
1320 if from.is_integer() && to.is_integer() {
1321 self.write_integer_cast(from, to);
1322 } else if from.is_float() && to.is_float() {
1323 self.write_float_integer_cast(from, to);
1324 } else if from.is_float() && to.is_integer() {
1325 let temp_to = if to.is_signed() { &Type::i64 } else { &Type::u64 };
1326 self.write_float_integer_cast(from, temp_to);
1327 if to.primitive_size() != 8 {
1328 self.write_integer_cast(temp_to, to);
1329 }
1330 } else if from.is_integer() && to.is_float() {
1331 let temp_from = if from.is_signed() { &Type::i64 } else { &Type::u64 };
1332 if from.primitive_size() != 8 {
1333 self.write_integer_cast(from, temp_from);
1334 }
1335 self.write_float_integer_cast(temp_from, to);
1336 } else if from.is_integer() && to.is_string() {
1337 let temp_from = if from.is_signed() { &Type::i64 } else { &Type::u64 };
1338 if from.primitive_size() != 8 {
1339 self.write_integer_cast(from, temp_from);
1340 }
1341 match temp_from {
1342 Type::i64 => self.writer.i64_to_string(),
1343 Type::u64 => self.writer.u64_to_string(),
1344 _ => unreachable!(),
1345 };
1346 } else if from == &Type::f32 && to.is_string() {
1347 self.writer.f32_to_string();
1348 } else if from == &Type::f64 && to.is_string() {
1349 self.writer.f64_to_string();
1350 } else if let Some(Enum { primitive: Some((primitive, _)), .. }) = from.as_enum() {
1351 let from = self.type_by_id(*primitive);
1352 self.write_cast(from, to);
1353 } else if from != to {
1354 unreachable!("Invalid cast {:?} to {:?}", from, to);
1355 }
1356 }
1357
1358 fn write_float_integer_cast(self: &Self, from: &Type, to: &Type) {
1360 match (from, to) {
1361 (Type::i64, Type::f32) => self.writer.i64_to_f32(),
1362 (Type::u64, Type::f32) => self.writer.u64_to_f32(),
1363 (Type::f64, Type::f32) => self.writer.f64_to_f32(),
1364
1365 (Type::i64, Type::f64) => self.writer.i64_to_f64(),
1366 (Type::u64, Type::f64) => self.writer.u64_to_f64(),
1367 (Type::f32, Type::f64) => self.writer.f32_to_f64(),
1368
1369 (Type::f32, Type::i64) => self.writer.f32_to_i64(),
1370 (Type::f64, Type::i64) => self.writer.f64_to_i64(),
1371
1372 (Type::f32, Type::u64) => self.writer.f32_to_u64(),
1373 (Type::f64, Type::u64) => self.writer.f64_to_u64(),
1374 _ => unreachable!("Invalid float/int cast {:?} to {:?}", from, to),
1375 };
1376 }
1377
1378 fn write_integer_cast(self: &Self, from: &Type, to: &Type) {
1380 let from_size = (from.primitive_size() * 8) as u8;
1381 let to_size = (to.primitive_size() * 8) as u8;
1382 if to_size < from_size || (to_size == from_size && !from.is_signed() && to.is_signed()) {
1383 if to.is_signed() {
1384 match from_size {
1385 64 => self.writer.trims64(to_size),
1386 32 => self.writer.trims32(to_size),
1387 16 => self.writer.trims16(to_size),
1388 _ => unreachable!("Invalid integer cast {:?} to {:?}", from, to),
1389 };
1390 } else {
1391 match from_size {
1392 64 => self.writer.trimu64(to_size),
1393 32 => self.writer.trimu32(to_size),
1394 16 => self.writer.trimu16(to_size),
1395 _ => unreachable!("Invalid integer cast {:?} to {:?}", from, to),
1396 };
1397 }
1398 } else if to_size > from_size {
1399 if from.is_signed() {
1400 match from_size {
1401 32 => self.writer.extends32(to_size),
1402 16 => self.writer.extends16(to_size),
1403 8 => self.writer.extends8(to_size),
1404 _ => unreachable!("Invalid integer cast {:?} to {:?}", from, to),
1405 };
1406 } else {
1407 match from_size {
1408 32 => self.writer.extendu32(to_size),
1409 16 => self.writer.extendu16(to_size),
1410 8 => self.writer.extendu8(to_size),
1411 _ => unreachable!("Invalid integer cast {:?} to {:?}", from, to),
1412 };
1413 }
1414 }
1415 }
1416
1417 fn write_cnt_nc(self: &Self, constructor: StackAddress, op: HeapRefOp) {
1419 select_unsigned_opcode!(self, cnt_8_nc, cnt_16_nc, cnt_sa_nc, constructor, op);
1420 }
1421
1422 fn write_cnt(self: &Self, constructor: StackAddress, op: HeapRefOp) {
1424 select_unsigned_opcode!(self, cnt_8, cnt_16, cnt_sa, constructor, op);
1425 }
1426
1427 fn write_member_offset(self: &Self, offset: StackAddress) {
1429 if offset > 0 {
1430 select_signed_opcode!(self, offsetx_8, offsetx_16, offsetx_sa, offset as StackOffset);
1431 }
1432 }
1433
1434 fn write_const(self: &Self, index: StackAddress, ty: &Type) {
1436 match ty.primitive_size() {
1437 2 => select_unsigned_opcode!(self, const16_8, const16_16, const16_sa, index),
1438 4 => select_unsigned_opcode!(self, const32_8, const32_16, const32_sa, index),
1439 8 => select_unsigned_opcode!(self, const64_8, const64_16, const64_sa, index),
1440 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1442 };
1443 }
1444
1445 fn write_heap_fetch(self: &Self, ty: &Type) {
1447 match ty.primitive_size() {
1448 1 => { self.writer.heap_fetch8(); },
1449 2 => { self.writer.heap_fetch16(); },
1450 4 => { self.writer.heap_fetch32(); },
1451 8 => { self.writer.heap_fetch64(); },
1452 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1454 }
1455 }
1456
1457 fn write_heap_put(self: &Self, ty: &Type) -> StackAddress {
1459 match ty.primitive_size() {
1460 1 => self.writer.heap_put8(),
1461 2 => self.writer.heap_put16(),
1462 4 => self.writer.heap_put32(),
1463 8 => self.writer.heap_put64(),
1464 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1465 }
1466 }
1467
1468 fn write_heap_putx(self: &Self, ty: &Type, is_new_heap_ref: bool) -> StackAddress {
1472 if ty.is_ref() {
1473 let constructor = self.get_constructor(ty);
1474 if !is_new_heap_ref {
1475 self.writer.heap_putx_replace(constructor)
1476 } else {
1477 self.writer.heap_putx_new(constructor)
1478 }
1479 } else {
1480 self.write_heap_put(ty)
1481 }
1482 }
1483
1484 fn write_heap_fetch_member(self: &Self, container_type: &Type, result_type: &Type, offset: StackAddress) {
1486 let constructor = self.get_constructor(container_type);
1487 match result_type.primitive_size() {
1488 1 => { self.writer.heap_fetch_member8(offset, constructor); },
1489 2 => { self.writer.heap_fetch_member16(offset, constructor); },
1490 4 => { self.writer.heap_fetch_member32(offset, constructor); },
1491 8 => { self.writer.heap_fetch_member64(offset, constructor); },
1492 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, result_type),
1494 }
1495 }
1496
1497 fn write_heap_fetch_element(self: &Self, container_type: &Type, result_type: &Type) {
1499 let constructor = self.get_constructor(container_type);
1500 match result_type.primitive_size() {
1501 1 => { self.writer.heap_fetch_element8(constructor); },
1502 2 => { self.writer.heap_fetch_element16(constructor); },
1503 4 => { self.writer.heap_fetch_element32(constructor); },
1504 8 => { self.writer.heap_fetch_element64(constructor); },
1505 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, result_type),
1507 }
1508 }
1509
1510 fn write_heap_tail_element_nc(self: &Self, container_type: &Type, result_type: &Type) {
1512 let constructor = self.get_constructor(container_type);
1513 match result_type.primitive_size() {
1514 1 => { self.writer.heap_tail_element8_nc(constructor); },
1515 2 => { self.writer.heap_tail_element16_nc(constructor); },
1516 4 => { self.writer.heap_tail_element32_nc(constructor); },
1517 8 => { self.writer.heap_tail_element64_nc(constructor); },
1518 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, result_type),
1519 }
1520 }
1521
1522 fn write_store(self: &Self, local: Local, ty: &Type) -> StackAddress {
1524 match ty.primitive_size() {
1525 1 => select_signed_opcode!(self, store8_8, store8_16, store8_sa, local.index as StackOffset),
1526 2 => select_signed_opcode!(self, store16_8, store16_16, store16_sa, local.index as StackOffset),
1527 4 => select_signed_opcode!(self, store32_8, store32_16, store32_sa, local.index as StackOffset),
1528 8 => select_signed_opcode!(self, store64_8, store64_16, store64_sa, local.index as StackOffset),
1529 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1530 }
1531 }
1532
1533 fn write_storex(self: &Self, local: Local, item: &impl Typeable, binding_id: BindingId) -> StackAddress {
1537 let ty = self.item_type(item);
1538 if ty.is_ref() {
1539 let constructor = self.get_constructor(ty);
1540 if self.init_state.initialized(binding_id) {
1541 self.writer.storex_replace(local.index as StackOffset, constructor)
1542 } else {
1543 self.writer.storex_new(local.index as StackOffset, constructor)
1544 }
1545 } else {
1546 self.write_store(local, ty)
1547 }
1548 }
1549
1550 fn write_load(self: &Self, index: StackOffset, ty: &Type) {
1552 match ty.primitive_size() {
1553 1 => select_signed_opcode!(self, load8_8, load8_16, load8_sa, index),
1554 2 => select_signed_opcode!(self, load16_8, load16_16, load16_sa, index),
1555 4 => match index {
1556 ARG1 => self.writer.load_arg1(),
1557 ARG2 => self.writer.load_arg2(),
1558 ARG3 => self.writer.load_arg3(),
1559 _ => select_signed_opcode!(self, load32_8, load32_16, load32_sa, index),
1560 },
1561 8 => select_signed_opcode!(self, load64_8, load64_16, load64_sa, index),
1562 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1564 };
1565 }
1566
1567 fn write_discard(self: &Self, ty: &Type) {
1569 comment!(self, "discarding result");
1570 if ty.is_ref() {
1571 let constructor = self.get_constructor(ty);
1572 self.write_cnt_nc(constructor, HeapRefOp::Free);
1573 }
1574 match ty.primitive_size() {
1575 0 => 0,
1576 1 => self.writer.discard8(),
1577 2 => self.writer.discard16(),
1578 4 => self.writer.discard32(),
1579 8 => self.writer.discard64(),
1580 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1582 };
1583 }
1584
1585 fn write_swap(self: &Self, ty: &Type) {
1587 match ty.primitive_size() {
1588 1 => self.writer.swap8(),
1589 2 => self.writer.swap16(),
1590 4 => self.writer.swap32(),
1591 8 => self.writer.swap64(),
1592 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1594 };
1595 }
1596
1597 fn write_clone(self: &Self, ty: &Type) -> StackAddress {
1599 match ty.primitive_size() {
1600 1 => self.writer.clone8(),
1601 2 => self.writer.clone16(),
1602 4 => self.writer.clone32(),
1603 8 => self.writer.clone64(),
1604 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1606 }
1607 }
1608
1609 fn write_clone_ref(self: &Self) -> StackAddress {
1610 match size_of::<crate::HeapAddress>() {
1611 1 => self.writer.clone8(),
1612 2 => self.writer.clone16(),
1613 4 => self.writer.clone32(),
1614 8 => self.writer.clone64(),
1615 size @ _ => unreachable!("Unsupported size {} for heap address", size),
1617 }
1618 }
1619
1620 fn write_call(self: &mut Self, function_id: FunctionId) -> StackAddress {
1622 let target = if let Some(&target) = self.functions.get(&function_id) {
1623 target
1624 } else {
1625 let call_position = self.writer.position();
1626 self.call_placeholder.entry(function_id).or_insert(Vec::new()).push(call_position);
1627 CallInfo::PLACEHOLDER
1628 };
1629 self.writer.call(target.addr, target.arg_size)
1630 }
1631
1632 fn write_builtin(self: &Self, builtin: BuiltinGroup, ty: &Type) {
1634 let constructor = self.get_constructor(ty);
1635 #[allow(unreachable_patterns)]
1636 match ty {
1637 &Type::Array(Array { type_id }) => {
1638 let inner_ty = self.type_by_id(type_id.unwrap());
1639 match builtin {
1640 BuiltinGroup::ArrayLen => {
1641 self.writer.heap_size(constructor);
1642 self.writer.shrsa(match inner_ty.primitive_size() {
1643 1 => 0,
1644 2 => 1,
1645 4 => 2,
1646 8 => 3,
1647 _ => unreachable!("Unsupported inner size for type {} for builtin group {:?}", ty, builtin),
1648 });
1649 }
1650 BuiltinGroup::ArrayPush => select_builtin!(self, inner_ty, array_push8, array_push16, array_push32, array_push64, array_pushx),
1651 BuiltinGroup::ArrayPop => select_builtin!(self, inner_ty, array_pop8, array_pop16, array_pop32, array_pop64, array_popx),
1652 BuiltinGroup::ArrayTruncate => select_builtin!(self, inner_ty, array_truncate8, array_truncate16, array_truncate32, array_truncate64, array_truncatex),
1653 BuiltinGroup::ArrayRemove => select_builtin!(self, inner_ty, array_remove8, array_remove16, array_remove32, array_remove64, array_removex),
1654 _ => unreachable!("Unsupported type {} for builtin group {:?}", ty, builtin),
1655 }
1656 }
1657 _ => unreachable!("Unsupported type {}", ty),
1658 }
1659 }
1660
1661 fn write_zclamp(self: &Self, ty: &Type) {
1663 match ty {
1664 Type::f32 => self.writer.zclampf32(),
1665 Type::f64 => self.writer.zclampf64(),
1666 Type::i8 => self.writer.zclampi8(),
1667 Type::i16 => self.writer.zclampi16(),
1668 Type::i32 => self.writer.zclampi32(),
1669 Type::i64 => self.writer.zclampi64(),
1670 _ => unreachable!("Unsupported operation for type {:?}", ty),
1671 };
1672 }
1673
1674 fn write_dec(self: &Self, ty: &Type) -> StackAddress {
1676 match ty {
1677 Type::i64 | Type::u64 => self.writer.deci64(1),
1678 Type::i32 | Type::u32 => self.writer.deci32(1),
1679 Type::i16 | Type::u16 => self.writer.deci16(1),
1680 Type::i8 | Type::u8 => self.writer.deci8(1),
1681 _ => unreachable!("Unsupported operation for type {:?}", ty),
1682 }
1683 }
1684
1685 fn write_preinc(self: &Self, index: StackOffset, ty: &Type) {
1687 match ty {
1688 Type::i64 | Type::u64 => self.writer.predeci64(index, -1),
1689 Type::i32 | Type::u32 => self.writer.predeci32(index, -1),
1690 Type::i16 | Type::u16 => self.writer.predeci16(index, -1),
1691 Type::i8 | Type::u8 => self.writer.predeci8(index, -1),
1692 _ => unreachable!("Unsupported operation for type {:?}", ty),
1693 };
1694 }
1695
1696 fn write_predec(self: &Self, index: StackOffset, ty: &Type) {
1698 match ty {
1699 Type::i64 | Type::u64 => self.writer.predeci64(index, 1),
1700 Type::i32 | Type::u32 => self.writer.predeci32(index, 1),
1701 Type::i16 | Type::u16 => self.writer.predeci16(index, 1),
1702 Type::i8 | Type::u8 => self.writer.predeci8(index, 1),
1703 _ => unreachable!("Unsupported operation for type {:?}", ty),
1704 };
1705 }
1706
1707 fn write_postinc(self: &Self, index: StackOffset, ty: &Type) {
1709 match ty {
1710 Type::i64 | Type::u64 => self.writer.postdeci64(index, -1),
1711 Type::i32 | Type::u32 => self.writer.postdeci32(index, -1),
1712 Type::i16 | Type::u16 => self.writer.postdeci16(index, -1),
1713 Type::i8 | Type::u8 => self.writer.postdeci8(index, -1),
1714 _ => unreachable!("Unsupported operation for type {:?}", ty),
1715 };
1716 }
1717
1718 fn write_postdec(self: &Self, index: StackOffset, ty: &Type) {
1720 match ty {
1721 Type::i64 | Type::u64 => self.writer.postdeci64(index, 1),
1722 Type::i32 | Type::u32 => self.writer.postdeci32(index, 1),
1723 Type::i16 | Type::u16 => self.writer.postdeci16(index, 1),
1724 Type::i8 | Type::u8 => self.writer.postdeci8(index, 1),
1725 _ => unreachable!("Unsupported operation for type {:?}", ty),
1726 };
1727 }
1728
1729 fn write_heap_preinc(self: &Self, ty: &Type) -> StackAddress {
1731 match ty {
1732 Type::i64 | Type::u64 => self.writer.heap_predeci64(-1),
1733 Type::i32 | Type::u32 => self.writer.heap_predeci32(-1),
1734 Type::i16 | Type::u16 => self.writer.heap_predeci16(-1),
1735 Type::i8 | Type::u8 => self.writer.heap_predeci8(-1),
1736 _ => unreachable!("Unsupported operation for type {:?}", ty),
1737 }
1738 }
1739
1740 fn write_heap_predec(self: &Self, ty: &Type) -> StackAddress {
1742 match ty {
1743 Type::i64 | Type::u64 => self.writer.heap_predeci64(1),
1744 Type::i32 | Type::u32 => self.writer.heap_predeci32(1),
1745 Type::i16 | Type::u16 => self.writer.heap_predeci16(1),
1746 Type::i8 | Type::u8 => self.writer.heap_predeci8(1),
1747 _ => unreachable!("Unsupported operation for type {:?}", ty),
1748 }
1749 }
1750
1751 fn write_heap_postinc(self: &Self, ty: &Type) -> StackAddress {
1753 match ty {
1754 Type::i64 | Type::u64 => self.writer.heap_postdeci64(-1),
1755 Type::i32 | Type::u32 => self.writer.heap_postdeci32(-1),
1756 Type::i16 | Type::u16 => self.writer.heap_postdeci16(-1),
1757 Type::i8 | Type::u8 => self.writer.heap_postdeci8(-1),
1758 _ => unreachable!("Unsupported operation for type {:?}", ty),
1759 }
1760 }
1761
1762 fn write_heap_postdec(self: &Self, ty: &Type) -> StackAddress {
1764 match ty {
1765 Type::i64 | Type::u64 => self.writer.heap_postdeci64(1),
1766 Type::i32 | Type::u32 => self.writer.heap_postdeci32(1),
1767 Type::i16 | Type::u16 => self.writer.heap_postdeci16(1),
1768 Type::i8 | Type::u8 => self.writer.heap_postdeci8(1),
1769 _ => unreachable!("Unsupported operation for type {:?}", ty),
1770 }
1771 }
1772
1773 fn write_sub(self: &Self, ty: &Type) {
1775 match ty {
1776 Type::i8 | Type::u8 => self.writer.subi8(),
1777 Type::i16 | Type::u16 => self.writer.subi16(),
1778 Type::i32 | Type::u32 => self.writer.subi32(),
1779 Type::i64 | Type::u64 => self.writer.subi64(),
1780 Type::f32 => self.writer.subf32(),
1781 Type::f64 => self.writer.subf64(),
1782 _ => unreachable!("Unsupported operation for type {:?}", ty),
1783 };
1784 }
1785
1786 fn write_add(self: &Self, ty: &Type) {
1788 match ty {
1789 Type::i8 | Type::u8 => self.writer.addi8(),
1790 Type::i16 | Type::u16 => self.writer.addi16(),
1791 Type::i32 | Type::u32 => self.writer.addi32(),
1792 Type::i64 | Type::u64 => self.writer.addi64(),
1793 Type::f32 => self.writer.addf32(),
1794 Type::f64 => self.writer.addf64(),
1795 Type::String => self.writer.string_concatx(),
1796 _ => unreachable!("Unsupported operation for type {:?}", ty),
1797 };
1798 }
1799
1800 fn write_mul(self: &Self, ty: &Type) {
1802 match ty {
1803 Type::i8 | Type::u8 => self.writer.muli8(),
1804 Type::i16 | Type::u16 => self.writer.muli16(),
1805 Type::i32 | Type::u32 => self.writer.muli32(),
1806 Type::i64 | Type::u64 => self.writer.muli64(),
1807 Type::f32 => self.writer.mulf32(),
1808 Type::f64 => self.writer.mulf64(),
1809 _ => unreachable!("Unsupported operation for type {:?}", ty),
1810 };
1811 }
1812
1813 fn write_div(self: &Self, ty: &Type) {
1815 match ty {
1816 Type::i8 => self.writer.divs8(),
1817 Type::u8 => self.writer.divu8(),
1818 Type::i16 => self.writer.divs16(),
1819 Type::u16 => self.writer.divu16(),
1820 Type::i32 => self.writer.divs32(),
1821 Type::u32 => self.writer.divu32(),
1822 Type::i64 => self.writer.divs64(),
1823 Type::u64 => self.writer.divu64(),
1824 Type::f32 => self.writer.divf32(),
1825 Type::f64 => self.writer.divf64(),
1826 _ => unreachable!("Unsupported operation for type {:?}", ty),
1827 };
1828 }
1829
1830 fn write_rem(self: &Self, ty: &Type) {
1832 match ty {
1833 Type::i8 => self.writer.rems8(),
1834 Type::u8 => self.writer.remu8(),
1835 Type::i16 => self.writer.rems16(),
1836 Type::u16 => self.writer.remu16(),
1837 Type::i32 => self.writer.rems32(),
1838 Type::u32 => self.writer.remu32(),
1839 Type::i64 => self.writer.rems64(),
1840 Type::u64 => self.writer.remu64(),
1841 Type::f32 => self.writer.remf32(),
1842 Type::f64 => self.writer.remf64(),
1843 _ => unreachable!("Unsupported operation for type {:?}", ty),
1844 };
1845 }
1846
1847 fn write_eq(self: &Self, ty: &Type) {
1849 if ty.is_primitive() {
1850 match ty.primitive_size() {
1851 1 => self.writer.ceq8(),
1852 2 => self.writer.ceq16(),
1853 4 => self.writer.ceq32(),
1854 8 => self.writer.ceq64(),
1855 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1856 };
1857 } else if ty.is_string() {
1858 self.writer.string_ceq();
1859 } else {
1860 unimplemented!("general heap compare not yet implemented");
1861 }
1862 }
1863
1864 fn write_neq(self: &Self, ty: &Type) {
1866 if ty.is_primitive() {
1867 match ty.primitive_size() {
1868 1 => self.writer.cneq8(),
1869 2 => self.writer.cneq16(),
1870 4 => self.writer.cneq32(),
1871 8 => self.writer.cneq64(),
1872 size @ _ => unreachable!("Unsupported size {} for type {:?}", size, ty),
1873 };
1874 } else if ty.is_string() {
1875 self.writer.string_cneq();
1876 } else {
1877 unimplemented!("general heap compare not yet implemented");
1878 }
1879 }
1880
1881 fn write_lt(self: &Self, ty: &Type) {
1883 if ty.is_primitive() {
1884 match ty {
1885 Type::i8 => self.writer.clts8(),
1886 Type::u8 => self.writer.cltu8(),
1887 Type::i16 => self.writer.clts16(),
1888 Type::u16 => self.writer.cltu16(),
1889 Type::i32 => self.writer.clts32(),
1890 Type::u32 => self.writer.cltu32(),
1891 Type::i64 => self.writer.clts64(),
1892 Type::u64 => self.writer.cltu64(),
1893 Type::f32 => self.writer.cltf32(),
1894 Type::f64 => self.writer.cltf64(),
1895 _ => unreachable!("Unsupported operation for type {:?}", ty),
1896 };
1897 } else if ty.is_string() {
1898 self.writer.string_clt();
1899 } else {
1900 panic!("unsupported type")
1901 }
1902 }
1903
1904 fn write_lte(self: &Self, ty: &Type) {
1906 if ty.is_primitive() {
1907 match ty {
1908 Type::i8 => self.writer.cltes8(),
1909 Type::u8 => self.writer.clteu8(),
1910 Type::i16 => self.writer.cltes16(),
1911 Type::u16 => self.writer.clteu16(),
1912 Type::i32 => self.writer.cltes32(),
1913 Type::u32 => self.writer.clteu32(),
1914 Type::i64 => self.writer.cltes64(),
1915 Type::u64 => self.writer.clteu64(),
1916 Type::f32 => self.writer.cltef32(),
1917 Type::f64 => self.writer.cltef64(),
1918 _ => unreachable!("Unsupported operation for type {:?}", ty),
1919 };
1920 } else if ty.is_string() {
1921 self.writer.string_clte();
1922 } else {
1923 panic!("unsupported type")
1924 }
1925 }
1926}
1927
1928impl<T> Compiler<T> {
1930
1931 fn vtable_function_offset(self: &Self, function_id: FunctionId) -> StackAddress {
1933 let trait_function_id = *self.trait_function_implementors.get(&function_id).unwrap_or(&function_id);
1934 let function_index = *self.trait_function_indices.get(&trait_function_id).expect("Invalid trait function id");
1935 self.trait_vtable_base + (function_index as usize * size_of::<StackAddress>() * self.trait_implementor_indices.len()) as StackAddress
1936 }
1937
1938 fn filter_trait_functions(id_mappings: &IdMappings) -> Vec<(TypeId, &String, FunctionId)> {
1940 id_mappings.traits()
1941 .flat_map(|(type_id, trt)| {
1942 trt.provided.iter().map(move |(function_name, function_id)| (type_id, function_name, function_id.unwrap()))
1943 .chain(trt.required.iter().map(move |(function_name, function_id)| (type_id, function_name, function_id.unwrap())))
1944 })
1945 .collect()
1946 }
1947
1948 fn enumerate_trait_function_indices(trait_functions: &Vec<(TypeId, &String, FunctionId)>) -> UnorderedMap<FunctionId, ItemIndex> {
1950 let mut trait_function_indices = UnorderedMap::new();
1951 for (index, &(_, _, function_id)) in trait_functions.iter().enumerate() {
1952 trait_function_indices.insert(function_id, index as ItemIndex);
1953 }
1954 trait_function_indices
1955 }
1956
1957 fn enumerate_trait_implementor_indices(trait_implementors: &Vec<(TypeId, &Map<TypeId, ImplTrait>)>) -> UnorderedMap<TypeId, ItemIndex> {
1959 let mut trait_implementor_indices = UnorderedMap::new();
1960 for (index, &(type_id, _)) in trait_implementors.iter().enumerate() {
1961 trait_implementor_indices.insert(type_id, index as ItemIndex);
1962 }
1963 trait_implementor_indices
1964 }
1965
1966 fn map_trait_function_implementors(trait_functions: &Vec<(TypeId, &String, FunctionId)>, trait_implementors: &Vec<(TypeId, &Map<TypeId, ImplTrait>)>) -> UnorderedMap<FunctionId, FunctionId> {
1968 let mut trait_function_implementors = UnorderedMap::new();
1969 for &(trait_type_id, function_name, trait_function_id) in trait_functions.iter() {
1970 for &(_, implementor_traits) in trait_implementors.iter() {
1971 if let Some(impl_trait) = implementor_traits.get(&trait_type_id) {
1972 if let Some(&implementor_function_id) = impl_trait.functions.get(function_name) {
1973 trait_function_implementors.insert(implementor_function_id.expect("Unresolved implementor function"), trait_function_id);
1974 }
1975 }
1976 }
1977 }
1978 trait_function_implementors
1979 }
1980
1981 fn select_trait_function_implementations(trait_functions: &Vec<(TypeId, &String, FunctionId)>, trait_implementors: &Vec<(TypeId, &Map<TypeId, ImplTrait>)>) -> Vec<(usize, Option<FunctionId>)> {
1987 let mut trait_implementation_mapping = Vec::new();
1988 for &(trait_type_id, function_name, trait_function_id) in trait_functions.iter() {
1989 for (implementor_index, &(_, implementor_traits)) in trait_implementors.iter().enumerate() {
1990 trait_implementation_mapping.push((implementor_index, match implementor_traits.get(&trait_type_id) {
1991 Some(impl_trait) => *impl_trait.functions.get(function_name).unwrap_or(&Some(trait_function_id)),
1992 None => None,
1993 }));
1994 }
1995 }
1996 trait_implementation_mapping
1997 }
1998}
1999
2000impl<T> TypeContainer for Compiler<T> {
2003 fn type_by_id(self: &Self, type_id: TypeId) -> &Type {
2004 let index: usize = type_id.into();
2005 &self.id_mappings.type_map[index]
2006 }
2007 fn type_by_id_mut(self: &mut Self, type_id: TypeId) -> &mut Type {
2008 let index: usize = type_id.into();
2009 &mut self.id_mappings.type_map[index]
2010 }
2011 fn type_flat_name(self: &Self, _type_id: TypeId) -> Option<&String> {
2012 None }
2014}
2015
2016#[cfg(feature="compiler")]
2018impl<T> BindingContainer for Compiler<T> {
2019 fn binding_by_id(self: &Self, binding_id: BindingId) -> &Binding {
2020 let binding_index = Into::<usize>::into(binding_id);
2021 &self.id_mappings.binding_map[binding_index]
2022 }
2023 fn binding_by_id_mut(self: &mut Self, binding_id: BindingId) -> &mut Binding {
2024 let binding_index = Into::<usize>::into(binding_id);
2025 &mut self.id_mappings.binding_map[binding_index]
2026 }
2027}