1use std::{collections::HashMap, fmt::Display, io::{Write, self}};
2
3use {
4 codegem::ir::{Operation, Terminator, Value, Linkage},
5 codegem::regalloc::RegisterAllocator,
6};
7
8use codegem::{arch::{Instr, InstructionSelector, Location, VCode, VCodeGenerator, VReg, Function}, ir::Type};
9
10pub const AA64_REGISTER_ZERO: usize = 0;
11pub const AA64_REGISTER_X0 : usize = 1;
12pub const AA64_REGISTER_X1 : usize = 2;
13pub const AA64_REGISTER_X2 : usize = 3;
14pub const AA64_REGISTER_X3 : usize = 4;
15pub const AA64_REGISTER_X4 : usize = 5;
16pub const AA64_REGISTER_X5 : usize = 6;
17pub const AA64_REGISTER_X6 : usize = 7;
18pub const AA64_REGISTER_X7 : usize = 8;
19pub const AA64_REGISTER_X8 : usize = 9;
20pub const AA64_REGISTER_X9 : usize = 10;
21pub const AA64_REGISTER_X10 : usize = 11;
22pub const AA64_REGISTER_X11 : usize = 12;
23pub const AA64_REGISTER_X12 : usize = 13;
24pub const AA64_REGISTER_X13 : usize = 14;
25pub const AA64_REGISTER_X14 : usize = 15;
26pub const AA64_REGISTER_X15 : usize = 16;
27pub const AA64_REGISTER_IP0 : usize = 17;
28pub const AA64_REGISTER_IP1 : usize = 18;
29pub const AA64_REGISTER_X18 : usize = 19;
30pub const AA64_REGISTER_X19 : usize = 20;
31pub const AA64_REGISTER_X20 : usize = 21;
32pub const AA64_REGISTER_X21 : usize = 22;
33pub const AA64_REGISTER_X22 : usize = 23;
34pub const AA64_REGISTER_X23 : usize = 24;
35pub const AA64_REGISTER_X24 : usize = 25;
36pub const AA64_REGISTER_X25 : usize = 26;
37pub const AA64_REGISTER_X26 : usize = 27;
38pub const AA64_REGISTER_X27 : usize = 28;
39pub const AA64_REGISTER_X28 : usize = 29;
40pub const AA64_REGISTER_FP : usize = 30;
41pub const AA64_REGISTER_LR : usize = 31;
42pub const AA64_REGISTER_SP : usize = 32;
43
44#[derive(Clone)]
45pub enum AA64RegSizes {
46 B64, B32
47}
48
49impl Display for AA64RegSizes {
50 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
51 match self {
52 AA64RegSizes::B64 => write!(f, "x"),
53 AA64RegSizes::B32 => write!(f, "w"),
54 }
55 }
56}
57
58pub enum AA64AluOp {
59 Add,
60 Sub,
61 Mul,
62 Div,
63 Lsl,
64 Lsr,
65 And,
66 Orr,
67 Eor,
68}
69
70impl Display for AA64AluOp {
71 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
72 match self {
73 AA64AluOp::Add => write!(f, "add"),
74 AA64AluOp::Sub => write!(f, "sub"),
75 AA64AluOp::Mul => write!(f, "mul"),
76 AA64AluOp::Div => write!(f, "udiv"),
77 AA64AluOp::Lsl => write!(f, "lsl"),
78 AA64AluOp::Lsr => write!(f, "lsr"),
79 AA64AluOp::And => write!(f, "and"),
80 AA64AluOp::Orr => write!(f, "orr"),
81 AA64AluOp::Eor => write!(f, "eor"),
82 }
83 }
84}
85
86pub enum AA64CompOp {
87 EQ, NE,
88 GT, GE,
89 LT, LE
90}
91
92impl Display for AA64CompOp {
93 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
94 match self {
95 AA64CompOp::EQ => write!(f, "eq"),
96 AA64CompOp::NE => write!(f, "ne"),
97 AA64CompOp::GT => write!(f, "gt"),
98 AA64CompOp::GE => write!(f, "ge"),
99 AA64CompOp::LT => write!(f, "lt"),
100 AA64CompOp::LE => write!(f, "le")
101 }
102 }
103}
104
105pub enum AA64Instruction {
106 PhiPlaceholder {
107 rd: VReg,
108 options: Vec<(Location, Value)>,
109 },
110
111 Integer {
112 rd: VReg,
113 value: u64,
114 size: AA64RegSizes
115 },
116
117 MSub {
118 rd1: VReg,
119 rd2: VReg,
120 rx : VReg,
121 ry : VReg,
122 size: AA64RegSizes
123 },
124
125 AluOp {
126 op: AA64AluOp,
127 rd: VReg,
128 rx: VReg,
129 ry: VReg,
130 size: AA64RegSizes
131 },
132
133 AluOpImm {
134 op: AA64AluOp,
135 rd: VReg,
136 rx: VReg,
137 imm: i16,
138 size: AA64RegSizes
139 },
140
141 Bl {
142 rd: VReg,
143 location: Location,
144 clobbers: Vec<VReg>,
145 },
146
147 Bne {
148 rx: VReg,
149 ry: VReg,
150 location: Location,
151 size: AA64RegSizes
152 },
153
154 Ret,
155
156 Load {
157 rd: VReg,
158 imm: i16,
159 rx: VReg,
160 size: AA64RegSizes
161 },
162
163 Store {
164 rx: VReg,
165 imm: i16,
166 ry: VReg,
167 size: AA64RegSizes
168 },
169
170 Compare {
171 rx: VReg,
172 ry: VReg,
173 size: AA64RegSizes
174 },
175
176 CondSet {
177 rd: VReg,
178 cnd: AA64CompOp,
179 size: AA64RegSizes
180 },
181}
182
183impl Display for AA64Instruction {
184 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
185 write!(f, "unimplemented!")
186 }
187}
188
189impl Instr for AA64Instruction {
190 fn get_regs() -> Vec<VReg> {
191 vec![
192 VReg::RealRegister(AA64_REGISTER_X0),
193 VReg::RealRegister(AA64_REGISTER_X1),
194 VReg::RealRegister(AA64_REGISTER_X2),
195 VReg::RealRegister(AA64_REGISTER_X3),
196 VReg::RealRegister(AA64_REGISTER_X4),
197 VReg::RealRegister(AA64_REGISTER_X5),
198 VReg::RealRegister(AA64_REGISTER_X6),
199 VReg::RealRegister(AA64_REGISTER_X7),
200 VReg::RealRegister(AA64_REGISTER_X8),
201 VReg::RealRegister(AA64_REGISTER_X9),
202 VReg::RealRegister(AA64_REGISTER_X10),
203 VReg::RealRegister(AA64_REGISTER_X11),
204 VReg::RealRegister(AA64_REGISTER_X12),
205 VReg::RealRegister(AA64_REGISTER_X13),
206 VReg::RealRegister(AA64_REGISTER_X14),
207 VReg::RealRegister(AA64_REGISTER_X15),
208 VReg::RealRegister(AA64_REGISTER_IP0),
209 VReg::RealRegister(AA64_REGISTER_IP1),
210 VReg::RealRegister(AA64_REGISTER_X18),
211 VReg::RealRegister(AA64_REGISTER_X19),
212 VReg::RealRegister(AA64_REGISTER_X20),
213 VReg::RealRegister(AA64_REGISTER_X21),
214 VReg::RealRegister(AA64_REGISTER_X22),
215 VReg::RealRegister(AA64_REGISTER_X23),
216 VReg::RealRegister(AA64_REGISTER_X24),
217 VReg::RealRegister(AA64_REGISTER_X25),
218 VReg::RealRegister(AA64_REGISTER_X26),
219 VReg::RealRegister(AA64_REGISTER_X27),
220 VReg::RealRegister(AA64_REGISTER_X28),
221 VReg::RealRegister(AA64_REGISTER_FP),
222 VReg::RealRegister(AA64_REGISTER_LR),
223 ]
224 }
225
226 fn get_arg_regs() -> Vec<VReg> {
227 vec![
228 VReg::RealRegister(AA64_REGISTER_X0),
229 VReg::RealRegister(AA64_REGISTER_X1),
230 VReg::RealRegister(AA64_REGISTER_X2),
231 VReg::RealRegister(AA64_REGISTER_X3),
232 VReg::RealRegister(AA64_REGISTER_X4),
233 VReg::RealRegister(AA64_REGISTER_X5),
234 VReg::RealRegister(AA64_REGISTER_X6),
235 VReg::RealRegister(AA64_REGISTER_X7),
236 ]
237 }
238
239 fn collect_registers<A>(&self, alloc: &mut A)
240 where
241 A: RegisterAllocator,
242 {
243 match self {
244 AA64Instruction::PhiPlaceholder { .. } => (),
245
246 AA64Instruction::Integer { rd, .. } => {
247 alloc.add_def(*rd);
248 }
249
250 AA64Instruction::MSub { rd1, rd2, rx, ry, .. } => {
251 alloc.add_def(*rd1);
252 alloc.add_def(*rd2);
253 alloc.add_use(*rx);
254 alloc.add_use(*ry);
255 }
256
257 AA64Instruction::AluOp { rd, rx, ry, .. } => {
258 alloc.add_def(*rd);
259 alloc.add_use(*rx);
260 alloc.add_use(*ry);
261 }
262
263 AA64Instruction::AluOpImm { rd, rx, .. } => {
264 alloc.add_def(*rd);
265 alloc.add_use(*rx);
266 }
267
268 AA64Instruction::Bl { clobbers, .. } => {
269 for (clobber, arg) in clobbers.iter().zip(AA64Instruction::get_arg_regs().into_iter()) {
270 alloc.add_use(*clobber);
271 alloc.force_same(*clobber, arg);
272 }
273 }
274
275 AA64Instruction::Bne { rx, ry, .. } => {
276 alloc.add_use(*rx);
277 alloc.add_use(*ry);
278 }
279
280 AA64Instruction::Ret => (),
281 AA64Instruction::Load { .. } => (),
282 AA64Instruction::Store { .. } => (),
283
284 AA64Instruction::Compare { rx, ry, .. } => {
285 alloc.add_use(*rx);
286 alloc.add_use(*ry);
287 },
288
289 AA64Instruction::CondSet { rd, .. } => {
290 alloc.add_use(*rd);
291 }
292 }
293 }
294
295 fn apply_reg_allocs(&mut self, alloc: &HashMap<VReg, VReg>) {
296 match self {
297 AA64Instruction::PhiPlaceholder { .. } => (),
298
299 AA64Instruction::Integer { rd, .. } => {
300 if let Some(new) = alloc.get(rd) {
301 *rd = *new;
302 }
303 }
304
305 AA64Instruction::MSub { rd1, rd2, rx, ry, .. } => {
306 if let Some(new) = alloc.get(rd1) {
307 *rd1 = *new;
308 }
309 if let Some(new) = alloc.get(rd2) {
310 *rd2 = *new;
311 }
312 if let Some(new) = alloc.get(rx) {
313 *rx = *new;
314 }
315 if let Some(new) = alloc.get(ry) {
316 *ry = *new;
317 }
318 }
319
320 AA64Instruction::AluOp { rd, rx, ry, .. } => {
321 if let Some(new) = alloc.get(rd) {
322 *rd = *new;
323 }
324 if let Some(new) = alloc.get(rx) {
325 *rx = *new;
326 }
327 if let Some(new) = alloc.get(ry) {
328 *ry = *new;
329 }
330 }
331
332 AA64Instruction::AluOpImm { rd, rx, .. } => {
333 if let Some(new) = alloc.get(rd) {
334 *rd = *new;
335 }
336 if let Some(new) = alloc.get(rx) {
337 *rx = *new;
338 }
339 }
340
341 AA64Instruction::Bl { rd, .. } => {
342 if let Some(new) = alloc.get(rd) {
343 *rd = *new;
344 }
345 }
346
347 AA64Instruction::Bne { rx, ry, .. } => {
348 if let Some(new) = alloc.get(rx) {
349 *rx = *new;
350 }
351 if let Some(new) = alloc.get(ry) {
352 *ry = *new;
353 }
354 }
355
356 AA64Instruction::Ret => (),
357
358 AA64Instruction::Load { .. } => (),
359
360 AA64Instruction::Store { .. } => (),
361
362 AA64Instruction::Compare { rx, ry, .. } => {
363 if let Some(new) = alloc.get(rx) {
364 *rx = *new;
365 }
366 if let Some(new) = alloc.get(ry) {
367 *ry = *new;
368 }
369 },
370
371 AA64Instruction::CondSet { rd, .. } => {
372 if let Some(new) = alloc.get(rd) {
373 *rd = *new;
374 }
375 },
376 }
377 }
378
379 fn mandatory_transforms(vcode: &mut VCode<Self>) {
380 for func in vcode.functions.iter_mut() {
381 for labelled in func.labels.iter_mut() {
382 let mut swaps = Vec::new();
383 #[derive(Copy, Clone)]
384 enum SwapType {
385 Rd,
386 Rx,
387 Ry,
388 }
389 use SwapType::*;
390
391 for (i, instruction) in labelled.instructions.iter_mut().enumerate() {
392 match instruction {
393 AA64Instruction::PhiPlaceholder { .. } => (),
394
395 AA64Instruction::Integer { rd, .. } => {
396 if let VReg::Spilled(spill) = *rd {
397 swaps.push((i, spill, Rd));
398 *rd = VReg::RealRegister(AA64_REGISTER_IP0);
399 }
400 }
401
402 AA64Instruction::MSub { rd1, rd2, rx, ry, .. } => {
403 if let VReg::Spilled(spill) = *rx {
404 swaps.push((i, spill, Rx));
405 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
406 }
407 if let VReg::Spilled(spill) = *ry {
408 swaps.push((i, spill, Ry));
409 *ry = VReg::RealRegister(AA64_REGISTER_IP0);
410 }
411 if let VReg::Spilled(spill) = *rd1 {
412 swaps.push((i, spill, Rd));
413 *rd1 = VReg::RealRegister(AA64_REGISTER_IP0);
414 }
415 if let VReg::Spilled(spill) = *rd2 {
416 swaps.push((i, spill, Rd));
417 *rd2 = VReg::RealRegister(AA64_REGISTER_IP0);
418 }
419 }
420
421 AA64Instruction::AluOp { rd, rx, ry, .. } => {
422 if let VReg::Spilled(spill) = *rx {
423 swaps.push((i, spill, Rx));
424 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
425 }
426 if let VReg::Spilled(spill) = *ry {
427 swaps.push((i, spill, Ry));
428 *ry = VReg::RealRegister(AA64_REGISTER_IP0);
429 }
430 if let VReg::Spilled(spill) = *rd {
431 swaps.push((i, spill, Rd));
432 *rd = VReg::RealRegister(AA64_REGISTER_IP0);
433 }
434 }
435
436 AA64Instruction::AluOpImm { rd, rx, .. } => {
437 if let VReg::Spilled(spill) = *rx {
438 swaps.push((i, spill, Rx));
439 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
440 }
441 if let VReg::Spilled(spill) = *rd {
442 swaps.push((i, spill, Rd));
443 *rd = VReg::RealRegister(AA64_REGISTER_IP0);
444 }
445 }
446
447 AA64Instruction::Bl { .. } => (),
448
449 AA64Instruction::Bne { rx, ry, .. } => {
450 if let VReg::Spilled(spill) = *rx {
451 swaps.push((i, spill, Rx));
452 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
453 }
454 if let VReg::Spilled(spill) = *ry {
455 swaps.push((i, spill, Ry));
456 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
457 }
458 }
459
460 AA64Instruction::Ret => (),
461
462 AA64Instruction::Load { .. } => (),
463
464 AA64Instruction::Store { .. } => (),
465
466 AA64Instruction::Compare { rx, ry, .. } => {
467 if let VReg::Spilled(spill) = *rx {
468 swaps.push((i, spill, Rx));
469 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
470 }
471 if let VReg::Spilled(spill) = *ry {
472 swaps.push((i, spill, Ry));
473 *rx = VReg::RealRegister(AA64_REGISTER_IP0);
474 }
475 },
476
477 AA64Instruction::CondSet { rd, .. } => {
478 if let VReg::Spilled(spill) = *rd {
479 swaps.push((i, spill, Rd));
480 *rd = VReg::RealRegister(AA64_REGISTER_IP0);
481 }
482 }
483 }
484 }
485
486 for (index, spill, swap_type) in swaps.into_iter().rev() {
487 match swap_type {
488 Rd => {
489 labelled.instructions.insert(index + 1, AA64Instruction::Store {
490 rx: VReg::RealRegister(AA64_REGISTER_IP0),
491 imm: spill as i16 * -8,
492 ry: VReg::RealRegister(AA64_REGISTER_FP),
493 size: AA64RegSizes::B64,
494 });
495 }
496
497 Rx => {
498 labelled.instructions.insert(index, AA64Instruction::Load {
499 rd: VReg::RealRegister(AA64_REGISTER_IP0),
500 imm: spill as i16 * -8,
501 rx: VReg::RealRegister(AA64_REGISTER_FP),
502 size: AA64RegSizes::B64,
503 });
504 }
505
506 Ry => {
507 labelled.instructions.insert(index, AA64Instruction::Load {
508 rd: VReg::RealRegister(AA64_REGISTER_IP0),
509 imm: spill as i16 * -8,
510 rx: VReg::RealRegister(AA64_REGISTER_FP),
511 size: AA64RegSizes::B64,
512 });
513 }
514 }
515 }
516 }
517 }
518 }
519
520 fn emit_assembly(file: &mut impl Write, vcode: &VCode<Self>) -> io::Result<()> {
521 for func in vcode.functions.iter() {
522 match func.linkage {
523 Linkage::External => {
524 writeln!(file, ".extern {}", func.name)?;
525 continue;
526 }
527
528 Linkage::Private => (),
529
530 Linkage::Public => {
531 writeln!(file, ".global {}", func.name)?;
532 }
533 }
534
535 writeln!(file, "{}:", func.name)?;
536 for instruction in func.pre_labels.iter() {
537 write_instruction(file, vcode, func, instruction)?;
538 }
539 for (i, labelled) in func.labels.iter().enumerate() {
540 writeln!(file, ".{}.L{}:", func.name, i)?;
541 for instruction in labelled.instructions.iter() {
542 write_instruction(file, vcode, func, instruction)?;
543 }
544 }
545
546 writeln!(file)?;
547 }
548 Ok(())
549 }
550}
551
552fn write_instruction(file: &mut impl Write, vcode: &VCode<AA64Instruction>, func: &Function<AA64Instruction>, instruction: &AA64Instruction) -> io::Result<()> {
553 match instruction {
554 AA64Instruction::PhiPlaceholder { .. } => (),
555
556 AA64Instruction::Integer { rd, value , size} => {
557 writeln!(file, " ldr {}, ={}", register(*rd, size.clone()), value)?;
558 }
559
560 AA64Instruction::MSub { rd1, rd2, rx, ry, size } => {
561 writeln!(file, " msub {}, {}, {}, {}",
562 register(*rd1, size.clone()), register(*rd2, size.clone()),
563 register(*rx , size.clone()), register(*ry , size.clone())
564 )?;
565 }
566
567 AA64Instruction::AluOp { op, rd, rx, ry, size } => {
568 writeln!(file, " {} {}, {}, {}", op, register(*rd, size.clone()), register(*rx, size.clone()), register(*ry, size.clone()))?;
569 }
570
571 AA64Instruction::AluOpImm { op: AA64AluOp::Sub, rd, rx, imm, size } => {
572 writeln!(file, " addi {}, {}, {}", register(*rd, size.clone()), register(*rx, size.clone()), -imm)?;
573 }
574
575 AA64Instruction::AluOpImm { op, rd, rx, imm, size } => {
576 writeln!(file, " {} {}, {}, {}", op, register(*rd, size.clone()), register(*rx, size.clone()), imm)?;
577 }
578
579 AA64Instruction::Bl { rd: _, location, .. } => {
580 match *location {
581 Location::InternalLabel(_) => {
582 writeln!(file, " bl .{}{}", func.name, location)?;
583 }
584 Location::Function(f) => {
585 writeln!(file, " bl {}", vcode.functions[f].name)?;
586 }
587 }
588 }
589
590 AA64Instruction::Bne { rx, ry, location, size } => {
591 match *location {
592 Location::InternalLabel(_) => {
593 writeln!(file, " cmp {}, {}", register(*rx, size.clone()), register(*ry, size.clone()))?;
594 writeln!(file, " bne .{}{}", func.name, location)?;
595 }
596 Location::Function(f) => {
597 writeln!(file, " cmp {}, {}", register(*rx, size.clone()), register(*ry, size.clone()))?;
598 writeln!(file, " bne {}", vcode.functions[f].name)?;
599 }
600 }
601 }
602
603 AA64Instruction::Ret => {
604 for instruction in func.pre_return.iter() {
605 write_instruction(file, vcode, func, instruction)?;
606 }
607
608 writeln!(file, " ret")?;
609 }
610
611 AA64Instruction::Load { rd, imm, rx, size } => {
612 writeln!(file, " ldr {}, [{}, #{}]", register(*rd, size.clone()), register(*rx, size.clone()), imm)?;
613 }
614
615 AA64Instruction::Store { rx, imm, ry, size } => {
616 writeln!(file, " str {}, [{}, #{}]", register(*rx, size.clone()), register(*ry, size.clone()), imm)?;
617 }
618
619 AA64Instruction::Compare { rx, ry, size } => {
620 writeln!(file, " cmp {}, {}", register(*rx, size.clone()), register(*ry, size.clone()))?;
621 }
622
623 AA64Instruction::CondSet { rd, cnd, size } => {
624 writeln!(file, " cset {}, {}", register(*rd, size.clone()), cnd)?;
625 }
626 }
627
628 Ok(())
629}
630
631fn auto_size(t: &Type) -> AA64RegSizes {
632 match t {
633 Type::Integer(_, v) => match v {
634 0..=32 => AA64RegSizes::B32,
635 _ => AA64RegSizes::B64
636 }
637 _ => AA64RegSizes::B64
638 }
639}
640
641fn register(reg: VReg, s: AA64RegSizes) -> String {
642 match reg {
643 VReg::RealRegister(reg) => {
644 match reg {
645 AA64_REGISTER_ZERO => format!("{}zr", s),
646 AA64_REGISTER_X0 => format!("{}0", s),
647 AA64_REGISTER_X1 => format!("{}1", s),
648 AA64_REGISTER_X2 => format!("{}2", s),
649 AA64_REGISTER_X3 => format!("{}3", s),
650 AA64_REGISTER_X4 => format!("{}4", s),
651 AA64_REGISTER_X5 => format!("{}5", s),
652 AA64_REGISTER_X6 => format!("{}6", s),
653 AA64_REGISTER_X7 => format!("{}7", s),
654 AA64_REGISTER_X8 => format!("{}8", s),
655 AA64_REGISTER_X9 => format!("{}9", s),
656 AA64_REGISTER_X10 => format!("{}10", s),
657 AA64_REGISTER_X11 => format!("{}11", s),
658 AA64_REGISTER_X12 => format!("{}12", s),
659 AA64_REGISTER_X13 => format!("{}13", s),
660 AA64_REGISTER_X14 => format!("{}14", s),
661 AA64_REGISTER_X15 => format!("{}15", s),
662 AA64_REGISTER_IP0 => format!("{}16", s),
663 AA64_REGISTER_IP1 => format!("{}17", s),
664 AA64_REGISTER_X18 => format!("{}18", s),
665 AA64_REGISTER_X19 => format!("{}19", s),
666 AA64_REGISTER_X20 => format!("{}20", s),
667 AA64_REGISTER_X21 => format!("{}21", s),
668 AA64_REGISTER_X22 => format!("{}22", s),
669 AA64_REGISTER_X23 => format!("{}23", s),
670 AA64_REGISTER_X24 => format!("{}24", s),
671 AA64_REGISTER_X25 => format!("{}25", s),
672 AA64_REGISTER_X26 => format!("{}26", s),
673 AA64_REGISTER_X27 => format!("{}27", s),
674 AA64_REGISTER_X28 => format!("{}28", s),
675 AA64_REGISTER_FP => format!("{}29", s),
676 AA64_REGISTER_LR => format!("{}30", s),
677 AA64_REGISTER_SP => format!("sp"),
678 _ => unreachable!(),
679 }
680 }
681
682 VReg::Virtual(_) => unreachable!(),
683
684 VReg::Spilled(s) => format!("#-{}(fp)", 8 * s),
685 }
686}
687
688#[derive(Default)]
689pub struct AA64Selector;
690
691impl InstructionSelector for AA64Selector {
692 type Instruction = AA64Instruction;
693
694 fn select_pre_function_instructions(&mut self, gen: &mut VCodeGenerator<Self::Instruction, Self>) {
695 gen.push_prelabel_instruction(AA64Instruction::AluOpImm {
696 op: AA64AluOp::Add,
697 rd: VReg::RealRegister(AA64_REGISTER_SP),
698 rx: VReg::RealRegister(AA64_REGISTER_SP),
699 imm: -16,
700 size: AA64RegSizes::B64,
701 });
702 gen.push_prelabel_instruction(AA64Instruction::Store {
703 rx: VReg::RealRegister(AA64_REGISTER_FP),
704 imm: 8,
705 ry: VReg::RealRegister(AA64_REGISTER_SP),
706 size: AA64RegSizes::B64,
707 });
708 gen.push_prelabel_instruction(AA64Instruction::Store {
709 rx: VReg::RealRegister(AA64_REGISTER_LR),
710 imm: 0,
711 ry: VReg::RealRegister(AA64_REGISTER_SP),
712 size: AA64RegSizes::B64,
713 });
714 gen.push_prelabel_instruction(AA64Instruction::AluOpImm {
715 op: AA64AluOp::Add,
716 rd: VReg::RealRegister(AA64_REGISTER_FP),
717 rx: VReg::RealRegister(AA64_REGISTER_SP),
718 imm: 0,
719 size: AA64RegSizes::B64,
720 });
721
722 let callee_saved_regs = [
724 AA64_REGISTER_X19,
725 AA64_REGISTER_X20,
726 AA64_REGISTER_X21,
727 AA64_REGISTER_X22,
728 AA64_REGISTER_X23,
729 AA64_REGISTER_X24,
730 AA64_REGISTER_X25,
731 AA64_REGISTER_X26,
732 AA64_REGISTER_X27,
733 AA64_REGISTER_X28,
734 AA64_REGISTER_FP,
735 AA64_REGISTER_ZERO
736 ];
737 gen.push_prelabel_instruction(AA64Instruction::AluOpImm {
738 op: AA64AluOp::Add,
739 rd: VReg::RealRegister(AA64_REGISTER_SP),
740 rx: VReg::RealRegister(AA64_REGISTER_SP),
741 imm: -(callee_saved_regs.len() as i16 * 8),
742 size: AA64RegSizes::B64,
743 });
744 for (i, ®) in callee_saved_regs.iter().enumerate() {
745 gen.push_prelabel_instruction(AA64Instruction::Store {
746 rx: VReg::RealRegister(reg),
747 imm: (i as i16) * 8,
748 ry: VReg::RealRegister(AA64_REGISTER_SP),
749 size: AA64RegSizes::B64,
750 });
751 }
752 for (i, ®) in callee_saved_regs.iter().enumerate() {
753 gen.push_prereturn_instruction(AA64Instruction::Load {
754 rd: VReg::RealRegister(reg),
755 imm: (i as i16) * 8,
756 rx: VReg::RealRegister(AA64_REGISTER_SP),
757 size: AA64RegSizes::B64,
758 });
759 }
760 gen.push_prereturn_instruction(AA64Instruction::AluOpImm {
761 op: AA64AluOp::Add,
762 rd: VReg::RealRegister(AA64_REGISTER_SP),
763 rx: VReg::RealRegister(AA64_REGISTER_SP),
764 imm: callee_saved_regs.len() as i16 * 8,
765 size: AA64RegSizes::B64,
766 });
767
768 gen.push_prereturn_instruction(AA64Instruction::AluOpImm {
769 op: AA64AluOp::Add,
770 rd: VReg::RealRegister(AA64_REGISTER_SP),
771 rx: VReg::RealRegister(AA64_REGISTER_FP),
772 imm: 0,
773 size: AA64RegSizes::B64,
774 });
775 gen.push_prereturn_instruction(AA64Instruction::Load {
776 rd: VReg::RealRegister(AA64_REGISTER_LR),
777 imm: 0,
778 rx: VReg::RealRegister(AA64_REGISTER_FP),
779 size: AA64RegSizes::B64,
780 });
781 gen.push_prereturn_instruction(AA64Instruction::Load {
782 rd: VReg::RealRegister(AA64_REGISTER_FP),
783 imm: 8,
784 rx: VReg::RealRegister(AA64_REGISTER_FP),
785 size: AA64RegSizes::B64,
786 });
787 gen.push_prereturn_instruction(AA64Instruction::AluOpImm {
788 op: AA64AluOp::Add,
789 rd: VReg::RealRegister(AA64_REGISTER_SP),
790 rx: VReg::RealRegister(AA64_REGISTER_SP),
791 imm: 16,
792 size: AA64RegSizes::B64,
793 });
794 }
795
796 fn select_instr(
797 &mut self,
798 gen: &mut VCodeGenerator<Self::Instruction, Self>,
799 result: Option<Value>,
800 op: Operation,
801 ) {
802 let rd = match result {
803 Some(val) => {
804 gen.get_vreg(val)
805 }
806
807 None => VReg::RealRegister(AA64_REGISTER_ZERO),
808 };
809
810 match op {
811 Operation::Identity(value) => {
812 let rx = gen.get_vreg(value);
813 gen.push_instruction(AA64Instruction::AluOp { op: AA64AluOp::Add, rd, rx, ry: VReg::RealRegister(AA64_REGISTER_ZERO), size: AA64RegSizes::B64 });
814 }
815
816 Operation::Integer(typ, mut value) => {
817 while value.len() < 8 {
819 value.push(0);
820 }
821 let value = u64::from_le_bytes(value[..8].try_into().unwrap());
822 gen.push_instruction(AA64Instruction::Integer { rd, value, size: auto_size(&typ) });
823 }
824
825 Operation::Add(a, b)
826 | Operation::Sub(a, b)
827 | Operation::Mul(a, b)
828 | Operation::Div(a, b)
829 | Operation::Bsl(a, b)
830 | Operation::Bsr(a, b)
831 | Operation::BitAnd(a, b)
832 | Operation::BitOr (a, b)
833 | Operation::BitXor(a, b) => {
834 let rx = gen.get_vreg(a);
835 let ry = gen.get_vreg(b);
836
837 gen.push_instruction(AA64Instruction::AluOp {
838 op: match op {
839 Operation::Add(_, _) => AA64AluOp::Add,
840 Operation::Sub(_, _) => AA64AluOp::Sub,
841 Operation::Mul(_, _) => AA64AluOp::Mul,
842 Operation::Div(_, _) => AA64AluOp::Div,
843 Operation::Bsl(_, _) => AA64AluOp::Lsl,
844 Operation::Bsr(_, _) => AA64AluOp::Lsr,
845 Operation::BitAnd(_, _) => AA64AluOp::And,
846 Operation::BitOr(_, _) => AA64AluOp::Orr,
847 Operation::BitXor(_, _) => AA64AluOp::Eor,
848 _ => unreachable!(),
849 }, rd, rx, ry, size: AA64RegSizes::B64 });
850 }
851
852 Operation::Mod(a, b) => {
853 let rx = gen.get_vreg(a);
854 let ry = gen.get_vreg(b);
855 let rt = gen.new_unassociated_vreg();
856 gen.push_instruction(AA64Instruction::AluOp {
857 op: AA64AluOp::Div,
858 rd: rt, rx, ry, size: AA64RegSizes::B64
859 });
860 gen.push_instruction(AA64Instruction::MSub {
861 rd1: rd, rd2: rd, rx: ry, ry: rx, size: AA64RegSizes::B64
862 });
863 }
864
865 Operation::Eq(a, b) => {
866 let rx = gen.get_vreg(b);
867 let ry = gen.get_vreg(a);
868 gen.push_instruction(AA64Instruction::Compare { rx, ry, size: AA64RegSizes::B64 });
869 gen.push_instruction(AA64Instruction::CondSet { rd, cnd: AA64CompOp::EQ, size: AA64RegSizes::B64 });
870 }
871
872 Operation::Ne(a, b) => {
873 let rx = gen.get_vreg(b);
874 let ry = gen.get_vreg(a);
875 gen.push_instruction(AA64Instruction::Compare { rx, ry, size: AA64RegSizes::B64 });
876 gen.push_instruction(AA64Instruction::CondSet { rd, cnd: AA64CompOp::NE, size: AA64RegSizes::B64 });
877 }
878
879 Operation::Lt(a, b) => {
880 let rx = gen.get_vreg(b);
881 let ry = gen.get_vreg(a);
882 gen.push_instruction(AA64Instruction::Compare { rx, ry, size: AA64RegSizes::B64 });
883 gen.push_instruction(AA64Instruction::CondSet { rd, cnd: AA64CompOp::LT, size: AA64RegSizes::B64 });
884 }
885
886 Operation::Le(a, b) => {
887 let rx = gen.get_vreg(b);
888 let ry = gen.get_vreg(a);
889 gen.push_instruction(AA64Instruction::Compare { rx, ry, size: AA64RegSizes::B64 });
890 gen.push_instruction(AA64Instruction::CondSet { rd, cnd: AA64CompOp::LE, size: AA64RegSizes::B64 });
891 }
892
893 Operation::Gt(a, b) => {
894 let rx = gen.get_vreg(b);
895 let ry = gen.get_vreg(a);
896 gen.push_instruction(AA64Instruction::Compare { rx, ry, size: AA64RegSizes::B64 });
897 gen.push_instruction(AA64Instruction::CondSet { rd, cnd: AA64CompOp::GT, size: AA64RegSizes::B64 });
898 }
899
900 Operation::Ge(a, b) => {
901 let rx = gen.get_vreg(b);
902 let ry = gen.get_vreg(a);
903 gen.push_instruction(AA64Instruction::Compare { rx, ry, size: AA64RegSizes::B64 });
904 gen.push_instruction(AA64Instruction::CondSet { rd, cnd: AA64CompOp::GE, size: AA64RegSizes::B64 });
905 }
906
907 Operation::Phi(mapping) => {
908 gen.push_instruction(AA64Instruction::PhiPlaceholder {
909 rd,
910 options: mapping
911 .into_iter()
912 .filter_map(|(b, v)| {
913 if let Some(&l) = gen.label_map().get(&b) {
914 Some((Location::InternalLabel(l), v))
915 } else {
916 None
917 }
918 })
919 .collect(),
920 });
921 }
922
923 Operation::GetVar(_) => unreachable!(),
924 Operation::SetVar(_, _) => unreachable!(),
925
926 Operation::Call(f, args) => {
927 if let Some(&f) = gen.func_map().get(&f) {
928 let mut save_regs = AA64Instruction::get_arg_regs();
930 save_regs.push(VReg::RealRegister(AA64_REGISTER_X9));
931 save_regs.push(VReg::RealRegister(AA64_REGISTER_X10));
932 save_regs.push(VReg::RealRegister(AA64_REGISTER_X11));
933 save_regs.push(VReg::RealRegister(AA64_REGISTER_X12));
934 save_regs.push(VReg::RealRegister(AA64_REGISTER_X13));
935 save_regs.push(VReg::RealRegister(AA64_REGISTER_X14));
936 save_regs.push(VReg::RealRegister(AA64_REGISTER_X15));
937 save_regs.push(VReg::RealRegister(AA64_REGISTER_ZERO));
938 gen.push_instruction(AA64Instruction::AluOpImm {
939 op: AA64AluOp::Add,
940 rd: VReg::RealRegister(AA64_REGISTER_SP),
941 rx: VReg::RealRegister(AA64_REGISTER_SP),
942 imm: -(save_regs.len() as i16 * 8),
943 size: AA64RegSizes::B64
944 });
945 for (i, &rx) in save_regs.iter().enumerate() {
946 gen.push_instruction(AA64Instruction::Store {
947 rx,
948 imm: i as i16 * 8,
949 ry: VReg::RealRegister(AA64_REGISTER_SP),
950 size: AA64RegSizes::B64
951 });
952 }
953
954 let clobbers: Vec<_> = args.into_iter().map(|v| {
955 let clobber = gen.new_unassociated_vreg();
956
957 let rx = gen.get_vreg(v);
958 gen.push_instruction(AA64Instruction::AluOp {
959 op: AA64AluOp::Add,
960 rd: clobber,
961 rx,
962 ry: VReg::RealRegister(AA64_REGISTER_ZERO),
963 size: AA64RegSizes::B64
964 });
965
966 clobber
967 }).collect();
968 gen.push_instruction(AA64Instruction::Bl {
969 rd: VReg::RealRegister(AA64_REGISTER_LR),
970 location: Location::Function(f),
971 clobbers,
972 });
973
974 gen.push_instruction(AA64Instruction::AluOp {
975 op: AA64AluOp::Add,
976 rd,
977 rx: VReg::RealRegister(AA64_REGISTER_X0),
978 ry: VReg::RealRegister(AA64_REGISTER_ZERO),
979 size: AA64RegSizes::B64
980 });
981
982 let rd_ = rd;
984 for (i, &rd) in save_regs.iter().enumerate() {
985 if rd == rd_ {
986 continue;
987 }
988 gen.push_instruction(AA64Instruction::Load {
989 rd,
990 imm: i as i16 * 8,
991 rx: VReg::RealRegister(AA64_REGISTER_SP),
992 size: AA64RegSizes::B64
993 });
994 }
995 gen.push_instruction(AA64Instruction::AluOpImm {
996 op: AA64AluOp::Add,
997 rd: VReg::RealRegister(AA64_REGISTER_SP),
998 rx: VReg::RealRegister(AA64_REGISTER_SP),
999 imm: (save_regs.len() as i16 * 8),
1000 size: AA64RegSizes::B64
1001 });
1002 }
1003 }
1004
1005 Operation::CallIndirect(_, _) => todo!(),
1006 Operation::Load(_) => todo!(),
1007 Operation::Store(_, _) => todo!(),
1008
1009 Operation::Bitcast(_, v) => {
1010 let rx = gen.get_vreg(v);
1011 gen.push_instruction(AA64Instruction::AluOpImm {
1012 op: AA64AluOp::Add,
1013 rd,
1014 rx,
1015 imm: 0,
1016 size: AA64RegSizes::B64
1017 });
1018 },
1019
1020 Operation::BitExtend(_, v) => {
1021 let rx = gen.get_vreg(v);
1022 gen.push_instruction(AA64Instruction::AluOpImm {
1023 op: AA64AluOp::Add,
1024 rd,
1025 rx,
1026 imm: 0,
1027 size: AA64RegSizes::B64
1028 });
1029 },
1030 Operation::BitReduce(t, v) => {
1031 let mask = match t {
1032 codegem::ir::Type::Integer(_, n) => 1 << n - 1,
1033 _ => panic!()
1034 };
1035
1036 let rx = gen.get_vreg(v);
1037 gen.push_instruction(AA64Instruction::AluOpImm {
1038 op: AA64AluOp::And,
1039 rd,
1040 rx,
1041 imm: mask,
1042 size: AA64RegSizes::B64
1043 });
1044 },
1045 }
1046 }
1047
1048 fn select_term(&mut self, gen: &mut VCodeGenerator<Self::Instruction, Self>, op: Terminator) {
1049 match op {
1050 Terminator::NoTerminator => (),
1051
1052 Terminator::ReturnVoid => {
1053 gen.push_instruction(AA64Instruction::Ret);
1054 }
1055
1056 Terminator::Return(v) => {
1057 let rx = gen.get_vreg(v);
1058 gen.push_instruction(AA64Instruction::AluOpImm {
1059 op: AA64AluOp::Add,
1060 rd: VReg::RealRegister(AA64_REGISTER_X0),
1061 rx,
1062 imm: 0,
1063 size: AA64RegSizes::B64
1064 });
1065 gen.push_instruction(AA64Instruction::Ret);
1066 }
1067
1068 Terminator::Jump(label) => {
1069 if let Some(&label) = gen.label_map().get(&label) {
1070 gen.push_instruction(AA64Instruction::Bl {
1071 rd: VReg::RealRegister(AA64_REGISTER_ZERO),
1072 location: Location::InternalLabel(label),
1073 clobbers: Vec::new(),
1074 });
1075 }
1076 }
1077
1078 Terminator::Branch(v, l1, l2) => {
1079 let rx = gen.get_vreg(v);
1080 if let Some(&l1) = gen.label_map().get(&l1) {
1081 gen.push_instruction(AA64Instruction::Bne {
1082 rx,
1083 ry: VReg::RealRegister(AA64_REGISTER_ZERO),
1084 location: Location::InternalLabel(l1),
1085 size: AA64RegSizes::B64
1086 });
1087 }
1088 if let Some(&l2) = gen.label_map().get(&l2) {
1089 gen.push_instruction(AA64Instruction::Bl {
1090 rd: VReg::RealRegister(AA64_REGISTER_ZERO),
1091 location: Location::InternalLabel(l2),
1092 clobbers: Vec::new(),
1093 });
1094 }
1095 }
1096 }
1097 }
1098
1099 fn post_function_generation(&mut self, func: &mut Function<Self::Instruction>, gen: &mut VCodeGenerator<Self::Instruction, Self>) {
1100 let mut v = Vec::new();
1101 for (i, labelled) in func.labels.iter().enumerate() {
1102 for (j, instr) in labelled.instructions.iter().enumerate() {
1103 if let AA64Instruction::PhiPlaceholder { .. } = instr {
1104 v.push((i, j));
1105 }
1106 }
1107 }
1108
1109 for (label_index, instr_index) in v.into_iter().rev() {
1110 let phi = func.labels[label_index].instructions.remove(instr_index);
1111 if let AA64Instruction::PhiPlaceholder { rd, options } = phi {
1112 for (label, v) in options {
1113 if let Location::InternalLabel(label) = label {
1114 let rx = gen.get_vreg(v);
1115 let labelled = &mut func.labels[label];
1116 labelled.instructions.insert(
1117 labelled.instructions.len() - 1,
1118 AA64Instruction::AluOp {
1119 op: AA64AluOp::Add,
1120 rd,
1121 rx,
1122 ry: VReg::RealRegister(AA64_REGISTER_ZERO),
1123 size: AA64RegSizes::B64
1124 },
1125 );
1126 }
1127 }
1128 }
1129 }
1130 }
1131
1132 fn post_generation(&mut self, _vcode: &mut VCode<Self::Instruction>) { }
1133}