use crate::entity::SecondaryMap;
use crate::ir;
use crate::ir::SourceLoc;
use crate::machinst::*;
use crate::settings;
use regalloc::Function as RegallocFunction;
use regalloc::Set as RegallocSet;
use regalloc::{
BlockIx, InstIx, Range, RegAllocResult, RegClass, RegUsageCollector, RegUsageMapper,
};
use alloc::boxed::Box;
use alloc::vec::Vec;
use log::debug;
use smallvec::SmallVec;
use std::fmt;
use std::iter;
use std::string::String;
pub type InsnIndex = u32;
pub type BlockIndex = u32;
pub trait VCodeInst: MachInst + MachInstEmit<MachSection> + MachInstEmit<MachSectionSize> {}
impl<I: MachInst + MachInstEmit<MachSection> + MachInstEmit<MachSectionSize>> VCodeInst for I {}
pub struct VCode<I: VCodeInst> {
liveins: RegallocSet<RealReg>,
liveouts: RegallocSet<RealReg>,
vreg_types: Vec<Type>,
insts: Vec<I>,
srclocs: Vec<SourceLoc>,
entry: BlockIndex,
block_ranges: Vec<(InsnIndex, InsnIndex)>,
block_succ_range: Vec<(usize, usize)>,
block_succs: Vec<BlockIndex>,
block_by_bb: SecondaryMap<ir::Block, BlockIndex>,
bb_by_block: Vec<ir::Block>,
final_block_order: Vec<BlockIndex>,
final_block_offsets: Vec<CodeOffset>,
code_size: CodeOffset,
abi: Box<dyn ABIBody<I = I>>,
}
pub struct VCodeBuilder<I: VCodeInst> {
vcode: VCode<I>,
bb_insns: SmallVec<[(I, SourceLoc); 32]>,
ir_inst_insns: SmallVec<[(I, SourceLoc); 4]>,
succ_start: usize,
cur_srcloc: SourceLoc,
}
impl<I: VCodeInst> VCodeBuilder<I> {
pub fn new(abi: Box<dyn ABIBody<I = I>>) -> VCodeBuilder<I> {
let vcode = VCode::new(abi);
VCodeBuilder {
vcode,
bb_insns: SmallVec::new(),
ir_inst_insns: SmallVec::new(),
succ_start: 0,
cur_srcloc: SourceLoc::default(),
}
}
pub fn abi(&mut self) -> &mut dyn ABIBody<I = I> {
&mut *self.vcode.abi
}
pub fn set_vreg_type(&mut self, vreg: VirtualReg, ty: Type) {
while self.vcode.vreg_types.len() <= vreg.get_index() {
self.vcode.vreg_types.push(ir::types::I8);
}
self.vcode.vreg_types[vreg.get_index()] = ty;
}
pub fn blocks_by_bb(&self) -> &SecondaryMap<ir::Block, BlockIndex> {
&self.vcode.block_by_bb
}
pub fn init_bb_map(&mut self, blocks: &[ir::Block]) -> BlockIndex {
let mut bindex: BlockIndex = 0;
for bb in blocks.iter() {
self.vcode.block_by_bb[*bb] = bindex;
self.vcode.bb_by_block.push(*bb);
bindex += 1;
}
bindex
}
pub fn bb_to_bindex(&self, bb: ir::Block) -> BlockIndex {
self.vcode.block_by_bb[bb]
}
pub fn set_entry(&mut self, block: BlockIndex) {
self.vcode.entry = block;
}
pub fn end_ir_inst(&mut self) {
while let Some(pair) = self.ir_inst_insns.pop() {
self.bb_insns.push(pair);
}
}
pub fn end_bb(&mut self) -> BlockIndex {
assert!(self.ir_inst_insns.is_empty());
let block_num = self.vcode.block_ranges.len() as BlockIndex;
let start_idx = self.vcode.insts.len() as InsnIndex;
while let Some((i, loc)) = self.bb_insns.pop() {
self.vcode.insts.push(i);
self.vcode.srclocs.push(loc);
}
let end_idx = self.vcode.insts.len() as InsnIndex;
self.vcode.block_ranges.push((start_idx, end_idx));
let succ_end = self.vcode.block_succs.len();
self.vcode
.block_succ_range
.push((self.succ_start, succ_end));
self.succ_start = succ_end;
block_num
}
pub fn push(&mut self, insn: I) {
match insn.is_term() {
MachTerminator::None | MachTerminator::Ret => {}
MachTerminator::Uncond(target) => {
self.vcode.block_succs.push(target);
}
MachTerminator::Cond(true_branch, false_branch) => {
self.vcode.block_succs.push(true_branch);
self.vcode.block_succs.push(false_branch);
}
MachTerminator::Indirect(targets) => {
for target in targets {
self.vcode.block_succs.push(*target);
}
}
}
self.ir_inst_insns.push((insn, self.cur_srcloc));
}
pub fn set_srcloc(&mut self, srcloc: SourceLoc) {
self.cur_srcloc = srcloc;
}
pub fn build(self) -> VCode<I> {
assert!(self.ir_inst_insns.is_empty());
assert!(self.bb_insns.is_empty());
self.vcode
}
}
fn block_ranges(indices: &[InstIx], len: usize) -> Vec<(usize, usize)> {
let v = indices
.iter()
.map(|iix| iix.get() as usize)
.chain(iter::once(len))
.collect::<Vec<usize>>();
v.windows(2).map(|p| (p[0], p[1])).collect()
}
fn is_redundant_move<I: VCodeInst>(insn: &I) -> bool {
if let Some((to, from)) = insn.is_move() {
to.to_reg() == from
} else {
false
}
}
fn is_trivial_jump_block<I: VCodeInst>(vcode: &VCode<I>, block: BlockIndex) -> Option<BlockIndex> {
let range = vcode.block_insns(BlockIx::new(block));
debug!(
"is_trivial_jump_block: block {} has len {}",
block,
range.len()
);
if range.len() != 1 {
return None;
}
let insn = range.first();
debug!(
" -> only insn is: {:?} with terminator {:?}",
vcode.get_insn(insn),
vcode.get_insn(insn).is_term()
);
match vcode.get_insn(insn).is_term() {
MachTerminator::Uncond(target) => Some(target),
_ => None,
}
}
impl<I: VCodeInst> VCode<I> {
fn new(abi: Box<dyn ABIBody<I = I>>) -> VCode<I> {
VCode {
liveins: abi.liveins(),
liveouts: abi.liveouts(),
vreg_types: vec![],
insts: vec![],
srclocs: vec![],
entry: 0,
block_ranges: vec![],
block_succ_range: vec![],
block_succs: vec![],
block_by_bb: SecondaryMap::with_default(0),
bb_by_block: vec![],
final_block_order: vec![],
final_block_offsets: vec![],
code_size: 0,
abi,
}
}
pub fn flags(&self) -> &settings::Flags {
self.abi.flags()
}
pub fn vreg_type(&self, vreg: VirtualReg) -> Type {
self.vreg_types[vreg.get_index()]
}
pub fn entry(&self) -> BlockIndex {
self.entry
}
pub fn num_blocks(&self) -> usize {
self.block_ranges.len()
}
pub fn frame_size(&self) -> u32 {
self.abi.frame_size()
}
pub fn succs(&self, block: BlockIndex) -> &[BlockIndex] {
let (start, end) = self.block_succ_range[block as usize];
&self.block_succs[start..end]
}
pub fn replace_insns_from_regalloc(&mut self, result: RegAllocResult<Self>) {
self.final_block_order = compute_final_block_order(self);
self.abi.set_num_spillslots(result.num_spill_slots as usize);
self.abi
.set_clobbered(result.clobbered_registers.map(|r| Writable::from_reg(*r)));
let block_ranges: Vec<(usize, usize)> =
block_ranges(result.target_map.elems(), result.insns.len());
let mut final_insns = vec![];
let mut final_block_ranges = vec![(0, 0); self.num_blocks()];
let mut final_srclocs = vec![];
for block in &self.final_block_order {
let (start, end) = block_ranges[*block as usize];
let final_start = final_insns.len() as InsnIndex;
if *block == self.entry {
let prologue = self.abi.gen_prologue();
let len = prologue.len();
final_insns.extend(prologue.into_iter());
final_srclocs.extend(iter::repeat(SourceLoc::default()).take(len));
}
for i in start..end {
let insn = &result.insns[i];
if is_redundant_move(insn) {
continue;
}
let orig_iix = result.orig_insn_map[InstIx::new(i as u32)];
let srcloc = if orig_iix.is_invalid() {
SourceLoc::default()
} else {
self.srclocs[orig_iix.get() as usize]
};
let is_ret = insn.is_term() == MachTerminator::Ret;
if is_ret {
let epilogue = self.abi.gen_epilogue();
let len = epilogue.len();
final_insns.extend(epilogue.into_iter());
final_srclocs.extend(iter::repeat(srcloc).take(len));
} else {
final_insns.push(insn.clone());
final_srclocs.push(srcloc);
}
}
let final_end = final_insns.len() as InsnIndex;
final_block_ranges[*block as usize] = (final_start, final_end);
}
debug_assert!(final_insns.len() == final_srclocs.len());
self.insts = final_insns;
self.srclocs = final_srclocs;
self.block_ranges = final_block_ranges;
}
pub fn remove_redundant_branches(&mut self) {
let block_rewrites: Vec<BlockIndex> = (0..self.num_blocks() as u32)
.map(|bix| is_trivial_jump_block(self, bix).unwrap_or(bix))
.collect();
let mut refcounts: Vec<usize> = vec![0; self.num_blocks()];
debug!(
"remove_redundant_branches: block_rewrites = {:?}",
block_rewrites
);
refcounts[self.entry as usize] = 1;
for block in 0..self.num_blocks() as u32 {
for insn in self.block_insns(BlockIx::new(block)) {
self.get_insn_mut(insn)
.with_block_rewrites(&block_rewrites[..]);
match self.get_insn(insn).is_term() {
MachTerminator::Uncond(bix) => {
refcounts[bix as usize] += 1;
}
MachTerminator::Cond(bix1, bix2) => {
refcounts[bix1 as usize] += 1;
refcounts[bix2 as usize] += 1;
}
MachTerminator::Indirect(blocks) => {
for block in blocks {
refcounts[*block as usize] += 1;
}
}
_ => {}
}
}
}
let deleted: Vec<bool> = refcounts.iter().map(|r| *r == 0).collect();
let block_order = std::mem::replace(&mut self.final_block_order, vec![]);
self.final_block_order = block_order
.into_iter()
.filter(|b| !deleted[*b as usize])
.collect();
for succ in &mut self.block_succs {
let new_succ = block_rewrites[*succ as usize];
*succ = new_succ;
}
}
pub fn finalize_branches(&mut self)
where
I: MachInstEmit<MachSectionSize>,
{
let num_final_blocks = self.final_block_order.len();
let mut block_fallthrough: Vec<Option<BlockIndex>> = vec![None; self.num_blocks()];
for i in 0..(num_final_blocks - 1) {
let from = self.final_block_order[i];
let to = self.final_block_order[i + 1];
block_fallthrough[from as usize] = Some(to);
}
for block in 0..self.num_blocks() {
let next_block = block_fallthrough[block];
let (start, end) = self.block_ranges[block];
for iix in start..end {
let insn = &mut self.insts[iix as usize];
insn.with_fallthrough_block(next_block);
}
}
let flags = self.abi.flags();
let mut code_section = MachSectionSize::new(0);
let mut block_offsets = vec![0; self.num_blocks()];
for &block in &self.final_block_order {
code_section.offset = I::align_basic_block(code_section.offset);
block_offsets[block as usize] = code_section.offset;
let (start, end) = self.block_ranges[block as usize];
for iix in start..end {
self.insts[iix as usize].emit(&mut code_section, flags);
}
}
self.final_block_offsets = block_offsets;
self.code_size = code_section.size();
let mut code_section = MachSectionSize::new(0);
for &block in &self.final_block_order {
code_section.offset = I::align_basic_block(code_section.offset);
let (start, end) = self.block_ranges[block as usize];
for iix in start..end {
self.insts[iix as usize]
.with_block_offsets(code_section.offset, &self.final_block_offsets[..]);
self.insts[iix as usize].emit(&mut code_section, flags);
}
}
}
pub fn emit(&self) -> MachSections
where
I: MachInstEmit<MachSection>,
{
let mut sections = MachSections::new();
let code_idx = sections.add_section(0, self.code_size);
let code_section = sections.get_section(code_idx);
let flags = self.abi.flags();
let mut cur_srcloc = SourceLoc::default();
for &block in &self.final_block_order {
let new_offset = I::align_basic_block(code_section.cur_offset_from_start());
while new_offset > code_section.cur_offset_from_start() {
let nop = I::gen_nop((new_offset - code_section.cur_offset_from_start()) as usize);
nop.emit(code_section, flags);
}
assert_eq!(code_section.cur_offset_from_start(), new_offset);
let (start, end) = self.block_ranges[block as usize];
for iix in start..end {
let srcloc = self.srclocs[iix as usize];
if srcloc != cur_srcloc {
if !cur_srcloc.is_default() {
code_section.end_srcloc();
}
if !srcloc.is_default() {
code_section.start_srcloc(srcloc);
}
cur_srcloc = srcloc;
}
self.insts[iix as usize].emit(code_section, flags);
}
if !cur_srcloc.is_default() {
code_section.end_srcloc();
cur_srcloc = SourceLoc::default();
}
}
sections
}
pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
if (block as usize) < self.bb_by_block.len() {
Some(self.bb_by_block[block as usize])
} else {
None
}
}
}
impl<I: VCodeInst> RegallocFunction for VCode<I> {
type Inst = I;
fn insns(&self) -> &[I] {
&self.insts[..]
}
fn insns_mut(&mut self) -> &mut [I] {
&mut self.insts[..]
}
fn get_insn(&self, insn: InstIx) -> &I {
&self.insts[insn.get() as usize]
}
fn get_insn_mut(&mut self, insn: InstIx) -> &mut I {
&mut self.insts[insn.get() as usize]
}
fn blocks(&self) -> Range<BlockIx> {
Range::new(BlockIx::new(0), self.block_ranges.len())
}
fn entry_block(&self) -> BlockIx {
BlockIx::new(self.entry)
}
fn block_insns(&self, block: BlockIx) -> Range<InstIx> {
let (start, end) = self.block_ranges[block.get() as usize];
Range::new(InstIx::new(start), (end - start) as usize)
}
fn block_succs(&self, block: BlockIx) -> Vec<BlockIx> {
let (start, end) = self.block_succ_range[block.get() as usize];
self.block_succs[start..end]
.iter()
.cloned()
.map(BlockIx::new)
.collect()
}
fn is_ret(&self, insn: InstIx) -> bool {
match self.insts[insn.get() as usize].is_term() {
MachTerminator::Ret => true,
_ => false,
}
}
fn get_regs(insn: &I, collector: &mut RegUsageCollector) {
insn.get_regs(collector)
}
fn map_regs(insn: &mut I, mapper: &RegUsageMapper) {
insn.map_regs(mapper);
}
fn is_move(&self, insn: &I) -> Option<(Writable<Reg>, Reg)> {
insn.is_move()
}
fn get_vreg_count_estimate(&self) -> Option<usize> {
Some(self.vreg_types.len())
}
fn get_spillslot_size(&self, regclass: RegClass, vreg: VirtualReg) -> u32 {
let ty = self.vreg_type(vreg);
self.abi.get_spillslot_size(regclass, ty)
}
fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, vreg: VirtualReg) -> I {
let ty = self.vreg_type(vreg);
self.abi.gen_spill(to_slot, from_reg, ty)
}
fn gen_reload(&self, to_reg: Writable<RealReg>, from_slot: SpillSlot, vreg: VirtualReg) -> I {
let ty = self.vreg_type(vreg);
self.abi.gen_reload(to_reg, from_slot, ty)
}
fn gen_move(&self, to_reg: Writable<RealReg>, from_reg: RealReg, vreg: VirtualReg) -> I {
let ty = self.vreg_type(vreg);
I::gen_move(to_reg.map(|r| r.to_reg()), from_reg.to_reg(), ty)
}
fn gen_zero_len_nop(&self) -> I {
I::gen_zero_len_nop()
}
fn maybe_direct_reload(&self, insn: &I, reg: VirtualReg, slot: SpillSlot) -> Option<I> {
insn.maybe_direct_reload(reg, slot)
}
fn func_liveins(&self) -> RegallocSet<RealReg> {
self.liveins.clone()
}
fn func_liveouts(&self) -> RegallocSet<RealReg> {
self.liveouts.clone()
}
}
impl<I: VCodeInst> fmt::Debug for VCode<I> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "VCode_Debug {{")?;
writeln!(f, " Entry block: {}", self.entry)?;
writeln!(f, " Final block order: {:?}", self.final_block_order)?;
for block in 0..self.num_blocks() {
writeln!(f, "Block {}:", block,)?;
for succ in self.succs(block as BlockIndex) {
writeln!(f, " (successor: Block {})", succ)?;
}
let (start, end) = self.block_ranges[block];
writeln!(f, " (instruction range: {} .. {})", start, end)?;
for inst in start..end {
writeln!(f, " Inst {}: {:?}", inst, self.insts[inst as usize])?;
}
}
writeln!(f, "}}")?;
Ok(())
}
}
impl<I: VCodeInst + ShowWithRRU> ShowWithRRU for VCode<I> {
fn show_rru(&self, mb_rru: Option<&RealRegUniverse>) -> String {
use std::fmt::Write;
let mut display_order = Vec::<usize>::new();
for bix in &self.final_block_order {
assert!((*bix as usize) < self.num_blocks());
display_order.push(*bix as usize);
}
for bix in 0..self.num_blocks() {
if display_order.contains(&bix) {
continue;
}
display_order.push(bix);
}
let mut s = String::new();
write!(&mut s, "VCode_ShowWithRRU {{{{\n").unwrap();
write!(&mut s, " Entry block: {}\n", self.entry).unwrap();
write!(
&mut s,
" Final block order: {:?}\n",
self.final_block_order
)
.unwrap();
for i in 0..self.num_blocks() {
let block = display_order[i];
let omitted = if !self.final_block_order.is_empty() && i >= self.final_block_order.len()
{
"** OMITTED **"
} else {
""
};
write!(&mut s, "Block {}: {}\n", block, omitted).unwrap();
if let Some(bb) = self.bindex_to_bb(block as BlockIndex) {
write!(&mut s, " (original IR block: {})\n", bb).unwrap();
}
for succ in self.succs(block as BlockIndex) {
write!(&mut s, " (successor: Block {})\n", succ).unwrap();
}
let (start, end) = self.block_ranges[block];
write!(&mut s, " (instruction range: {} .. {})\n", start, end).unwrap();
for inst in start..end {
write!(
&mut s,
" Inst {}: {}\n",
inst,
self.insts[inst as usize].show_rru(mb_rru)
)
.unwrap();
}
}
write!(&mut s, "}}}}\n").unwrap();
s
}
}