use crate::fx::FxHashMap;
use crate::fx::FxHashSet;
use crate::ir::RelSourceLoc;
use crate::ir::{self, types, Constant, ConstantData, DynamicStackSlot, LabelValueLoc, ValueLabel};
use crate::machinst::*;
use crate::timing;
use crate::trace;
use crate::ValueLocRange;
use regalloc2::{
Edit, Function as RegallocFunction, InstOrEdit, InstRange, Operand, OperandKind, PReg, PRegSet,
RegClass, VReg,
};
use alloc::vec::Vec;
use cranelift_entity::{entity_impl, Keys, PrimaryMap};
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::fmt;
pub type InsnIndex = regalloc2::Inst;
pub type BlockIndex = regalloc2::Block;
pub trait VCodeInst: MachInst + MachInstEmit {}
impl<I: MachInst + MachInstEmit> VCodeInst for I {}
pub struct VCode<I: VCodeInst> {
vreg_types: Vec<Type>,
have_ref_values: bool,
insts: Vec<I>,
operands: Vec<Operand>,
operand_ranges: Vec<(u32, u32)>,
clobbers: FxHashMap<InsnIndex, PRegSet>,
is_move: FxHashMap<InsnIndex, (Operand, Operand)>,
srclocs: Vec<RelSourceLoc>,
entry: BlockIndex,
block_ranges: Vec<(InsnIndex, InsnIndex)>,
block_succ_range: Vec<(u32, u32)>,
block_pred_range: Vec<(u32, u32)>,
block_succs_preds: Vec<regalloc2::Block>,
block_params_range: Vec<(u32, u32)>,
block_params: Vec<regalloc2::VReg>,
branch_block_args: Vec<regalloc2::VReg>,
branch_block_arg_range: Vec<(u32, u32)>,
branch_block_arg_succ_range: Vec<(u32, u32)>,
vreg_aliases: FxHashMap<regalloc2::VReg, regalloc2::VReg>,
block_order: BlockLoweringOrder,
pub(crate) abi: Callee<I::ABIMachineSpec>,
emit_info: I::Info,
reftyped_vregs: Vec<VReg>,
reftyped_vregs_set: FxHashSet<VReg>,
constants: VCodeConstants,
debug_value_labels: Vec<(VReg, InsnIndex, InsnIndex, u32)>,
pub(crate) sigs: SigSet,
}
pub struct EmitResult<I: VCodeInst> {
pub buffer: MachBuffer<I>,
pub bb_offsets: Vec<CodeOffset>,
pub bb_edges: Vec<(CodeOffset, CodeOffset)>,
pub inst_offsets: Vec<CodeOffset>,
pub func_body_len: CodeOffset,
pub disasm: Option<String>,
pub sized_stackslot_offsets: PrimaryMap<StackSlot, u32>,
pub dynamic_stackslot_offsets: PrimaryMap<DynamicStackSlot, u32>,
pub value_labels_ranges: ValueLabelsRanges,
pub frame_size: u32,
pub alignment: u32,
}
pub struct VCodeBuilder<I: VCodeInst> {
pub(crate) vcode: VCode<I>,
direction: VCodeBuildDirection,
block_start: usize,
succ_start: usize,
block_params_start: usize,
branch_block_arg_succ_start: usize,
cur_srcloc: RelSourceLoc,
debug_info: FxHashMap<ValueLabel, Vec<(InsnIndex, InsnIndex, VReg)>>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum VCodeBuildDirection {
Backward,
}
impl<I: VCodeInst> VCodeBuilder<I> {
pub fn new(
sigs: SigSet,
abi: Callee<I::ABIMachineSpec>,
emit_info: I::Info,
block_order: BlockLoweringOrder,
constants: VCodeConstants,
direction: VCodeBuildDirection,
) -> VCodeBuilder<I> {
let vcode = VCode::new(sigs, abi, emit_info, block_order, constants);
VCodeBuilder {
vcode,
direction,
block_start: 0,
succ_start: 0,
block_params_start: 0,
branch_block_arg_succ_start: 0,
cur_srcloc: Default::default(),
debug_info: FxHashMap::default(),
}
}
pub fn init_abi(&mut self, temps: Vec<Writable<Reg>>) {
self.vcode.abi.init(&self.vcode.sigs, temps);
}
pub fn abi(&self) -> &Callee<I::ABIMachineSpec> {
&self.vcode.abi
}
pub fn abi_mut(&mut self) -> &mut Callee<I::ABIMachineSpec> {
&mut self.vcode.abi
}
pub fn sigs(&self) -> &SigSet {
&self.vcode.sigs
}
pub fn sigs_mut(&mut self) -> &mut SigSet {
&mut self.vcode.sigs
}
pub fn block_order(&self) -> &BlockLoweringOrder {
&self.vcode.block_order
}
pub fn set_vreg_type(&mut self, vreg: VirtualReg, ty: Type) {
if self.vcode.vreg_types.len() <= vreg.index() {
self.vcode
.vreg_types
.resize(vreg.index() + 1, ir::types::I8);
}
self.vcode.vreg_types[vreg.index()] = ty;
if is_reftype(ty) {
let vreg: VReg = vreg.into();
if self.vcode.reftyped_vregs_set.insert(vreg) {
self.vcode.reftyped_vregs.push(vreg);
}
self.vcode.have_ref_values = true;
}
}
pub fn get_vreg_type(&self, vreg: VirtualReg) -> Type {
self.vcode.vreg_types[vreg.index()]
}
pub fn set_entry(&mut self, block: BlockIndex) {
self.vcode.entry = block;
}
pub fn end_bb(&mut self) {
let start_idx = self.block_start;
let end_idx = self.vcode.insts.len();
self.block_start = end_idx;
self.vcode
.block_ranges
.push((InsnIndex::new(start_idx), InsnIndex::new(end_idx)));
let succ_end = self.vcode.block_succs_preds.len();
self.vcode
.block_succ_range
.push((self.succ_start as u32, succ_end as u32));
self.succ_start = succ_end;
let block_params_end = self.vcode.block_params.len();
self.vcode
.block_params_range
.push((self.block_params_start as u32, block_params_end as u32));
self.block_params_start = block_params_end;
let branch_block_arg_succ_end = self.vcode.branch_block_arg_range.len();
self.vcode.branch_block_arg_succ_range.push((
self.branch_block_arg_succ_start as u32,
branch_block_arg_succ_end as u32,
));
self.branch_block_arg_succ_start = branch_block_arg_succ_end;
}
pub fn add_block_param(&mut self, param: VirtualReg, ty: Type) {
self.set_vreg_type(param, ty);
self.vcode.block_params.push(param.into());
}
fn add_branch_args_for_succ(&mut self, args: &[Reg]) {
let start = self.vcode.branch_block_args.len();
self.vcode
.branch_block_args
.extend(args.iter().map(|&arg| VReg::from(arg)));
let end = self.vcode.branch_block_args.len();
self.vcode
.branch_block_arg_range
.push((start as u32, end as u32));
}
pub fn push(&mut self, insn: I) {
self.vcode.insts.push(insn);
self.vcode.srclocs.push(self.cur_srcloc);
}
pub fn add_succ(&mut self, block: BlockIndex, args: &[Reg]) {
self.vcode.block_succs_preds.push(block);
self.add_branch_args_for_succ(args);
}
pub fn set_srcloc(&mut self, srcloc: RelSourceLoc) {
self.cur_srcloc = srcloc;
}
pub fn add_value_label(&mut self, reg: Reg, label: ValueLabel) {
let inst = InsnIndex::new(self.vcode.insts.len());
let labels = self.debug_info.entry(label).or_insert_with(|| vec![]);
let last = labels
.last()
.map(|(_start, end, _vreg)| *end)
.unwrap_or(InsnIndex::new(0));
labels.push((last, inst, reg.into()));
}
pub fn set_vreg_alias(&mut self, from: Reg, to: Reg) {
let from = from.into();
let resolved_to = self.resolve_vreg_alias(to.into());
assert_ne!(resolved_to, from);
self.vcode.vreg_aliases.insert(from, resolved_to);
}
pub fn resolve_vreg_alias(&self, from: regalloc2::VReg) -> regalloc2::VReg {
Self::resolve_vreg_alias_impl(&self.vcode.vreg_aliases, from)
}
fn resolve_vreg_alias_impl(
aliases: &FxHashMap<regalloc2::VReg, regalloc2::VReg>,
from: regalloc2::VReg,
) -> regalloc2::VReg {
let mut vreg = from;
while let Some(to) = aliases.get(&vreg) {
vreg = *to;
}
vreg
}
pub fn constants(&mut self) -> &mut VCodeConstants {
&mut self.vcode.constants
}
fn compute_preds_from_succs(&mut self) {
let mut succ_pred_edges: Vec<(BlockIndex, BlockIndex)> =
Vec::with_capacity(self.vcode.block_succs_preds.len());
for (pred, &(start, end)) in self.vcode.block_succ_range.iter().enumerate() {
let pred = BlockIndex::new(pred);
for i in start..end {
let succ = BlockIndex::new(self.vcode.block_succs_preds[i as usize].index());
succ_pred_edges.push((succ, pred));
}
}
succ_pred_edges.sort_unstable();
let mut i = 0;
for succ in 0..self.vcode.num_blocks() {
let succ = BlockIndex::new(succ);
let start = self.vcode.block_succs_preds.len();
while i < succ_pred_edges.len() && succ_pred_edges[i].0 == succ {
let pred = succ_pred_edges[i].1;
self.vcode.block_succs_preds.push(pred);
i += 1;
}
let end = self.vcode.block_succs_preds.len();
self.vcode.block_pred_range.push((start as u32, end as u32));
}
}
fn reverse_and_finalize(&mut self) {
let n_insts = self.vcode.insts.len();
if n_insts == 0 {
return;
}
self.vcode.block_ranges.reverse();
self.vcode.block_params_range.reverse();
self.vcode.block_succ_range.reverse();
self.vcode.insts.reverse();
self.vcode.srclocs.reverse();
self.vcode.branch_block_arg_succ_range.reverse();
let translate = |inst: InsnIndex| InsnIndex::new(n_insts - inst.index());
for tuple in &mut self.vcode.block_ranges {
let (start, end) = *tuple;
*tuple = (translate(end), translate(start)); }
for (label, tuples) in &self.debug_info {
for &(start, end, vreg) in tuples {
let vreg = self.resolve_vreg_alias(vreg);
let fwd_start = translate(end);
let fwd_end = translate(start);
self.vcode
.debug_value_labels
.push((vreg, fwd_start, fwd_end, label.as_u32()));
}
}
self.vcode
.debug_value_labels
.sort_unstable_by_key(|(vreg, _, _, _)| *vreg);
}
fn collect_operands(&mut self) {
for (i, insn) in self.vcode.insts.iter().enumerate() {
let vreg_aliases = &self.vcode.vreg_aliases;
let mut op_collector = OperandCollector::new(&mut self.vcode.operands, |vreg| {
Self::resolve_vreg_alias_impl(vreg_aliases, vreg)
});
insn.get_operands(&mut op_collector);
let (ops, clobbers) = op_collector.finish();
self.vcode.operand_ranges.push(ops);
if clobbers != PRegSet::default() {
self.vcode.clobbers.insert(InsnIndex::new(i), clobbers);
}
if let Some((dst, src)) = insn.is_move() {
let src = Operand::reg_use(Self::resolve_vreg_alias_impl(vreg_aliases, src.into()));
let dst = Operand::reg_def(Self::resolve_vreg_alias_impl(
vreg_aliases,
dst.to_reg().into(),
));
self.vcode.is_move.insert(InsnIndex::new(i), (src, dst));
}
}
for arg in &mut self.vcode.branch_block_args {
let new_arg = Self::resolve_vreg_alias_impl(&self.vcode.vreg_aliases, *arg);
trace!("operandcollector: block arg {:?} -> {:?}", arg, new_arg);
*arg = new_arg;
}
}
pub fn build(mut self) -> VCode<I> {
if self.direction == VCodeBuildDirection::Backward {
self.reverse_and_finalize();
}
self.collect_operands();
for reg in self.vcode.reftyped_vregs.iter_mut() {
*reg = Self::resolve_vreg_alias_impl(&self.vcode.vreg_aliases, *reg);
}
self.vcode.reftyped_vregs.sort();
self.vcode.reftyped_vregs.dedup();
self.compute_preds_from_succs();
self.vcode.debug_value_labels.sort_unstable();
self.vcode
}
}
fn is_reftype(ty: Type) -> bool {
ty == types::R64 || ty == types::R32
}
impl<I: VCodeInst> VCode<I> {
fn new(
sigs: SigSet,
abi: Callee<I::ABIMachineSpec>,
emit_info: I::Info,
block_order: BlockLoweringOrder,
constants: VCodeConstants,
) -> VCode<I> {
let n_blocks = block_order.lowered_order().len();
VCode {
sigs,
vreg_types: vec![],
have_ref_values: false,
insts: Vec::with_capacity(10 * n_blocks),
operands: Vec::with_capacity(30 * n_blocks),
operand_ranges: Vec::with_capacity(10 * n_blocks),
clobbers: FxHashMap::default(),
is_move: FxHashMap::default(),
srclocs: Vec::with_capacity(10 * n_blocks),
entry: BlockIndex::new(0),
block_ranges: Vec::with_capacity(n_blocks),
block_succ_range: Vec::with_capacity(n_blocks),
block_succs_preds: Vec::with_capacity(2 * n_blocks),
block_pred_range: Vec::with_capacity(n_blocks),
block_params_range: Vec::with_capacity(n_blocks),
block_params: Vec::with_capacity(5 * n_blocks),
branch_block_args: Vec::with_capacity(10 * n_blocks),
branch_block_arg_range: Vec::with_capacity(2 * n_blocks),
branch_block_arg_succ_range: Vec::with_capacity(n_blocks),
block_order,
abi,
emit_info,
reftyped_vregs: vec![],
reftyped_vregs_set: FxHashSet::default(),
constants,
debug_value_labels: vec![],
vreg_aliases: FxHashMap::with_capacity_and_hasher(10 * n_blocks, Default::default()),
}
}
pub fn num_blocks(&self) -> usize {
self.block_ranges.len()
}
pub fn succs(&self, block: BlockIndex) -> &[BlockIndex] {
let (start, end) = self.block_succ_range[block.index()];
&self.block_succs_preds[start as usize..end as usize]
}
fn compute_clobbers(&self, regalloc: ®alloc2::Output) -> Vec<Writable<RealReg>> {
let mut clobbered = vec![];
let mut clobbered_set = FxHashSet::default();
for edit in ®alloc.edits {
let Edit::Move { to, .. } = edit.1;
if let Some(preg) = to.as_reg() {
let reg = RealReg::from(preg);
if clobbered_set.insert(reg) {
clobbered.push(Writable::from_reg(reg));
}
}
}
for (i, (start, end)) in self.operand_ranges.iter().enumerate() {
if !self.insts[i].is_included_in_clobbers() {
continue;
}
let start = *start as usize;
let end = *end as usize;
let operands = &self.operands[start..end];
let allocs = ®alloc.allocs[start..end];
for (operand, alloc) in operands.iter().zip(allocs.iter()) {
if operand.kind() == OperandKind::Use {
continue;
}
if let Some(preg) = alloc.as_reg() {
let reg = RealReg::from(preg);
if clobbered_set.insert(reg) {
clobbered.push(Writable::from_reg(reg));
}
}
}
for preg in self
.clobbers
.get(&InsnIndex::new(i))
.cloned()
.unwrap_or_default()
{
let reg = RealReg::from(preg);
if clobbered_set.insert(reg) {
clobbered.push(Writable::from_reg(reg));
}
}
}
clobbered
}
pub fn emit(
mut self,
regalloc: ®alloc2::Output,
want_disasm: bool,
want_metadata: bool,
) -> EmitResult<I>
where
I: VCodeInst,
{
use core::fmt::Write;
let _tt = timing::vcode_emit();
let mut buffer = MachBuffer::new();
let mut bb_starts: Vec<Option<CodeOffset>> = vec![];
buffer.reserve_labels_for_blocks(self.num_blocks());
buffer.reserve_labels_for_constants(&self.constants);
let mut final_order: SmallVec<[BlockIndex; 16]> = smallvec![];
let mut cold_blocks: SmallVec<[BlockIndex; 16]> = smallvec![];
for block in 0..self.num_blocks() {
let block = BlockIndex::new(block);
if self.block_order.is_cold(block) {
cold_blocks.push(block);
} else {
final_order.push(block);
}
}
final_order.extend(cold_blocks.clone());
let clobbers = self.compute_clobbers(regalloc);
self.abi.set_num_spillslots(regalloc.num_spillslots);
self.abi.set_clobbered(clobbers);
let prologue_insts = self.abi.gen_prologue(&self.sigs);
let mut cur_srcloc = None;
let mut last_offset = None;
let mut inst_offsets = vec![];
let mut state = I::State::new(&self.abi);
let mut disasm = String::new();
if !self.debug_value_labels.is_empty() {
inst_offsets.resize(self.insts.len(), 0);
}
let mut ra_edits_per_block: SmallVec<[u32; 64]> = smallvec![];
let mut edit_idx = 0;
for block in 0..self.num_blocks() {
let end_inst = self.block_ranges[block].1;
let start_edit_idx = edit_idx;
while edit_idx < regalloc.edits.len() && regalloc.edits[edit_idx].0.inst() < end_inst {
edit_idx += 1;
}
let end_edit_idx = edit_idx;
ra_edits_per_block.push((end_edit_idx - start_edit_idx) as u32);
}
let is_forward_edge_cfi_enabled = self.abi.is_forward_edge_cfi_enabled();
for (block_order_idx, &block) in final_order.iter().enumerate() {
trace!("emitting block {:?}", block);
let new_offset = I::align_basic_block(buffer.cur_offset());
while new_offset > buffer.cur_offset() {
let nop = I::gen_nop((new_offset - buffer.cur_offset()) as usize);
nop.emit(&[], &mut buffer, &self.emit_info, &mut Default::default());
}
assert_eq!(buffer.cur_offset(), new_offset);
let do_emit = |inst: &I,
allocs: &[Allocation],
disasm: &mut String,
buffer: &mut MachBuffer<I>,
state: &mut I::State| {
if want_disasm && !inst.is_args() {
let mut s = state.clone();
writeln!(disasm, " {}", inst.pretty_print_inst(allocs, &mut s)).unwrap();
}
inst.emit(allocs, buffer, &self.emit_info, state);
};
if block == self.entry {
trace!(" -> entry block");
buffer.start_srcloc(Default::default());
state.pre_sourceloc(Default::default());
for inst in &prologue_insts {
do_emit(&inst, &[], &mut disasm, &mut buffer, &mut state);
}
buffer.end_srcloc();
}
buffer.bind_label(MachLabel::from_block(block));
if want_disasm {
writeln!(&mut disasm, "block{}:", block.index()).unwrap();
}
if want_metadata {
let cur_offset = buffer.cur_offset();
if last_offset.is_some() && cur_offset <= last_offset.unwrap() {
for i in (0..bb_starts.len()).rev() {
if bb_starts[i].is_some() && cur_offset > bb_starts[i].unwrap() {
break;
}
bb_starts[i] = None;
}
}
bb_starts.push(Some(cur_offset));
last_offset = Some(cur_offset);
}
if let Some(block_start) = I::gen_block_start(
self.block_order.is_indirect_branch_target(block),
is_forward_edge_cfi_enabled,
) {
do_emit(&block_start, &[], &mut disasm, &mut buffer, &mut state);
}
for inst_or_edit in regalloc.block_insts_and_edits(&self, block) {
match inst_or_edit {
InstOrEdit::Inst(iix) => {
if !self.debug_value_labels.is_empty() {
if !self.block_order.is_cold(block) {
inst_offsets[iix.index()] = buffer.cur_offset();
}
}
if self.insts[iix.index()].is_move().is_some() {
continue;
}
let srcloc = self.srclocs[iix.index()];
if cur_srcloc != Some(srcloc) {
if cur_srcloc.is_some() {
buffer.end_srcloc();
}
buffer.start_srcloc(srcloc);
cur_srcloc = Some(srcloc);
}
state.pre_sourceloc(cur_srcloc.unwrap_or_default());
if self.insts[iix.index()].is_safepoint() {
let mut safepoint_slots: SmallVec<[SpillSlot; 8]> = smallvec![];
let safepoint_slots_start = regalloc
.safepoint_slots
.binary_search_by(|(progpoint, _alloc)| {
if progpoint.inst() >= iix {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Less
}
})
.unwrap_err();
for (_, alloc) in regalloc.safepoint_slots[safepoint_slots_start..]
.iter()
.take_while(|(progpoint, _)| progpoint.inst() == iix)
{
let slot = alloc.as_stack().unwrap();
safepoint_slots.push(slot);
}
if !safepoint_slots.is_empty() {
let stack_map = self
.abi
.spillslots_to_stack_map(&safepoint_slots[..], &state);
state.pre_safepoint(stack_map);
}
}
let allocs = regalloc.inst_allocs(iix);
if self.insts[iix.index()].is_term() == MachTerminator::Ret {
for inst in self.abi.gen_epilogue() {
do_emit(&inst, &[], &mut disasm, &mut buffer, &mut state);
}
} else {
do_emit(
&self.insts[iix.index()],
allocs,
&mut disasm,
&mut buffer,
&mut state,
);
}
}
InstOrEdit::Edit(Edit::Move { from, to }) => {
match (from.as_reg(), to.as_reg()) {
(Some(from), Some(to)) => {
let from_rreg = Reg::from(from);
let to_rreg = Writable::from_reg(Reg::from(to));
debug_assert_eq!(from.class(), to.class());
let ty = I::canonical_type_for_rc(from.class());
let mv = I::gen_move(to_rreg, from_rreg, ty);
do_emit(&mv, &[], &mut disasm, &mut buffer, &mut state);
}
(Some(from), None) => {
let to = to.as_stack().unwrap();
let from_rreg = RealReg::from(from);
let spill = self.abi.gen_spill(to, from_rreg);
do_emit(&spill, &[], &mut disasm, &mut buffer, &mut state);
}
(None, Some(to)) => {
let from = from.as_stack().unwrap();
let to_rreg = Writable::from_reg(RealReg::from(to));
let reload = self.abi.gen_reload(to_rreg, from);
do_emit(&reload, &[], &mut disasm, &mut buffer, &mut state);
}
(None, None) => {
panic!("regalloc2 should have eliminated stack-to-stack moves!");
}
}
}
}
}
if cur_srcloc.is_some() {
buffer.end_srcloc();
cur_srcloc = None;
}
if block_order_idx < final_order.len() - 1 {
let next_block = final_order[block_order_idx + 1];
let next_block_range = self.block_ranges[next_block.index()];
let next_block_size =
(next_block_range.1.index() - next_block_range.0.index()) as u32;
let next_block_ra_insertions = ra_edits_per_block[next_block.index()];
let worst_case_next_bb =
I::worst_case_size() * (next_block_size + next_block_ra_insertions);
if buffer.island_needed(worst_case_next_bb) {
buffer.emit_island(worst_case_next_bb);
}
}
}
let mut alignment = 1;
for (constant, data) in self.constants.iter() {
alignment = data.alignment().max(alignment);
let label = buffer.get_label_for_constant(constant);
buffer.defer_constant(label, data.alignment(), data.as_slice(), u32::max_value());
}
let func_body_len = buffer.cur_offset();
let mut bb_edges = vec![];
let mut bb_offsets = vec![];
if want_metadata {
for block in 0..self.num_blocks() {
if bb_starts[block].is_none() {
continue;
}
let from = bb_starts[block].unwrap();
bb_offsets.push(from);
let succs = self.block_succs(BlockIndex::new(block));
for &succ in succs.iter() {
let to = buffer.resolve_label_offset(MachLabel::from_block(succ));
bb_edges.push((from, to));
}
}
}
let value_labels_ranges =
self.compute_value_labels_ranges(regalloc, &inst_offsets[..], func_body_len);
let frame_size = self.abi.frame_size();
EmitResult {
buffer,
bb_offsets,
bb_edges,
inst_offsets,
func_body_len,
disasm: if want_disasm { Some(disasm) } else { None },
sized_stackslot_offsets: self.abi.sized_stackslot_offsets().clone(),
dynamic_stackslot_offsets: self.abi.dynamic_stackslot_offsets().clone(),
value_labels_ranges,
frame_size,
alignment,
}
}
fn compute_value_labels_ranges(
&self,
regalloc: ®alloc2::Output,
inst_offsets: &[CodeOffset],
func_body_len: u32,
) -> ValueLabelsRanges {
if self.debug_value_labels.is_empty() {
return ValueLabelsRanges::default();
}
let mut value_labels_ranges: ValueLabelsRanges = HashMap::new();
for &(label, from, to, alloc) in ®alloc.debug_locations {
let ranges = value_labels_ranges
.entry(ValueLabel::from_u32(label))
.or_insert_with(|| vec![]);
let from_offset = inst_offsets[from.inst().index()];
let to_offset = if to.inst().index() == inst_offsets.len() {
func_body_len
} else {
inst_offsets[to.inst().index()]
};
if to_offset == 0 || from_offset == to_offset {
continue;
}
let loc = if let Some(preg) = alloc.as_reg() {
LabelValueLoc::Reg(Reg::from(preg))
} else {
continue;
};
ranges.push(ValueLocRange {
loc,
start: from_offset + 1,
end: to_offset + 1,
});
}
value_labels_ranges
}
pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
self.block_order.lowered_order()[block.index()].orig_block()
}
#[inline]
fn assert_no_vreg_aliases<'a>(&self, list: &'a [VReg]) -> &'a [VReg] {
for vreg in list {
self.assert_not_vreg_alias(*vreg);
}
list
}
#[inline]
fn assert_not_vreg_alias(&self, vreg: VReg) -> VReg {
debug_assert!(VCodeBuilder::<I>::resolve_vreg_alias_impl(&self.vreg_aliases, vreg) == vreg);
vreg
}
#[inline]
fn assert_operand_not_vreg_alias(&self, op: Operand) -> Operand {
self.assert_not_vreg_alias(op.vreg());
op
}
}
impl<I: VCodeInst> RegallocFunction for VCode<I> {
fn num_insts(&self) -> usize {
self.insts.len()
}
fn num_blocks(&self) -> usize {
self.block_ranges.len()
}
fn entry_block(&self) -> BlockIndex {
self.entry
}
fn block_insns(&self, block: BlockIndex) -> InstRange {
let (start, end) = self.block_ranges[block.index()];
InstRange::forward(start, end)
}
fn block_succs(&self, block: BlockIndex) -> &[BlockIndex] {
let (start, end) = self.block_succ_range[block.index()];
&self.block_succs_preds[start as usize..end as usize]
}
fn block_preds(&self, block: BlockIndex) -> &[BlockIndex] {
let (start, end) = self.block_pred_range[block.index()];
&self.block_succs_preds[start as usize..end as usize]
}
fn block_params(&self, block: BlockIndex) -> &[VReg] {
let (start, end) = self.block_params_range[block.index()];
let ret = &self.block_params[start as usize..end as usize];
self.assert_no_vreg_aliases(ret)
}
fn branch_blockparams(&self, block: BlockIndex, _insn: InsnIndex, succ_idx: usize) -> &[VReg] {
let (succ_range_start, succ_range_end) = self.branch_block_arg_succ_range[block.index()];
let succ_ranges =
&self.branch_block_arg_range[succ_range_start as usize..succ_range_end as usize];
let (branch_block_args_start, branch_block_args_end) = succ_ranges[succ_idx];
let ret = &self.branch_block_args
[branch_block_args_start as usize..branch_block_args_end as usize];
self.assert_no_vreg_aliases(ret)
}
fn is_ret(&self, insn: InsnIndex) -> bool {
match self.insts[insn.index()].is_term() {
MachTerminator::Ret => true,
_ => false,
}
}
fn is_branch(&self, insn: InsnIndex) -> bool {
match self.insts[insn.index()].is_term() {
MachTerminator::Cond | MachTerminator::Uncond | MachTerminator::Indirect => true,
_ => false,
}
}
fn requires_refs_on_stack(&self, insn: InsnIndex) -> bool {
self.insts[insn.index()].is_safepoint()
}
fn is_move(&self, insn: InsnIndex) -> Option<(Operand, Operand)> {
let (a, b) = self.is_move.get(&insn)?;
Some((
self.assert_operand_not_vreg_alias(*a),
self.assert_operand_not_vreg_alias(*b),
))
}
fn inst_operands(&self, insn: InsnIndex) -> &[Operand] {
let (start, end) = self.operand_ranges[insn.index()];
let ret = &self.operands[start as usize..end as usize];
for op in ret {
self.assert_operand_not_vreg_alias(*op);
}
ret
}
fn inst_clobbers(&self, insn: InsnIndex) -> PRegSet {
self.clobbers.get(&insn).cloned().unwrap_or_default()
}
fn num_vregs(&self) -> usize {
std::cmp::max(self.vreg_types.len(), first_user_vreg_index())
}
fn reftype_vregs(&self) -> &[VReg] {
self.assert_no_vreg_aliases(&self.reftyped_vregs[..])
}
fn debug_value_labels(&self) -> &[(VReg, InsnIndex, InsnIndex, u32)] {
for (vreg, ..) in self.debug_value_labels.iter() {
self.assert_not_vreg_alias(*vreg);
}
&self.debug_value_labels[..]
}
fn is_pinned_vreg(&self, vreg: VReg) -> Option<PReg> {
pinned_vreg_to_preg(vreg)
}
fn spillslot_size(&self, regclass: RegClass) -> usize {
self.abi.get_spillslot_size(regclass) as usize
}
fn allow_multiple_vreg_defs(&self) -> bool {
true
}
}
impl<I: VCodeInst> fmt::Debug for VCode<I> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "VCode {{")?;
writeln!(f, " Entry block: {}", self.entry.index())?;
let mut state = Default::default();
let mut alias_keys = self.vreg_aliases.keys().cloned().collect::<Vec<_>>();
alias_keys.sort_unstable();
for key in alias_keys {
let dest = self.vreg_aliases.get(&key).unwrap();
writeln!(f, " {:?} := {:?}", Reg::from(key), Reg::from(*dest))?;
}
for block in 0..self.num_blocks() {
let block = BlockIndex::new(block);
writeln!(f, "Block {}:", block.index())?;
if let Some(bb) = self.bindex_to_bb(block) {
writeln!(f, " (original IR block: {})", bb)?;
}
for succ in self.succs(block) {
writeln!(f, " (successor: Block {})", succ.index())?;
}
let (start, end) = self.block_ranges[block.index()];
writeln!(
f,
" (instruction range: {} .. {})",
start.index(),
end.index()
)?;
for inst in start.index()..end.index() {
writeln!(
f,
" Inst {}: {}",
inst,
self.insts[inst].pretty_print_inst(&[], &mut state)
)?;
}
}
writeln!(f, "}}")?;
Ok(())
}
}
#[derive(Default)]
pub struct VCodeConstants {
constants: PrimaryMap<VCodeConstant, VCodeConstantData>,
pool_uses: HashMap<Constant, VCodeConstant>,
well_known_uses: HashMap<*const [u8], VCodeConstant>,
u64s: HashMap<[u8; 8], VCodeConstant>,
}
impl VCodeConstants {
pub fn with_capacity(expected_num_constants: usize) -> Self {
Self {
constants: PrimaryMap::with_capacity(expected_num_constants),
pool_uses: HashMap::with_capacity(expected_num_constants),
well_known_uses: HashMap::new(),
u64s: HashMap::new(),
}
}
pub fn insert(&mut self, data: VCodeConstantData) -> VCodeConstant {
match data {
VCodeConstantData::Generated(_) => self.constants.push(data),
VCodeConstantData::Pool(constant, _) => match self.pool_uses.get(&constant) {
None => {
let vcode_constant = self.constants.push(data);
self.pool_uses.insert(constant, vcode_constant);
vcode_constant
}
Some(&vcode_constant) => vcode_constant,
},
VCodeConstantData::WellKnown(data_ref) => {
match self.well_known_uses.entry(data_ref as *const [u8]) {
Entry::Vacant(v) => {
let vcode_constant = self.constants.push(data);
v.insert(vcode_constant);
vcode_constant
}
Entry::Occupied(o) => *o.get(),
}
}
VCodeConstantData::U64(value) => match self.u64s.entry(value) {
Entry::Vacant(v) => {
let vcode_constant = self.constants.push(data);
v.insert(vcode_constant);
vcode_constant
}
Entry::Occupied(o) => *o.get(),
},
}
}
pub fn len(&self) -> usize {
self.constants.len()
}
pub fn keys(&self) -> Keys<VCodeConstant> {
self.constants.keys()
}
pub fn iter(&self) -> impl Iterator<Item = (VCodeConstant, &VCodeConstantData)> {
self.constants.iter()
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct VCodeConstant(u32);
entity_impl!(VCodeConstant);
pub enum VCodeConstantData {
Pool(Constant, ConstantData),
WellKnown(&'static [u8]),
Generated(ConstantData),
U64([u8; 8]),
}
impl VCodeConstantData {
pub fn as_slice(&self) -> &[u8] {
match self {
VCodeConstantData::Pool(_, d) | VCodeConstantData::Generated(d) => d.as_slice(),
VCodeConstantData::WellKnown(d) => d,
VCodeConstantData::U64(value) => &value[..],
}
}
pub fn alignment(&self) -> u32 {
if self.as_slice().len() <= 8 {
8
} else {
16
}
}
}
#[cfg(test)]
mod test {
use super::*;
use std::mem::size_of;
#[test]
fn size_of_constant_structs() {
assert_eq!(size_of::<Constant>(), 4);
assert_eq!(size_of::<VCodeConstant>(), 4);
assert_eq!(size_of::<ConstantData>(), 24);
assert_eq!(size_of::<VCodeConstantData>(), 32);
assert_eq!(
size_of::<PrimaryMap<VCodeConstant, VCodeConstantData>>(),
24
);
}
}