use crate::entity::SecondaryMap;
use crate::inst_predicates::{has_lowering_side_effect, is_constant_64bit};
use crate::ir::pcc::{Fact, FactContext, PccError, PccResult};
use crate::ir::{
ArgumentPurpose, Block, BlockArg, Constant, ConstantData, DataFlowGraph, ExternalName,
Function, GlobalValue, GlobalValueData, Immediate, Inst, InstructionData, MemFlags,
RelSourceLoc, SigRef, Signature, Type, Value, ValueDef, ValueLabelAssignments, ValueLabelStart,
};
use crate::machinst::valueregs::InvalidSentinel;
use crate::machinst::{
ABIMachineSpec, BackwardsInsnIndex, BlockIndex, BlockLoweringOrder, CallArgList, CallInfo,
CallRetList, Callee, InsnIndex, LoweredBlock, MachLabel, Reg, Sig, SigSet, TryCallInfo, VCode,
VCodeBuilder, VCodeConstant, VCodeConstantData, VCodeConstants, VCodeInst, ValueRegs, Writable,
writable_value_regs,
};
use crate::settings::Flags;
use crate::{CodegenError, CodegenResult, trace};
use crate::{FxHashMap, FxHashSet};
use alloc::vec::Vec;
use core::fmt::Debug;
use cranelift_control::ControlPlane;
use smallvec::{SmallVec, smallvec};
use super::{VCodeBuildDirection, VRegAllocator};
pub type InstOutput = SmallVec<[ValueRegs<Reg>; 2]>;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
struct InstColor(u32);
impl InstColor {
fn new(n: u32) -> InstColor {
InstColor(n)
}
pub fn get(self) -> u32 {
self.0
}
}
#[derive(Clone, Copy, Debug)]
pub struct NonRegInput {
pub inst: InputSourceInst,
pub constant: Option<u64>,
}
#[derive(Clone, Copy, Debug)]
pub enum InputSourceInst {
UniqueUse(Inst, usize),
Use(Inst, usize),
None,
}
impl InputSourceInst {
pub fn as_inst(&self) -> Option<(Inst, usize)> {
match self {
&InputSourceInst::UniqueUse(inst, output_idx)
| &InputSourceInst::Use(inst, output_idx) => Some((inst, output_idx)),
&InputSourceInst::None => None,
}
}
}
pub trait LowerBackend {
type MInst: VCodeInst;
fn lower(&self, ctx: &mut Lower<Self::MInst>, inst: Inst) -> Option<InstOutput>;
fn lower_branch(
&self,
ctx: &mut Lower<Self::MInst>,
inst: Inst,
targets: &[MachLabel],
) -> Option<()>;
fn maybe_pinned_reg(&self) -> Option<Reg> {
None
}
type FactFlowState: Default + Clone + Debug;
fn check_fact(
&self,
_ctx: &FactContext<'_>,
_vcode: &mut VCode<Self::MInst>,
_inst: InsnIndex,
_state: &mut Self::FactFlowState,
) -> PccResult<()> {
Err(PccError::UnimplementedBackend)
}
}
pub struct Lower<'func, I: VCodeInst> {
pub(crate) f: &'func Function,
vcode: VCodeBuilder<I>,
vregs: VRegAllocator<I>,
value_regs: SecondaryMap<Value, ValueRegs<Reg>>,
sret_reg: Option<ValueRegs<Reg>>,
block_end_colors: SecondaryMap<Block, InstColor>,
side_effect_inst_entry_colors: FxHashMap<Inst, InstColor>,
cur_scan_entry_color: Option<InstColor>,
cur_inst: Option<Inst>,
inst_constants: FxHashMap<Inst, u64>,
value_ir_uses: SecondaryMap<Value, ValueUseState>,
value_lowered_uses: SecondaryMap<Value, u32>,
inst_sunk: FxHashSet<Inst>,
ir_insts: Vec<I>,
try_call_rets: FxHashMap<Inst, SmallVec<[ValueRegs<Writable<Reg>>; 2]>>,
try_call_payloads: FxHashMap<Inst, SmallVec<[Writable<Reg>; 2]>>,
pinned_reg: Option<Reg>,
flags: Flags,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ValueUseState {
Unused,
Once,
Multiple,
}
impl ValueUseState {
fn inc(&mut self) {
let new = match self {
Self::Unused => Self::Once,
Self::Once | Self::Multiple => Self::Multiple,
};
*self = new;
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RelocDistance {
Near,
Far,
}
impl<'func, I: VCodeInst> Lower<'func, I> {
pub fn new(
f: &'func Function,
abi: Callee<I::ABIMachineSpec>,
emit_info: I::Info,
block_order: BlockLoweringOrder,
sigs: SigSet,
flags: Flags,
) -> CodegenResult<Self> {
let constants = VCodeConstants::with_capacity(f.dfg.constants.len());
let vcode = VCodeBuilder::new(
sigs,
abi,
emit_info,
block_order,
constants,
VCodeBuildDirection::Backward,
flags.log2_min_function_alignment(),
);
let mut vregs = VRegAllocator::with_capacity(f.dfg.num_values() * 2);
let mut value_regs = SecondaryMap::with_default(ValueRegs::invalid());
let mut try_call_rets = FxHashMap::default();
let mut try_call_payloads = FxHashMap::default();
for bb in f.layout.blocks() {
for ¶m in f.dfg.block_params(bb) {
let ty = f.dfg.value_type(param);
if value_regs[param].is_invalid() {
let regs = vregs.alloc_with_maybe_fact(ty, f.dfg.facts[param].clone())?;
value_regs[param] = regs;
trace!("bb {} param {}: regs {:?}", bb, param, regs);
}
}
for inst in f.layout.block_insts(bb) {
for &result in f.dfg.inst_results(inst) {
let ty = f.dfg.value_type(result);
if value_regs[result].is_invalid() && !ty.is_invalid() {
let regs = vregs.alloc_with_maybe_fact(ty, f.dfg.facts[result].clone())?;
value_regs[result] = regs;
trace!(
"bb {} inst {} ({:?}): result {} regs {:?}",
bb, inst, f.dfg.insts[inst], result, regs,
);
}
}
if let Some(et) = f.dfg.insts[inst].exception_table() {
let exdata = &f.dfg.exception_tables[et];
let sig = &f.dfg.signatures[exdata.signature()];
let mut rets = smallvec![];
for ty in sig.returns.iter().map(|ret| ret.value_type) {
rets.push(vregs.alloc(ty)?.map(|r| Writable::from_reg(r)));
}
try_call_rets.insert(inst, rets);
let mut payloads = smallvec![];
for &ty in sig
.call_conv
.exception_payload_types(I::ABIMachineSpec::word_type())
{
payloads.push(Writable::from_reg(vregs.alloc(ty)?.only_reg().unwrap()));
}
try_call_payloads.insert(inst, payloads);
}
}
}
let mut sret_param = None;
for ret in vcode.abi().signature().returns.iter() {
if ret.purpose == ArgumentPurpose::StructReturn {
let entry_bb = f.stencil.layout.entry_block().unwrap();
for (¶m, sig_param) in f
.dfg
.block_params(entry_bb)
.iter()
.zip(vcode.abi().signature().params.iter())
{
if sig_param.purpose == ArgumentPurpose::StructReturn {
assert!(sret_param.is_none());
sret_param = Some(param);
}
}
assert!(sret_param.is_some());
}
}
let sret_reg = sret_param.map(|param| {
let regs = value_regs[param];
assert!(regs.len() == 1);
regs
});
let mut cur_color = 0;
let mut block_end_colors = SecondaryMap::with_default(InstColor::new(0));
let mut side_effect_inst_entry_colors = FxHashMap::default();
let mut inst_constants = FxHashMap::default();
for bb in f.layout.blocks() {
cur_color += 1;
for inst in f.layout.block_insts(bb) {
let side_effect = has_lowering_side_effect(f, inst);
trace!("bb {} inst {} has color {}", bb, inst, cur_color);
if side_effect {
side_effect_inst_entry_colors.insert(inst, InstColor::new(cur_color));
trace!(" -> side-effecting; incrementing color for next inst");
cur_color += 1;
}
if let Some(c) = is_constant_64bit(f, inst) {
trace!(" -> constant: {}", c);
inst_constants.insert(inst, c);
}
}
block_end_colors[bb] = InstColor::new(cur_color);
}
let value_ir_uses = compute_use_states(f, sret_param);
Ok(Lower {
f,
vcode,
vregs,
value_regs,
sret_reg,
block_end_colors,
side_effect_inst_entry_colors,
inst_constants,
value_ir_uses,
value_lowered_uses: SecondaryMap::default(),
inst_sunk: FxHashSet::default(),
cur_scan_entry_color: None,
cur_inst: None,
ir_insts: vec![],
try_call_rets,
try_call_payloads,
pinned_reg: None,
flags,
})
}
pub fn sigs(&self) -> &SigSet {
self.vcode.sigs()
}
pub fn sigs_mut(&mut self) -> &mut SigSet {
self.vcode.sigs_mut()
}
pub fn vregs_mut(&mut self) -> &mut VRegAllocator<I> {
&mut self.vregs
}
fn gen_arg_setup(&mut self) {
if let Some(entry_bb) = self.f.layout.entry_block() {
trace!(
"gen_arg_setup: entry BB {} args are:\n{:?}",
entry_bb,
self.f.dfg.block_params(entry_bb)
);
for (i, param) in self.f.dfg.block_params(entry_bb).iter().enumerate() {
if self.value_ir_uses[*param] == ValueUseState::Unused {
continue;
}
let regs = writable_value_regs(self.value_regs[*param]);
for insn in self
.vcode
.vcode
.abi
.gen_copy_arg_to_regs(&self.vcode.vcode.sigs, i, regs, &mut self.vregs)
.into_iter()
{
self.emit(insn);
}
}
if let Some(insn) = self
.vcode
.vcode
.abi
.gen_retval_area_setup(&self.vcode.vcode.sigs, &mut self.vregs)
{
self.emit(insn);
}
self.finish_ir_inst(Default::default());
if let Some(insn) = self.vcode.vcode.abi.take_args() {
self.emit(insn);
}
}
}
pub fn gen_return(&mut self, rets: &[ValueRegs<Reg>]) {
let mut out_rets = vec![];
let mut rets = rets.into_iter();
for (i, ret) in self
.abi()
.signature()
.returns
.clone()
.into_iter()
.enumerate()
{
let regs = if ret.purpose == ArgumentPurpose::StructReturn {
self.sret_reg.unwrap()
} else {
*rets.next().unwrap()
};
let (regs, insns) = self.vcode.abi().gen_copy_regs_to_retval(
self.vcode.sigs(),
i,
regs,
&mut self.vregs,
);
out_rets.extend(regs);
for insn in insns {
self.emit(insn);
}
}
if self.f.dfg.values_labels.is_some() {
if let Some(vmctx_val) = self.f.special_param(ArgumentPurpose::VMContext) {
if self.value_ir_uses[vmctx_val] != ValueUseState::Unused {
let vmctx_reg = self.value_regs[vmctx_val].only_reg().unwrap();
self.emit(I::gen_dummy_use(vmctx_reg));
}
}
}
let inst = self.abi().gen_rets(out_rets);
self.emit(inst);
}
pub fn gen_call_output(&mut self, sig: &Signature) -> InstOutput {
let mut rets = smallvec![];
for ty in sig.returns.iter().map(|ret| ret.value_type) {
rets.push(self.vregs.alloc_with_deferred_error(ty));
}
rets
}
pub fn gen_call_output_from_sig_ref(&mut self, sig_ref: SigRef) -> InstOutput {
self.gen_call_output(&self.f.dfg.signatures[sig_ref])
}
pub fn gen_call_args(&mut self, sig: Sig, args: &[ValueRegs<Reg>]) -> CallArgList {
let (uses, insts) = self.vcode.abi().gen_call_args(
self.vcode.sigs(),
sig,
args,
false,
&self.flags,
&mut self.vregs,
);
for insn in insts {
self.emit(insn);
}
uses
}
pub fn gen_return_call_args(&mut self, sig: Sig, args: &[ValueRegs<Reg>]) -> CallArgList {
let (uses, insts) = self.vcode.abi().gen_call_args(
self.vcode.sigs(),
sig,
args,
true,
&self.flags,
&mut self.vregs,
);
for insn in insts {
self.emit(insn);
}
uses
}
pub fn gen_call_rets(&mut self, sig: Sig, outputs: &[ValueRegs<Reg>]) -> CallRetList {
self.vcode
.abi()
.gen_call_rets(self.vcode.sigs(), sig, outputs, None, &mut self.vregs)
}
pub fn gen_try_call_rets(&mut self, sig: Sig) -> CallRetList {
let ir_inst = self.cur_inst.unwrap();
let mut outputs: SmallVec<[ValueRegs<Reg>; 2]> = smallvec![];
for return_def in self.try_call_rets.get(&ir_inst).unwrap() {
outputs.push(return_def.map(|r| r.to_reg()));
}
let payloads = Some(&self.try_call_payloads.get(&ir_inst).unwrap()[..]);
self.vcode
.abi()
.gen_call_rets(self.vcode.sigs(), sig, &outputs, payloads, &mut self.vregs)
}
pub fn gen_call_info<T>(
&mut self,
sig: Sig,
dest: T,
uses: CallArgList,
defs: CallRetList,
try_call_info: Option<TryCallInfo>,
patchable: bool,
) -> CallInfo<T> {
self.vcode.abi().gen_call_info(
self.vcode.sigs(),
sig,
dest,
uses,
defs,
try_call_info,
patchable,
)
}
fn is_inst_sunk(&self, inst: Inst) -> bool {
self.inst_sunk.contains(&inst)
}
fn is_any_inst_result_needed(&self, inst: Inst) -> bool {
self.f
.dfg
.inst_results(inst)
.iter()
.any(|&result| self.value_lowered_uses[result] > 0)
}
fn lower_clif_block<B: LowerBackend<MInst = I>>(
&mut self,
backend: &B,
block: Block,
ctrl_plane: &mut ControlPlane,
) -> CodegenResult<()> {
self.cur_scan_entry_color = Some(self.block_end_colors[block]);
for inst in self.f.layout.block_insts(block).rev() {
let data = &self.f.dfg.insts[inst];
let has_side_effect = has_lowering_side_effect(self.f, inst);
if self.is_inst_sunk(inst) {
continue;
}
let value_needed = self.is_any_inst_result_needed(inst);
trace!(
"lower_clif_block: block {} inst {} ({:?}) is_branch {} side_effect {} value_needed {}",
block,
inst,
data,
data.opcode().is_branch(),
has_side_effect,
value_needed,
);
self.cur_inst = Some(inst);
if has_side_effect {
let entry_color = *self
.side_effect_inst_entry_colors
.get(&inst)
.expect("every side-effecting inst should have a color-map entry");
self.cur_scan_entry_color = Some(entry_color);
}
if self.f.dfg.insts[inst].opcode().is_branch() {
continue;
}
self.emit_value_label_live_range_start_for_inst(inst);
if has_side_effect || value_needed {
trace!("lowering: inst {}: {}", inst, self.f.dfg.display_inst(inst));
let temp_regs = match backend.lower(self, inst) {
Some(regs) => regs,
None => {
let ty = if self.num_outputs(inst) > 0 {
Some(self.output_ty(inst, 0))
} else {
None
};
return Err(CodegenError::Unsupported(format!(
"should be implemented in ISLE: inst = `{}`, type = `{:?}`",
self.f.dfg.display_inst(inst),
ty
)));
}
};
let results = self.f.dfg.inst_results(inst);
debug_assert_eq!(temp_regs.len(), results.len());
for (regs, &result) in temp_regs.iter().zip(results) {
let dsts = self.value_regs[result];
let mut regs = regs.regs().iter();
for &dst in dsts.regs().iter() {
let temp = regs.next().copied().unwrap_or(Reg::invalid_sentinel());
trace!("set vreg alias: {result:?} = {dst:?}, lowering = {temp:?}");
self.vregs.set_vreg_alias(dst, temp);
}
}
}
let start = self.vcode.vcode.num_insts();
let loc = self.srcloc(inst);
self.finish_ir_inst(loc);
if let Some(entries) = self.f.dfg.user_stack_map_entries(inst) {
let end = self.vcode.vcode.num_insts();
debug_assert!(end > start);
debug_assert_eq!(
(start..end)
.filter(|i| self.vcode.vcode[InsnIndex::new(*i)].is_safepoint())
.count(),
1
);
for i in start..end {
let iix = InsnIndex::new(i);
if self.vcode.vcode[iix].is_safepoint() {
trace!(
"Adding user stack map from clif\n\n\
{inst:?} `{}`\n\n\
to vcode\n\n\
{iix:?} `{}`",
self.f.dfg.display_inst(inst),
&self.vcode.vcode[iix].pretty_print_inst(&mut Default::default()),
);
self.vcode
.add_user_stack_map(BackwardsInsnIndex::new(iix.index()), entries);
break;
}
}
}
let debug_tags = self.f.debug_tags.get(inst);
if !debug_tags.is_empty() && self.vcode.vcode.num_insts() > 0 {
let end = self.vcode.vcode.num_insts();
for i in start..end {
let backwards_index = BackwardsInsnIndex::new(i);
log::trace!(
"debug tags on {inst}; associating {debug_tags:?} with {backwards_index:?}"
);
self.vcode.add_debug_tags(backwards_index, debug_tags);
}
}
if ctrl_plane.get_decision() {
if ctrl_plane.get_decision() {
let imm: u64 = ctrl_plane.get_arbitrary();
let reg = self.alloc_tmp(crate::ir::types::I64).regs()[0];
I::gen_imm_u64(imm, reg).map(|inst| self.emit(inst));
} else {
let imm: f64 = ctrl_plane.get_arbitrary();
let tmp = self.alloc_tmp(crate::ir::types::I64).regs()[0];
let reg = self.alloc_tmp(crate::ir::types::F64).regs()[0];
for inst in I::gen_imm_f64(imm, tmp, reg) {
self.emit(inst);
}
}
}
}
self.add_block_params(block)?;
self.cur_scan_entry_color = None;
Ok(())
}
fn add_block_params(&mut self, block: Block) -> CodegenResult<()> {
for ¶m in self.f.dfg.block_params(block) {
for ® in self.value_regs[param].regs() {
let vreg = reg.to_virtual_reg().unwrap();
self.vcode.add_block_param(vreg);
}
}
Ok(())
}
fn get_value_labels<'a>(&'a self, val: Value, depth: usize) -> Option<&'a [ValueLabelStart]> {
if let Some(ref values_labels) = self.f.dfg.values_labels {
debug_assert!(self.f.dfg.value_is_real(val));
trace!(
"get_value_labels: val {} -> {:?}",
val,
values_labels.get(&val)
);
match values_labels.get(&val) {
Some(&ValueLabelAssignments::Starts(ref list)) => Some(&list[..]),
Some(&ValueLabelAssignments::Alias { value, .. }) if depth < 10 => {
self.get_value_labels(value, depth + 1)
}
_ => None,
}
} else {
None
}
}
fn emit_value_label_marks_for_value(&mut self, val: Value) {
let regs = self.value_regs[val];
if regs.len() > 1 {
return;
}
let reg = regs.only_reg().unwrap();
if let Some(label_starts) = self.get_value_labels(val, 0) {
let labels = label_starts
.iter()
.map(|&ValueLabelStart { label, .. }| label)
.collect::<FxHashSet<_>>();
for label in labels {
trace!(
"value labeling: defines val {:?} -> reg {:?} -> label {:?}",
val, reg, label,
);
self.vcode.add_value_label(reg, label);
}
}
}
fn emit_value_label_live_range_start_for_inst(&mut self, inst: Inst) {
if self.f.dfg.values_labels.is_none() {
return;
}
trace!(
"value labeling: srcloc {}: inst {}",
self.srcloc(inst),
inst
);
for &val in self.f.dfg.inst_results(inst) {
self.emit_value_label_marks_for_value(val);
}
}
fn emit_value_label_live_range_start_for_block_args(&mut self, block: Block) {
if self.f.dfg.values_labels.is_none() {
return;
}
trace!("value labeling: block {}", block);
for &arg in self.f.dfg.block_params(block) {
self.emit_value_label_marks_for_value(arg);
}
self.finish_ir_inst(Default::default());
}
fn finish_ir_inst(&mut self, loc: RelSourceLoc) {
for inst in self.ir_insts.drain(..).rev() {
self.vcode.push(inst, loc);
}
}
fn finish_bb(&mut self) {
self.vcode.end_bb();
}
fn lower_clif_branch<B: LowerBackend<MInst = I>>(
&mut self,
backend: &B,
bindex: BlockIndex,
block: Block,
branch: Inst,
targets: &[MachLabel],
) -> CodegenResult<()> {
trace!(
"lower_clif_branch: block {} branch {:?} targets {:?}",
block, branch, targets,
);
self.cur_inst = Some(branch);
backend
.lower_branch(self, branch, targets)
.unwrap_or_else(|| {
panic!(
"should be implemented in ISLE: branch = `{}`",
self.f.dfg.display_inst(branch),
)
});
let loc = self.srcloc(branch);
self.finish_ir_inst(loc);
self.lower_branch_blockparam_args(bindex);
Ok(())
}
fn lower_branch_blockparam_args(&mut self, block: BlockIndex) {
let mut branch_arg_vregs: SmallVec<[Reg; 16]> = smallvec![];
for succ_idx in 0..self.vcode.block_order().succ_indices(block).1.len() {
branch_arg_vregs.clear();
let (succ, args) = self.collect_block_call(block, succ_idx, &mut branch_arg_vregs);
self.vcode.add_succ(succ, args);
}
}
fn collect_branch_and_targets(
&self,
bindex: BlockIndex,
_bb: Block,
targets: &mut SmallVec<[MachLabel; 2]>,
) -> Option<Inst> {
targets.clear();
let (opt_inst, succs) = self.vcode.block_order().succ_indices(bindex);
targets.extend(succs.iter().map(|succ| MachLabel::from_block(*succ)));
opt_inst
}
fn collect_block_call<'a>(
&mut self,
block: BlockIndex,
succ_idx: usize,
buffer: &'a mut SmallVec<[Reg; 16]>,
) -> (BlockIndex, &'a [Reg]) {
let block_order = self.vcode.block_order();
let (_, succs) = block_order.succ_indices(block);
let succ = succs[succ_idx];
let this_lb = block_order.lowered_order()[block.index()];
let succ_lb = block_order.lowered_order()[succ.index()];
let (branch_inst, succ_idx) = match (this_lb, succ_lb) {
(_, LoweredBlock::CriticalEdge { .. }) => {
return (succ, &[]);
}
(LoweredBlock::CriticalEdge { pred, succ_idx, .. }, _) => {
let branch_inst = self.f.layout.last_inst(pred).unwrap();
(branch_inst, succ_idx as usize)
}
(this, _) => {
let block = this.orig_block().unwrap();
let branch_inst = self.f.layout.last_inst(block).unwrap();
(branch_inst, succ_idx)
}
};
let block_call = self.f.dfg.insts[branch_inst]
.branch_destination(&self.f.dfg.jump_tables, &self.f.dfg.exception_tables)[succ_idx];
for arg in block_call.args(&self.f.dfg.value_lists) {
match arg {
BlockArg::Value(arg) => {
debug_assert!(self.f.dfg.value_is_real(arg));
let regs = self.put_value_in_regs(arg);
buffer.extend_from_slice(regs.regs());
}
BlockArg::TryCallRet(i) => {
let regs = self.try_call_rets.get(&branch_inst).unwrap()[i as usize]
.map(|r| r.to_reg());
buffer.extend_from_slice(regs.regs());
}
BlockArg::TryCallExn(i) => {
let reg =
self.try_call_payloads.get(&branch_inst).unwrap()[i as usize].to_reg();
buffer.push(reg);
}
}
}
(succ, &buffer[..])
}
pub fn lower<B: LowerBackend<MInst = I>>(
mut self,
backend: &B,
ctrl_plane: &mut ControlPlane,
) -> CodegenResult<VCode<I>> {
trace!("about to lower function: {:?}", self.f);
self.vcode.init_retval_area(&mut self.vregs)?;
self.pinned_reg = backend.maybe_pinned_reg();
self.vcode.set_entry(BlockIndex::new(0));
let mut targets: SmallVec<[MachLabel; 2]> = SmallVec::new();
let lowered_order: SmallVec<[LoweredBlock; 64]> = self
.vcode
.block_order()
.lowered_order()
.iter()
.cloned()
.collect();
for (bindex, lb) in lowered_order.iter().enumerate().rev() {
let bindex = BlockIndex::new(bindex);
if let Some(bb) = lb.orig_block() {
if let Some(branch) = self.collect_branch_and_targets(bindex, bb, &mut targets) {
self.lower_clif_branch(backend, bindex, bb, branch, &targets)?;
self.finish_ir_inst(self.srcloc(branch));
}
} else {
let succ = self.vcode.block_order().succ_indices(bindex).1[0];
self.emit(I::gen_jump(MachLabel::from_block(succ)));
self.finish_ir_inst(Default::default());
self.lower_branch_blockparam_args(bindex);
}
if let Some(bb) = lb.orig_block() {
self.lower_clif_block(backend, bb, ctrl_plane)?;
self.emit_value_label_live_range_start_for_block_args(bb);
}
if bindex.index() == 0 {
self.gen_arg_setup();
self.finish_ir_inst(Default::default());
}
self.finish_bb();
if let Some(e) = self.vregs.take_deferred_error() {
return Err(e);
}
}
trace!(
"built vcode:\n{:?}Backwards {:?}",
&self.vregs, &self.vcode.vcode
);
let vcode = self.vcode.build(self.vregs);
Ok(vcode)
}
pub fn value_is_unused(&self, val: Value) -> bool {
match self.value_ir_uses[val] {
ValueUseState::Unused => true,
_ => false,
}
}
pub fn block_successor_label(&self, block: Block, succ: usize) -> MachLabel {
trace!("block_successor_label: block {block} succ {succ}");
let lowered = self
.vcode
.block_order()
.lowered_index_for_block(block)
.expect("Unreachable block");
trace!(" -> lowered block {lowered:?}");
let (_, succs) = self.vcode.block_order().succ_indices(lowered);
trace!(" -> succs {succs:?}");
let succ_block = *succs.get(succ).expect("Successor index out of range");
MachLabel::from_block(succ_block)
}
}
fn compute_use_states(
f: &Function,
sret_param: Option<Value>,
) -> SecondaryMap<Value, ValueUseState> {
let mut value_ir_uses = SecondaryMap::with_default(ValueUseState::Unused);
if let Some(sret_param) = sret_param {
value_ir_uses[sret_param] = ValueUseState::Multiple;
}
let mut stack: SmallVec<[_; 16]> = smallvec![];
let uses = |value| {
trace!(" -> pushing args for {} onto stack", value);
if let ValueDef::Result(src_inst, _) = f.dfg.value_def(value) {
if is_value_use_root(f, src_inst) {
None
} else {
Some(f.dfg.inst_values(src_inst))
}
} else {
None
}
};
for inst in f
.layout
.blocks()
.flat_map(|block| f.layout.block_insts(block))
{
for arg in f.dfg.inst_values(inst) {
debug_assert!(f.dfg.value_is_real(arg));
let old = value_ir_uses[arg];
value_ir_uses[arg].inc();
let new = value_ir_uses[arg];
trace!("arg {} used, old state {:?}, new {:?}", arg, old, new);
if old == ValueUseState::Multiple || new != ValueUseState::Multiple {
continue;
}
if let Some(iter) = uses(arg) {
stack.push(iter);
}
while let Some(iter) = stack.last_mut() {
if let Some(value) = iter.next() {
debug_assert!(f.dfg.value_is_real(value));
trace!(" -> DFS reaches {}", value);
if value_ir_uses[value] == ValueUseState::Multiple {
debug_assert!(uses(value).into_iter().flatten().all(|arg| {
debug_assert!(f.dfg.value_is_real(arg));
value_ir_uses[arg] == ValueUseState::Multiple
}));
continue;
}
value_ir_uses[value] = ValueUseState::Multiple;
trace!(" -> became Multiple");
if let Some(iter) = uses(value) {
stack.push(iter);
}
} else {
stack.pop();
}
}
}
}
value_ir_uses
}
fn is_value_use_root(f: &Function, inst: Inst) -> bool {
f.dfg.inst_results(inst).len() > 1
}
impl<'func, I: VCodeInst> Lower<'func, I> {
pub fn dfg(&self) -> &DataFlowGraph {
&self.f.dfg
}
pub fn abi(&self) -> &Callee<I::ABIMachineSpec> {
self.vcode.abi()
}
pub fn abi_mut(&mut self) -> &mut Callee<I::ABIMachineSpec> {
self.vcode.abi_mut()
}
}
impl<'func, I: VCodeInst> Lower<'func, I> {
pub fn data(&self, ir_inst: Inst) -> &InstructionData {
&self.f.dfg.insts[ir_inst]
}
pub fn symbol_value_data<'b>(
&'b self,
global_value: GlobalValue,
) -> Option<(&'b ExternalName, RelocDistance, i64)> {
let gvdata = &self.f.global_values[global_value];
match gvdata {
&GlobalValueData::Symbol {
ref name,
ref offset,
colocated,
..
} => {
let offset = offset.bits();
let dist = if colocated {
RelocDistance::Near
} else {
RelocDistance::Far
};
Some((name, dist, offset))
}
_ => None,
}
}
pub fn memflags(&self, ir_inst: Inst) -> Option<MemFlags> {
match &self.f.dfg.insts[ir_inst] {
&InstructionData::AtomicCas { flags, .. } => Some(flags),
&InstructionData::AtomicRmw { flags, .. } => Some(flags),
&InstructionData::Load { flags, .. }
| &InstructionData::LoadNoOffset { flags, .. }
| &InstructionData::Store { flags, .. } => Some(flags),
&InstructionData::StoreNoOffset { flags, .. } => Some(flags),
_ => None,
}
}
pub fn srcloc(&self, ir_inst: Inst) -> RelSourceLoc {
self.f.rel_srclocs()[ir_inst]
}
pub fn num_inputs(&self, ir_inst: Inst) -> usize {
self.f.dfg.inst_args(ir_inst).len()
}
pub fn num_outputs(&self, ir_inst: Inst) -> usize {
self.f.dfg.inst_results(ir_inst).len()
}
pub fn input_ty(&self, ir_inst: Inst, idx: usize) -> Type {
self.value_ty(self.input_as_value(ir_inst, idx))
}
pub fn value_ty(&self, val: Value) -> Type {
self.f.dfg.value_type(val)
}
pub fn output_ty(&self, ir_inst: Inst, idx: usize) -> Type {
self.f.dfg.value_type(self.f.dfg.inst_results(ir_inst)[idx])
}
pub fn get_constant(&self, ir_inst: Inst) -> Option<u64> {
self.inst_constants.get(&ir_inst).map(|&c| {
debug_assert_eq!(c, {
let input_size = self.output_ty(ir_inst, 0).bits() as u64;
let shift = 64 - input_size;
(c << shift) >> shift
});
c
})
}
pub fn input_as_value(&self, ir_inst: Inst, idx: usize) -> Value {
let val = self.f.dfg.inst_args(ir_inst)[idx];
debug_assert!(self.f.dfg.value_is_real(val));
val
}
pub fn get_input_as_source_or_const(&self, ir_inst: Inst, idx: usize) -> NonRegInput {
let val = self.input_as_value(ir_inst, idx);
self.get_value_as_source_or_const(val)
}
pub fn get_value_as_source_or_const(&self, val: Value) -> NonRegInput {
trace!(
"get_input_for_val: val {} at cur_inst {:?} cur_scan_entry_color {:?}",
val, self.cur_inst, self.cur_scan_entry_color,
);
let inst = match self.f.dfg.value_def(val) {
ValueDef::Result(src_inst, result_idx) => {
let src_side_effect = has_lowering_side_effect(self.f, src_inst);
trace!(" -> src inst {}", self.f.dfg.display_inst(src_inst));
trace!(" -> has lowering side effect: {}", src_side_effect);
if is_value_use_root(self.f, src_inst) {
trace!(" -> is a root instruction");
InputSourceInst::None
} else if !src_side_effect {
if self.value_ir_uses[val] == ValueUseState::Once {
InputSourceInst::UniqueUse(src_inst, result_idx)
} else {
InputSourceInst::Use(src_inst, result_idx)
}
} else {
trace!(
" -> side-effecting op {} for val {}: use state {:?}",
src_inst, val, self.value_ir_uses[val]
);
if self.cur_scan_entry_color.is_some()
&& self.value_ir_uses[val] == ValueUseState::Once
&& self.num_outputs(src_inst) == 1
&& self
.side_effect_inst_entry_colors
.get(&src_inst)
.unwrap()
.get()
+ 1
== self.cur_scan_entry_color.unwrap().get()
{
InputSourceInst::UniqueUse(src_inst, 0)
} else {
InputSourceInst::None
}
}
}
_ => InputSourceInst::None,
};
let constant = inst.as_inst().and_then(|(inst, _)| self.get_constant(inst));
NonRegInput { inst, constant }
}
pub fn increment_lowered_uses(&mut self, val: Value) {
self.value_lowered_uses[val] += 1
}
pub fn put_input_in_regs(&mut self, ir_inst: Inst, idx: usize) -> ValueRegs<Reg> {
let val = self.f.dfg.inst_args(ir_inst)[idx];
self.put_value_in_regs(val)
}
pub fn put_value_in_regs(&mut self, val: Value) -> ValueRegs<Reg> {
debug_assert!(self.f.dfg.value_is_real(val));
trace!("put_value_in_regs: val {}", val);
if let Some(inst) = self.f.dfg.value_def(val).inst() {
assert!(!self.inst_sunk.contains(&inst));
}
let regs = self.value_regs[val];
trace!(" -> regs {:?}", regs);
assert!(regs.is_valid());
self.value_lowered_uses[val] += 1;
regs
}
pub fn try_call_return_defs(&mut self, ir_inst: Inst) -> &[ValueRegs<Writable<Reg>>] {
&self.try_call_rets.get(&ir_inst).unwrap()[..]
}
pub fn try_call_exception_defs(&mut self, ir_inst: Inst) -> &[Writable<Reg>] {
&self.try_call_payloads.get(&ir_inst).unwrap()[..]
}
}
impl<'func, I: VCodeInst> Lower<'func, I> {
pub fn alloc_tmp(&mut self, ty: Type) -> ValueRegs<Writable<Reg>> {
writable_value_regs(self.vregs.alloc_with_deferred_error(ty))
}
pub fn cur_inst(&self) -> Inst {
self.cur_inst.unwrap()
}
pub fn emit(&mut self, mach_inst: I) {
trace!("emit: {:?}", mach_inst);
self.ir_insts.push(mach_inst);
}
pub fn sink_inst(&mut self, ir_inst: Inst) {
assert!(has_lowering_side_effect(self.f, ir_inst));
assert!(self.cur_scan_entry_color.is_some());
for result in self.dfg().inst_results(ir_inst) {
assert!(self.value_lowered_uses[*result] == 0);
}
let sunk_inst_entry_color = self
.side_effect_inst_entry_colors
.get(&ir_inst)
.cloned()
.unwrap();
let sunk_inst_exit_color = InstColor::new(sunk_inst_entry_color.get() + 1);
assert!(sunk_inst_exit_color == self.cur_scan_entry_color.unwrap());
self.cur_scan_entry_color = Some(sunk_inst_entry_color);
self.inst_sunk.insert(ir_inst);
}
pub fn get_immediate_data(&self, imm: Immediate) -> &ConstantData {
self.f.dfg.immediates.get(imm).unwrap()
}
pub fn get_constant_data(&self, constant_handle: Constant) -> &ConstantData {
self.f.dfg.constants.get(constant_handle)
}
pub fn use_constant(&mut self, constant: VCodeConstantData) -> VCodeConstant {
self.vcode.constants().insert(constant)
}
pub fn ensure_in_vreg(&mut self, reg: Reg, ty: Type) -> Reg {
if reg.to_virtual_reg().is_some() {
reg
} else {
let new_reg = self.alloc_tmp(ty).only_reg().unwrap();
self.emit(I::gen_move(new_reg, reg, ty));
new_reg.to_reg()
}
}
pub fn add_range_fact(&mut self, reg: Reg, bit_width: u16, min: u64, max: u64) {
if self.flags.enable_pcc() {
self.vregs.set_fact_if_missing(
reg.to_virtual_reg().unwrap(),
Fact::Range {
bit_width,
min,
max,
},
);
}
}
}
#[cfg(test)]
mod tests {
use super::ValueUseState;
use crate::cursor::{Cursor, FuncCursor};
use crate::ir::types;
use crate::ir::{Function, InstBuilder};
#[test]
fn multi_result_use_once() {
let mut func = Function::new();
let block0 = func.dfg.make_block();
let mut pos = FuncCursor::new(&mut func);
pos.insert_block(block0);
let v1 = pos.ins().iconst(types::I64, 0);
let v2 = pos.ins().iconst(types::I64, 1);
let v3 = pos.ins().iconcat(v1, v2);
let (v4, v5) = pos.ins().isplit(v3);
pos.ins().return_(&[v4, v5]);
let func = pos.func;
let uses = super::compute_use_states(&func, None);
assert_eq!(uses[v1], ValueUseState::Once);
assert_eq!(uses[v2], ValueUseState::Once);
assert_eq!(uses[v3], ValueUseState::Once);
assert_eq!(uses[v4], ValueUseState::Once);
assert_eq!(uses[v5], ValueUseState::Once);
}
#[test]
fn results_used_twice_but_not_operands() {
let mut func = Function::new();
let block0 = func.dfg.make_block();
let mut pos = FuncCursor::new(&mut func);
pos.insert_block(block0);
let v1 = pos.ins().iconst(types::I64, 0);
let v2 = pos.ins().iconst(types::I64, 1);
let v3 = pos.ins().iconcat(v1, v2);
let (v4, v5) = pos.ins().isplit(v3);
pos.ins().return_(&[v4, v4]);
let func = pos.func;
let uses = super::compute_use_states(&func, None);
assert_eq!(uses[v1], ValueUseState::Once);
assert_eq!(uses[v2], ValueUseState::Once);
assert_eq!(uses[v3], ValueUseState::Once);
assert_eq!(uses[v4], ValueUseState::Multiple);
assert_eq!(uses[v5], ValueUseState::Unused);
}
}