use crate::binemit::{Addend, CodeOffset, Reloc};
use crate::ir::function::FunctionParameters;
use crate::ir::{DebugTag, ExceptionTag, ExternalName, RelSourceLoc, SourceLoc, TrapCode};
use crate::isa::unwind::UnwindInst;
use crate::machinst::{
BlockIndex, MachInstLabelUse, TextSectionBuilder, VCodeConstant, VCodeConstants, VCodeInst,
};
use crate::trace;
use crate::{MachInstEmitState, ir};
use crate::{VCodeConstantData, timing};
use alloc::collections::BinaryHeap;
use alloc::string::String;
use alloc::vec::Vec;
use core::cmp::Ordering;
use core::mem;
use core::ops::Range;
use cranelift_control::ControlPlane;
use cranelift_entity::{PrimaryMap, SecondaryMap, entity_impl};
use smallvec::SmallVec;
#[cfg(feature = "enable-serde")]
use serde::{Deserialize, Serialize};
#[cfg(feature = "enable-serde")]
pub trait CompilePhase {
type MachSrcLocType: for<'a> Deserialize<'a> + Serialize + core::fmt::Debug + PartialEq + Clone;
type SourceLocType: for<'a> Deserialize<'a> + Serialize + core::fmt::Debug + PartialEq + Clone;
}
#[cfg(not(feature = "enable-serde"))]
pub trait CompilePhase {
type MachSrcLocType: core::fmt::Debug + PartialEq + Clone;
type SourceLocType: core::fmt::Debug + PartialEq + Clone;
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct Stencil;
#[derive(Clone, Debug, PartialEq)]
pub struct Final;
impl CompilePhase for Stencil {
type MachSrcLocType = MachSrcLoc<Stencil>;
type SourceLocType = RelSourceLoc;
}
impl CompilePhase for Final {
type MachSrcLocType = MachSrcLoc<Final>;
type SourceLocType = SourceLoc;
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ForceVeneers {
Yes,
No,
}
pub struct MachBuffer<I: VCodeInst> {
data: SmallVec<[u8; 1024]>,
min_alignment: u32,
relocs: SmallVec<[MachReloc; 16]>,
traps: SmallVec<[MachTrap; 16]>,
call_sites: SmallVec<[MachCallSite; 16]>,
patchable_call_sites: SmallVec<[MachPatchableCallSite; 16]>,
exception_handlers: SmallVec<[MachExceptionHandler; 16]>,
srclocs: SmallVec<[MachSrcLoc<Stencil>; 64]>,
debug_tags: Vec<MachDebugTags>,
debug_tag_pool: Vec<DebugTag>,
user_stack_maps: SmallVec<[(CodeOffset, u32, ir::UserStackMap); 8]>,
unwind_info: SmallVec<[(CodeOffset, UnwindInst); 8]>,
cur_srcloc: Option<(CodeOffset, RelSourceLoc)>,
label_offsets: SmallVec<[CodeOffset; 16]>,
label_aliases: SmallVec<[MachLabel; 16]>,
pending_constants: SmallVec<[VCodeConstant; 16]>,
pending_constants_size: CodeOffset,
pending_traps: SmallVec<[MachLabelTrap; 16]>,
pending_fixup_records: SmallVec<[MachLabelFixup<I>; 16]>,
pending_fixup_deadline: CodeOffset,
fixup_records: BinaryHeap<MachLabelFixup<I>>,
latest_branches: SmallVec<[MachBranch; 4]>,
labels_at_tail: SmallVec<[MachLabel; 4]>,
labels_at_tail_off: CodeOffset,
constants: PrimaryMap<VCodeConstant, MachBufferConstant>,
used_constants: SmallVec<[(VCodeConstant, CodeOffset); 4]>,
open_patchable: bool,
frame_layout: Option<MachBufferFrameLayout>,
}
impl MachBufferFinalized<Stencil> {
pub fn apply_base_srcloc(self, base_srcloc: SourceLoc) -> MachBufferFinalized<Final> {
MachBufferFinalized {
data: self.data,
relocs: self.relocs,
traps: self.traps,
call_sites: self.call_sites,
patchable_call_sites: self.patchable_call_sites,
exception_handlers: self.exception_handlers,
srclocs: self
.srclocs
.into_iter()
.map(|srcloc| srcloc.apply_base_srcloc(base_srcloc))
.collect(),
debug_tags: self.debug_tags,
debug_tag_pool: self.debug_tag_pool,
user_stack_maps: self.user_stack_maps,
unwind_info: self.unwind_info,
alignment: self.alignment,
frame_layout: self.frame_layout,
nop_units: self.nop_units,
}
}
}
#[derive(PartialEq, Debug, Clone)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachBufferFinalized<T: CompilePhase> {
pub(crate) data: SmallVec<[u8; 1024]>,
pub(crate) relocs: SmallVec<[FinalizedMachReloc; 16]>,
pub(crate) traps: SmallVec<[MachTrap; 16]>,
pub(crate) call_sites: SmallVec<[MachCallSite; 16]>,
pub(crate) patchable_call_sites: SmallVec<[MachPatchableCallSite; 16]>,
pub(crate) exception_handlers: SmallVec<[FinalizedMachExceptionHandler; 16]>,
pub(crate) srclocs: SmallVec<[T::MachSrcLocType; 64]>,
pub(crate) debug_tags: Vec<MachDebugTags>,
pub(crate) debug_tag_pool: Vec<DebugTag>,
pub(crate) user_stack_maps: SmallVec<[(CodeOffset, u32, ir::UserStackMap); 8]>,
pub(crate) frame_layout: Option<MachBufferFrameLayout>,
pub unwind_info: SmallVec<[(CodeOffset, UnwindInst); 8]>,
pub alignment: u32,
pub nop_units: Vec<Vec<u8>>,
}
const UNKNOWN_LABEL_OFFSET: CodeOffset = 0xffff_ffff;
const UNKNOWN_LABEL: MachLabel = MachLabel(0xffff_ffff);
const LABEL_LIST_THRESHOLD: usize = 100;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MachLabel(u32);
entity_impl!(MachLabel);
impl MachLabel {
pub fn from_block(bindex: BlockIndex) -> MachLabel {
MachLabel(bindex.index() as u32)
}
pub fn to_string(&self) -> String {
format!("label{}", self.0)
}
}
impl Default for MachLabel {
fn default() -> Self {
UNKNOWN_LABEL
}
}
pub struct OpenPatchRegion(usize);
pub struct PatchRegion {
range: Range<usize>,
}
impl PatchRegion {
pub fn patch<I: VCodeInst>(self, buffer: &mut MachBuffer<I>) -> &mut [u8] {
&mut buffer.data[self.range]
}
}
impl<I: VCodeInst> MachBuffer<I> {
pub fn new() -> MachBuffer<I> {
MachBuffer {
data: SmallVec::new(),
min_alignment: I::function_alignment().minimum,
relocs: SmallVec::new(),
traps: SmallVec::new(),
call_sites: SmallVec::new(),
patchable_call_sites: SmallVec::new(),
exception_handlers: SmallVec::new(),
srclocs: SmallVec::new(),
debug_tags: vec![],
debug_tag_pool: vec![],
user_stack_maps: SmallVec::new(),
unwind_info: SmallVec::new(),
cur_srcloc: None,
label_offsets: SmallVec::new(),
label_aliases: SmallVec::new(),
pending_constants: SmallVec::new(),
pending_constants_size: 0,
pending_traps: SmallVec::new(),
pending_fixup_records: SmallVec::new(),
pending_fixup_deadline: u32::MAX,
fixup_records: Default::default(),
latest_branches: SmallVec::new(),
labels_at_tail: SmallVec::new(),
labels_at_tail_off: 0,
constants: Default::default(),
used_constants: Default::default(),
open_patchable: false,
frame_layout: None,
}
}
pub fn cur_offset(&self) -> CodeOffset {
self.data.len() as CodeOffset
}
pub fn put1(&mut self, value: u8) {
self.data.push(value);
}
pub fn put2(&mut self, value: u16) {
let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]);
}
pub fn put4(&mut self, value: u32) {
let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]);
}
pub fn put8(&mut self, value: u64) {
let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]);
}
pub fn put_data(&mut self, data: &[u8]) {
self.data.extend_from_slice(data);
}
pub fn get_appended_space(&mut self, len: usize) -> &mut [u8] {
let off = self.data.len();
let new_len = self.data.len() + len;
self.data.resize(new_len, 0);
&mut self.data[off..]
}
pub fn align_to(&mut self, align_to: CodeOffset) {
trace!("MachBuffer: align to {}", align_to);
assert!(
align_to.is_power_of_two(),
"{align_to} is not a power of two"
);
while self.cur_offset() & (align_to - 1) != 0 {
self.put1(0);
}
}
pub fn start_patchable(&mut self) -> OpenPatchRegion {
assert!(!self.open_patchable, "Patchable regions may not be nested");
self.open_patchable = true;
OpenPatchRegion(usize::try_from(self.cur_offset()).unwrap())
}
pub fn end_patchable(&mut self, open: OpenPatchRegion) -> PatchRegion {
self.open_patchable = false;
let end = usize::try_from(self.cur_offset()).unwrap();
PatchRegion { range: open.0..end }
}
pub fn get_label(&mut self) -> MachLabel {
let l = self.label_offsets.len() as u32;
self.label_offsets.push(UNKNOWN_LABEL_OFFSET);
self.label_aliases.push(UNKNOWN_LABEL);
trace!("MachBuffer: new label -> {:?}", MachLabel(l));
MachLabel(l)
}
pub fn reserve_labels_for_blocks(&mut self, blocks: usize) {
trace!("MachBuffer: first {} labels are for blocks", blocks);
debug_assert!(self.label_offsets.is_empty());
self.label_offsets.resize(blocks, UNKNOWN_LABEL_OFFSET);
self.label_aliases.resize(blocks, UNKNOWN_LABEL);
}
pub fn register_constants(&mut self, constants: &VCodeConstants) {
for (c, val) in constants.iter() {
self.register_constant(&c, val);
}
}
pub fn register_constant(&mut self, constant: &VCodeConstant, data: &VCodeConstantData) {
let c2 = self.constants.push(MachBufferConstant {
upcoming_label: None,
align: data.alignment(),
size: data.as_slice().len(),
});
assert_eq!(*constant, c2);
}
fn finish_constants(&mut self, constants: &VCodeConstants) -> u32 {
let mut alignment = self.min_alignment;
for (constant, offset) in mem::take(&mut self.used_constants) {
let constant = constants.get(constant);
let data = constant.as_slice();
self.data[offset as usize..][..data.len()].copy_from_slice(data);
alignment = constant.alignment().max(alignment);
}
alignment
}
pub fn get_label_for_constant(&mut self, constant: VCodeConstant) -> MachLabel {
let MachBufferConstant {
align,
size,
upcoming_label,
} = self.constants[constant];
if let Some(label) = upcoming_label {
return label;
}
let label = self.get_label();
trace!(
"defer constant: eventually emit {size} bytes aligned \
to {align} at label {label:?}",
);
self.pending_constants.push(constant);
self.pending_constants_size += size as u32;
self.constants[constant].upcoming_label = Some(label);
label
}
pub fn bind_label(&mut self, label: MachLabel, ctrl_plane: &mut ControlPlane) {
trace!(
"MachBuffer: bind label {:?} at offset {}",
label,
self.cur_offset()
);
debug_assert_eq!(self.label_offsets[label.0 as usize], UNKNOWN_LABEL_OFFSET);
debug_assert_eq!(self.label_aliases[label.0 as usize], UNKNOWN_LABEL);
let offset = self.cur_offset();
self.label_offsets[label.0 as usize] = offset;
self.lazily_clear_labels_at_tail();
self.labels_at_tail.push(label);
self.optimize_branches(ctrl_plane);
}
fn lazily_clear_labels_at_tail(&mut self) {
let offset = self.cur_offset();
if offset > self.labels_at_tail_off {
self.labels_at_tail_off = offset;
self.labels_at_tail.clear();
}
}
pub(crate) fn resolve_label_offset(&self, mut label: MachLabel) -> CodeOffset {
let mut iters = 0;
while self.label_aliases[label.0 as usize] != UNKNOWN_LABEL {
label = self.label_aliases[label.0 as usize];
iters += 1;
assert!(iters < 1_000_000, "Unexpected cycle in label aliases");
}
self.label_offsets[label.0 as usize]
}
pub fn use_label_at_offset(&mut self, offset: CodeOffset, label: MachLabel, kind: I::LabelUse) {
trace!(
"MachBuffer: use_label_at_offset: offset {} label {:?} kind {:?}",
offset, label, kind
);
let fixup = MachLabelFixup {
label,
offset,
kind,
};
self.pending_fixup_deadline = self.pending_fixup_deadline.min(fixup.deadline());
self.pending_fixup_records.push(fixup);
}
pub fn add_uncond_branch(&mut self, start: CodeOffset, end: CodeOffset, target: MachLabel) {
debug_assert!(
!self.open_patchable,
"Branch instruction inserted within a patchable region"
);
assert!(self.cur_offset() == start);
debug_assert!(end > start);
assert!(!self.pending_fixup_records.is_empty());
let fixup = self.pending_fixup_records.len() - 1;
self.lazily_clear_labels_at_tail();
self.latest_branches.push(MachBranch {
start,
end,
target,
fixup,
inverted: None,
labels_at_this_branch: self.labels_at_tail.clone(),
});
}
pub fn add_cond_branch(
&mut self,
start: CodeOffset,
end: CodeOffset,
target: MachLabel,
inverted: &[u8],
) {
debug_assert!(
!self.open_patchable,
"Branch instruction inserted within a patchable region"
);
assert!(self.cur_offset() == start);
debug_assert!(end > start);
assert!(!self.pending_fixup_records.is_empty());
debug_assert!(
inverted.len() == (end - start) as usize,
"branch length = {}, but inverted length = {}",
end - start,
inverted.len()
);
let fixup = self.pending_fixup_records.len() - 1;
let inverted = Some(SmallVec::from(inverted));
self.lazily_clear_labels_at_tail();
self.latest_branches.push(MachBranch {
start,
end,
target,
fixup,
inverted,
labels_at_this_branch: self.labels_at_tail.clone(),
});
}
fn truncate_last_branch(&mut self) {
debug_assert!(
!self.open_patchable,
"Branch instruction truncated within a patchable region"
);
self.lazily_clear_labels_at_tail();
let b = self.latest_branches.pop().unwrap();
assert!(b.end == self.cur_offset());
self.data.truncate(b.start as usize);
self.pending_fixup_records.truncate(b.fixup);
while let Some(last_srcloc) = self.srclocs.last_mut() {
if last_srcloc.end <= b.start {
break;
}
if last_srcloc.start < b.start {
last_srcloc.end = b.start;
break;
}
self.srclocs.pop();
}
while let Some(last_debug_tag) = self.debug_tags.last() {
if last_debug_tag.offset <= b.start {
break;
}
self.debug_tags.pop();
}
let cur_off = self.cur_offset();
self.labels_at_tail_off = cur_off;
trace!(
"truncate_last_branch: truncated {:?}; off now {}",
b, cur_off
);
for &l in &self.labels_at_tail {
self.label_offsets[l.0 as usize] = cur_off;
}
self.labels_at_tail.extend(b.labels_at_this_branch);
}
pub fn optimize_branches(&mut self, ctrl_plane: &mut ControlPlane) {
if ctrl_plane.get_decision() {
return;
}
self.lazily_clear_labels_at_tail();
trace!(
"enter optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}",
self.latest_branches, self.labels_at_tail, self.pending_fixup_records
);
while let Some(b) = self.latest_branches.last() {
let cur_off = self.cur_offset();
trace!("optimize_branches: last branch {:?} at off {}", b, cur_off);
if b.end < cur_off {
break;
}
if b.labels_at_this_branch.len() > LABEL_LIST_THRESHOLD {
break;
}
if self.resolve_label_offset(b.target) == cur_off {
trace!("branch with target == cur off; truncating");
self.truncate_last_branch();
continue;
}
if b.is_uncond() {
if self.resolve_label_offset(b.target) != b.start {
let redirected = b.labels_at_this_branch.len();
for &l in &b.labels_at_this_branch {
trace!(
" -> label at start of branch {:?} redirected to target {:?}",
l, b.target
);
self.label_aliases[l.0 as usize] = b.target;
}
let mut_b = self.latest_branches.last_mut().unwrap();
mut_b.labels_at_this_branch.clear();
if redirected > 0 {
trace!(" -> after label redirects, restarting loop");
continue;
}
} else {
break;
}
let b = self.latest_branches.last().unwrap();
if self.latest_branches.len() > 1 {
let prev_b = &self.latest_branches[self.latest_branches.len() - 2];
trace!(" -> more than one branch; prev_b = {:?}", prev_b);
if prev_b.is_uncond()
&& prev_b.end == b.start
&& b.labels_at_this_branch.is_empty()
{
trace!(" -> uncond follows another uncond; truncating");
self.truncate_last_branch();
continue;
}
if prev_b.is_cond()
&& prev_b.end == b.start
&& self.resolve_label_offset(prev_b.target) == cur_off
{
trace!(
" -> uncond follows a conditional, and conditional's target resolves to current offset"
);
let target = b.target;
let data = prev_b.inverted.clone().unwrap();
self.truncate_last_branch();
let off_before_edit = self.cur_offset();
let prev_b = self.latest_branches.last_mut().unwrap();
let not_inverted = SmallVec::from(
&self.data[(prev_b.start as usize)..(prev_b.end as usize)],
);
self.data.truncate(prev_b.start as usize);
self.data.extend_from_slice(&data[..]);
prev_b.inverted = Some(not_inverted);
self.pending_fixup_records[prev_b.fixup].label = target;
trace!(" -> reassigning target of condbr to {:?}", target);
prev_b.target = target;
debug_assert_eq!(off_before_edit, self.cur_offset());
continue;
}
}
}
break;
}
self.purge_latest_branches();
trace!(
"leave optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}",
self.latest_branches, self.labels_at_tail, self.pending_fixup_records
);
}
fn purge_latest_branches(&mut self) {
let cur_off = self.cur_offset();
if let Some(l) = self.latest_branches.last() {
if l.end < cur_off {
trace!("purge_latest_branches: removing branch {:?}", l);
self.latest_branches.clear();
}
}
}
pub fn defer_trap(&mut self, code: TrapCode) -> MachLabel {
let label = self.get_label();
self.pending_traps.push(MachLabelTrap {
label,
code,
loc: self.cur_srcloc.map(|(_start, loc)| loc),
});
label
}
pub fn island_needed(&self, distance: CodeOffset) -> bool {
let deadline = match self.fixup_records.peek() {
Some(fixup) => fixup.deadline().min(self.pending_fixup_deadline),
None => self.pending_fixup_deadline,
};
deadline < u32::MAX && self.worst_case_end_of_island(distance) > deadline
}
fn worst_case_end_of_island(&self, distance: CodeOffset) -> CodeOffset {
let island_worst_case_size = ((self.fixup_records.len() + self.pending_fixup_records.len())
as u32)
* (I::LabelUse::worst_case_veneer_size())
+ self.pending_constants_size
+ (self.pending_traps.len() * I::TRAP_OPCODE.len()) as u32;
self.cur_offset()
.saturating_add(distance)
.saturating_add(island_worst_case_size)
}
pub fn emit_island(&mut self, distance: CodeOffset, ctrl_plane: &mut ControlPlane) {
self.emit_island_maybe_forced(ForceVeneers::No, distance, ctrl_plane);
}
fn emit_island_maybe_forced(
&mut self,
force_veneers: ForceVeneers,
distance: CodeOffset,
ctrl_plane: &mut ControlPlane,
) {
self.latest_branches.clear();
let cur_loc = self.cur_srcloc.map(|(_, loc)| loc);
if cur_loc.is_some() {
self.end_srcloc();
}
let forced_threshold = self.worst_case_end_of_island(distance);
for MachLabelTrap { label, code, loc } in mem::take(&mut self.pending_traps) {
if let Some(loc) = loc {
self.start_srcloc(loc);
}
self.align_to(I::LabelUse::ALIGN);
self.bind_label(label, ctrl_plane);
self.add_trap(code);
self.put_data(I::TRAP_OPCODE);
if loc.is_some() {
self.end_srcloc();
}
}
for constant in mem::take(&mut self.pending_constants) {
let MachBufferConstant { align, size, .. } = self.constants[constant];
let label = self.constants[constant].upcoming_label.take().unwrap();
self.align_to(align);
self.bind_label(label, ctrl_plane);
self.used_constants.push((constant, self.cur_offset()));
self.get_appended_space(size);
}
assert!(self.latest_branches.is_empty());
for fixup in mem::take(&mut self.pending_fixup_records) {
if self.should_apply_fixup(&fixup, forced_threshold) {
self.handle_fixup(fixup, force_veneers, forced_threshold);
} else {
self.fixup_records.push(fixup);
}
}
self.pending_fixup_deadline = u32::MAX;
while let Some(fixup) = self.fixup_records.peek() {
trace!("emit_island: fixup {:?}", fixup);
if !self.should_apply_fixup(fixup, forced_threshold) {
break;
}
let fixup = self.fixup_records.pop().unwrap();
self.handle_fixup(fixup, force_veneers, forced_threshold);
}
if let Some(loc) = cur_loc {
self.start_srcloc(loc);
}
}
fn should_apply_fixup(&self, fixup: &MachLabelFixup<I>, forced_threshold: CodeOffset) -> bool {
let label_offset = self.resolve_label_offset(fixup.label);
label_offset != UNKNOWN_LABEL_OFFSET || fixup.deadline() < forced_threshold
}
fn handle_fixup(
&mut self,
fixup: MachLabelFixup<I>,
force_veneers: ForceVeneers,
forced_threshold: CodeOffset,
) {
let MachLabelFixup {
label,
offset,
kind,
} = fixup;
let start = offset as usize;
let end = (offset + kind.patch_size()) as usize;
let label_offset = self.resolve_label_offset(label);
if label_offset != UNKNOWN_LABEL_OFFSET {
let veneer_required = if label_offset >= offset {
assert!((label_offset - offset) <= kind.max_pos_range());
false
} else {
(offset - label_offset) > kind.max_neg_range()
};
trace!(
" -> label_offset = {}, known, required = {} (pos {} neg {})",
label_offset,
veneer_required,
kind.max_pos_range(),
kind.max_neg_range()
);
if (force_veneers == ForceVeneers::Yes && kind.supports_veneer()) || veneer_required {
self.emit_veneer(label, offset, kind);
} else {
let slice = &mut self.data[start..end];
trace!(
"patching in-range! slice = {slice:?}; offset = {offset:#x}; label_offset = {label_offset:#x}"
);
kind.patch(slice, offset, label_offset);
}
} else {
assert!(forced_threshold - offset > kind.max_pos_range());
self.emit_veneer(label, offset, kind);
}
}
fn emit_veneer(&mut self, label: MachLabel, offset: CodeOffset, kind: I::LabelUse) {
assert!(
kind.supports_veneer(),
"jump beyond the range of {kind:?} but a veneer isn't supported",
);
self.align_to(I::LabelUse::ALIGN);
let veneer_offset = self.cur_offset();
trace!("making a veneer at {}", veneer_offset);
let start = offset as usize;
let end = (offset + kind.patch_size()) as usize;
let slice = &mut self.data[start..end];
trace!(
"patching original at offset {} to veneer offset {}",
offset, veneer_offset
);
kind.patch(slice, offset, veneer_offset);
let veneer_slice = self.get_appended_space(kind.veneer_size() as usize);
let (veneer_fixup_off, veneer_label_use) =
kind.generate_veneer(veneer_slice, veneer_offset);
trace!(
"generated veneer; fixup offset {}, label_use {:?}",
veneer_fixup_off, veneer_label_use
);
self.use_label_at_offset(veneer_fixup_off, label, veneer_label_use);
}
fn finish_emission_maybe_forcing_veneers(
&mut self,
force_veneers: ForceVeneers,
ctrl_plane: &mut ControlPlane,
) {
while !self.pending_constants.is_empty()
|| !self.pending_traps.is_empty()
|| !self.fixup_records.is_empty()
|| !self.pending_fixup_records.is_empty()
{
self.emit_island_maybe_forced(force_veneers, u32::MAX, ctrl_plane);
}
assert!(self.fixup_records.is_empty());
assert!(self.pending_fixup_records.is_empty());
}
pub fn finish(
mut self,
constants: &VCodeConstants,
ctrl_plane: &mut ControlPlane,
) -> MachBufferFinalized<Stencil> {
let _tt = timing::vcode_emit_finish();
self.finish_emission_maybe_forcing_veneers(ForceVeneers::No, ctrl_plane);
let alignment = self.finish_constants(constants);
let finalized_relocs = self
.relocs
.iter()
.map(|reloc| FinalizedMachReloc {
offset: reloc.offset,
kind: reloc.kind,
addend: reloc.addend,
target: match &reloc.target {
RelocTarget::ExternalName(name) => {
FinalizedRelocTarget::ExternalName(name.clone())
}
RelocTarget::Label(label) => {
FinalizedRelocTarget::Func(self.resolve_label_offset(*label))
}
},
})
.collect();
let finalized_exception_handlers = self
.exception_handlers
.iter()
.map(|handler| handler.finalize(|label| self.resolve_label_offset(label)))
.collect();
let mut srclocs = self.srclocs;
srclocs.sort_by_key(|entry| entry.start);
MachBufferFinalized {
data: self.data,
relocs: finalized_relocs,
traps: self.traps,
call_sites: self.call_sites,
patchable_call_sites: self.patchable_call_sites,
exception_handlers: finalized_exception_handlers,
srclocs,
debug_tags: self.debug_tags,
debug_tag_pool: self.debug_tag_pool,
user_stack_maps: self.user_stack_maps,
unwind_info: self.unwind_info,
alignment,
frame_layout: self.frame_layout,
nop_units: I::gen_nop_units(),
}
}
pub fn add_reloc_at_offset<T: Into<RelocTarget> + Clone>(
&mut self,
offset: CodeOffset,
kind: Reloc,
target: &T,
addend: Addend,
) {
let target: RelocTarget = target.clone().into();
self.relocs.push(MachReloc {
offset,
kind,
target,
addend,
});
}
pub fn add_reloc<T: Into<RelocTarget> + Clone>(
&mut self,
kind: Reloc,
target: &T,
addend: Addend,
) {
self.add_reloc_at_offset(self.data.len() as CodeOffset, kind, target, addend);
}
pub fn add_trap(&mut self, code: TrapCode) {
self.traps.push(MachTrap {
offset: self.data.len() as CodeOffset,
code,
});
}
pub fn add_call_site(&mut self) {
self.add_try_call_site(None, core::iter::empty());
}
pub fn add_try_call_site(
&mut self,
frame_offset: Option<u32>,
exception_handlers: impl Iterator<Item = MachExceptionHandler>,
) {
let start = u32::try_from(self.exception_handlers.len()).unwrap();
self.exception_handlers.extend(exception_handlers);
let end = u32::try_from(self.exception_handlers.len()).unwrap();
let exception_handler_range = start..end;
self.call_sites.push(MachCallSite {
ret_addr: self.data.len() as CodeOffset,
frame_offset,
exception_handler_range,
});
}
pub fn add_patchable_call_site(&mut self, len: u32) {
self.patchable_call_sites.push(MachPatchableCallSite {
ret_addr: self.cur_offset(),
len,
});
}
pub fn add_unwind(&mut self, unwind: UnwindInst) {
self.unwind_info.push((self.cur_offset(), unwind));
}
pub fn start_srcloc(&mut self, loc: RelSourceLoc) -> (CodeOffset, RelSourceLoc) {
let cur = (self.cur_offset(), loc);
self.cur_srcloc = Some(cur);
cur
}
pub fn end_srcloc(&mut self) {
let (start, loc) = self
.cur_srcloc
.take()
.expect("end_srcloc() called without start_srcloc()");
let end = self.cur_offset();
debug_assert!(end >= start);
if end > start {
self.srclocs.push(MachSrcLoc { start, end, loc });
}
}
pub fn push_user_stack_map(
&mut self,
emit_state: &I::State,
return_addr: CodeOffset,
mut stack_map: ir::UserStackMap,
) {
let span = emit_state.frame_layout().active_size();
trace!("Adding user stack map @ {return_addr:#x} spanning {span} bytes: {stack_map:?}");
debug_assert!(
self.user_stack_maps
.last()
.map_or(true, |(prev_addr, _, _)| *prev_addr < return_addr),
"pushed stack maps out of order: {} is not less than {}",
self.user_stack_maps.last().unwrap().0,
return_addr,
);
stack_map.finalize(emit_state.frame_layout().sp_to_sized_stack_slots());
self.user_stack_maps.push((return_addr, span, stack_map));
}
pub fn push_debug_tags(&mut self, pos: MachDebugTagPos, tags: &[DebugTag]) {
trace!("debug tags at offset {}: {tags:?}", self.cur_offset());
let start = u32::try_from(self.debug_tag_pool.len()).unwrap();
self.debug_tag_pool.extend(tags.iter().cloned());
let end = u32::try_from(self.debug_tag_pool.len()).unwrap();
self.debug_tags.push(MachDebugTags {
offset: self.cur_offset(),
pos,
range: start..end,
});
}
pub fn set_log2_min_function_alignment(&mut self, align_to: u8) {
self.min_alignment = self.min_alignment.max(
1u32.checked_shl(u32::from(align_to))
.expect("log2_min_function_alignment too large"),
);
}
pub fn set_frame_layout(&mut self, frame_layout: MachBufferFrameLayout) {
debug_assert!(self.frame_layout.is_none());
self.frame_layout = Some(frame_layout);
}
}
impl<I: VCodeInst> Extend<u8> for MachBuffer<I> {
fn extend<T: IntoIterator<Item = u8>>(&mut self, iter: T) {
for b in iter {
self.put1(b);
}
}
}
impl<T: CompilePhase> MachBufferFinalized<T> {
pub fn get_srclocs_sorted(&self) -> &[T::MachSrcLocType] {
&self.srclocs[..]
}
pub fn debug_tags(&self) -> impl Iterator<Item = MachBufferDebugTagList<'_>> {
self.debug_tags.iter().map(|tags| {
let start = usize::try_from(tags.range.start).unwrap();
let end = usize::try_from(tags.range.end).unwrap();
MachBufferDebugTagList {
offset: tags.offset,
pos: tags.pos,
tags: &self.debug_tag_pool[start..end],
}
})
}
pub fn total_size(&self) -> CodeOffset {
self.data.len() as CodeOffset
}
pub fn stringify_code_bytes(&self) -> String {
use core::fmt::Write;
let mut s = String::with_capacity(self.data.len() * 2);
for b in &self.data {
write!(&mut s, "{b:02X}").unwrap();
}
s
}
pub fn data(&self) -> &[u8] {
&self.data[..]
}
pub fn data_mut(&mut self) -> &mut [u8] {
&mut self.data[..]
}
pub fn relocs(&self) -> &[FinalizedMachReloc] {
&self.relocs[..]
}
pub fn traps(&self) -> &[MachTrap] {
&self.traps[..]
}
pub fn user_stack_maps(&self) -> &[(CodeOffset, u32, ir::UserStackMap)] {
&self.user_stack_maps
}
pub fn take_user_stack_maps(&mut self) -> SmallVec<[(CodeOffset, u32, ir::UserStackMap); 8]> {
mem::take(&mut self.user_stack_maps)
}
pub fn call_sites(&self) -> impl Iterator<Item = FinalizedMachCallSite<'_>> + '_ {
self.call_sites.iter().map(|call_site| {
let handler_range = call_site.exception_handler_range.clone();
let handler_range = usize::try_from(handler_range.start).unwrap()
..usize::try_from(handler_range.end).unwrap();
FinalizedMachCallSite {
ret_addr: call_site.ret_addr,
frame_offset: call_site.frame_offset,
exception_handlers: &self.exception_handlers[handler_range],
}
})
}
pub fn frame_layout(&self) -> Option<&MachBufferFrameLayout> {
self.frame_layout.as_ref()
}
pub fn patchable_call_sites(&self) -> impl Iterator<Item = &MachPatchableCallSite> + '_ {
self.patchable_call_sites.iter()
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MachExceptionHandler {
Tag(ExceptionTag, MachLabel),
Default(MachLabel),
Context(ExceptionContextLoc),
}
impl MachExceptionHandler {
fn finalize<F: Fn(MachLabel) -> CodeOffset>(self, f: F) -> FinalizedMachExceptionHandler {
match self {
Self::Tag(tag, label) => FinalizedMachExceptionHandler::Tag(tag, f(label)),
Self::Default(label) => FinalizedMachExceptionHandler::Default(f(label)),
Self::Context(loc) => FinalizedMachExceptionHandler::Context(loc),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub enum FinalizedMachExceptionHandler {
Tag(ExceptionTag, CodeOffset),
Default(CodeOffset),
Context(ExceptionContextLoc),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub enum ExceptionContextLoc {
SPOffset(u32),
GPR(u8),
}
struct MachBufferConstant {
upcoming_label: Option<MachLabel>,
align: CodeOffset,
size: usize,
}
struct MachLabelTrap {
label: MachLabel,
code: TrapCode,
loc: Option<RelSourceLoc>,
}
#[derive(Debug)]
struct MachLabelFixup<I: VCodeInst> {
label: MachLabel,
offset: CodeOffset,
kind: I::LabelUse,
}
impl<I: VCodeInst> MachLabelFixup<I> {
fn deadline(&self) -> CodeOffset {
self.offset.saturating_add(self.kind.max_pos_range())
}
}
impl<I: VCodeInst> PartialEq for MachLabelFixup<I> {
fn eq(&self, other: &Self) -> bool {
self.deadline() == other.deadline()
}
}
impl<I: VCodeInst> Eq for MachLabelFixup<I> {}
impl<I: VCodeInst> PartialOrd for MachLabelFixup<I> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<I: VCodeInst> Ord for MachLabelFixup<I> {
fn cmp(&self, other: &Self) -> Ordering {
other.deadline().cmp(&self.deadline())
}
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachRelocBase<T> {
pub offset: CodeOffset,
pub kind: Reloc,
pub target: T,
pub addend: i64,
}
type MachReloc = MachRelocBase<RelocTarget>;
pub type FinalizedMachReloc = MachRelocBase<FinalizedRelocTarget>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RelocTarget {
ExternalName(ExternalName),
Label(MachLabel),
}
impl From<ExternalName> for RelocTarget {
fn from(name: ExternalName) -> Self {
Self::ExternalName(name)
}
}
impl From<MachLabel> for RelocTarget {
fn from(label: MachLabel) -> Self {
Self::Label(label)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub enum FinalizedRelocTarget {
ExternalName(ExternalName),
Func(CodeOffset),
}
impl FinalizedRelocTarget {
pub fn display<'a>(&'a self, params: Option<&'a FunctionParameters>) -> String {
match self {
FinalizedRelocTarget::ExternalName(name) => format!("{}", name.display(params)),
FinalizedRelocTarget::Func(offset) => format!("func+{offset}"),
}
}
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachTrap {
pub offset: CodeOffset,
pub code: TrapCode,
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachCallSite {
pub ret_addr: CodeOffset,
pub frame_offset: Option<u32>,
exception_handler_range: Range<u32>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct FinalizedMachCallSite<'a> {
pub ret_addr: CodeOffset,
pub frame_offset: Option<u32>,
pub exception_handlers: &'a [FinalizedMachExceptionHandler],
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachPatchableCallSite {
pub ret_addr: CodeOffset,
pub len: u32,
}
#[derive(PartialEq, Debug, Clone)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachSrcLoc<T: CompilePhase> {
pub start: CodeOffset,
pub end: CodeOffset,
pub loc: T::SourceLocType,
}
impl MachSrcLoc<Stencil> {
fn apply_base_srcloc(self, base_srcloc: SourceLoc) -> MachSrcLoc<Final> {
MachSrcLoc {
start: self.start,
end: self.end,
loc: self.loc.expand(base_srcloc),
}
}
}
#[derive(Clone, Debug)]
struct MachBranch {
start: CodeOffset,
end: CodeOffset,
target: MachLabel,
fixup: usize,
inverted: Option<SmallVec<[u8; 8]>>,
labels_at_this_branch: SmallVec<[MachLabel; 4]>,
}
impl MachBranch {
fn is_cond(&self) -> bool {
self.inverted.is_some()
}
fn is_uncond(&self) -> bool {
self.inverted.is_none()
}
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachBufferFrameLayout {
pub frame_to_fp_offset: u32,
pub stackslots: SecondaryMap<ir::StackSlot, MachBufferStackSlot>,
}
#[derive(Clone, Debug, PartialEq, Default)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub struct MachBufferStackSlot {
pub offset: u32,
pub key: Option<ir::StackSlotKey>,
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub(crate) struct MachDebugTags {
pub offset: CodeOffset,
pub pos: MachDebugTagPos,
pub range: Range<u32>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "enable-serde",
derive(serde_derive::Serialize, serde_derive::Deserialize)
)]
pub enum MachDebugTagPos {
Post,
Pre,
}
pub struct MachBufferDebugTagList<'a> {
pub offset: CodeOffset,
pub pos: MachDebugTagPos,
pub tags: &'a [DebugTag],
}
pub struct MachTextSectionBuilder<I: VCodeInst> {
buf: MachBuffer<I>,
next_func: usize,
force_veneers: ForceVeneers,
}
impl<I: VCodeInst> MachTextSectionBuilder<I> {
pub fn new(num_funcs: usize) -> MachTextSectionBuilder<I> {
let mut buf = MachBuffer::new();
buf.reserve_labels_for_blocks(num_funcs);
MachTextSectionBuilder {
buf,
next_func: 0,
force_veneers: ForceVeneers::No,
}
}
}
impl<I: VCodeInst> TextSectionBuilder for MachTextSectionBuilder<I> {
fn append(
&mut self,
labeled: bool,
func: &[u8],
align: u32,
ctrl_plane: &mut ControlPlane,
) -> u64 {
let size = func.len() as u32;
if self.force_veneers == ForceVeneers::Yes || self.buf.island_needed(size) {
self.buf
.emit_island_maybe_forced(self.force_veneers, size, ctrl_plane);
}
self.buf.align_to(align);
let pos = self.buf.cur_offset();
if labeled {
self.buf.bind_label(
MachLabel::from_block(BlockIndex::new(self.next_func)),
ctrl_plane,
);
self.next_func += 1;
}
self.buf.put_data(func);
u64::from(pos)
}
fn resolve_reloc(&mut self, offset: u64, reloc: Reloc, addend: Addend, target: usize) -> bool {
crate::trace!(
"Resolving relocation @ {offset:#x} + {addend:#x} to target {target} of kind {reloc:?}"
);
let label = MachLabel::from_block(BlockIndex::new(target));
let offset = u32::try_from(offset).unwrap();
match I::LabelUse::from_reloc(reloc, addend) {
Some(label_use) => {
self.buf.use_label_at_offset(offset, label, label_use);
true
}
None => false,
}
}
fn force_veneers(&mut self) {
self.force_veneers = ForceVeneers::Yes;
}
fn write(&mut self, offset: u64, data: &[u8]) {
self.buf.data[offset.try_into().unwrap()..][..data.len()].copy_from_slice(data);
}
fn finish(&mut self, ctrl_plane: &mut ControlPlane) -> Vec<u8> {
assert_eq!(self.next_func, self.buf.label_offsets.len());
self.buf
.finish_emission_maybe_forcing_veneers(self.force_veneers, ctrl_plane);
mem::take(&mut self.buf.data).into_vec()
}
}
#[cfg(all(test, feature = "arm64"))]
mod test {
use cranelift_entity::EntityRef as _;
use super::*;
use crate::ir::UserExternalNameRef;
use crate::isa::aarch64::inst::{BranchTarget, CondBrKind, EmitInfo, Inst};
use crate::isa::aarch64::inst::{OperandSize, xreg};
use crate::machinst::{MachInstEmit, MachInstEmitState};
use crate::settings;
fn label(n: u32) -> MachLabel {
MachLabel::from_block(BlockIndex::new(n as usize))
}
fn target(n: u32) -> BranchTarget {
BranchTarget::Label(label(n))
}
#[test]
fn test_elide_jump_to_next() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(2);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(1) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
let buf = buf.finish(&constants, state.ctrl_plane_mut());
assert_eq!(0, buf.total_size());
}
#[test]
fn test_elide_trivial_jump_blocks() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0), OperandSize::Size64),
taken: target(1),
not_taken: target(2),
};
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(3) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(2), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(3) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(3), state.ctrl_plane_mut());
let buf = buf.finish(&constants, state.ctrl_plane_mut());
assert_eq!(0, buf.total_size());
}
#[test]
fn test_flip_cond() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::CondBr {
kind: CondBrKind::Zero(xreg(0), OperandSize::Size64),
taken: target(1),
not_taken: target(2),
};
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(2), state.ctrl_plane_mut());
let inst = Inst::Udf {
trap_code: TrapCode::STACK_OVERFLOW,
};
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(3), state.ctrl_plane_mut());
let buf = buf.finish(&constants, state.ctrl_plane_mut());
let mut buf2 = MachBuffer::new();
let mut state = Default::default();
let inst = Inst::TrapIf {
kind: CondBrKind::NotZero(xreg(0), OperandSize::Size64),
trap_code: TrapCode::STACK_OVERFLOW,
};
inst.emit(&mut buf2, &info, &mut state);
let inst = Inst::Nop4;
inst.emit(&mut buf2, &info, &mut state);
let buf2 = buf2.finish(&constants, state.ctrl_plane_mut());
assert_eq!(buf.data, buf2.data);
}
#[test]
fn test_island() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0), OperandSize::Size64),
taken: target(2),
not_taken: target(3),
};
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
while buf.cur_offset() < 2000000 {
if buf.island_needed(0) {
buf.emit_island(0, state.ctrl_plane_mut());
}
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
}
buf.bind_label(label(2), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(3), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
let buf = buf.finish(&constants, state.ctrl_plane_mut());
assert_eq!(2000000 + 8, buf.total_size());
let mut buf2 = MachBuffer::new();
let mut state = Default::default();
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0), OperandSize::Size64),
taken: BranchTarget::ResolvedOffset((1 << 20) - 20 - 20),
not_taken: BranchTarget::ResolvedOffset(2000000 + 4 - 4),
};
inst.emit(&mut buf2, &info, &mut state);
let buf2 = buf2.finish(&constants, state.ctrl_plane_mut());
assert_eq!(&buf.data[0..8], &buf2.data[..]);
}
#[test]
fn test_island_backward() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(2), state.ctrl_plane_mut());
while buf.cur_offset() < 2000000 {
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
}
buf.bind_label(label(3), state.ctrl_plane_mut());
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0), OperandSize::Size64),
taken: target(0),
not_taken: target(1),
};
inst.emit(&mut buf, &info, &mut state);
let buf = buf.finish(&constants, state.ctrl_plane_mut());
assert_eq!(2000000 + 12, buf.total_size());
let mut buf2 = MachBuffer::new();
let mut state = Default::default();
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0), OperandSize::Size64),
taken: BranchTarget::ResolvedOffset(8),
not_taken: BranchTarget::ResolvedOffset(4 - (2000000 + 4)),
};
inst.emit(&mut buf2, &info, &mut state);
let inst = Inst::Jump {
dest: BranchTarget::ResolvedOffset(-(2000000 + 8)),
};
inst.emit(&mut buf2, &info, &mut state);
let buf2 = buf2.finish(&constants, state.ctrl_plane_mut());
assert_eq!(&buf.data[2000000..], &buf2.data[..]);
}
#[test]
fn test_multiple_redirect() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(8);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::CondBr {
kind: CondBrKind::Zero(xreg(0), OperandSize::Size64),
taken: target(1),
not_taken: target(2),
};
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(3) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(2), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
inst.emit(&mut buf, &info, &mut state);
let inst = Inst::Jump { dest: target(0) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(3), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(4) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(4), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(5) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(5), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(7) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(6), state.ctrl_plane_mut());
let inst = Inst::Nop4;
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(7), state.ctrl_plane_mut());
let inst = Inst::Ret {};
inst.emit(&mut buf, &info, &mut state);
let buf = buf.finish(&constants, state.ctrl_plane_mut());
let golden_data = vec![
0xa0, 0x00, 0x00, 0xb4, 0x1f, 0x20, 0x03, 0xd5, 0x1f, 0x20, 0x03, 0xd5, 0xfd, 0xff, 0xff, 0x17, 0x1f, 0x20, 0x03, 0xd5, 0xc0, 0x03, 0x5f, 0xd6, ];
assert_eq!(&golden_data[..], &buf.data[..]);
}
#[test]
fn test_handle_branch_cycle() {
let info = EmitInfo::new(settings::Flags::new(settings::builder()));
let mut buf = MachBuffer::new();
let mut state = <Inst as MachInstEmit>::State::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(5);
buf.bind_label(label(0), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(1) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(1), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(2) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(2), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(3) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(3), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(4) };
inst.emit(&mut buf, &info, &mut state);
buf.bind_label(label(4), state.ctrl_plane_mut());
let inst = Inst::Jump { dest: target(1) };
inst.emit(&mut buf, &info, &mut state);
let buf = buf.finish(&constants, state.ctrl_plane_mut());
let golden_data = vec![
0x00, 0x00, 0x00, 0x14, ];
assert_eq!(&golden_data[..], &buf.data[..]);
}
#[test]
fn metadata_records() {
let mut buf = MachBuffer::<Inst>::new();
let ctrl_plane = &mut Default::default();
let constants = Default::default();
buf.reserve_labels_for_blocks(3);
buf.bind_label(label(0), ctrl_plane);
buf.put1(1);
buf.add_trap(TrapCode::HEAP_OUT_OF_BOUNDS);
buf.put1(2);
buf.add_trap(TrapCode::INTEGER_OVERFLOW);
buf.add_trap(TrapCode::INTEGER_DIVISION_BY_ZERO);
buf.add_try_call_site(
Some(0x10),
[
MachExceptionHandler::Tag(ExceptionTag::new(42), label(2)),
MachExceptionHandler::Default(label(1)),
]
.into_iter(),
);
buf.add_reloc(
Reloc::Abs4,
&ExternalName::User(UserExternalNameRef::new(0)),
0,
);
buf.put1(3);
buf.add_reloc(
Reloc::Abs8,
&ExternalName::User(UserExternalNameRef::new(1)),
1,
);
buf.put1(4);
buf.bind_label(label(1), ctrl_plane);
buf.put1(0xff);
buf.bind_label(label(2), ctrl_plane);
buf.put1(0xff);
let buf = buf.finish(&constants, ctrl_plane);
assert_eq!(buf.data(), &[1, 2, 3, 4, 0xff, 0xff]);
assert_eq!(
buf.traps()
.iter()
.map(|trap| (trap.offset, trap.code))
.collect::<Vec<_>>(),
vec![
(1, TrapCode::HEAP_OUT_OF_BOUNDS),
(2, TrapCode::INTEGER_OVERFLOW),
(2, TrapCode::INTEGER_DIVISION_BY_ZERO)
]
);
let call_sites: Vec<_> = buf.call_sites().collect();
assert_eq!(call_sites[0].ret_addr, 2);
assert_eq!(call_sites[0].frame_offset, Some(0x10));
assert_eq!(
call_sites[0].exception_handlers,
&[
FinalizedMachExceptionHandler::Tag(ExceptionTag::new(42), 5),
FinalizedMachExceptionHandler::Default(4)
],
);
assert_eq!(
buf.relocs()
.iter()
.map(|reloc| (reloc.offset, reloc.kind))
.collect::<Vec<_>>(),
vec![(2, Reloc::Abs4), (3, Reloc::Abs8)]
);
}
}