use crate::binemit::{Addend, CodeOffset, CodeSink, Reloc, Stackmap};
use crate::ir::{ExternalName, Opcode, SourceLoc, TrapCode};
use crate::machinst::{BlockIndex, MachInstLabelUse, VCodeInst};
use log::trace;
use smallvec::SmallVec;
use std::mem;
pub struct MachBuffer<I: VCodeInst> {
data: SmallVec<[u8; 1024]>,
relocs: SmallVec<[MachReloc; 16]>,
traps: SmallVec<[MachTrap; 16]>,
call_sites: SmallVec<[MachCallSite; 16]>,
srclocs: SmallVec<[MachSrcLoc; 64]>,
stackmaps: SmallVec<[MachStackMap; 8]>,
cur_srcloc: Option<(CodeOffset, SourceLoc)>,
label_offsets: SmallVec<[CodeOffset; 16]>,
label_aliases: SmallVec<[MachLabel; 16]>,
pending_constants: SmallVec<[MachLabelConstant; 16]>,
fixup_records: SmallVec<[MachLabelFixup<I>; 16]>,
island_deadline: CodeOffset,
island_worst_case_size: CodeOffset,
latest_branches: SmallVec<[MachBranch; 4]>,
labels_at_tail: SmallVec<[MachLabel; 4]>,
labels_at_tail_off: CodeOffset,
}
pub struct MachBufferFinalized {
pub data: SmallVec<[u8; 1024]>,
relocs: SmallVec<[MachReloc; 16]>,
traps: SmallVec<[MachTrap; 16]>,
call_sites: SmallVec<[MachCallSite; 16]>,
srclocs: SmallVec<[MachSrcLoc; 64]>,
stackmaps: SmallVec<[MachStackMap; 8]>,
}
static UNKNOWN_LABEL_OFFSET: CodeOffset = 0xffff_ffff;
static UNKNOWN_LABEL: MachLabel = MachLabel(0xffff_ffff);
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MachLabel(u32);
impl MachLabel {
pub fn from_block(bindex: BlockIndex) -> MachLabel {
MachLabel(bindex)
}
pub fn get(self) -> u32 {
self.0
}
}
impl<I: VCodeInst> MachBuffer<I> {
pub fn new() -> MachBuffer<I> {
MachBuffer {
data: SmallVec::new(),
relocs: SmallVec::new(),
traps: SmallVec::new(),
call_sites: SmallVec::new(),
srclocs: SmallVec::new(),
stackmaps: SmallVec::new(),
cur_srcloc: None,
label_offsets: SmallVec::new(),
label_aliases: SmallVec::new(),
pending_constants: SmallVec::new(),
fixup_records: SmallVec::new(),
island_deadline: UNKNOWN_LABEL_OFFSET,
island_worst_case_size: 0,
latest_branches: SmallVec::new(),
labels_at_tail: SmallVec::new(),
labels_at_tail_off: 0,
}
}
#[cfg(debug)]
fn check_label_branch_invariants(&self) {
let cur_off = self.cur_offset();
let mut last_end = 0;
for b in &self.latest_branches {
debug_assert!(b.start < b.end);
debug_assert!(b.end <= cur_off);
debug_assert!(b.start >= last_end);
last_end = b.end;
for &l in &b.labels_at_this_branch {
debug_assert_eq!(self.resolve_label_offset(l), b.start);
debug_assert_eq!(self.label_aliases[l.0 as usize], UNKNOWN_LABEL);
}
}
for (i, &off) in self.label_offsets.iter().enumerate() {
let label = MachLabel(i as u32);
debug_assert!(off == UNKNOWN_LABEL_OFFSET || off <= cur_off);
if off == cur_off {
debug_assert!(
self.labels_at_tail_off == cur_off && self.labels_at_tail.contains(&label)
);
}
}
debug_assert!(self.labels_at_tail_off <= cur_off);
if self.labels_at_tail_off == cur_off {
for &l in &self.labels_at_tail {
debug_assert_eq!(self.resolve_label_offset(l), cur_off);
debug_assert_eq!(self.label_aliases[l.0 as usize], UNKNOWN_LABEL);
}
}
}
#[cfg(not(debug))]
fn check_label_branch_invariants(&self) {
}
pub fn cur_offset(&self) -> CodeOffset {
self.data.len() as CodeOffset
}
pub fn put1(&mut self, value: u8) {
trace!("MachBuffer: put byte @ {}: {:x}", self.cur_offset(), value);
self.data.push(value);
}
pub fn put2(&mut self, value: u16) {
trace!(
"MachBuffer: put 16-bit word @ {}: {:x}",
self.cur_offset(),
value
);
let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]);
}
pub fn put4(&mut self, value: u32) {
trace!(
"MachBuffer: put 32-bit word @ {}: {:x}",
self.cur_offset(),
value
);
let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]);
}
pub fn put8(&mut self, value: u64) {
trace!(
"MachBuffer: put 64-bit word @ {}: {:x}",
self.cur_offset(),
value
);
let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]);
}
pub fn put_data(&mut self, data: &[u8]) {
trace!(
"MachBuffer: put data @ {}: len {}",
self.cur_offset(),
data.len()
);
self.data.extend_from_slice(data);
}
pub fn get_appended_space(&mut self, len: usize) -> &mut [u8] {
trace!("MachBuffer: put data @ {}: len {}", self.cur_offset(), len);
let off = self.data.len();
let new_len = self.data.len() + len;
self.data.resize(new_len, 0);
&mut self.data[off..]
}
pub fn align_to(&mut self, align_to: CodeOffset) {
trace!("MachBuffer: align to {}", align_to);
assert!(align_to.is_power_of_two());
while self.cur_offset() & (align_to - 1) != 0 {
self.put1(0);
}
}
pub fn get_label(&mut self) -> MachLabel {
let l = self.label_offsets.len() as u32;
self.label_offsets.push(UNKNOWN_LABEL_OFFSET);
self.label_aliases.push(UNKNOWN_LABEL);
trace!("MachBuffer: new label -> {:?}", MachLabel(l));
MachLabel(l)
}
pub fn reserve_labels_for_blocks(&mut self, blocks: BlockIndex) {
trace!("MachBuffer: first {} labels are for blocks", blocks);
debug_assert!(self.label_offsets.is_empty());
self.label_offsets
.resize(blocks as usize, UNKNOWN_LABEL_OFFSET);
self.label_aliases.resize(blocks as usize, UNKNOWN_LABEL);
}
pub fn bind_label(&mut self, label: MachLabel) {
trace!(
"MachBuffer: bind label {:?} at offset {}",
label,
self.cur_offset()
);
debug_assert_eq!(self.label_offsets[label.0 as usize], UNKNOWN_LABEL_OFFSET);
debug_assert_eq!(self.label_aliases[label.0 as usize], UNKNOWN_LABEL);
let offset = self.cur_offset();
self.label_offsets[label.0 as usize] = offset;
self.lazily_clear_labels_at_tail();
self.labels_at_tail.push(label);
self.check_label_branch_invariants();
self.optimize_branches();
self.check_label_branch_invariants();
}
fn lazily_clear_labels_at_tail(&mut self) {
let offset = self.cur_offset();
if offset > self.labels_at_tail_off {
self.labels_at_tail_off = offset;
self.labels_at_tail.clear();
}
}
fn resolve_label_offset(&self, label: MachLabel) -> CodeOffset {
let alias = self.label_aliases[label.0 as usize];
if alias != UNKNOWN_LABEL {
self.label_offsets[alias.0 as usize]
} else {
self.label_offsets[label.0 as usize]
}
}
pub fn use_label_at_offset(&mut self, offset: CodeOffset, label: MachLabel, kind: I::LabelUse) {
trace!(
"MachBuffer: use_label_at_offset: offset {} label {:?} kind {:?}",
offset,
label,
kind
);
self.fixup_records.push(MachLabelFixup {
label,
offset,
kind,
});
if kind.supports_veneer() {
self.island_worst_case_size += kind.veneer_size();
self.island_worst_case_size &= !(I::LabelUse::ALIGN - 1);
}
let deadline = offset + kind.max_pos_range();
if deadline < self.island_deadline {
self.island_deadline = deadline;
}
self.check_label_branch_invariants();
}
pub fn add_uncond_branch(&mut self, start: CodeOffset, end: CodeOffset, target: MachLabel) {
assert!(self.cur_offset() == start);
debug_assert!(end > start);
assert!(!self.fixup_records.is_empty());
let fixup = self.fixup_records.len() - 1;
self.lazily_clear_labels_at_tail();
self.latest_branches.push(MachBranch {
start,
end,
target,
fixup,
inverted: None,
labels_at_this_branch: self.labels_at_tail.clone(),
});
self.check_label_branch_invariants();
}
pub fn add_cond_branch(
&mut self,
start: CodeOffset,
end: CodeOffset,
target: MachLabel,
inverted: &[u8],
) {
assert!(self.cur_offset() == start);
debug_assert!(end > start);
assert!(!self.fixup_records.is_empty());
debug_assert!(inverted.len() == (end - start) as usize);
let fixup = self.fixup_records.len() - 1;
let inverted = Some(SmallVec::from(inverted));
self.lazily_clear_labels_at_tail();
self.latest_branches.push(MachBranch {
start,
end,
target,
fixup,
inverted,
labels_at_this_branch: self.labels_at_tail.clone(),
});
self.check_label_branch_invariants();
}
fn truncate_last_branch(&mut self) {
self.lazily_clear_labels_at_tail();
let b = self.latest_branches.pop().unwrap();
assert!(b.end == self.cur_offset());
self.data.truncate(b.start as usize);
self.fixup_records.truncate(b.fixup);
let cur_off = self.cur_offset();
self.labels_at_tail_off = cur_off;
trace!(
"truncate_last_branch: truncated {:?}; off now {}",
b,
cur_off
);
for &l in &self.labels_at_tail {
self.label_offsets[l.0 as usize] = cur_off;
}
self.labels_at_tail
.extend(b.labels_at_this_branch.into_iter());
self.check_label_branch_invariants();
}
fn optimize_branches(&mut self) {
self.lazily_clear_labels_at_tail();
trace!(
"enter optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}",
self.latest_branches,
self.labels_at_tail,
self.fixup_records
);
while let Some(b) = self.latest_branches.last() {
let cur_off = self.cur_offset();
trace!("optimize_branches: last branch {:?} at off {}", b, cur_off);
if b.end < cur_off {
break;
}
if self.resolve_label_offset(b.target) == cur_off {
trace!("branch with target == cur off; truncating");
self.truncate_last_branch();
continue;
}
if b.is_uncond() {
if self.resolve_label_offset(b.target) != b.start {
let redirected = b.labels_at_this_branch.len();
for &l in &b.labels_at_this_branch {
trace!(
" -> label at start of branch {:?} redirected to target {:?}",
l,
b.target
);
self.label_aliases[l.0 as usize] = b.target;
}
let mut_b = self.latest_branches.last_mut().unwrap();
mut_b.labels_at_this_branch.clear();
if redirected > 0 {
trace!(" -> after label redirects, restarting loop");
continue;
}
}
let b = self.latest_branches.last().unwrap();
if self.latest_branches.len() > 1 {
let prev_b = &self.latest_branches[self.latest_branches.len() - 2];
trace!(" -> more than one branch; prev_b = {:?}", prev_b);
if prev_b.is_uncond()
&& prev_b.end == b.start
&& b.labels_at_this_branch.is_empty()
{
trace!(" -> uncond follows another uncond; truncating");
self.truncate_last_branch();
continue;
}
if prev_b.is_cond()
&& prev_b.end == b.start
&& self.resolve_label_offset(prev_b.target) == cur_off
{
trace!(" -> uncond follows a conditional, and conditional's target resolves to current offset");
let target = b.target;
let data = prev_b.inverted.clone().unwrap();
self.truncate_last_branch();
let off_before_edit = self.cur_offset();
let prev_b = self.latest_branches.last_mut().unwrap();
let not_inverted = SmallVec::from(
&self.data[(prev_b.start as usize)..(prev_b.end as usize)],
);
self.data.truncate(prev_b.start as usize);
self.data.extend_from_slice(&data[..]);
prev_b.inverted = Some(not_inverted);
self.fixup_records[prev_b.fixup].label = target;
trace!(" -> reassigning target of condbr to {:?}", target);
prev_b.target = target;
debug_assert_eq!(off_before_edit, self.cur_offset());
continue;
}
}
}
break;
}
self.purge_latest_branches();
trace!(
"leave optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}",
self.latest_branches,
self.labels_at_tail,
self.fixup_records
);
}
fn purge_latest_branches(&mut self) {
let cur_off = self.cur_offset();
if let Some(l) = self.latest_branches.last() {
if l.end < cur_off {
trace!("purge_latest_branches: removing branch {:?}", l);
self.latest_branches.clear();
}
}
}
pub fn defer_constant(
&mut self,
label: MachLabel,
align: CodeOffset,
data: &[u8],
max_distance: CodeOffset,
) {
let deadline = self.cur_offset() + max_distance;
self.island_worst_case_size += data.len() as CodeOffset;
self.island_worst_case_size &= !(I::LabelUse::ALIGN - 1);
self.pending_constants.push(MachLabelConstant {
label,
align,
data: SmallVec::from(data),
});
if deadline < self.island_deadline {
self.island_deadline = deadline;
}
}
pub fn island_needed(&self, distance: CodeOffset) -> bool {
let worst_case_end_of_island = self.cur_offset() + distance + self.island_worst_case_size;
worst_case_end_of_island > self.island_deadline
}
pub fn emit_island(&mut self) {
self.latest_branches.clear();
let pending_constants = mem::replace(&mut self.pending_constants, SmallVec::new());
for MachLabelConstant { label, align, data } in pending_constants.into_iter() {
self.align_to(align);
self.bind_label(label);
self.put_data(&data[..]);
}
let fixup_records = mem::replace(&mut self.fixup_records, SmallVec::new());
let mut new_fixups = SmallVec::new();
for MachLabelFixup {
label,
offset,
kind,
} in fixup_records.into_iter()
{
trace!(
"emit_island: fixup for label {:?} at offset {} kind {:?}",
label,
offset,
kind
);
let label_offset = self.resolve_label_offset(label);
let known = label_offset != UNKNOWN_LABEL_OFFSET;
let in_range = if known {
if label_offset >= offset {
(label_offset - offset) <= kind.max_pos_range()
} else {
(offset - label_offset) <= kind.max_neg_range()
}
} else {
false
};
trace!(
" -> label_offset = {}, known = {}, in_range = {} (pos {} neg {})",
label_offset,
known,
in_range,
kind.max_pos_range(),
kind.max_neg_range()
);
let start = offset as usize;
let end = (offset + kind.patch_size()) as usize;
if in_range {
debug_assert!(known); let slice = &mut self.data[start..end];
trace!("patching in-range!");
kind.patch(slice, offset, label_offset);
} else if !known && !kind.supports_veneer() {
new_fixups.push(MachLabelFixup {
label,
offset,
kind,
});
} else if !in_range && kind.supports_veneer() {
self.align_to(I::LabelUse::ALIGN);
let veneer_offset = self.cur_offset();
trace!("making a veneer at {}", veneer_offset);
let slice = &mut self.data[start..end];
trace!(
"patching original at offset {} to veneer offset {}",
offset,
veneer_offset
);
kind.patch(slice, offset, veneer_offset);
let veneer_slice = self.get_appended_space(kind.veneer_size() as usize);
let (veneer_fixup_off, veneer_label_use) =
kind.generate_veneer(veneer_slice, veneer_offset);
trace!(
"generated veneer; fixup offset {}, label_use {:?}",
veneer_fixup_off,
veneer_label_use
);
if known {
let start = veneer_fixup_off as usize;
let end = (veneer_fixup_off + veneer_label_use.patch_size()) as usize;
let veneer_slice = &mut self.data[start..end];
trace!("doing veneer fixup right away too");
veneer_label_use.patch(veneer_slice, veneer_fixup_off, label_offset);
} else {
new_fixups.push(MachLabelFixup {
label,
offset: veneer_fixup_off,
kind: veneer_label_use,
});
}
} else {
panic!(
"Cannot support label-use {:?} (known = {}, in-range = {})",
kind, known, in_range
);
}
}
self.fixup_records = new_fixups;
self.island_deadline = UNKNOWN_LABEL_OFFSET;
}
pub fn finish(mut self) -> MachBufferFinalized {
assert!(self
.label_offsets
.iter()
.all(|&off| off != UNKNOWN_LABEL_OFFSET));
while !self.pending_constants.is_empty() || !self.fixup_records.is_empty() {
self.emit_island();
}
MachBufferFinalized {
data: self.data,
relocs: self.relocs,
traps: self.traps,
call_sites: self.call_sites,
srclocs: self.srclocs,
stackmaps: self.stackmaps,
}
}
pub fn add_reloc(
&mut self,
srcloc: SourceLoc,
kind: Reloc,
name: &ExternalName,
addend: Addend,
) {
let name = name.clone();
self.relocs.push(MachReloc {
offset: self.data.len() as CodeOffset,
srcloc,
kind,
name,
addend,
});
}
pub fn add_trap(&mut self, srcloc: SourceLoc, code: TrapCode) {
self.traps.push(MachTrap {
offset: self.data.len() as CodeOffset,
srcloc,
code,
});
}
pub fn add_call_site(&mut self, srcloc: SourceLoc, opcode: Opcode) {
self.call_sites.push(MachCallSite {
ret_addr: self.data.len() as CodeOffset,
srcloc,
opcode,
});
}
pub fn start_srcloc(&mut self, loc: SourceLoc) {
self.cur_srcloc = Some((self.cur_offset(), loc));
}
pub fn end_srcloc(&mut self) {
let (start, loc) = self
.cur_srcloc
.take()
.expect("end_srcloc() called without start_srcloc()");
let end = self.cur_offset();
debug_assert!(end >= start);
if end > start {
self.srclocs.push(MachSrcLoc { start, end, loc });
}
}
pub fn add_stackmap(&mut self, insn_len: CodeOffset, stackmap: Stackmap) {
let offset = self.cur_offset();
self.stackmaps.push(MachStackMap {
offset,
offset_end: offset + insn_len,
stackmap,
});
}
}
impl MachBufferFinalized {
pub fn get_srclocs_sorted(&self) -> &[MachSrcLoc] {
&self.srclocs[..]
}
pub fn total_size(&self) -> CodeOffset {
self.data.len() as CodeOffset
}
pub fn emit<CS: CodeSink>(&self, sink: &mut CS) {
let mut next_reloc = 0;
let mut next_trap = 0;
let mut next_call_site = 0;
for (idx, byte) in self.data.iter().enumerate() {
if next_reloc < self.relocs.len() {
let reloc = &self.relocs[next_reloc];
if reloc.offset == idx as CodeOffset {
sink.reloc_external(reloc.srcloc, reloc.kind, &reloc.name, reloc.addend);
next_reloc += 1;
}
}
if next_trap < self.traps.len() {
let trap = &self.traps[next_trap];
if trap.offset == idx as CodeOffset {
sink.trap(trap.code, trap.srcloc);
next_trap += 1;
}
}
if next_call_site < self.call_sites.len() {
let call_site = &self.call_sites[next_call_site];
if call_site.ret_addr == idx as CodeOffset {
sink.add_call_site(call_site.opcode, call_site.srcloc);
next_call_site += 1;
}
}
sink.put1(*byte);
}
sink.begin_jumptables();
sink.begin_rodata();
sink.end_codegen();
}
pub fn stackmaps(&self) -> &[MachStackMap] {
&self.stackmaps[..]
}
}
struct MachLabelConstant {
label: MachLabel,
align: CodeOffset,
data: SmallVec<[u8; 16]>,
}
#[derive(Debug)]
struct MachLabelFixup<I: VCodeInst> {
label: MachLabel,
offset: CodeOffset,
kind: I::LabelUse,
}
struct MachReloc {
offset: CodeOffset,
srcloc: SourceLoc,
kind: Reloc,
name: ExternalName,
addend: i64,
}
struct MachTrap {
offset: CodeOffset,
srcloc: SourceLoc,
code: TrapCode,
}
struct MachCallSite {
ret_addr: CodeOffset,
srcloc: SourceLoc,
opcode: Opcode,
}
#[derive(Clone, Debug)]
pub struct MachSrcLoc {
pub start: CodeOffset,
pub end: CodeOffset,
pub loc: SourceLoc,
}
#[derive(Clone, Debug)]
pub struct MachStackMap {
pub offset: CodeOffset,
pub offset_end: CodeOffset,
pub stackmap: Stackmap,
}
#[derive(Clone, Debug)]
struct MachBranch {
start: CodeOffset,
end: CodeOffset,
target: MachLabel,
fixup: usize,
inverted: Option<SmallVec<[u8; 8]>>,
labels_at_this_branch: SmallVec<[MachLabel; 4]>,
}
impl MachBranch {
fn is_cond(&self) -> bool {
self.inverted.is_some()
}
fn is_uncond(&self) -> bool {
self.inverted.is_none()
}
}
#[cfg(all(test, feature = "arm64"))]
mod test {
use super::*;
use crate::isa::aarch64::inst::xreg;
use crate::isa::aarch64::inst::{BranchTarget, CondBrKind, Inst};
use crate::machinst::MachInstEmit;
use crate::settings;
use std::default::Default;
fn label(n: u32) -> MachLabel {
MachLabel::from_block(n)
}
fn target(n: u32) -> BranchTarget {
BranchTarget::Label(label(n))
}
#[test]
fn test_elide_jump_to_next() {
let flags = settings::Flags::new(settings::builder());
let mut buf = MachBuffer::new();
let mut state = Default::default();
buf.reserve_labels_for_blocks(2);
buf.bind_label(label(0));
let inst = Inst::Jump { dest: target(1) };
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(1));
let buf = buf.finish();
assert_eq!(0, buf.total_size());
}
#[test]
fn test_elide_trivial_jump_blocks() {
let flags = settings::Flags::new(settings::builder());
let mut buf = MachBuffer::new();
let mut state = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0));
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0)),
taken: target(1),
not_taken: target(2),
};
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(1));
let inst = Inst::Jump { dest: target(3) };
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(2));
let inst = Inst::Jump { dest: target(3) };
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(3));
let buf = buf.finish();
assert_eq!(0, buf.total_size());
}
#[test]
fn test_flip_cond() {
let flags = settings::Flags::new(settings::builder());
let mut buf = MachBuffer::new();
let mut state = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0));
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0)),
taken: target(1),
not_taken: target(2),
};
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(1));
let inst = Inst::Udf {
trap_info: (SourceLoc::default(), TrapCode::Interrupt),
};
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(2));
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(3));
let buf = buf.finish();
let mut buf2 = MachBuffer::new();
let mut state = Default::default();
let inst = Inst::TrapIf {
kind: CondBrKind::NotZero(xreg(0)),
trap_info: (SourceLoc::default(), TrapCode::Interrupt),
};
inst.emit(&mut buf2, &flags, &mut state);
let inst = Inst::Nop4;
inst.emit(&mut buf2, &flags, &mut state);
let buf2 = buf2.finish();
assert_eq!(buf.data, buf2.data);
}
#[test]
fn test_island() {
let flags = settings::Flags::new(settings::builder());
let mut buf = MachBuffer::new();
let mut state = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0));
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0)),
taken: target(2),
not_taken: target(3),
};
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(1));
while buf.cur_offset() < 2000000 {
if buf.island_needed(0) {
buf.emit_island();
}
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
}
buf.bind_label(label(2));
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(3));
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
let buf = buf.finish();
assert_eq!(2000000 + 8, buf.total_size());
let mut buf2 = MachBuffer::new();
let mut state = Default::default();
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0)),
taken: BranchTarget::ResolvedOffset(1048576 - 4),
not_taken: BranchTarget::ResolvedOffset(2000000 + 4 - 4),
};
inst.emit(&mut buf2, &flags, &mut state);
let buf2 = buf2.finish();
assert_eq!(&buf.data[0..8], &buf2.data[..]);
}
#[test]
fn test_island_backward() {
let flags = settings::Flags::new(settings::builder());
let mut buf = MachBuffer::new();
let mut state = Default::default();
buf.reserve_labels_for_blocks(4);
buf.bind_label(label(0));
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(1));
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
buf.bind_label(label(2));
while buf.cur_offset() < 2000000 {
let inst = Inst::Nop4;
inst.emit(&mut buf, &flags, &mut state);
}
buf.bind_label(label(3));
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0)),
taken: target(0),
not_taken: target(1),
};
inst.emit(&mut buf, &flags, &mut state);
let buf = buf.finish();
assert_eq!(2000000 + 12, buf.total_size());
let mut buf2 = MachBuffer::new();
let mut state = Default::default();
let inst = Inst::CondBr {
kind: CondBrKind::NotZero(xreg(0)),
taken: BranchTarget::ResolvedOffset(8),
not_taken: BranchTarget::ResolvedOffset(4 - (2000000 + 4)),
};
inst.emit(&mut buf2, &flags, &mut state);
let inst = Inst::Jump {
dest: BranchTarget::ResolvedOffset(-(2000000 + 8)),
};
inst.emit(&mut buf2, &flags, &mut state);
let buf2 = buf2.finish();
assert_eq!(&buf.data[2000000..], &buf2.data[..]);
}
}