#[derive(Debug)]
pub struct InstructionStream<'a>
{
byte_emitter: ByteEmitter,
executable_anonymous_memory_map: &'a mut ExecutableAnonymousMemoryMap,
labelled_locations: LabelledLocations,
instruction_pointers_to_replace_labels_with_8_bit_displacements: Vec<(Label, InstructionPointer)>,
instruction_pointers_to_replace_labels_with_32_bit_displacements: Vec<(Label, InstructionPointer)>,
emitted_labels: Vec<(Label, InstructionPointer)>,
}
impl<'a> InstructionStream<'a>
{
const REX_W: u8 = 0x48;
pub(crate) const REX_R: u8 = 0x44;
pub(crate) const REX_X: u8 = 0x42;
pub(crate) const REX_B: u8 = 0x41;
pub(crate) const REX: u8 = 0x40;
#[inline(always)]
pub(crate) fn new(executable_anonymous_memory_map: &'a mut ExecutableAnonymousMemoryMap, instruction_stream_hints: &InstructionStreamHints) -> Self
{
executable_anonymous_memory_map.make_writable();
Self
{
byte_emitter: ByteEmitter::new(executable_anonymous_memory_map),
executable_anonymous_memory_map,
labelled_locations: LabelledLocations::new(instruction_stream_hints.number_of_labels),
instruction_pointers_to_replace_labels_with_8_bit_displacements: Vec::with_capacity(instruction_stream_hints.number_of_8_bit_jumps),
instruction_pointers_to_replace_labels_with_32_bit_displacements: Vec::with_capacity(instruction_stream_hints.number_of_32_bit_jumps),
emitted_labels: Vec::with_capacity(instruction_stream_hints.number_of_emitted_labels),
}
}
#[cfg(any(target_os = "android", target_os = "linux"))]
#[inline(always)]
fn attempt_to_resize_in_place(&mut self) -> io::Result<()>
{
let new_length = self.executable_anonymous_memory_map.attempt_to_resize_in_place_whilst_writing()?;
self.byte_emitter.end_instruction_pointer += new_length;
Ok(())
}
#[cfg(not(any(target_os = "android", target_os = "linux")))]
#[inline(always)]
fn attempt_to_resize_in_place(&mut self) -> io::Result<()>
{
Err(io::Error::new(io::ErrorKind::Other, "Could not resize in place"))
}
#[inline(always)]
fn hints_for_next_instance(&self) -> InstructionStreamHints
{
InstructionStreamHints
{
number_of_labels: self.labelled_locations.next_label_index,
number_of_8_bit_jumps: self.instruction_pointers_to_replace_labels_with_8_bit_displacements.len(),
number_of_32_bit_jumps: self.instruction_pointers_to_replace_labels_with_32_bit_displacements.len(),
number_of_emitted_labels: self.emitted_labels.len(),
}
}
#[inline(always)]
pub fn finish(mut self) -> (&'a [u8], InstructionStreamHints)
{
let hints = self.hints_for_next_instance();
for (label, insert_at_instruction_pointer) in self.instruction_pointers_to_replace_labels_with_8_bit_displacements.iter()
{
let target_instruction_pointer = self.valid_target_instruction_pointer(*label);
let result = self.byte_emitter.insert_8_bit_effective_address_displacement(*insert_at_instruction_pointer, target_instruction_pointer);
debug_assert!(result.is_ok(), "8-bit JMP for {:?} was too far", label)
}
for (label, insert_at_instruction_pointer) in self.instruction_pointers_to_replace_labels_with_32_bit_displacements.iter()
{
let target_instruction_pointer = self.valid_target_instruction_pointer(*label);
let result = self.byte_emitter.insert_32_bit_effective_address_displacement(*insert_at_instruction_pointer, target_instruction_pointer);
debug_assert!(result.is_ok(), "32-bit JMP for {:?} was too far", label)
}
for (label, insert_at_instruction_pointer) in self.emitted_labels.iter()
{
let target_instruction_pointer = self.valid_target_instruction_pointer(*label);
self.byte_emitter.emit_u64_at(target_instruction_pointer as u64, *insert_at_instruction_pointer)
}
self.executable_anonymous_memory_map.make_executable();
let length = self.instruction_pointer() - self.start_instruction_pointer();
let slice = unsafe { from_raw_parts(self.start_instruction_pointer() as *const u8, length) };
(slice, hints)
}
#[inline(always)]
fn target_instruction_pointer(&self, label: Label) -> InstructionPointer
{
self.labelled_locations.potential_target_instruction_pointer(label)
}
#[inline(always)]
fn valid_target_instruction_pointer(&self, label: Label) -> InstructionPointer
{
let target_instruction_pointer = self.target_instruction_pointer(label);
debug_assert!(target_instruction_pointer.is_valid(), "unresolved {:?}'", label);
target_instruction_pointer
}
#[inline(always)]
pub fn create_and_attach_label(&mut self) -> Label
{
let label = self.create_label();
self.attach_label(label);
label
}
#[inline(always)]
pub fn create_label(&mut self) -> Label
{
self.labelled_locations.create_label()
}
#[inline(always)]
pub fn attach_label(&mut self, label: Label)
{
let instruction_pointer = self.instruction_pointer();
self.labelled_locations.set(label, instruction_pointer)
}
#[inline(always)]
pub fn emit_label(&mut self, label: Label)
{
let target_instruction_pointer = self.target_instruction_pointer(label);
if target_instruction_pointer.is_valid()
{
self.emit_quad_word(target_instruction_pointer as u64)
}
else
{
let instruction_pointer = self.instruction_pointer();
self.emitted_labels.push((label, instruction_pointer));
self.skip_quad_word();
}
}
#[inline(always)]
pub fn push_stack_frame(&mut self)
{
use self::Register64Bit::RBP;
use self::Register64Bit::RSP;
self.push_Register64Bit_r64(RBP);
self.mov_Register64Bit_Register64Bit_rm64_r64(RBP, RSP);
}
#[inline(always)]
pub fn pop_stack_frame_and_return(&mut self)
{
use self::Register64Bit::RBP;
use self::Register64Bit::RSP;
self.mov_Register64Bit_Register64Bit_rm64_r64(RSP, RBP);
self.pop_Register64Bit_r64(RBP);
self.ret();
}
#[inline(always)]
pub fn zero_RAX(&mut self)
{
use self::Register32Bit::EAX;
self.xor_Register32Bit_Register32Bit(EAX, EAX);
}
#[inline(always)]
pub fn set_RAX_to_c_bool_false(&mut self)
{
use self::Register8Bit::AL;
self.mov_Register8Bit_Immediate8Bit(AL, Immediate8Bit::Zero);
}
#[inline(always)]
pub fn set_RAX_to_c_bool_true(&mut self)
{
use self::Register8Bit::AL;
self.mov_Register8Bit_Immediate8Bit(AL, Immediate8Bit::One);
}
#[inline(always)]
pub fn nullary_function_pointer<R>(&self) -> unsafe extern "C" fn() -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn unary_function_pointer<R, A>(&self) -> unsafe extern "C" fn(A) -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn binary_function_pointer<R, A, B>(&self) -> unsafe extern "C" fn(A, B) -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn ternary_function_pointer<R, A, B, C>(&self) -> unsafe extern "C" fn(A, B, C) -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn quaternary_function_pointer<R, A, B, C, D>(&self) -> unsafe extern "C" fn(A, B, C, D) -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn quinary_function_pointer<R, A, B, C, D, E>(&self) -> unsafe extern "C" fn(A, B, C, D, E) -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn senary_function_pointer<R, A, B, C, D, E, F>(&self) -> unsafe extern "C" fn(A, B, C, D, E, F) -> R
{
unsafe { transmute(self.instruction_pointer()) }
}
#[inline(always)]
pub fn emit_byte(&mut self, byte: u8)
{
self.reserve_space(1);
self.byte_emitter.emit_u8(byte)
}
#[inline(always)]
pub fn emit_word(&mut self, word: u16)
{
self.reserve_space(2);
self.byte_emitter.emit_u16(word)
}
#[inline(always)]
pub fn emit_double_word(&mut self, double_word: u32)
{
self.reserve_space(4);
self.byte_emitter.emit_u32(double_word)
}
#[inline(always)]
pub fn emit_quad_word(&mut self, quad_word: u64)
{
self.reserve_space(8);
self.byte_emitter.emit_u64(quad_word)
}
#[inline(always)]
pub fn emit_double_quad_word(&mut self, double_quad_word: u128)
{
self.reserve_space(16);
self.byte_emitter.emit_u128(double_quad_word)
}
#[inline(always)]
pub fn emit_bytes(&mut self, bytes: &[u8])
{
self.reserve_space(bytes.len());
self.byte_emitter.emit_bytes(bytes)
}
#[inline(always)]
pub fn rewind_to_emit_byte(&mut self, byte: u8)
{
let instruction_pointer = self.instruction_pointer();
self.byte_emitter.emit_u8_at(byte, instruction_pointer - 1)
}
#[inline(always)]
pub fn rewind_to_emit_double_word(&mut self, double_word: u32)
{
let instruction_pointer = self.instruction_pointer();
self.byte_emitter.emit_u32_at(double_word, instruction_pointer - 4)
}
#[inline(always)]
pub(crate) fn skip_byte(&mut self)
{
self.reserve_space(1);
self.byte_emitter.skip_u8()
}
#[inline(always)]
pub(crate) fn skip_double_word(&mut self)
{
self.reserve_space(4);
self.byte_emitter.skip_u32()
}
#[inline(always)]
pub(crate) fn skip_quad_word(&mut self)
{
self.reserve_space(8);
self.byte_emitter.skip_u64()
}
#[inline(always)]
pub fn skip_bytes(&mut self, count: usize)
{
self.reserve_space(count);
self.byte_emitter.skip_bytes(count)
}
#[inline(always)]
pub fn emit_nops(&mut self, count: usize)
{
match count
{
0 => (),
1 => self.nop_1(),
2 => self.nop_2(),
3 => self.nop_3(),
4 => self.nop_4(),
5 => self.nop_5(),
6 => self.nop_6(),
7 => self.nop_7(),
8 => self.nop_8(),
9 => self.nop_9(),
10 =>
{
self.nop_9();
self.nop_1()
}
11 =>
{
self.nop_9();
self.nop_2()
}
12 =>
{
self.nop_9();
self.nop_3()
}
13 =>
{
self.nop_9();
self.nop_4()
}
14 =>
{
self.nop_9();
self.nop_5()
}
15 =>
{
self.nop_9();
self.nop_6()
}
16 =>
{
self.nop_9();
self.nop_7()
}
17 =>
{
self.nop_9();
self.nop_8()
}
18 =>
{
self.nop_9();
self.nop_9()
}
19 =>
{
self.nop_9();
self.nop_9();
self.nop_1()
}
20 =>
{
self.nop_9();
self.nop_9();
self.nop_2()
}
21 =>
{
self.nop_9();
self.nop_9();
self.nop_3()
}
22 =>
{
self.nop_9();
self.nop_9();
self.nop_4()
}
23 =>
{
self.nop_9();
self.nop_9();
self.nop_5()
}
24 =>
{
self.nop_9();
self.nop_9();
self.nop_6()
}
25 =>
{
self.nop_9();
self.nop_9();
self.nop_7()
}
26 =>
{
self.nop_9();
self.nop_9();
self.nop_8()
}
27 =>
{
self.nop_9();
self.nop_9();
self.nop_9()
}
28 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_1()
}
29 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_2()
}
30 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_3()
}
31 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_4()
}
32 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_5()
}
33 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_6()
}
34 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_7()
}
35 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_8()
}
36 =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_9()
}
_ =>
{
self.nop_9();
self.nop_9();
self.nop_9();
self.nop_9();
let nop_count = count - 36;
let nop_9s = nop_count / 9;
for _ in 0 .. nop_9s
{
self.nop_9()
}
match nop_count % 9
{
0 => (),
1 => self.nop_1(),
2 => self.nop_2(),
3 => self.nop_3(),
4 => self.nop_4(),
5 => self.nop_5(),
6 => self.nop_6(),
7 => self.nop_7(),
8 => self.nop_8(),
_ => unreachable!(),
}
}
}
}
#[inline(always)]
pub fn emit_alignment(&mut self, alignment: usize)
{
let offset = self.instruction_pointer() % alignment;
if offset == 0
{
return
}
let count = alignment - offset;
self.emit_nops(count);
}
#[inline(always)]
fn nop_1(&mut self)
{
const NOP: u8 = 0x90;
self.emit_byte(NOP)
}
#[inline(always)]
fn nop_2(&mut self)
{
self.emit_word(0x6690)
}
#[inline(always)]
fn nop_3(&mut self)
{
self.emit_bytes(&[0x0F, 0x1F, 0x00])
}
#[inline(always)]
fn nop_4(&mut self)
{
self.emit_double_word(0x0F1F4000)
}
#[inline(always)]
fn nop_5(&mut self)
{
self.emit_bytes(&[0x0F, 0x1F, 0x44, 0x00, 0x00])
}
#[inline(always)]
fn nop_6(&mut self)
{
self.emit_bytes(&[0x66, 0x0F, 0x1F, 0x44, 0x00, 0x00])
}
#[inline(always)]
fn nop_7(&mut self)
{
self.emit_bytes(&[0x0F, 0x1F, 0x80, 0x00, 0x00, 0x00, 0x00])
}
#[inline(always)]
fn nop_8(&mut self)
{
self.emit_quad_word(0x0F1F840000000000)
}
#[inline(always)]
fn nop_9(&mut self)
{
self.emit_bytes(&[0x66, 0x0F, 0x1F, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00])
}
#[inline(always)]
fn reserve_space(&mut self, length: usize)
{
let remaining_space = self.byte_emitter.remaining_space();
if unlikely!(remaining_space < length)
{
if self.attempt_to_resize_in_place().is_err()
{
panic!("There is no more space in the buffer and remap failed")
}
}
}
#[inline(always)]
fn reserve_space_for_instruction(&mut self)
{
const MaximumOpcodeLength: usize = 15;
self.reserve_space(MaximumOpcodeLength)
}
#[inline(always)]
fn bookmark(&mut self)
{
self.byte_emitter.store_bookmark()
}
#[inline(always)]
fn reset_to_bookmark(&mut self)
{
self.byte_emitter.reset_to_bookmark()
}
#[inline(always)]
pub fn instruction_pointer(&self) -> InstructionPointer
{
self.byte_emitter.instruction_pointer
}
#[inline(always)]
pub fn start_instruction_pointer(&self) -> InstructionPointer
{
self.byte_emitter.start_instruction_pointer
}
#[inline(always)]
fn vex_7(&mut self, mmmmm: u8, L: u8, pp: u8, W: u8, vvvv: impl Register, rm: impl MemoryOrRegister, r: impl Register)
{
rm.emit_vex_prefix(&mut self.byte_emitter, mmmmm, L, pp, W, vvvv, r)
}
#[inline(always)]
fn vex_5(&mut self, mmmmm: u8, L: u8, pp: u8, W: u8, vvvv: impl Register)
{
if mmmmm == 0x01 && W == 0
{
self.byte_emitter.emit_2_byte_vex_prefix(0x80, vvvv, L, pp)
}
else
{
self.byte_emitter.emit_3_byte_vex_prefix(0x80, 0x40, 0x20, mmmmm, W, vvvv, L, pp)
}
}
#[inline(always)]
fn prefix_fwait(&mut self, byte: u8)
{
self.byte_emitter.emit_u8(byte)
}
#[inline(always)]
fn prefix_group1(&mut self, byte: u8)
{
self.byte_emitter.emit_u8(byte)
}
#[inline(always)]
fn prefix_group2(&mut self, memory_operand_or_branch_hint: impl PrefixGroup2)
{
memory_operand_or_branch_hint.emit_prefix_group2(&mut self.byte_emitter)
}
#[inline(always)]
fn prefix_group3(&mut self)
{
self.byte_emitter.emit_u8(0x66)
}
#[inline(always)]
fn prefix_group4(&mut self, memory: impl HasMemoryOperand)
{
self.prefix_group4_if_address_override(memory.memory_operand().address_override_for_32_bit)
}
#[inline(always)]
fn prefix_group4_if_address_override(&mut self, address_override_for_32_bit: bool)
{
if address_override_for_32_bit
{
const AddressOverridePrefix: u8 = 0x67;
self.byte_emitter.emit_u8(AddressOverridePrefix)
}
}
#[inline(always)]
fn rex_3(&mut self, rm: impl MemoryOrRegister, r: impl Register, byte: u8)
{
rm.emit_rex_3(&mut self.byte_emitter, r, byte)
}
#[inline(always)]
fn rex_2(&mut self, rm: impl MemoryOrRegister, byte: u8)
{
rm.emit_rex_2(&mut self.byte_emitter, byte)
}
#[inline(always)]
fn rex_1(&mut self, byte: u8)
{
self.byte_emitter.emit_u8_if_not_zero(byte)
}
#[inline(always)]
pub(crate) fn opcode_1(&mut self, opcode: u8)
{
self.byte_emitter.emit_u8(opcode)
}
#[inline(always)]
fn opcode_2(&mut self, opcode1: u8, rcode: impl OpcodeEncoding)
{
rcode.emit_2(self, opcode1)
}
#[inline(always)]
fn opcode_3(&mut self, opcode1: u8, opcode2: u8, rcode: impl OpcodeEncoding)
{
rcode.emit_3(self, opcode1, opcode2)
}
#[inline(always)]
fn mod_rm_sib(&mut self, rm: impl MemoryOrRegister, reg: impl Register)
{
rm.emit_mod_rm_sib(&mut self.byte_emitter, reg)
}
#[inline(always)]
fn displacement_immediate_1(&mut self, displacement: impl AsDisplacement)
{
displacement.displacement().emit(&mut self.byte_emitter)
}
#[inline(always)]
fn displacement_immediate_2(&mut self, displacement1: Immediate8Bit, displacement2: Immediate16Bit)
{
self.displacement_immediate_1(displacement2);
self.displacement_immediate_1(displacement1);
}
#[inline(always)]
fn displacement_label_8bit(&mut self, label: Label) -> ShortJmpResult
{
let insert_at_instruction_pointer = self.instruction_pointer();
self.skip_byte();
let target_instruction_pointer = self.target_instruction_pointer(label);
if target_instruction_pointer.is_valid()
{
match self.byte_emitter.insert_8_bit_effective_address_displacement(insert_at_instruction_pointer, target_instruction_pointer)
{
Ok(()) => Ok(()),
Err(()) =>
{
self.reset_to_bookmark();
Err(())
}
}
}
else
{
self.instruction_pointers_to_replace_labels_with_8_bit_displacements.push((label, insert_at_instruction_pointer));
Ok(())
}
}
#[inline(always)]
fn displacement_label_32bit(&mut self, label: Label)
{
let insert_at_instruction_pointer = self.instruction_pointer();
self.skip_double_word();
let target_instruction_pointer = self.target_instruction_pointer(label);
if target_instruction_pointer.is_valid()
{
self.byte_emitter.insert_32_bit_effective_address_displacement(insert_at_instruction_pointer, target_instruction_pointer).expect("32-bit JMP was too far")
}
else
{
self.instruction_pointers_to_replace_labels_with_32_bit_displacements.push((label, insert_at_instruction_pointer));
}
}
#[inline(always)]
fn relative_address_32bit(&self, absolute_address: impl FunctionPointer, offset_to_end_of_opcode_encoding: usize) -> RelativeAddress32Bit
{
RelativeAddress32Bit(((absolute_address.absolute_virtual_address() as isize) - ((self.instruction_pointer() + offset_to_end_of_opcode_encoding) as isize)) as i32)
}
#[inline(always)]
pub fn jmp_Any64BitMemory_statically_relative_address(&mut self, index_register: Register64Bit, scale: IndexScale, base_register_holding_start_of_instructions_pointer: Register64Bit)
{
const ArtificallyLargeDisplacementPlaceholder: Immediate32Bit = Immediate32Bit::Maximum;
self.bookmark();
let memory_destination = Any64BitMemory::index_64_scale_displacement(index_register, scale, ArtificallyLargeDisplacementPlaceholder);
self.jmp_Any64BitMemory(memory_destination);
let instruction_pointer = self.instruction_pointer();
if instruction_pointer <= ::std::i32::MAX as usize
{
self.rewind_to_emit_double_word(instruction_pointer as u32);
return
}
self.reset_to_bookmark();
let memory_destination = Any64BitMemory::base_64_index_64_scale_displacement(base_register_holding_start_of_instructions_pointer, index_register, scale, ArtificallyLargeDisplacementPlaceholder);
self.jmp_Any64BitMemory(memory_destination);
let image_base = self.start_instruction_pointer();
let displacement = self.instruction_pointer() - image_base;
debug_assert!(displacement <= ::std::i32::MAX as usize, "Jumps of more than 2Gb are not supported");
self.rewind_to_emit_double_word(displacement as u32)
}
#[inline(always)]
pub fn statically_relative_address<BM: BitMemory>(&self, array_location_in_memory: InstructionPointer, index_register: Register64Bit, scale: IndexScale, base_register_holding_start_of_instructions_pointer: Register64Bit) -> BM
{
BM::statically_relative_address(self, array_location_in_memory, index_register, scale, base_register_holding_start_of_instructions_pointer)
}
#[inline(always)]
pub fn overwrite_last_32bit_displacement_with_relative_address_to(&mut self, location_in_memory: InstructionPointer)
{
debug_assert!(location_in_memory <= ::std::isize::MAX as usize, "location_in_memory is larger than ::std::isize::MAX");
let instruction_pointer = self.instruction_pointer();
debug_assert!(instruction_pointer <= ::std::isize::MAX as usize, "instruction_pointer is larger than ::std::isize::MAX");
let offset = (location_in_memory as isize) - (instruction_pointer as isize);
debug_assert!(offset <= (::std::i32::MAX as isize) && offset >= (::std::i32::MIN as isize), "offset to location_in_memory is bigger than a 32-bit displacement can hold");
self.rewind_to_emit_double_word(offset as i32 as u32);
}
#[inline(always)]
pub fn emit_fixed_size_block<R>(&mut self, scale: impl Into<u8>, mut emit_instructions: impl FnMut(&mut Self) -> R) -> R
{
let scale = scale.into();
let desired_block_size = 1 << (scale as usize);
let start = self.instruction_pointer();
let result = emit_instructions(self);
let block_size = self.instruction_pointer() - start;
debug_assert!(block_size <= desired_block_size, "block_size '{}' exceeds desired_block_size '{}' (scale '{})", block_size, desired_block_size, scale);
self.skip_bytes(desired_block_size - block_size);
result
}
}
include!("InstructionStream.instructions.rs");