use crate::cpu_modes::CpuModes;
use crate::cpuid::Cpuid;
use crate::decode_error::{status_to_error, DecodeError};
use crate::fpu_flags::FpuFlags;
use crate::instruction_category::Category;
use crate::isa_set::IsaSet;
use crate::mnemonic::Mnemonic;
use crate::operand;
use crate::operand::{OpAccess, Operands, OperandsLookup};
use crate::rflags::flags_raw;
use crate::tuple::Tuple;
use core::convert::TryFrom;
use core::fmt;
use core::mem;
pub type DecodeResult = Result<DecodedInstruction, DecodeError>;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum DecodeMode {
Bits16,
Bits32,
Bits64,
}
#[doc(hidden)]
impl From<DecodeMode> for (u8, u8) {
fn from(mode: DecodeMode) -> Self {
match mode {
DecodeMode::Bits16 => (ffi::ND_CODE_16 as u8, ffi::ND_DATA_16 as u8),
DecodeMode::Bits32 => (ffi::ND_CODE_32 as u8, ffi::ND_DATA_32 as u8),
DecodeMode::Bits64 => (ffi::ND_CODE_64 as u8, ffi::ND_DATA_64 as u8),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum EncodingMode {
Legacy,
Xop,
Vex,
Evex,
}
#[doc(hidden)]
impl EncodingMode {
pub(crate) fn from_raw(value: u32) -> Result<Self, DecodeError> {
match value {
ffi::ND_ENCM_LEGACY => Ok(EncodingMode::Legacy),
ffi::ND_ENCM_XOP => Ok(EncodingMode::Xop),
ffi::ND_ENCM_VEX => Ok(EncodingMode::Vex),
ffi::ND_ENCM_EVEX => Ok(EncodingMode::Evex),
_ => Err(DecodeError::InternalError(value.into())),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum VexMode {
Vex2b,
Vex3b,
}
#[doc(hidden)]
impl VexMode {
pub(crate) fn from_raw(value: u32) -> Result<Self, DecodeError> {
match value {
ffi::ND_VEXM_2B => Ok(VexMode::Vex2b),
ffi::ND_VEXM_3B => Ok(VexMode::Vex3b),
_ => Err(DecodeError::InternalError(value.into())),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum AddressingMode {
Addr16,
Addr32,
Addr64,
}
#[doc(hidden)]
impl AddressingMode {
pub(crate) fn from_raw(value: u32) -> Result<Self, DecodeError> {
match value {
ffi::ND_ADDR_16 => Ok(AddressingMode::Addr16),
ffi::ND_ADDR_32 => Ok(AddressingMode::Addr32),
ffi::ND_ADDR_64 => Ok(AddressingMode::Addr64),
_ => Err(DecodeError::InternalError(value.into())),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum OperandSize {
OpSize16,
OpSize32,
OpSize64,
}
#[doc(hidden)]
impl OperandSize {
pub(crate) fn from_raw(value: u32) -> Result<Self, DecodeError> {
match value {
ffi::ND_OPSZ_16 => Ok(OperandSize::OpSize16),
ffi::ND_OPSZ_32 => Ok(OperandSize::OpSize32),
ffi::ND_OPSZ_64 => Ok(OperandSize::OpSize64),
_ => Err(DecodeError::InternalError(value.into())),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum VectorSize {
VecSize128,
VecSize256,
VecSize512,
}
#[doc(hidden)]
impl VectorSize {
pub(crate) fn from_raw(value: u32) -> Result<Self, DecodeError> {
match value {
ffi::ND_VECM_128 => Ok(VectorSize::VecSize128),
ffi::ND_VECM_256 => Ok(VectorSize::VecSize256),
ffi::ND_VECM_512 => Ok(VectorSize::VecSize512),
_ => Err(DecodeError::InternalError(value.into())),
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct FlagsAccess {
pub mode: OpAccess,
pub tested: u32,
pub modified: u32,
pub set: u32,
pub cleared: u32,
pub undefined: u32,
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum EvexRounding {
NearestEqual,
Down,
Up,
Zero,
}
#[doc(hidden)]
impl EvexRounding {
pub(crate) fn from_raw(value: u8) -> Result<Self, DecodeError> {
if value == ffi::_ND_ROUNDING::ND_RND_RNE as u8 {
Ok(EvexRounding::NearestEqual)
} else if value == ffi::_ND_ROUNDING::ND_RND_RD as u8 {
Ok(EvexRounding::Down)
} else if value == ffi::_ND_ROUNDING::ND_RND_RU as u8 {
Ok(EvexRounding::Up)
} else if value == ffi::_ND_ROUNDING::ND_RND_RZ as u8 {
Ok(EvexRounding::Zero)
} else {
Err(DecodeError::InternalError(value.into()))
}
}
}
#[allow(clippy::struct_excessive_bools)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct ValidPrefixes {
pub rep: bool,
pub rep_cond: bool,
pub lock: bool,
pub hle: bool,
pub xacquire: bool,
pub xrelease: bool,
pub bnd: bool,
pub bhint: bool,
pub hle_no_lock: bool,
pub dnt: bool,
}
#[doc(hidden)]
impl ValidPrefixes {
pub(crate) fn from_raw(raw: ffi::ND_VALID_PREFIXES) -> Self {
let raw = unsafe { raw.__bindgen_anon_1 };
Self {
rep: raw.Rep() != 0,
rep_cond: raw.RepCond() != 0,
lock: raw.Lock() != 0,
hle: raw.Hle() != 0,
xacquire: raw.Xacquire() != 0,
xrelease: raw.Xrelease() != 0,
bnd: raw.Bnd() != 0,
bhint: raw.Bhint() != 0,
hle_no_lock: raw.HleNoLock() != 0,
dnt: raw.Dnt() != 0,
}
}
}
#[allow(clippy::struct_excessive_bools)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct ValidDecorators {
pub er: bool,
pub sae: bool,
pub zero: bool,
pub mask: bool,
pub broadcast: bool,
}
#[doc(hidden)]
impl ValidDecorators {
pub(crate) fn from_raw(raw: ffi::ND_VALID_DECORATORS) -> Self {
let raw = unsafe { raw.__bindgen_anon_1 };
Self {
er: raw.Er() != 0,
sae: raw.Sae() != 0,
zero: raw.Zero() != 0,
mask: raw.Mask() != 0,
broadcast: raw.Broadcast() != 0,
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct DecodedInstruction {
inner: ffi::INSTRUX,
ip: u64,
instruction: Mnemonic,
length: usize,
}
impl DecodedInstruction {
pub fn decode_with_ip(code: &[u8], mode: DecodeMode, ip: u64) -> DecodeResult {
let mut instrux: mem::MaybeUninit<ffi::INSTRUX> = mem::MaybeUninit::uninit();
let instrux = instrux.as_mut_ptr();
let (code_def, data_def) = mode.into();
let status = unsafe {
ffi::NdDecodeEx(
instrux,
code.as_ptr(),
code.len() as u64,
code_def,
data_def,
)
};
status_to_error(status)?;
let instrux = unsafe { *instrux };
Ok(DecodedInstruction {
inner: instrux,
ip,
instruction: Mnemonic::try_from(instrux.Instruction).unwrap(),
length: instrux.Length as usize,
})
}
pub fn decode(code: &[u8], mode: DecodeMode) -> DecodeResult {
Self::decode_with_ip(code, mode, 0)
}
#[inline]
#[must_use]
pub fn mnemonic(&self) -> Mnemonic {
self.instruction
}
#[inline]
#[must_use]
pub fn length(&self) -> usize {
self.length
}
#[must_use]
pub fn operands(&self) -> Operands {
let mut operands = Operands::default();
let op_count = self.inner.OperandsCount();
for op_index in 0..op_count {
operands.operands[op_index as usize] =
operand::Operand::from_raw(self.inner.Operands[op_index as usize]).unwrap();
}
operands.actual_count = op_count as usize;
operands
}
#[must_use]
pub fn cpuid(&self) -> Option<Cpuid> {
let cpuid = unsafe { self.inner.CpuidFlag.__bindgen_anon_1 };
let leaf = cpuid.Leaf;
if leaf == ffi::ND_CFF_NO_LEAF {
None
} else {
let sub_leaf = cpuid.SubLeaf();
let sub_leaf = if sub_leaf == ffi::ND_CFF_NO_SUBLEAF {
None
} else {
Some(sub_leaf)
};
let register = cpuid.Reg() as u8;
let bit = u64::from(cpuid.Bit());
Some(Cpuid {
leaf,
sub_leaf,
register,
bit,
})
}
}
#[inline]
#[must_use]
pub fn encoding_mode(&self) -> EncodingMode {
EncodingMode::from_raw(u32::from(self.inner.EncMode())).unwrap()
}
#[inline]
#[must_use]
pub fn vex_mode(&self) -> Option<VexMode> {
if self.has_vex() {
Some(VexMode::from_raw(u32::from(self.inner.VexMode())).unwrap())
} else {
None
}
}
#[inline]
#[must_use]
pub fn addr_mode(&self) -> AddressingMode {
AddressingMode::from_raw(u32::from(self.inner.AddrMode())).unwrap()
}
#[inline]
#[must_use]
pub fn op_mode(&self) -> OperandSize {
OperandSize::from_raw(u32::from(self.inner.OpMode())).unwrap()
}
#[inline]
#[must_use]
pub fn effective_op_mode(&self) -> OperandSize {
OperandSize::from_raw(u32::from(self.inner.EfOpMode())).unwrap()
}
#[inline]
#[must_use]
pub fn vec_mode(&self) -> Option<VectorSize> {
if self.has_vector() {
Some(VectorSize::from_raw(u32::from(self.inner.VecMode())).unwrap())
} else {
None
}
}
#[inline]
#[must_use]
pub fn effective_vec_mode(&self) -> Option<VectorSize> {
if self.has_vector() {
Some(VectorSize::from_raw(u32::from(self.inner.EfVecMode())).unwrap())
} else {
None
}
}
#[inline]
#[must_use]
pub fn has_rex(&self) -> bool {
self.inner.HasRex() != 0
}
#[inline]
#[must_use]
pub fn has_vex(&self) -> bool {
self.inner.HasVex() != 0
}
#[inline]
#[must_use]
pub fn has_xop(&self) -> bool {
self.inner.HasXop() != 0
}
#[inline]
#[must_use]
pub fn has_evex(&self) -> bool {
self.inner.HasEvex() != 0
}
#[inline]
#[must_use]
pub fn has_op_size(&self) -> bool {
self.inner.HasOpSize() != 0
}
#[inline]
#[must_use]
pub fn has_addr_size(&self) -> bool {
self.inner.HasAddrSize() != 0
}
#[inline]
#[must_use]
pub fn has_lock(&self) -> bool {
self.inner.HasLock() != 0
}
#[inline]
#[must_use]
pub fn has_repnz_xacquire_bnd(&self) -> bool {
self.inner.HasRepnzXacquireBnd() != 0
}
#[inline]
#[must_use]
pub fn has_rep_repz_xrelease(&self) -> bool {
self.inner.HasRepRepzXrelease() != 0
}
#[inline]
#[must_use]
pub fn has_seg(&self) -> bool {
self.inner.HasSeg() != 0
}
#[inline]
#[must_use]
pub fn is_repeated(&self) -> bool {
self.inner.IsRepeated() != 0
}
#[inline]
#[must_use]
pub fn is_xacquire_enabled(&self) -> bool {
self.inner.IsXacquireEnabled() != 0
}
#[inline]
#[must_use]
pub fn is_xrelease_enabled(&self) -> bool {
self.inner.IsXreleaseEnabled() != 0
}
#[inline]
#[must_use]
pub fn is_rip_relative(&self) -> bool {
self.inner.IsRipRelative() != 0
}
#[inline]
#[must_use]
pub fn is_cet_tracked(&self) -> bool {
self.inner.IsCetTracked() != 0
}
#[inline]
#[must_use]
pub fn has_mod_rm(&self) -> bool {
self.inner.HasModRm() != 0
}
#[inline]
#[must_use]
pub fn has_sib(&self) -> bool {
self.inner.HasSib() != 0
}
#[inline]
#[must_use]
pub fn has_disp(&self) -> bool {
self.inner.HasDisp() != 0
}
#[inline]
#[must_use]
pub fn has_addr(&self) -> bool {
self.inner.HasAddr() != 0
}
#[inline]
#[must_use]
pub fn has_moffset(&self) -> bool {
self.inner.HasMoffset() != 0
}
#[inline]
#[must_use]
pub fn has_imm1(&self) -> bool {
self.inner.HasImm1() != 0
}
#[inline]
#[must_use]
pub fn has_imm2(&self) -> bool {
self.inner.HasImm2() != 0
}
#[inline]
#[must_use]
pub fn has_rel_offs(&self) -> bool {
self.inner.HasRelOffs() != 0
}
#[inline]
#[must_use]
pub fn has_sse_imm(&self) -> bool {
self.inner.HasSseImm() != 0
}
#[inline]
#[must_use]
pub fn has_comp_disp(&self) -> bool {
self.inner.HasCompDisp() != 0
}
#[inline]
#[must_use]
pub fn has_broadcast(&self) -> bool {
self.inner.HasBroadcast() != 0
}
#[inline]
#[must_use]
pub fn has_mask(&self) -> bool {
self.inner.HasMask() != 0
}
#[inline]
#[must_use]
pub fn has_zero(&self) -> bool {
self.inner.HasZero() != 0
}
#[inline]
#[must_use]
pub fn has_er(&self) -> bool {
self.inner.HasEr() != 0
}
#[inline]
#[must_use]
pub fn has_sae(&self) -> bool {
self.inner.HasSae() != 0
}
#[inline]
#[must_use]
pub fn has_ign_er(&self) -> bool {
self.inner.HasIgnEr() != 0
}
#[inline]
#[must_use]
pub fn has_mandatory_66(&self) -> bool {
self.inner.HasMandatory66() != 0
}
#[inline]
#[must_use]
pub fn has_mandatory_f2(&self) -> bool {
self.inner.HasMandatoryF2() != 0
}
#[inline]
#[must_use]
pub fn has_mandatory_f3(&self) -> bool {
self.inner.HasMandatoryF3() != 0
}
#[inline]
#[must_use]
pub fn word_length(&self) -> usize {
self.inner.WordLength() as usize
}
#[inline]
#[must_use]
pub fn pref_length(&self) -> usize {
self.inner.PrefLength() as usize
}
#[inline]
#[must_use]
pub fn op_length(&self) -> usize {
self.inner.OpLength() as usize
}
#[inline]
#[must_use]
pub fn disp_length(&self) -> usize {
self.inner.DispLength() as usize
}
#[inline]
#[must_use]
pub fn addr_length(&self) -> usize {
self.inner.AddrLength() as usize
}
#[inline]
#[must_use]
pub fn moffset_length(&self) -> usize {
self.inner.MoffsetLength() as usize
}
#[inline]
#[must_use]
pub fn imm1_length(&self) -> usize {
self.inner.Imm1Length() as usize
}
#[inline]
#[must_use]
pub fn imm2_length(&self) -> usize {
self.inner.Imm2Length() as usize
}
#[inline]
#[must_use]
pub fn rel_offs_length(&self) -> usize {
self.inner.RelOffsLength() as usize
}
#[inline]
#[must_use]
pub fn op_offset(&self) -> usize {
self.inner.OpOffset() as usize
}
#[inline]
#[must_use]
pub fn main_op_offset(&self) -> usize {
self.inner.MainOpOffset() as usize
}
#[inline]
#[must_use]
pub fn disp_offset(&self) -> Option<usize> {
let value = self.inner.DispOffset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn addr_offset(&self) -> Option<usize> {
let value = self.inner.AddrOffset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn moffset_offset(&self) -> Option<usize> {
let value = self.inner.MoffsetOffset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn imm1_offset(&self) -> Option<usize> {
let value = self.inner.Imm1Offset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn imm2_offset(&self) -> Option<usize> {
let value = self.inner.Imm2Offset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn rel_offs_offset(&self) -> Option<usize> {
let value = self.inner.RelOffsOffset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn sse_imm_offset(&self) -> Option<usize> {
let value = self.inner.SseImmOffset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn mod_rm_offset(&self) -> Option<usize> {
let value = self.inner.ModRmOffset() as usize;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn stack_words(&self) -> usize {
self.inner.StackWords() as usize
}
#[inline]
#[must_use]
pub fn rep(&self) -> Option<u8> {
let value = self.inner.Rep;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn seg(&self) -> Option<u8> {
let value = self.inner.Seg;
if value == 0 {
None
} else {
Some(value)
}
}
#[inline]
#[must_use]
pub fn mod_rm(&self) -> Option<u8> {
if self.has_mod_rm() {
Some(unsafe { self.inner.ModRm.ModRm })
} else {
None
}
}
#[inline]
#[must_use]
pub fn sib(&self) -> Option<u8> {
if self.has_sib() {
Some(unsafe { self.inner.Sib.Sib })
} else {
None
}
}
#[inline]
#[must_use]
pub fn vex2(&self) -> Option<(u8, u8)> {
if matches!(self.vex_mode(), Some(VexMode::Vex2b)) {
let vex2 = self.inner.__bindgen_anon_1;
let vex2 = unsafe { vex2.Vex2.Vex };
Some((vex2[0], vex2[1]))
} else {
None
}
}
#[inline]
#[must_use]
pub fn vex3(&self) -> Option<(u8, u8, u8)> {
if matches!(self.vex_mode(), Some(VexMode::Vex3b)) {
let vex3 = self.inner.__bindgen_anon_1;
let vex3 = unsafe { vex3.Vex3.Vex };
Some((vex3[0], vex3[1], vex3[2]))
} else {
None
}
}
#[inline]
#[must_use]
pub fn xop(&self) -> Option<(u8, u8, u8)> {
if self.has_xop() {
let xop = self.inner.__bindgen_anon_1;
let xop = unsafe { xop.Xop.Xop };
Some((xop[0], xop[1], xop[2]))
} else {
None
}
}
#[inline]
#[must_use]
pub fn evex(&self) -> Option<(u8, u8, u8, u8)> {
if self.has_evex() {
let evex = self.inner.__bindgen_anon_1;
let evex = unsafe { evex.Evex.Evex };
Some((evex[0], evex[1], evex[2], evex[3]))
} else {
None
}
}
#[inline]
#[must_use]
pub fn moffset(&self) -> Option<u64> {
if self.has_moffset() {
Some(unsafe { self.inner.__bindgen_anon_2.Moffset })
} else {
None
}
}
#[inline]
#[must_use]
pub fn disp(&self) -> Option<u32> {
if self.has_disp() {
Some(unsafe { self.inner.__bindgen_anon_2.Displacement })
} else {
None
}
}
#[inline]
#[must_use]
pub fn rel_offset(&self) -> Option<u32> {
if self.has_rel_offs() {
Some(unsafe { self.inner.__bindgen_anon_2.RelativeOffset })
} else {
None
}
}
#[inline]
#[must_use]
pub fn immediate1(&self) -> Option<u64> {
if self.has_imm1() {
Some(self.inner.Immediate1)
} else {
None
}
}
#[inline]
#[must_use]
pub fn immediate2(&self) -> Option<u8> {
if self.has_imm2() {
Some(unsafe { self.inner.__bindgen_anon_3.Immediate2 })
} else {
None
}
}
#[inline]
#[must_use]
pub fn sse_immediate(&self) -> Option<u8> {
if self.has_sse_imm() {
Some(unsafe { self.inner.__bindgen_anon_3.SseImmediate })
} else {
None
}
}
#[inline]
#[must_use]
pub fn cond(&self) -> Option<u8> {
if (self.inner.Attributes & ffi::ND_FLAG_COND as u64) != 0 {
Some(self.inner.__bindgen_anon_4.Condition())
} else {
None
}
}
#[inline]
#[must_use]
pub fn is_3d_now(&self) -> bool {
(self.inner.Attributes & ffi::ND_FLAG_3DNOW as u64) != 0
}
#[inline]
#[must_use]
pub fn operands_count(&self) -> usize {
self.inner.OperandsCount() as usize
}
#[inline]
#[must_use]
pub fn exp_operands_count(&self) -> usize {
self.inner.ExpOperandsCount() as usize
}
#[inline]
#[must_use]
pub fn cs_access(&self) -> OpAccess {
OpAccess::from_raw(ffi::ND_OPERAND_ACCESS {
Access: self.inner.CsAccess,
})
}
#[inline]
#[must_use]
pub fn rip_access(&self) -> OpAccess {
OpAccess::from_raw(ffi::ND_OPERAND_ACCESS {
Access: self.inner.RipAccess,
})
}
#[inline]
#[must_use]
pub fn stack_access(&self) -> OpAccess {
OpAccess::from_raw(ffi::ND_OPERAND_ACCESS {
Access: self.inner.StackAccess,
})
}
#[inline]
#[must_use]
pub fn memory_access(&self) -> OpAccess {
OpAccess::from_raw(ffi::ND_OPERAND_ACCESS {
Access: self.inner.MemoryAccess,
})
}
#[inline]
#[must_use]
pub fn is_branch(&self) -> bool {
self.inner.BranchInfo.IsBranch() != 0
}
#[inline]
#[must_use]
pub fn is_conditional_branch(&self) -> bool {
self.inner.BranchInfo.IsConditional() != 0
}
#[inline]
#[must_use]
pub fn is_indirect_branch(&self) -> bool {
self.inner.BranchInfo.IsIndirect() != 0
}
#[inline]
#[must_use]
pub fn is_far_branch(&self) -> bool {
self.inner.BranchInfo.IsFar() != 0
}
#[must_use]
pub fn flags_access(&self) -> FlagsAccess {
let facc = self.inner.FlagsAccess;
let mode = OpAccess::from_raw(ffi::ND_OPERAND_ACCESS {
Access: self.inner.RflAccess,
});
FlagsAccess {
mode,
tested: flags_raw(facc.Tested),
modified: flags_raw(facc.Modified),
set: flags_raw(facc.Set),
cleared: flags_raw(facc.Cleared),
undefined: flags_raw(facc.Undefined),
}
}
#[inline]
#[must_use]
pub fn fpu_flags_access(&self) -> FpuFlags {
FpuFlags::from_raw(self.inner.FpuFlagsAccess).unwrap()
}
#[inline]
#[must_use]
pub fn evex_tuple(&self) -> Option<Tuple> {
if self.has_evex() {
Some(Tuple::from_raw(u32::from(self.inner.TupleType)).unwrap())
} else {
None
}
}
#[inline]
#[must_use]
pub fn evex_rounding(&self) -> Option<EvexRounding> {
if self.has_er() {
Some(EvexRounding::from_raw(self.inner.RoundingMode()).unwrap())
} else {
None
}
}
#[inline]
#[must_use]
pub fn category(&self) -> Category {
Category::try_from(self.inner.Category).unwrap()
}
#[inline]
#[must_use]
pub fn isa_set(&self) -> IsaSet {
IsaSet::try_from(self.inner.IsaSet).unwrap()
}
#[inline]
#[must_use]
pub fn valid_cpu_modes(&self) -> CpuModes {
CpuModes::from_raw(self.inner.ValidModes)
}
#[inline]
#[must_use]
pub fn valid_prefixes(&self) -> ValidPrefixes {
ValidPrefixes::from_raw(self.inner.ValidPrefixes)
}
#[inline]
#[must_use]
pub fn valid_decorators(&self) -> ValidDecorators {
ValidDecorators::from_raw(self.inner.ValidDecorators)
}
#[inline]
#[must_use]
pub fn primary_op_code(&self) -> u8 {
unsafe { self.inner.__bindgen_anon_4.PrimaryOpCode }
}
#[inline]
#[must_use]
pub fn has_vector(&self) -> bool {
self.inner.Attributes & ffi::ND_FLAG_VECTOR as u64 != 0
}
}
impl<'a> DecodedInstruction {
#[inline]
#[must_use]
pub fn bytes(&'a self) -> &'a [u8] {
&self.inner.InstructionBytes[..self.inner.Length as usize]
}
#[inline]
#[must_use]
pub fn op_code_bytes(&'a self) -> &'a [u8] {
&self.inner.OpCodeBytes[..self.op_length()]
}
#[inline]
#[must_use]
pub fn operand_lookup(&'a self) -> OperandsLookup {
OperandsLookup::from_raw(&self.inner)
}
}
impl fmt::Display for DecodedInstruction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer: [u8; ffi::ND_MIN_BUF_SIZE as usize] = [0; ffi::ND_MIN_BUF_SIZE as usize];
let status = unsafe {
ffi::NdToText(
&self.inner,
self.ip,
buffer.len() as u32,
buffer.as_mut_ptr().cast::<i8>(),
)
};
match status_to_error(status) {
Ok(_) => {
let text = core::str::from_utf8(&buffer).unwrap();
write!(f, "{}", text.trim_matches(char::from(0)))
}
Err(_) => Err(fmt::Error),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn decode() {
let code = vec![0xb8, 0x00, 0x00, 0x00, 0x00];
let ins = DecodedInstruction::decode(&code, DecodeMode::Bits32).expect("Unable to decode");
assert_eq!(ins.instruction, Mnemonic::MOV);
assert_eq!(ins.bytes(), code);
assert_eq!(format!("{}", ins), "MOV eax, 0x00000000");
}
#[test]
fn decode_with_ip() {
let code = b"\x48\x8b\x05\xf9\xff\xff\xff";
let ins = DecodedInstruction::decode_with_ip(code, DecodeMode::Bits64, 0x100)
.expect("Unable to decode");
assert_eq!(ins.instruction, Mnemonic::MOV);
assert_eq!(ins.bytes(), code);
assert_eq!(format!("{}", ins), "MOV rax, qword ptr [rel 0x100]");
}
fn get_tokens(line: &str, index: usize) -> u32 {
let tokens = line.split(" ").collect::<Vec<&str>>();
let tokens = tokens
.into_iter()
.filter(|s| !s.is_empty())
.collect::<Vec<&str>>();
let token = tokens[index];
if token.starts_with("0x") {
u32::from_str_radix(token.trim_start_matches("0x"), 16).unwrap()
} else {
token.parse().unwrap()
}
}
#[test]
fn constants() {
let bindings = include_str!("../../bddisasm-sys/csrc/inc/bddisasm.h");
let mut shadow_stack_count: u8 = 0;
let mut tuple_count: u32 = 0;
let mut evex_rounding: u8 = 0;
for line in bindings.lines() {
if line.starts_with("#define ND_ENCM_") {
assert!(EncodingMode::from_raw(get_tokens(line, 2)).is_ok());
} else if line.starts_with("#define ND_VEXM_") {
assert!(VexMode::from_raw(get_tokens(line, 2)).is_ok());
} else if line.starts_with("#define ND_ADDR_") {
assert!(AddressingMode::from_raw(get_tokens(line, 2)).is_ok());
} else if line.starts_with("#define ND_OPSZ_") {
assert!(OperandSize::from_raw(get_tokens(line, 2)).is_ok());
} else if line.starts_with("#define ND_VECM_") {
assert!(VectorSize::from_raw(get_tokens(line, 2)).is_ok());
} else if line.starts_with("#define ND_SIZE_")
&& !line.starts_with("#define ND_SIZE_TO_MASK(sz)")
{
assert!(operand::OpSize::from_raw(get_tokens(line, 2)).is_ok());
} else if line.starts_with(" ND_SHSTK_") {
assert!(operand::ShadowStackAccess::from_raw(shadow_stack_count).is_ok());
shadow_stack_count += 1;
} else if line.starts_with("#define ND_FPU_FLAG_") {
assert!(
crate::fpu_flags::FpuFlagsAccess::from_raw(get_tokens(line, 2) as u8).is_ok()
);
} else if line.starts_with(" ND_TUPLE_") {
assert!(Tuple::from_raw(tuple_count).is_ok());
tuple_count += 1;
} else if line.starts_with(" ND_RND_") {
assert!(EvexRounding::from_raw(evex_rounding).is_ok());
evex_rounding += 1;
}
}
}
#[test]
fn status() {
let status = include_str!("../../bddisasm-sys/csrc/inc/bddisasm_status.h");
for line in status.lines() {
if line.starts_with("#define ND_STATUS_SUCCESS")
|| line.starts_with("#define ND_STATUS_HINT_OPERAND_NOT_USED")
{
assert!(status_to_error(get_tokens(line, 2)).is_ok());
} else if line.starts_with("#define ND_STATUS_") {
assert!(status_to_error(get_tokens(line, 2)).is_err());
}
}
}
#[test]
fn check_all_evex_roundings() {
match ffi::_ND_ROUNDING::ND_RND_RNE {
ffi::_ND_ROUNDING::ND_RND_RNE => {}
ffi::_ND_ROUNDING::ND_RND_RD => {}
ffi::_ND_ROUNDING::ND_RND_RU => {}
ffi::_ND_ROUNDING::ND_RND_RZ => {}
}
}
}