capstone_git/arch/
arm64.rs

1//! Contains arm64-specific types
2
3pub use crate::arch::arch_builder::arm64::*;
4use crate::arch::DetailsArchInsn;
5use crate::instruction::{AccessType, RegId, RegIdInt};
6use capstone_sys::{arm64_op_mem, arm64_op_sme_index, arm64_op_type, cs_ac_type, cs_arm64, cs_arm64_op};
7use core::convert::{From, TryInto};
8use core::{cmp, fmt, mem, slice};
9use core::ffi::c_uint;
10
11// Re-exports
12pub use capstone_sys::arm64_barrier_op as ArmBarrierOp;
13pub use capstone_sys::arm64_barrier_op as Arm64BarrierOp;
14pub use capstone_sys::arm64_cc as Arm64CC;
15pub use capstone_sys::arm64_extender as Arm64Extender;
16pub use capstone_sys::arm64_insn as Arm64Insn;
17pub use capstone_sys::arm64_insn_group as Arm64InsnGroup;
18pub use capstone_sys::arm64_prefetch_op as ArmPrefetchOp;
19pub use capstone_sys::arm64_pstate as Arm64Pstate;
20pub use capstone_sys::arm64_reg as Arm64Reg;
21pub use capstone_sys::arm64_svcr_op as Arm64SvcrOp;
22pub use capstone_sys::arm64_sys_op as Arm64SysOp;
23pub use capstone_sys::arm64_sysreg as Arm64Sysreg;
24pub use capstone_sys::arm64_vas as Arm64Vas;
25
26use capstone_sys::arm64_shifter;
27use capstone_sys::cs_arm64_op__bindgen_ty_2;
28
29/// Contains ARM64-specific details for an instruction
30pub struct Arm64InsnDetail<'a>(pub(crate) &'a cs_arm64);
31
32/// ARM64 shift amount
33#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
34pub enum Arm64Shift {
35    Invalid,
36
37    /// Logical shift left
38    Lsl(u32),
39
40    /// Masking shift left
41    Msl(u32),
42
43    /// Logical shift right
44    Lsr(u32),
45
46    /// Arithmetic shift right
47    Asr(u32),
48
49    /// Rotate right
50    Ror(u32),
51}
52
53impl Arm64OperandType {
54    fn new(
55        op_type: arm64_op_type,
56        value: cs_arm64_op__bindgen_ty_2,
57        svcr: Arm64SvcrOp,
58    ) -> Arm64OperandType {
59        use self::arm64_op_type::*;
60        use self::Arm64OperandType::*;
61
62        match op_type {
63            ARM64_OP_INVALID => Invalid,
64            ARM64_OP_REG => Reg(RegId(unsafe { value.reg } as RegIdInt)),
65            ARM64_OP_IMM => Imm(unsafe { value.imm }),
66            ARM64_OP_MEM => Mem(Arm64OpMem(unsafe { value.mem })),
67            ARM64_OP_FP => Fp(unsafe { value.fp }),
68            ARM64_OP_CIMM => Cimm(unsafe { value.imm }),
69            ARM64_OP_REG_MRS => RegMrs(unsafe {
70                mem::transmute::<Arm64Reg::Type, Arm64Sysreg>(value.reg as Arm64Reg::Type)
71            }),
72            ARM64_OP_REG_MSR => RegMsr(unsafe {
73                mem::transmute::<Arm64Reg::Type, Arm64Sysreg>(value.reg as Arm64Reg::Type)
74            }),
75            ARM64_OP_PSTATE => Pstate(unsafe { value.pstate }),
76            ARM64_OP_SYS => Sys(unsafe { value.sys }),
77            ARM64_OP_PREFETCH => Prefetch(unsafe { value.prefetch }),
78            ARM64_OP_BARRIER => Barrier(unsafe { value.barrier }),
79            ARM64_OP_SVCR => SVCR(svcr),
80            ARM64_OP_SME_INDEX => SMEIndex(Arm64OpSmeIndex(unsafe { value.sme_index })),
81        }
82    }
83}
84
85/// ARM64 operand
86#[derive(Clone, Debug, PartialEq)]
87pub struct Arm64Operand {
88    /// How is this operand accessed?
89    ///
90    /// NOTE: this field is always `None` if the "full" feataure is not enabled.
91    pub access: Option<AccessType>,
92
93    /// Vector Index for some vector operands
94    pub vector_index: Option<u32>,
95
96    /// Vector arrangement specifier (for FloatingPoint/Advanced SIMD insn)
97    pub vas: Arm64Vas,
98
99    /// Shifter of this operand
100    pub shift: Arm64Shift,
101
102    /// Extender type of this operand
103    pub ext: Arm64Extender,
104
105    /// Operand type
106    pub op_type: Arm64OperandType,
107}
108
109/// ARM64 operand
110#[derive(Clone, Debug, PartialEq)]
111pub enum Arm64OperandType {
112    /// Register
113    Reg(RegId),
114
115    /// Immediate
116    Imm(i64),
117
118    /// Memory
119    Mem(Arm64OpMem),
120
121    /// Floating point
122    Fp(f64),
123
124    /// C-IMM
125    Cimm(i64),
126
127    /// System register MRS (move the contents of a PSR to a general-purpose register)
128    RegMrs(Arm64Sysreg),
129
130    /// System register MSR (move to system coprocessor register from ARM register)
131    RegMsr(Arm64Sysreg),
132
133    /// System PState Field (MSR instruction)
134    Pstate(Arm64Pstate),
135
136    /// System operation (IC/DC/AT/TLBI)
137    Sys(Arm64SysOp),
138
139    /// PRFM operation
140    Prefetch(ArmPrefetchOp),
141
142    /// Memory barrier operation (ISB/DMB/DSB instructions)
143    Barrier(Arm64BarrierOp),
144
145    /// SMSTART/SMSTOP mode (Streaming SVE & ZA storage)
146    SVCR(Arm64SvcrOp),
147
148    /// SME index
149    SMEIndex(Arm64OpSmeIndex),
150
151    /// Invalid
152    Invalid,
153}
154
155/// ARM64 memory operand
156#[derive(Debug, Copy, Clone)]
157pub struct Arm64OpMem(pub(crate) arm64_op_mem);
158
159impl Arm64InsnDetail<'_> {
160    /// Condition codes
161    pub fn cc(&self) -> Arm64CC {
162        self.0.cc
163    }
164
165    /// Whether this insn updates flags
166    pub fn update_flags(&self) -> bool {
167        self.0.update_flags
168    }
169
170    /// Whether writeback is required
171    pub fn writeback(&self) -> bool {
172        self.0.writeback
173    }
174}
175
176impl_PartialEq_repr_fields!(Arm64InsnDetail<'a> [ 'a ];
177    cc, update_flags, writeback, operands
178);
179
180impl Arm64OpMem {
181    /// Base register
182    pub fn base(&self) -> RegId {
183        RegId(self.0.base as RegIdInt)
184    }
185
186    /// Index register
187    pub fn index(&self) -> RegId {
188        RegId(self.0.index as RegIdInt)
189    }
190
191    /// Disp value
192    pub fn disp(&self) -> i32 {
193        self.0.disp as i32
194    }
195}
196
197impl_PartialEq_repr_fields!(Arm64OpMem;
198    base, index, disp
199);
200
201impl cmp::Eq for Arm64OpMem {}
202
203/// ARM64 sme index operand
204#[derive(Debug, Copy, Clone)]
205pub struct Arm64OpSmeIndex(pub(crate) arm64_op_sme_index);
206
207impl Arm64OpSmeIndex {
208    /// Register being indexed
209    pub fn reg(&self) -> RegId {
210        RegId(self.0.reg as RegIdInt)
211    }
212
213    /// Base register
214    pub fn base(&self) -> RegId {
215        RegId(self.0.base as RegIdInt)
216    }
217
218    /// Disp value
219    pub fn disp(&self) -> i32 {
220        self.0.disp as i32
221    }
222}
223
224impl_PartialEq_repr_fields!(Arm64OpSmeIndex;
225    reg, base, disp
226);
227
228impl Default for Arm64Operand {
229    fn default() -> Self {
230        Arm64Operand {
231            access: None,
232            vector_index: None,
233            vas: Arm64Vas::ARM64_VAS_INVALID,
234            shift: Arm64Shift::Invalid,
235            ext: Arm64Extender::ARM64_EXT_INVALID,
236            op_type: Arm64OperandType::Invalid,
237        }
238    }
239}
240
241impl Arm64Shift {
242    fn new(type_: arm64_shifter, value: c_uint) -> Arm64Shift {
243        use self::arm64_shifter::*;
244        use self::Arm64Shift::*;
245
246        macro_rules! arm64_shift_match {
247            (
248                $( $imm_r_enum:ident = $imm_c_enum:ident, )*
249            ) => {
250                match type_ {
251                    ARM64_SFT_INVALID => Invalid,
252
253                    $(
254                        $imm_c_enum => $imm_r_enum(value as u32) ,
255                    )*
256                }
257            }
258        }
259
260        arm64_shift_match!(
261            Lsl = ARM64_SFT_LSL,
262            Msl = ARM64_SFT_MSL,
263            Lsr = ARM64_SFT_LSR,
264            Asr = ARM64_SFT_ASR,
265            Ror = ARM64_SFT_ROR,
266        )
267    }
268}
269
270impl From<&cs_arm64_op> for Arm64Operand {
271    fn from(op: &cs_arm64_op) -> Arm64Operand {
272        let shift = Arm64Shift::new(op.shift.type_, op.shift.value);
273        let op_type = Arm64OperandType::new(op.type_, op.__bindgen_anon_1, op.svcr);
274        let vector_index = if op.vector_index >= 0 {
275            Some(op.vector_index as u32)
276        } else {
277            None
278        };
279        Arm64Operand {
280            access: cs_ac_type(op.access as _).try_into().ok(),
281            vector_index,
282            vas: op.vas,
283            shift,
284            ext: op.ext,
285            op_type,
286        }
287    }
288}
289
290def_arch_details_struct!(
291    InsnDetail = Arm64InsnDetail;
292    Operand = Arm64Operand;
293    OperandIterator = Arm64OperandIterator;
294    OperandIteratorLife = Arm64OperandIterator<'a>;
295    [ pub struct Arm64OperandIterator<'a>(slice::Iter<'a, cs_arm64_op>); ]
296    cs_arch_op = cs_arm64_op;
297    cs_arch = cs_arm64;
298);
299
300#[cfg(test)]
301mod test {
302    use super::*;
303
304    #[test]
305    fn test_arm64shift() {
306        use super::arm64_shifter::*;
307        use super::Arm64Shift::*;
308        use core::ffi::c_uint;
309
310        fn t(shift_type_value: (arm64_shifter, c_uint), arm64_shift: Arm64Shift) {
311            let (shift_type, value) = shift_type_value;
312            assert_eq!(arm64_shift, Arm64Shift::new(shift_type, value));
313        }
314
315        t((ARM64_SFT_INVALID, 0), Invalid);
316        t((ARM64_SFT_ASR, 0), Asr(0));
317    }
318
319    #[test]
320    fn test_arm64_op_type() {
321        use super::arm64_op_type::*;
322        use super::Arm64OperandType::*;
323        use super::Arm64Sysreg::*;
324        use capstone_sys::arm64_prefetch_op::*;
325        use capstone_sys::arm64_pstate::*;
326        use capstone_sys::arm64_svcr_op::*;
327        use capstone_sys::*;
328
329        fn t(
330            op_type_value: (arm64_op_type, cs_arm64_op__bindgen_ty_2, arm64_svcr_op),
331            expected_op_type: Arm64OperandType,
332        ) {
333            let (op_type, op_value, op_svcr) = op_type_value;
334            let op_type = Arm64OperandType::new(op_type, op_value, op_svcr);
335            assert_eq!(expected_op_type, op_type);
336        }
337
338        t(
339            (
340                ARM64_OP_INVALID,
341                cs_arm64_op__bindgen_ty_2 { reg: 0 },
342                ARM64_SVCR_INVALID,
343            ),
344            Invalid,
345        );
346        t(
347            (
348                ARM64_OP_REG,
349                cs_arm64_op__bindgen_ty_2 { reg: 0 },
350                ARM64_SVCR_INVALID,
351            ),
352            Reg(RegId(0)),
353        );
354        t(
355            (
356                ARM64_OP_IMM,
357                cs_arm64_op__bindgen_ty_2 { imm: 42 },
358                ARM64_SVCR_INVALID,
359            ),
360            Imm(42),
361        );
362        t(
363            (
364                ARM64_OP_REG_MRS,
365                cs_arm64_op__bindgen_ty_2 {
366                    reg: ARM64_SYSREG_MDRAR_EL1 as arm64_reg::Type,
367                },
368                ARM64_SVCR_INVALID,
369            ),
370            RegMrs(ARM64_SYSREG_MDRAR_EL1),
371        );
372        t(
373            (
374                ARM64_OP_PSTATE,
375                cs_arm64_op__bindgen_ty_2 {
376                    pstate: ARM64_PSTATE_SPSEL,
377                },
378                ARM64_SVCR_INVALID,
379            ),
380            Pstate(Arm64Pstate::ARM64_PSTATE_SPSEL),
381        );
382        t(
383            (
384                ARM64_OP_FP,
385                cs_arm64_op__bindgen_ty_2 { fp: 0.0 },
386                ARM64_SVCR_INVALID,
387            ),
388            Fp(0.0),
389        );
390        t(
391            (
392                ARM64_OP_CIMM,
393                cs_arm64_op__bindgen_ty_2 { imm: 42 },
394                ARM64_SVCR_INVALID,
395            ),
396            Cimm(42),
397        );
398        t(
399            (
400                ARM64_OP_REG_MSR,
401                cs_arm64_op__bindgen_ty_2 {
402                    reg: arm64_sysreg::ARM64_SYSREG_ICC_EOIR1_EL1 as arm64_reg::Type,
403                },
404                ARM64_SVCR_INVALID,
405            ),
406            RegMsr(arm64_sysreg::ARM64_SYSREG_ICC_EOIR1_EL1),
407        );
408        t(
409            (
410                ARM64_OP_SYS,
411                cs_arm64_op__bindgen_ty_2 {
412                    sys: arm64_sys_op::ARM64_AT_S1E0R,
413                },
414                ARM64_SVCR_INVALID,
415            ),
416            Sys(arm64_sys_op::ARM64_AT_S1E0R),
417        );
418        t(
419            (
420                ARM64_OP_PREFETCH,
421                cs_arm64_op__bindgen_ty_2 {
422                    prefetch: ARM64_PRFM_PLDL2KEEP,
423                },
424                ARM64_SVCR_INVALID,
425            ),
426            Prefetch(ARM64_PRFM_PLDL2KEEP),
427        );
428        t(
429            (
430                ARM64_OP_SVCR,
431                cs_arm64_op__bindgen_ty_2 { reg: 0 },
432                ARM64_SVCR_SVCRSM,
433            ),
434            SVCR(ARM64_SVCR_SVCRSM),
435        );
436        t(
437            (
438                ARM64_OP_SME_INDEX,
439                cs_arm64_op__bindgen_ty_2 {
440                    sme_index: arm64_op_sme_index {
441                        reg: 1,
442                        base: 2,
443                        disp: 3,
444                    },
445                },
446                ARM64_SVCR_INVALID,
447            ),
448            SMEIndex(Arm64OpSmeIndex(arm64_op_sme_index {
449                reg: 1,
450                base: 2,
451                disp: 3,
452            })),
453        );
454    }
455}