use cstr_core::CStr;
use num_traits::ToPrimitive;
use bad64_sys::*;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, FromPrimitive, ToPrimitive)]
#[repr(u32)]
#[allow(non_camel_case_types)]
pub enum Reg {
W0 = Register_REG_W0 as u32,
W1 = Register_REG_W1 as u32,
W2 = Register_REG_W2 as u32,
W3 = Register_REG_W3 as u32,
W4 = Register_REG_W4 as u32,
W5 = Register_REG_W5 as u32,
W6 = Register_REG_W6 as u32,
W7 = Register_REG_W7 as u32,
W8 = Register_REG_W8 as u32,
W9 = Register_REG_W9 as u32,
W10 = Register_REG_W10 as u32,
W11 = Register_REG_W11 as u32,
W12 = Register_REG_W12 as u32,
W13 = Register_REG_W13 as u32,
W14 = Register_REG_W14 as u32,
W15 = Register_REG_W15 as u32,
W16 = Register_REG_W16 as u32,
W17 = Register_REG_W17 as u32,
W18 = Register_REG_W18 as u32,
W19 = Register_REG_W19 as u32,
W20 = Register_REG_W20 as u32,
W21 = Register_REG_W21 as u32,
W22 = Register_REG_W22 as u32,
W23 = Register_REG_W23 as u32,
W24 = Register_REG_W24 as u32,
W25 = Register_REG_W25 as u32,
W26 = Register_REG_W26 as u32,
W27 = Register_REG_W27 as u32,
W28 = Register_REG_W28 as u32,
W29 = Register_REG_W29 as u32,
W30 = Register_REG_W30 as u32,
WZR = Register_REG_WZR as u32,
WSP = Register_REG_WSP as u32,
X0 = Register_REG_X0 as u32,
X1 = Register_REG_X1 as u32,
X2 = Register_REG_X2 as u32,
X3 = Register_REG_X3 as u32,
X4 = Register_REG_X4 as u32,
X5 = Register_REG_X5 as u32,
X6 = Register_REG_X6 as u32,
X7 = Register_REG_X7 as u32,
X8 = Register_REG_X8 as u32,
X9 = Register_REG_X9 as u32,
X10 = Register_REG_X10 as u32,
X11 = Register_REG_X11 as u32,
X12 = Register_REG_X12 as u32,
X13 = Register_REG_X13 as u32,
X14 = Register_REG_X14 as u32,
X15 = Register_REG_X15 as u32,
X16 = Register_REG_X16 as u32,
X17 = Register_REG_X17 as u32,
X18 = Register_REG_X18 as u32,
X19 = Register_REG_X19 as u32,
X20 = Register_REG_X20 as u32,
X21 = Register_REG_X21 as u32,
X22 = Register_REG_X22 as u32,
X23 = Register_REG_X23 as u32,
X24 = Register_REG_X24 as u32,
X25 = Register_REG_X25 as u32,
X26 = Register_REG_X26 as u32,
X27 = Register_REG_X27 as u32,
X28 = Register_REG_X28 as u32,
X29 = Register_REG_X29 as u32,
X30 = Register_REG_X30 as u32,
XZR = Register_REG_XZR as u32,
SP = Register_REG_SP as u32,
V0 = Register_REG_V0 as u32,
V1 = Register_REG_V1 as u32,
V2 = Register_REG_V2 as u32,
V3 = Register_REG_V3 as u32,
V4 = Register_REG_V4 as u32,
V5 = Register_REG_V5 as u32,
V6 = Register_REG_V6 as u32,
V7 = Register_REG_V7 as u32,
V8 = Register_REG_V8 as u32,
V9 = Register_REG_V9 as u32,
V10 = Register_REG_V10 as u32,
V11 = Register_REG_V11 as u32,
V12 = Register_REG_V12 as u32,
V13 = Register_REG_V13 as u32,
V14 = Register_REG_V14 as u32,
V15 = Register_REG_V15 as u32,
V16 = Register_REG_V16 as u32,
V17 = Register_REG_V17 as u32,
V18 = Register_REG_V18 as u32,
V19 = Register_REG_V19 as u32,
V20 = Register_REG_V20 as u32,
V21 = Register_REG_V21 as u32,
V22 = Register_REG_V22 as u32,
V23 = Register_REG_V23 as u32,
V24 = Register_REG_V24 as u32,
V25 = Register_REG_V25 as u32,
V26 = Register_REG_V26 as u32,
V27 = Register_REG_V27 as u32,
V28 = Register_REG_V28 as u32,
V29 = Register_REG_V29 as u32,
V30 = Register_REG_V30 as u32,
VZR = Register_REG_VZR as u32,
V31 = Register_REG_V31 as u32,
B0 = Register_REG_B0 as u32,
B1 = Register_REG_B1 as u32,
B2 = Register_REG_B2 as u32,
B3 = Register_REG_B3 as u32,
B4 = Register_REG_B4 as u32,
B5 = Register_REG_B5 as u32,
B6 = Register_REG_B6 as u32,
B7 = Register_REG_B7 as u32,
B8 = Register_REG_B8 as u32,
B9 = Register_REG_B9 as u32,
B10 = Register_REG_B10 as u32,
B11 = Register_REG_B11 as u32,
B12 = Register_REG_B12 as u32,
B13 = Register_REG_B13 as u32,
B14 = Register_REG_B14 as u32,
B15 = Register_REG_B15 as u32,
B16 = Register_REG_B16 as u32,
B17 = Register_REG_B17 as u32,
B18 = Register_REG_B18 as u32,
B19 = Register_REG_B19 as u32,
B20 = Register_REG_B20 as u32,
B21 = Register_REG_B21 as u32,
B22 = Register_REG_B22 as u32,
B23 = Register_REG_B23 as u32,
B24 = Register_REG_B24 as u32,
B25 = Register_REG_B25 as u32,
B26 = Register_REG_B26 as u32,
B27 = Register_REG_B27 as u32,
B28 = Register_REG_B28 as u32,
B29 = Register_REG_B29 as u32,
B30 = Register_REG_B30 as u32,
BZR = Register_REG_BZR as u32,
B31 = Register_REG_B31 as u32,
H0 = Register_REG_H0 as u32,
H1 = Register_REG_H1 as u32,
H2 = Register_REG_H2 as u32,
H3 = Register_REG_H3 as u32,
H4 = Register_REG_H4 as u32,
H5 = Register_REG_H5 as u32,
H6 = Register_REG_H6 as u32,
H7 = Register_REG_H7 as u32,
H8 = Register_REG_H8 as u32,
H9 = Register_REG_H9 as u32,
H10 = Register_REG_H10 as u32,
H11 = Register_REG_H11 as u32,
H12 = Register_REG_H12 as u32,
H13 = Register_REG_H13 as u32,
H14 = Register_REG_H14 as u32,
H15 = Register_REG_H15 as u32,
H16 = Register_REG_H16 as u32,
H17 = Register_REG_H17 as u32,
H18 = Register_REG_H18 as u32,
H19 = Register_REG_H19 as u32,
H20 = Register_REG_H20 as u32,
H21 = Register_REG_H21 as u32,
H22 = Register_REG_H22 as u32,
H23 = Register_REG_H23 as u32,
H24 = Register_REG_H24 as u32,
H25 = Register_REG_H25 as u32,
H26 = Register_REG_H26 as u32,
H27 = Register_REG_H27 as u32,
H28 = Register_REG_H28 as u32,
H29 = Register_REG_H29 as u32,
H30 = Register_REG_H30 as u32,
HZR = Register_REG_HZR as u32,
H31 = Register_REG_H31 as u32,
S0 = Register_REG_S0 as u32,
S1 = Register_REG_S1 as u32,
S2 = Register_REG_S2 as u32,
S3 = Register_REG_S3 as u32,
S4 = Register_REG_S4 as u32,
S5 = Register_REG_S5 as u32,
S6 = Register_REG_S6 as u32,
S7 = Register_REG_S7 as u32,
S8 = Register_REG_S8 as u32,
S9 = Register_REG_S9 as u32,
S10 = Register_REG_S10 as u32,
S11 = Register_REG_S11 as u32,
S12 = Register_REG_S12 as u32,
S13 = Register_REG_S13 as u32,
S14 = Register_REG_S14 as u32,
S15 = Register_REG_S15 as u32,
S16 = Register_REG_S16 as u32,
S17 = Register_REG_S17 as u32,
S18 = Register_REG_S18 as u32,
S19 = Register_REG_S19 as u32,
S20 = Register_REG_S20 as u32,
S21 = Register_REG_S21 as u32,
S22 = Register_REG_S22 as u32,
S23 = Register_REG_S23 as u32,
S24 = Register_REG_S24 as u32,
S25 = Register_REG_S25 as u32,
S26 = Register_REG_S26 as u32,
S27 = Register_REG_S27 as u32,
S28 = Register_REG_S28 as u32,
S29 = Register_REG_S29 as u32,
S30 = Register_REG_S30 as u32,
SZR = Register_REG_SZR as u32,
S31 = Register_REG_S31 as u32,
D0 = Register_REG_D0 as u32,
D1 = Register_REG_D1 as u32,
D2 = Register_REG_D2 as u32,
D3 = Register_REG_D3 as u32,
D4 = Register_REG_D4 as u32,
D5 = Register_REG_D5 as u32,
D6 = Register_REG_D6 as u32,
D7 = Register_REG_D7 as u32,
D8 = Register_REG_D8 as u32,
D9 = Register_REG_D9 as u32,
D10 = Register_REG_D10 as u32,
D11 = Register_REG_D11 as u32,
D12 = Register_REG_D12 as u32,
D13 = Register_REG_D13 as u32,
D14 = Register_REG_D14 as u32,
D15 = Register_REG_D15 as u32,
D16 = Register_REG_D16 as u32,
D17 = Register_REG_D17 as u32,
D18 = Register_REG_D18 as u32,
D19 = Register_REG_D19 as u32,
D20 = Register_REG_D20 as u32,
D21 = Register_REG_D21 as u32,
D22 = Register_REG_D22 as u32,
D23 = Register_REG_D23 as u32,
D24 = Register_REG_D24 as u32,
D25 = Register_REG_D25 as u32,
D26 = Register_REG_D26 as u32,
D27 = Register_REG_D27 as u32,
D28 = Register_REG_D28 as u32,
D29 = Register_REG_D29 as u32,
D30 = Register_REG_D30 as u32,
DZR = Register_REG_DZR as u32,
D31 = Register_REG_D31 as u32,
Q0 = Register_REG_Q0 as u32,
Q1 = Register_REG_Q1 as u32,
Q2 = Register_REG_Q2 as u32,
Q3 = Register_REG_Q3 as u32,
Q4 = Register_REG_Q4 as u32,
Q5 = Register_REG_Q5 as u32,
Q6 = Register_REG_Q6 as u32,
Q7 = Register_REG_Q7 as u32,
Q8 = Register_REG_Q8 as u32,
Q9 = Register_REG_Q9 as u32,
Q10 = Register_REG_Q10 as u32,
Q11 = Register_REG_Q11 as u32,
Q12 = Register_REG_Q12 as u32,
Q13 = Register_REG_Q13 as u32,
Q14 = Register_REG_Q14 as u32,
Q15 = Register_REG_Q15 as u32,
Q16 = Register_REG_Q16 as u32,
Q17 = Register_REG_Q17 as u32,
Q18 = Register_REG_Q18 as u32,
Q19 = Register_REG_Q19 as u32,
Q20 = Register_REG_Q20 as u32,
Q21 = Register_REG_Q21 as u32,
Q22 = Register_REG_Q22 as u32,
Q23 = Register_REG_Q23 as u32,
Q24 = Register_REG_Q24 as u32,
Q25 = Register_REG_Q25 as u32,
Q26 = Register_REG_Q26 as u32,
Q27 = Register_REG_Q27 as u32,
Q28 = Register_REG_Q28 as u32,
Q29 = Register_REG_Q29 as u32,
Q30 = Register_REG_Q30 as u32,
QZR = Register_REG_QZR as u32,
Q31 = Register_REG_Q31 as u32,
V0_B0 = Register_REG_V0_B0 as u32,
V0_B1 = Register_REG_V0_B1 as u32,
V0_B2 = Register_REG_V0_B2 as u32,
V0_B3 = Register_REG_V0_B3 as u32,
V0_B4 = Register_REG_V0_B4 as u32,
V0_B5 = Register_REG_V0_B5 as u32,
V0_B6 = Register_REG_V0_B6 as u32,
V0_B7 = Register_REG_V0_B7 as u32,
V0_B8 = Register_REG_V0_B8 as u32,
V0_B9 = Register_REG_V0_B9 as u32,
V0_B10 = Register_REG_V0_B10 as u32,
V0_B11 = Register_REG_V0_B11 as u32,
V0_B12 = Register_REG_V0_B12 as u32,
V0_B13 = Register_REG_V0_B13 as u32,
V0_B14 = Register_REG_V0_B14 as u32,
V0_B15 = Register_REG_V0_B15 as u32,
V1_B0 = Register_REG_V1_B0 as u32,
V1_B1 = Register_REG_V1_B1 as u32,
V1_B2 = Register_REG_V1_B2 as u32,
V1_B3 = Register_REG_V1_B3 as u32,
V1_B4 = Register_REG_V1_B4 as u32,
V1_B5 = Register_REG_V1_B5 as u32,
V1_B6 = Register_REG_V1_B6 as u32,
V1_B7 = Register_REG_V1_B7 as u32,
V1_B8 = Register_REG_V1_B8 as u32,
V1_B9 = Register_REG_V1_B9 as u32,
V1_B10 = Register_REG_V1_B10 as u32,
V1_B11 = Register_REG_V1_B11 as u32,
V1_B12 = Register_REG_V1_B12 as u32,
V1_B13 = Register_REG_V1_B13 as u32,
V1_B14 = Register_REG_V1_B14 as u32,
V1_B15 = Register_REG_V1_B15 as u32,
V2_B0 = Register_REG_V2_B0 as u32,
V2_B1 = Register_REG_V2_B1 as u32,
V2_B2 = Register_REG_V2_B2 as u32,
V2_B3 = Register_REG_V2_B3 as u32,
V2_B4 = Register_REG_V2_B4 as u32,
V2_B5 = Register_REG_V2_B5 as u32,
V2_B6 = Register_REG_V2_B6 as u32,
V2_B7 = Register_REG_V2_B7 as u32,
V2_B8 = Register_REG_V2_B8 as u32,
V2_B9 = Register_REG_V2_B9 as u32,
V2_B10 = Register_REG_V2_B10 as u32,
V2_B11 = Register_REG_V2_B11 as u32,
V2_B12 = Register_REG_V2_B12 as u32,
V2_B13 = Register_REG_V2_B13 as u32,
V2_B14 = Register_REG_V2_B14 as u32,
V2_B15 = Register_REG_V2_B15 as u32,
V3_B0 = Register_REG_V3_B0 as u32,
V3_B1 = Register_REG_V3_B1 as u32,
V3_B2 = Register_REG_V3_B2 as u32,
V3_B3 = Register_REG_V3_B3 as u32,
V3_B4 = Register_REG_V3_B4 as u32,
V3_B5 = Register_REG_V3_B5 as u32,
V3_B6 = Register_REG_V3_B6 as u32,
V3_B7 = Register_REG_V3_B7 as u32,
V3_B8 = Register_REG_V3_B8 as u32,
V3_B9 = Register_REG_V3_B9 as u32,
V3_B10 = Register_REG_V3_B10 as u32,
V3_B11 = Register_REG_V3_B11 as u32,
V3_B12 = Register_REG_V3_B12 as u32,
V3_B13 = Register_REG_V3_B13 as u32,
V3_B14 = Register_REG_V3_B14 as u32,
V3_B15 = Register_REG_V3_B15 as u32,
V4_B0 = Register_REG_V4_B0 as u32,
V4_B1 = Register_REG_V4_B1 as u32,
V4_B2 = Register_REG_V4_B2 as u32,
V4_B3 = Register_REG_V4_B3 as u32,
V4_B4 = Register_REG_V4_B4 as u32,
V4_B5 = Register_REG_V4_B5 as u32,
V4_B6 = Register_REG_V4_B6 as u32,
V4_B7 = Register_REG_V4_B7 as u32,
V4_B8 = Register_REG_V4_B8 as u32,
V4_B9 = Register_REG_V4_B9 as u32,
V4_B10 = Register_REG_V4_B10 as u32,
V4_B11 = Register_REG_V4_B11 as u32,
V4_B12 = Register_REG_V4_B12 as u32,
V4_B13 = Register_REG_V4_B13 as u32,
V4_B14 = Register_REG_V4_B14 as u32,
V4_B15 = Register_REG_V4_B15 as u32,
V5_B0 = Register_REG_V5_B0 as u32,
V5_B1 = Register_REG_V5_B1 as u32,
V5_B2 = Register_REG_V5_B2 as u32,
V5_B3 = Register_REG_V5_B3 as u32,
V5_B4 = Register_REG_V5_B4 as u32,
V5_B5 = Register_REG_V5_B5 as u32,
V5_B6 = Register_REG_V5_B6 as u32,
V5_B7 = Register_REG_V5_B7 as u32,
V5_B8 = Register_REG_V5_B8 as u32,
V5_B9 = Register_REG_V5_B9 as u32,
V5_B10 = Register_REG_V5_B10 as u32,
V5_B11 = Register_REG_V5_B11 as u32,
V5_B12 = Register_REG_V5_B12 as u32,
V5_B13 = Register_REG_V5_B13 as u32,
V5_B14 = Register_REG_V5_B14 as u32,
V5_B15 = Register_REG_V5_B15 as u32,
V6_B0 = Register_REG_V6_B0 as u32,
V6_B1 = Register_REG_V6_B1 as u32,
V6_B2 = Register_REG_V6_B2 as u32,
V6_B3 = Register_REG_V6_B3 as u32,
V6_B4 = Register_REG_V6_B4 as u32,
V6_B5 = Register_REG_V6_B5 as u32,
V6_B6 = Register_REG_V6_B6 as u32,
V6_B7 = Register_REG_V6_B7 as u32,
V6_B8 = Register_REG_V6_B8 as u32,
V6_B9 = Register_REG_V6_B9 as u32,
V6_B10 = Register_REG_V6_B10 as u32,
V6_B11 = Register_REG_V6_B11 as u32,
V6_B12 = Register_REG_V6_B12 as u32,
V6_B13 = Register_REG_V6_B13 as u32,
V6_B14 = Register_REG_V6_B14 as u32,
V6_B15 = Register_REG_V6_B15 as u32,
V7_B0 = Register_REG_V7_B0 as u32,
V7_B1 = Register_REG_V7_B1 as u32,
V7_B2 = Register_REG_V7_B2 as u32,
V7_B3 = Register_REG_V7_B3 as u32,
V7_B4 = Register_REG_V7_B4 as u32,
V7_B5 = Register_REG_V7_B5 as u32,
V7_B6 = Register_REG_V7_B6 as u32,
V7_B7 = Register_REG_V7_B7 as u32,
V7_B8 = Register_REG_V7_B8 as u32,
V7_B9 = Register_REG_V7_B9 as u32,
V7_B10 = Register_REG_V7_B10 as u32,
V7_B11 = Register_REG_V7_B11 as u32,
V7_B12 = Register_REG_V7_B12 as u32,
V7_B13 = Register_REG_V7_B13 as u32,
V7_B14 = Register_REG_V7_B14 as u32,
V7_B15 = Register_REG_V7_B15 as u32,
V8_B0 = Register_REG_V8_B0 as u32,
V8_B1 = Register_REG_V8_B1 as u32,
V8_B2 = Register_REG_V8_B2 as u32,
V8_B3 = Register_REG_V8_B3 as u32,
V8_B4 = Register_REG_V8_B4 as u32,
V8_B5 = Register_REG_V8_B5 as u32,
V8_B6 = Register_REG_V8_B6 as u32,
V8_B7 = Register_REG_V8_B7 as u32,
V8_B8 = Register_REG_V8_B8 as u32,
V8_B9 = Register_REG_V8_B9 as u32,
V8_B10 = Register_REG_V8_B10 as u32,
V8_B11 = Register_REG_V8_B11 as u32,
V8_B12 = Register_REG_V8_B12 as u32,
V8_B13 = Register_REG_V8_B13 as u32,
V8_B14 = Register_REG_V8_B14 as u32,
V8_B15 = Register_REG_V8_B15 as u32,
V9_B0 = Register_REG_V9_B0 as u32,
V9_B1 = Register_REG_V9_B1 as u32,
V9_B2 = Register_REG_V9_B2 as u32,
V9_B3 = Register_REG_V9_B3 as u32,
V9_B4 = Register_REG_V9_B4 as u32,
V9_B5 = Register_REG_V9_B5 as u32,
V9_B6 = Register_REG_V9_B6 as u32,
V9_B7 = Register_REG_V9_B7 as u32,
V9_B8 = Register_REG_V9_B8 as u32,
V9_B9 = Register_REG_V9_B9 as u32,
V9_B10 = Register_REG_V9_B10 as u32,
V9_B11 = Register_REG_V9_B11 as u32,
V9_B12 = Register_REG_V9_B12 as u32,
V9_B13 = Register_REG_V9_B13 as u32,
V9_B14 = Register_REG_V9_B14 as u32,
V9_B15 = Register_REG_V9_B15 as u32,
V10_B0 = Register_REG_V10_B0 as u32,
V10_B1 = Register_REG_V10_B1 as u32,
V10_B2 = Register_REG_V10_B2 as u32,
V10_B3 = Register_REG_V10_B3 as u32,
V10_B4 = Register_REG_V10_B4 as u32,
V10_B5 = Register_REG_V10_B5 as u32,
V10_B6 = Register_REG_V10_B6 as u32,
V10_B7 = Register_REG_V10_B7 as u32,
V10_B8 = Register_REG_V10_B8 as u32,
V10_B9 = Register_REG_V10_B9 as u32,
V10_B10 = Register_REG_V10_B10 as u32,
V10_B11 = Register_REG_V10_B11 as u32,
V10_B12 = Register_REG_V10_B12 as u32,
V10_B13 = Register_REG_V10_B13 as u32,
V10_B14 = Register_REG_V10_B14 as u32,
V10_B15 = Register_REG_V10_B15 as u32,
V11_B0 = Register_REG_V11_B0 as u32,
V11_B1 = Register_REG_V11_B1 as u32,
V11_B2 = Register_REG_V11_B2 as u32,
V11_B3 = Register_REG_V11_B3 as u32,
V11_B4 = Register_REG_V11_B4 as u32,
V11_B5 = Register_REG_V11_B5 as u32,
V11_B6 = Register_REG_V11_B6 as u32,
V11_B7 = Register_REG_V11_B7 as u32,
V11_B8 = Register_REG_V11_B8 as u32,
V11_B9 = Register_REG_V11_B9 as u32,
V11_B10 = Register_REG_V11_B10 as u32,
V11_B11 = Register_REG_V11_B11 as u32,
V11_B12 = Register_REG_V11_B12 as u32,
V11_B13 = Register_REG_V11_B13 as u32,
V11_B14 = Register_REG_V11_B14 as u32,
V11_B15 = Register_REG_V11_B15 as u32,
V12_B0 = Register_REG_V12_B0 as u32,
V12_B1 = Register_REG_V12_B1 as u32,
V12_B2 = Register_REG_V12_B2 as u32,
V12_B3 = Register_REG_V12_B3 as u32,
V12_B4 = Register_REG_V12_B4 as u32,
V12_B5 = Register_REG_V12_B5 as u32,
V12_B6 = Register_REG_V12_B6 as u32,
V12_B7 = Register_REG_V12_B7 as u32,
V12_B8 = Register_REG_V12_B8 as u32,
V12_B9 = Register_REG_V12_B9 as u32,
V12_B10 = Register_REG_V12_B10 as u32,
V12_B11 = Register_REG_V12_B11 as u32,
V12_B12 = Register_REG_V12_B12 as u32,
V12_B13 = Register_REG_V12_B13 as u32,
V12_B14 = Register_REG_V12_B14 as u32,
V12_B15 = Register_REG_V12_B15 as u32,
V13_B0 = Register_REG_V13_B0 as u32,
V13_B1 = Register_REG_V13_B1 as u32,
V13_B2 = Register_REG_V13_B2 as u32,
V13_B3 = Register_REG_V13_B3 as u32,
V13_B4 = Register_REG_V13_B4 as u32,
V13_B5 = Register_REG_V13_B5 as u32,
V13_B6 = Register_REG_V13_B6 as u32,
V13_B7 = Register_REG_V13_B7 as u32,
V13_B8 = Register_REG_V13_B8 as u32,
V13_B9 = Register_REG_V13_B9 as u32,
V13_B10 = Register_REG_V13_B10 as u32,
V13_B11 = Register_REG_V13_B11 as u32,
V13_B12 = Register_REG_V13_B12 as u32,
V13_B13 = Register_REG_V13_B13 as u32,
V13_B14 = Register_REG_V13_B14 as u32,
V13_B15 = Register_REG_V13_B15 as u32,
V14_B0 = Register_REG_V14_B0 as u32,
V14_B1 = Register_REG_V14_B1 as u32,
V14_B2 = Register_REG_V14_B2 as u32,
V14_B3 = Register_REG_V14_B3 as u32,
V14_B4 = Register_REG_V14_B4 as u32,
V14_B5 = Register_REG_V14_B5 as u32,
V14_B6 = Register_REG_V14_B6 as u32,
V14_B7 = Register_REG_V14_B7 as u32,
V14_B8 = Register_REG_V14_B8 as u32,
V14_B9 = Register_REG_V14_B9 as u32,
V14_B10 = Register_REG_V14_B10 as u32,
V14_B11 = Register_REG_V14_B11 as u32,
V14_B12 = Register_REG_V14_B12 as u32,
V14_B13 = Register_REG_V14_B13 as u32,
V14_B14 = Register_REG_V14_B14 as u32,
V14_B15 = Register_REG_V14_B15 as u32,
V15_B0 = Register_REG_V15_B0 as u32,
V15_B1 = Register_REG_V15_B1 as u32,
V15_B2 = Register_REG_V15_B2 as u32,
V15_B3 = Register_REG_V15_B3 as u32,
V15_B4 = Register_REG_V15_B4 as u32,
V15_B5 = Register_REG_V15_B5 as u32,
V15_B6 = Register_REG_V15_B6 as u32,
V15_B7 = Register_REG_V15_B7 as u32,
V15_B8 = Register_REG_V15_B8 as u32,
V15_B9 = Register_REG_V15_B9 as u32,
V15_B10 = Register_REG_V15_B10 as u32,
V15_B11 = Register_REG_V15_B11 as u32,
V15_B12 = Register_REG_V15_B12 as u32,
V15_B13 = Register_REG_V15_B13 as u32,
V15_B14 = Register_REG_V15_B14 as u32,
V15_B15 = Register_REG_V15_B15 as u32,
V16_B0 = Register_REG_V16_B0 as u32,
V16_B1 = Register_REG_V16_B1 as u32,
V16_B2 = Register_REG_V16_B2 as u32,
V16_B3 = Register_REG_V16_B3 as u32,
V16_B4 = Register_REG_V16_B4 as u32,
V16_B5 = Register_REG_V16_B5 as u32,
V16_B6 = Register_REG_V16_B6 as u32,
V16_B7 = Register_REG_V16_B7 as u32,
V16_B8 = Register_REG_V16_B8 as u32,
V16_B9 = Register_REG_V16_B9 as u32,
V16_B10 = Register_REG_V16_B10 as u32,
V16_B11 = Register_REG_V16_B11 as u32,
V16_B12 = Register_REG_V16_B12 as u32,
V16_B13 = Register_REG_V16_B13 as u32,
V16_B14 = Register_REG_V16_B14 as u32,
V16_B15 = Register_REG_V16_B15 as u32,
V17_B0 = Register_REG_V17_B0 as u32,
V17_B1 = Register_REG_V17_B1 as u32,
V17_B2 = Register_REG_V17_B2 as u32,
V17_B3 = Register_REG_V17_B3 as u32,
V17_B4 = Register_REG_V17_B4 as u32,
V17_B5 = Register_REG_V17_B5 as u32,
V17_B6 = Register_REG_V17_B6 as u32,
V17_B7 = Register_REG_V17_B7 as u32,
V17_B8 = Register_REG_V17_B8 as u32,
V17_B9 = Register_REG_V17_B9 as u32,
V17_B10 = Register_REG_V17_B10 as u32,
V17_B11 = Register_REG_V17_B11 as u32,
V17_B12 = Register_REG_V17_B12 as u32,
V17_B13 = Register_REG_V17_B13 as u32,
V17_B14 = Register_REG_V17_B14 as u32,
V17_B15 = Register_REG_V17_B15 as u32,
V18_B0 = Register_REG_V18_B0 as u32,
V18_B1 = Register_REG_V18_B1 as u32,
V18_B2 = Register_REG_V18_B2 as u32,
V18_B3 = Register_REG_V18_B3 as u32,
V18_B4 = Register_REG_V18_B4 as u32,
V18_B5 = Register_REG_V18_B5 as u32,
V18_B6 = Register_REG_V18_B6 as u32,
V18_B7 = Register_REG_V18_B7 as u32,
V18_B8 = Register_REG_V18_B8 as u32,
V18_B9 = Register_REG_V18_B9 as u32,
V18_B10 = Register_REG_V18_B10 as u32,
V18_B11 = Register_REG_V18_B11 as u32,
V18_B12 = Register_REG_V18_B12 as u32,
V18_B13 = Register_REG_V18_B13 as u32,
V18_B14 = Register_REG_V18_B14 as u32,
V18_B15 = Register_REG_V18_B15 as u32,
V19_B0 = Register_REG_V19_B0 as u32,
V19_B1 = Register_REG_V19_B1 as u32,
V19_B2 = Register_REG_V19_B2 as u32,
V19_B3 = Register_REG_V19_B3 as u32,
V19_B4 = Register_REG_V19_B4 as u32,
V19_B5 = Register_REG_V19_B5 as u32,
V19_B6 = Register_REG_V19_B6 as u32,
V19_B7 = Register_REG_V19_B7 as u32,
V19_B8 = Register_REG_V19_B8 as u32,
V19_B9 = Register_REG_V19_B9 as u32,
V19_B10 = Register_REG_V19_B10 as u32,
V19_B11 = Register_REG_V19_B11 as u32,
V19_B12 = Register_REG_V19_B12 as u32,
V19_B13 = Register_REG_V19_B13 as u32,
V19_B14 = Register_REG_V19_B14 as u32,
V19_B15 = Register_REG_V19_B15 as u32,
V20_B0 = Register_REG_V20_B0 as u32,
V20_B1 = Register_REG_V20_B1 as u32,
V20_B2 = Register_REG_V20_B2 as u32,
V20_B3 = Register_REG_V20_B3 as u32,
V20_B4 = Register_REG_V20_B4 as u32,
V20_B5 = Register_REG_V20_B5 as u32,
V20_B6 = Register_REG_V20_B6 as u32,
V20_B7 = Register_REG_V20_B7 as u32,
V20_B8 = Register_REG_V20_B8 as u32,
V20_B9 = Register_REG_V20_B9 as u32,
V20_B10 = Register_REG_V20_B10 as u32,
V20_B11 = Register_REG_V20_B11 as u32,
V20_B12 = Register_REG_V20_B12 as u32,
V20_B13 = Register_REG_V20_B13 as u32,
V20_B14 = Register_REG_V20_B14 as u32,
V20_B15 = Register_REG_V20_B15 as u32,
V21_B0 = Register_REG_V21_B0 as u32,
V21_B1 = Register_REG_V21_B1 as u32,
V21_B2 = Register_REG_V21_B2 as u32,
V21_B3 = Register_REG_V21_B3 as u32,
V21_B4 = Register_REG_V21_B4 as u32,
V21_B5 = Register_REG_V21_B5 as u32,
V21_B6 = Register_REG_V21_B6 as u32,
V21_B7 = Register_REG_V21_B7 as u32,
V21_B8 = Register_REG_V21_B8 as u32,
V21_B9 = Register_REG_V21_B9 as u32,
V21_B10 = Register_REG_V21_B10 as u32,
V21_B11 = Register_REG_V21_B11 as u32,
V21_B12 = Register_REG_V21_B12 as u32,
V21_B13 = Register_REG_V21_B13 as u32,
V21_B14 = Register_REG_V21_B14 as u32,
V21_B15 = Register_REG_V21_B15 as u32,
V22_B0 = Register_REG_V22_B0 as u32,
V22_B1 = Register_REG_V22_B1 as u32,
V22_B2 = Register_REG_V22_B2 as u32,
V22_B3 = Register_REG_V22_B3 as u32,
V22_B4 = Register_REG_V22_B4 as u32,
V22_B5 = Register_REG_V22_B5 as u32,
V22_B6 = Register_REG_V22_B6 as u32,
V22_B7 = Register_REG_V22_B7 as u32,
V22_B8 = Register_REG_V22_B8 as u32,
V22_B9 = Register_REG_V22_B9 as u32,
V22_B10 = Register_REG_V22_B10 as u32,
V22_B11 = Register_REG_V22_B11 as u32,
V22_B12 = Register_REG_V22_B12 as u32,
V22_B13 = Register_REG_V22_B13 as u32,
V22_B14 = Register_REG_V22_B14 as u32,
V22_B15 = Register_REG_V22_B15 as u32,
V23_B0 = Register_REG_V23_B0 as u32,
V23_B1 = Register_REG_V23_B1 as u32,
V23_B2 = Register_REG_V23_B2 as u32,
V23_B3 = Register_REG_V23_B3 as u32,
V23_B4 = Register_REG_V23_B4 as u32,
V23_B5 = Register_REG_V23_B5 as u32,
V23_B6 = Register_REG_V23_B6 as u32,
V23_B7 = Register_REG_V23_B7 as u32,
V23_B8 = Register_REG_V23_B8 as u32,
V23_B9 = Register_REG_V23_B9 as u32,
V23_B10 = Register_REG_V23_B10 as u32,
V23_B11 = Register_REG_V23_B11 as u32,
V23_B12 = Register_REG_V23_B12 as u32,
V23_B13 = Register_REG_V23_B13 as u32,
V23_B14 = Register_REG_V23_B14 as u32,
V23_B15 = Register_REG_V23_B15 as u32,
V24_B0 = Register_REG_V24_B0 as u32,
V24_B1 = Register_REG_V24_B1 as u32,
V24_B2 = Register_REG_V24_B2 as u32,
V24_B3 = Register_REG_V24_B3 as u32,
V24_B4 = Register_REG_V24_B4 as u32,
V24_B5 = Register_REG_V24_B5 as u32,
V24_B6 = Register_REG_V24_B6 as u32,
V24_B7 = Register_REG_V24_B7 as u32,
V24_B8 = Register_REG_V24_B8 as u32,
V24_B9 = Register_REG_V24_B9 as u32,
V24_B10 = Register_REG_V24_B10 as u32,
V24_B11 = Register_REG_V24_B11 as u32,
V24_B12 = Register_REG_V24_B12 as u32,
V24_B13 = Register_REG_V24_B13 as u32,
V24_B14 = Register_REG_V24_B14 as u32,
V24_B15 = Register_REG_V24_B15 as u32,
V25_B0 = Register_REG_V25_B0 as u32,
V25_B1 = Register_REG_V25_B1 as u32,
V25_B2 = Register_REG_V25_B2 as u32,
V25_B3 = Register_REG_V25_B3 as u32,
V25_B4 = Register_REG_V25_B4 as u32,
V25_B5 = Register_REG_V25_B5 as u32,
V25_B6 = Register_REG_V25_B6 as u32,
V25_B7 = Register_REG_V25_B7 as u32,
V25_B8 = Register_REG_V25_B8 as u32,
V25_B9 = Register_REG_V25_B9 as u32,
V25_B10 = Register_REG_V25_B10 as u32,
V25_B11 = Register_REG_V25_B11 as u32,
V25_B12 = Register_REG_V25_B12 as u32,
V25_B13 = Register_REG_V25_B13 as u32,
V25_B14 = Register_REG_V25_B14 as u32,
V25_B15 = Register_REG_V25_B15 as u32,
V26_B0 = Register_REG_V26_B0 as u32,
V26_B1 = Register_REG_V26_B1 as u32,
V26_B2 = Register_REG_V26_B2 as u32,
V26_B3 = Register_REG_V26_B3 as u32,
V26_B4 = Register_REG_V26_B4 as u32,
V26_B5 = Register_REG_V26_B5 as u32,
V26_B6 = Register_REG_V26_B6 as u32,
V26_B7 = Register_REG_V26_B7 as u32,
V26_B8 = Register_REG_V26_B8 as u32,
V26_B9 = Register_REG_V26_B9 as u32,
V26_B10 = Register_REG_V26_B10 as u32,
V26_B11 = Register_REG_V26_B11 as u32,
V26_B12 = Register_REG_V26_B12 as u32,
V26_B13 = Register_REG_V26_B13 as u32,
V26_B14 = Register_REG_V26_B14 as u32,
V26_B15 = Register_REG_V26_B15 as u32,
V27_B0 = Register_REG_V27_B0 as u32,
V27_B1 = Register_REG_V27_B1 as u32,
V27_B2 = Register_REG_V27_B2 as u32,
V27_B3 = Register_REG_V27_B3 as u32,
V27_B4 = Register_REG_V27_B4 as u32,
V27_B5 = Register_REG_V27_B5 as u32,
V27_B6 = Register_REG_V27_B6 as u32,
V27_B7 = Register_REG_V27_B7 as u32,
V27_B8 = Register_REG_V27_B8 as u32,
V27_B9 = Register_REG_V27_B9 as u32,
V27_B10 = Register_REG_V27_B10 as u32,
V27_B11 = Register_REG_V27_B11 as u32,
V27_B12 = Register_REG_V27_B12 as u32,
V27_B13 = Register_REG_V27_B13 as u32,
V27_B14 = Register_REG_V27_B14 as u32,
V27_B15 = Register_REG_V27_B15 as u32,
V28_B0 = Register_REG_V28_B0 as u32,
V28_B1 = Register_REG_V28_B1 as u32,
V28_B2 = Register_REG_V28_B2 as u32,
V28_B3 = Register_REG_V28_B3 as u32,
V28_B4 = Register_REG_V28_B4 as u32,
V28_B5 = Register_REG_V28_B5 as u32,
V28_B6 = Register_REG_V28_B6 as u32,
V28_B7 = Register_REG_V28_B7 as u32,
V28_B8 = Register_REG_V28_B8 as u32,
V28_B9 = Register_REG_V28_B9 as u32,
V28_B10 = Register_REG_V28_B10 as u32,
V28_B11 = Register_REG_V28_B11 as u32,
V28_B12 = Register_REG_V28_B12 as u32,
V28_B13 = Register_REG_V28_B13 as u32,
V28_B14 = Register_REG_V28_B14 as u32,
V28_B15 = Register_REG_V28_B15 as u32,
V29_B0 = Register_REG_V29_B0 as u32,
V29_B1 = Register_REG_V29_B1 as u32,
V29_B2 = Register_REG_V29_B2 as u32,
V29_B3 = Register_REG_V29_B3 as u32,
V29_B4 = Register_REG_V29_B4 as u32,
V29_B5 = Register_REG_V29_B5 as u32,
V29_B6 = Register_REG_V29_B6 as u32,
V29_B7 = Register_REG_V29_B7 as u32,
V29_B8 = Register_REG_V29_B8 as u32,
V29_B9 = Register_REG_V29_B9 as u32,
V29_B10 = Register_REG_V29_B10 as u32,
V29_B11 = Register_REG_V29_B11 as u32,
V29_B12 = Register_REG_V29_B12 as u32,
V29_B13 = Register_REG_V29_B13 as u32,
V29_B14 = Register_REG_V29_B14 as u32,
V29_B15 = Register_REG_V29_B15 as u32,
V30_B0 = Register_REG_V30_B0 as u32,
V30_B1 = Register_REG_V30_B1 as u32,
V30_B2 = Register_REG_V30_B2 as u32,
V30_B3 = Register_REG_V30_B3 as u32,
V30_B4 = Register_REG_V30_B4 as u32,
V30_B5 = Register_REG_V30_B5 as u32,
V30_B6 = Register_REG_V30_B6 as u32,
V30_B7 = Register_REG_V30_B7 as u32,
V30_B8 = Register_REG_V30_B8 as u32,
V30_B9 = Register_REG_V30_B9 as u32,
V30_B10 = Register_REG_V30_B10 as u32,
V30_B11 = Register_REG_V30_B11 as u32,
V30_B12 = Register_REG_V30_B12 as u32,
V30_B13 = Register_REG_V30_B13 as u32,
V30_B14 = Register_REG_V30_B14 as u32,
V30_B15 = Register_REG_V30_B15 as u32,
V31_B0 = Register_REG_V31_B0 as u32,
V31_B1 = Register_REG_V31_B1 as u32,
V31_B2 = Register_REG_V31_B2 as u32,
V31_B3 = Register_REG_V31_B3 as u32,
V31_B4 = Register_REG_V31_B4 as u32,
V31_B5 = Register_REG_V31_B5 as u32,
V31_B6 = Register_REG_V31_B6 as u32,
V31_B7 = Register_REG_V31_B7 as u32,
V31_B8 = Register_REG_V31_B8 as u32,
V31_B9 = Register_REG_V31_B9 as u32,
V31_B10 = Register_REG_V31_B10 as u32,
V31_B11 = Register_REG_V31_B11 as u32,
V31_B12 = Register_REG_V31_B12 as u32,
V31_B13 = Register_REG_V31_B13 as u32,
V31_B14 = Register_REG_V31_B14 as u32,
V31_B15 = Register_REG_V31_B15 as u32,
V0_H0 = Register_REG_V0_H0 as u32,
V0_H1 = Register_REG_V0_H1 as u32,
V0_H2 = Register_REG_V0_H2 as u32,
V0_H3 = Register_REG_V0_H3 as u32,
V0_H4 = Register_REG_V0_H4 as u32,
V0_H5 = Register_REG_V0_H5 as u32,
V0_H6 = Register_REG_V0_H6 as u32,
V0_H7 = Register_REG_V0_H7 as u32,
V1_H0 = Register_REG_V1_H0 as u32,
V1_H1 = Register_REG_V1_H1 as u32,
V1_H2 = Register_REG_V1_H2 as u32,
V1_H3 = Register_REG_V1_H3 as u32,
V1_H4 = Register_REG_V1_H4 as u32,
V1_H5 = Register_REG_V1_H5 as u32,
V1_H6 = Register_REG_V1_H6 as u32,
V1_H7 = Register_REG_V1_H7 as u32,
V2_H0 = Register_REG_V2_H0 as u32,
V2_H1 = Register_REG_V2_H1 as u32,
V2_H2 = Register_REG_V2_H2 as u32,
V2_H3 = Register_REG_V2_H3 as u32,
V2_H4 = Register_REG_V2_H4 as u32,
V2_H5 = Register_REG_V2_H5 as u32,
V2_H6 = Register_REG_V2_H6 as u32,
V2_H7 = Register_REG_V2_H7 as u32,
V3_H0 = Register_REG_V3_H0 as u32,
V3_H1 = Register_REG_V3_H1 as u32,
V3_H2 = Register_REG_V3_H2 as u32,
V3_H3 = Register_REG_V3_H3 as u32,
V3_H4 = Register_REG_V3_H4 as u32,
V3_H5 = Register_REG_V3_H5 as u32,
V3_H6 = Register_REG_V3_H6 as u32,
V3_H7 = Register_REG_V3_H7 as u32,
V4_H0 = Register_REG_V4_H0 as u32,
V4_H1 = Register_REG_V4_H1 as u32,
V4_H2 = Register_REG_V4_H2 as u32,
V4_H3 = Register_REG_V4_H3 as u32,
V4_H4 = Register_REG_V4_H4 as u32,
V4_H5 = Register_REG_V4_H5 as u32,
V4_H6 = Register_REG_V4_H6 as u32,
V4_H7 = Register_REG_V4_H7 as u32,
V5_H0 = Register_REG_V5_H0 as u32,
V5_H1 = Register_REG_V5_H1 as u32,
V5_H2 = Register_REG_V5_H2 as u32,
V5_H3 = Register_REG_V5_H3 as u32,
V5_H4 = Register_REG_V5_H4 as u32,
V5_H5 = Register_REG_V5_H5 as u32,
V5_H6 = Register_REG_V5_H6 as u32,
V5_H7 = Register_REG_V5_H7 as u32,
V6_H0 = Register_REG_V6_H0 as u32,
V6_H1 = Register_REG_V6_H1 as u32,
V6_H2 = Register_REG_V6_H2 as u32,
V6_H3 = Register_REG_V6_H3 as u32,
V6_H4 = Register_REG_V6_H4 as u32,
V6_H5 = Register_REG_V6_H5 as u32,
V6_H6 = Register_REG_V6_H6 as u32,
V6_H7 = Register_REG_V6_H7 as u32,
V7_H0 = Register_REG_V7_H0 as u32,
V7_H1 = Register_REG_V7_H1 as u32,
V7_H2 = Register_REG_V7_H2 as u32,
V7_H3 = Register_REG_V7_H3 as u32,
V7_H4 = Register_REG_V7_H4 as u32,
V7_H5 = Register_REG_V7_H5 as u32,
V7_H6 = Register_REG_V7_H6 as u32,
V7_H7 = Register_REG_V7_H7 as u32,
V8_H0 = Register_REG_V8_H0 as u32,
V8_H1 = Register_REG_V8_H1 as u32,
V8_H2 = Register_REG_V8_H2 as u32,
V8_H3 = Register_REG_V8_H3 as u32,
V8_H4 = Register_REG_V8_H4 as u32,
V8_H5 = Register_REG_V8_H5 as u32,
V8_H6 = Register_REG_V8_H6 as u32,
V8_H7 = Register_REG_V8_H7 as u32,
V9_H0 = Register_REG_V9_H0 as u32,
V9_H1 = Register_REG_V9_H1 as u32,
V9_H2 = Register_REG_V9_H2 as u32,
V9_H3 = Register_REG_V9_H3 as u32,
V9_H4 = Register_REG_V9_H4 as u32,
V9_H5 = Register_REG_V9_H5 as u32,
V9_H6 = Register_REG_V9_H6 as u32,
V9_H7 = Register_REG_V9_H7 as u32,
V10_H0 = Register_REG_V10_H0 as u32,
V10_H1 = Register_REG_V10_H1 as u32,
V10_H2 = Register_REG_V10_H2 as u32,
V10_H3 = Register_REG_V10_H3 as u32,
V10_H4 = Register_REG_V10_H4 as u32,
V10_H5 = Register_REG_V10_H5 as u32,
V10_H6 = Register_REG_V10_H6 as u32,
V10_H7 = Register_REG_V10_H7 as u32,
V11_H0 = Register_REG_V11_H0 as u32,
V11_H1 = Register_REG_V11_H1 as u32,
V11_H2 = Register_REG_V11_H2 as u32,
V11_H3 = Register_REG_V11_H3 as u32,
V11_H4 = Register_REG_V11_H4 as u32,
V11_H5 = Register_REG_V11_H5 as u32,
V11_H6 = Register_REG_V11_H6 as u32,
V11_H7 = Register_REG_V11_H7 as u32,
V12_H0 = Register_REG_V12_H0 as u32,
V12_H1 = Register_REG_V12_H1 as u32,
V12_H2 = Register_REG_V12_H2 as u32,
V12_H3 = Register_REG_V12_H3 as u32,
V12_H4 = Register_REG_V12_H4 as u32,
V12_H5 = Register_REG_V12_H5 as u32,
V12_H6 = Register_REG_V12_H6 as u32,
V12_H7 = Register_REG_V12_H7 as u32,
V13_H0 = Register_REG_V13_H0 as u32,
V13_H1 = Register_REG_V13_H1 as u32,
V13_H2 = Register_REG_V13_H2 as u32,
V13_H3 = Register_REG_V13_H3 as u32,
V13_H4 = Register_REG_V13_H4 as u32,
V13_H5 = Register_REG_V13_H5 as u32,
V13_H6 = Register_REG_V13_H6 as u32,
V13_H7 = Register_REG_V13_H7 as u32,
V14_H0 = Register_REG_V14_H0 as u32,
V14_H1 = Register_REG_V14_H1 as u32,
V14_H2 = Register_REG_V14_H2 as u32,
V14_H3 = Register_REG_V14_H3 as u32,
V14_H4 = Register_REG_V14_H4 as u32,
V14_H5 = Register_REG_V14_H5 as u32,
V14_H6 = Register_REG_V14_H6 as u32,
V14_H7 = Register_REG_V14_H7 as u32,
V15_H0 = Register_REG_V15_H0 as u32,
V15_H1 = Register_REG_V15_H1 as u32,
V15_H2 = Register_REG_V15_H2 as u32,
V15_H3 = Register_REG_V15_H3 as u32,
V15_H4 = Register_REG_V15_H4 as u32,
V15_H5 = Register_REG_V15_H5 as u32,
V15_H6 = Register_REG_V15_H6 as u32,
V15_H7 = Register_REG_V15_H7 as u32,
V16_H0 = Register_REG_V16_H0 as u32,
V16_H1 = Register_REG_V16_H1 as u32,
V16_H2 = Register_REG_V16_H2 as u32,
V16_H3 = Register_REG_V16_H3 as u32,
V16_H4 = Register_REG_V16_H4 as u32,
V16_H5 = Register_REG_V16_H5 as u32,
V16_H6 = Register_REG_V16_H6 as u32,
V16_H7 = Register_REG_V16_H7 as u32,
V17_H0 = Register_REG_V17_H0 as u32,
V17_H1 = Register_REG_V17_H1 as u32,
V17_H2 = Register_REG_V17_H2 as u32,
V17_H3 = Register_REG_V17_H3 as u32,
V17_H4 = Register_REG_V17_H4 as u32,
V17_H5 = Register_REG_V17_H5 as u32,
V17_H6 = Register_REG_V17_H6 as u32,
V17_H7 = Register_REG_V17_H7 as u32,
V18_H0 = Register_REG_V18_H0 as u32,
V18_H1 = Register_REG_V18_H1 as u32,
V18_H2 = Register_REG_V18_H2 as u32,
V18_H3 = Register_REG_V18_H3 as u32,
V18_H4 = Register_REG_V18_H4 as u32,
V18_H5 = Register_REG_V18_H5 as u32,
V18_H6 = Register_REG_V18_H6 as u32,
V18_H7 = Register_REG_V18_H7 as u32,
V19_H0 = Register_REG_V19_H0 as u32,
V19_H1 = Register_REG_V19_H1 as u32,
V19_H2 = Register_REG_V19_H2 as u32,
V19_H3 = Register_REG_V19_H3 as u32,
V19_H4 = Register_REG_V19_H4 as u32,
V19_H5 = Register_REG_V19_H5 as u32,
V19_H6 = Register_REG_V19_H6 as u32,
V19_H7 = Register_REG_V19_H7 as u32,
V20_H0 = Register_REG_V20_H0 as u32,
V20_H1 = Register_REG_V20_H1 as u32,
V20_H2 = Register_REG_V20_H2 as u32,
V20_H3 = Register_REG_V20_H3 as u32,
V20_H4 = Register_REG_V20_H4 as u32,
V20_H5 = Register_REG_V20_H5 as u32,
V20_H6 = Register_REG_V20_H6 as u32,
V20_H7 = Register_REG_V20_H7 as u32,
V21_H0 = Register_REG_V21_H0 as u32,
V21_H1 = Register_REG_V21_H1 as u32,
V21_H2 = Register_REG_V21_H2 as u32,
V21_H3 = Register_REG_V21_H3 as u32,
V21_H4 = Register_REG_V21_H4 as u32,
V21_H5 = Register_REG_V21_H5 as u32,
V21_H6 = Register_REG_V21_H6 as u32,
V21_H7 = Register_REG_V21_H7 as u32,
V22_H0 = Register_REG_V22_H0 as u32,
V22_H1 = Register_REG_V22_H1 as u32,
V22_H2 = Register_REG_V22_H2 as u32,
V22_H3 = Register_REG_V22_H3 as u32,
V22_H4 = Register_REG_V22_H4 as u32,
V22_H5 = Register_REG_V22_H5 as u32,
V22_H6 = Register_REG_V22_H6 as u32,
V22_H7 = Register_REG_V22_H7 as u32,
V23_H0 = Register_REG_V23_H0 as u32,
V23_H1 = Register_REG_V23_H1 as u32,
V23_H2 = Register_REG_V23_H2 as u32,
V23_H3 = Register_REG_V23_H3 as u32,
V23_H4 = Register_REG_V23_H4 as u32,
V23_H5 = Register_REG_V23_H5 as u32,
V23_H6 = Register_REG_V23_H6 as u32,
V23_H7 = Register_REG_V23_H7 as u32,
V24_H0 = Register_REG_V24_H0 as u32,
V24_H1 = Register_REG_V24_H1 as u32,
V24_H2 = Register_REG_V24_H2 as u32,
V24_H3 = Register_REG_V24_H3 as u32,
V24_H4 = Register_REG_V24_H4 as u32,
V24_H5 = Register_REG_V24_H5 as u32,
V24_H6 = Register_REG_V24_H6 as u32,
V24_H7 = Register_REG_V24_H7 as u32,
V25_H0 = Register_REG_V25_H0 as u32,
V25_H1 = Register_REG_V25_H1 as u32,
V25_H2 = Register_REG_V25_H2 as u32,
V25_H3 = Register_REG_V25_H3 as u32,
V25_H4 = Register_REG_V25_H4 as u32,
V25_H5 = Register_REG_V25_H5 as u32,
V25_H6 = Register_REG_V25_H6 as u32,
V25_H7 = Register_REG_V25_H7 as u32,
V26_H0 = Register_REG_V26_H0 as u32,
V26_H1 = Register_REG_V26_H1 as u32,
V26_H2 = Register_REG_V26_H2 as u32,
V26_H3 = Register_REG_V26_H3 as u32,
V26_H4 = Register_REG_V26_H4 as u32,
V26_H5 = Register_REG_V26_H5 as u32,
V26_H6 = Register_REG_V26_H6 as u32,
V26_H7 = Register_REG_V26_H7 as u32,
V27_H0 = Register_REG_V27_H0 as u32,
V27_H1 = Register_REG_V27_H1 as u32,
V27_H2 = Register_REG_V27_H2 as u32,
V27_H3 = Register_REG_V27_H3 as u32,
V27_H4 = Register_REG_V27_H4 as u32,
V27_H5 = Register_REG_V27_H5 as u32,
V27_H6 = Register_REG_V27_H6 as u32,
V27_H7 = Register_REG_V27_H7 as u32,
V28_H0 = Register_REG_V28_H0 as u32,
V28_H1 = Register_REG_V28_H1 as u32,
V28_H2 = Register_REG_V28_H2 as u32,
V28_H3 = Register_REG_V28_H3 as u32,
V28_H4 = Register_REG_V28_H4 as u32,
V28_H5 = Register_REG_V28_H5 as u32,
V28_H6 = Register_REG_V28_H6 as u32,
V28_H7 = Register_REG_V28_H7 as u32,
V29_H0 = Register_REG_V29_H0 as u32,
V29_H1 = Register_REG_V29_H1 as u32,
V29_H2 = Register_REG_V29_H2 as u32,
V29_H3 = Register_REG_V29_H3 as u32,
V29_H4 = Register_REG_V29_H4 as u32,
V29_H5 = Register_REG_V29_H5 as u32,
V29_H6 = Register_REG_V29_H6 as u32,
V29_H7 = Register_REG_V29_H7 as u32,
V30_H0 = Register_REG_V30_H0 as u32,
V30_H1 = Register_REG_V30_H1 as u32,
V30_H2 = Register_REG_V30_H2 as u32,
V30_H3 = Register_REG_V30_H3 as u32,
V30_H4 = Register_REG_V30_H4 as u32,
V30_H5 = Register_REG_V30_H5 as u32,
V30_H6 = Register_REG_V30_H6 as u32,
V30_H7 = Register_REG_V30_H7 as u32,
V31_H0 = Register_REG_V31_H0 as u32,
V31_H1 = Register_REG_V31_H1 as u32,
V31_H2 = Register_REG_V31_H2 as u32,
V31_H3 = Register_REG_V31_H3 as u32,
V31_H4 = Register_REG_V31_H4 as u32,
V31_H5 = Register_REG_V31_H5 as u32,
V31_H6 = Register_REG_V31_H6 as u32,
V31_H7 = Register_REG_V31_H7 as u32,
V0_S0 = Register_REG_V0_S0 as u32,
V0_S1 = Register_REG_V0_S1 as u32,
V0_S2 = Register_REG_V0_S2 as u32,
V0_S3 = Register_REG_V0_S3 as u32,
V1_S0 = Register_REG_V1_S0 as u32,
V1_S1 = Register_REG_V1_S1 as u32,
V1_S2 = Register_REG_V1_S2 as u32,
V1_S3 = Register_REG_V1_S3 as u32,
V2_S0 = Register_REG_V2_S0 as u32,
V2_S1 = Register_REG_V2_S1 as u32,
V2_S2 = Register_REG_V2_S2 as u32,
V2_S3 = Register_REG_V2_S3 as u32,
V3_S0 = Register_REG_V3_S0 as u32,
V3_S1 = Register_REG_V3_S1 as u32,
V3_S2 = Register_REG_V3_S2 as u32,
V3_S3 = Register_REG_V3_S3 as u32,
V4_S0 = Register_REG_V4_S0 as u32,
V4_S1 = Register_REG_V4_S1 as u32,
V4_S2 = Register_REG_V4_S2 as u32,
V4_S3 = Register_REG_V4_S3 as u32,
V5_S0 = Register_REG_V5_S0 as u32,
V5_S1 = Register_REG_V5_S1 as u32,
V5_S2 = Register_REG_V5_S2 as u32,
V5_S3 = Register_REG_V5_S3 as u32,
V6_S0 = Register_REG_V6_S0 as u32,
V6_S1 = Register_REG_V6_S1 as u32,
V6_S2 = Register_REG_V6_S2 as u32,
V6_S3 = Register_REG_V6_S3 as u32,
V7_S0 = Register_REG_V7_S0 as u32,
V7_S1 = Register_REG_V7_S1 as u32,
V7_S2 = Register_REG_V7_S2 as u32,
V7_S3 = Register_REG_V7_S3 as u32,
V8_S0 = Register_REG_V8_S0 as u32,
V8_S1 = Register_REG_V8_S1 as u32,
V8_S2 = Register_REG_V8_S2 as u32,
V8_S3 = Register_REG_V8_S3 as u32,
V9_S0 = Register_REG_V9_S0 as u32,
V9_S1 = Register_REG_V9_S1 as u32,
V9_S2 = Register_REG_V9_S2 as u32,
V9_S3 = Register_REG_V9_S3 as u32,
V10_S0 = Register_REG_V10_S0 as u32,
V10_S1 = Register_REG_V10_S1 as u32,
V10_S2 = Register_REG_V10_S2 as u32,
V10_S3 = Register_REG_V10_S3 as u32,
V11_S0 = Register_REG_V11_S0 as u32,
V11_S1 = Register_REG_V11_S1 as u32,
V11_S2 = Register_REG_V11_S2 as u32,
V11_S3 = Register_REG_V11_S3 as u32,
V12_S0 = Register_REG_V12_S0 as u32,
V12_S1 = Register_REG_V12_S1 as u32,
V12_S2 = Register_REG_V12_S2 as u32,
V12_S3 = Register_REG_V12_S3 as u32,
V13_S0 = Register_REG_V13_S0 as u32,
V13_S1 = Register_REG_V13_S1 as u32,
V13_S2 = Register_REG_V13_S2 as u32,
V13_S3 = Register_REG_V13_S3 as u32,
V14_S0 = Register_REG_V14_S0 as u32,
V14_S1 = Register_REG_V14_S1 as u32,
V14_S2 = Register_REG_V14_S2 as u32,
V14_S3 = Register_REG_V14_S3 as u32,
V15_S0 = Register_REG_V15_S0 as u32,
V15_S1 = Register_REG_V15_S1 as u32,
V15_S2 = Register_REG_V15_S2 as u32,
V15_S3 = Register_REG_V15_S3 as u32,
V16_S0 = Register_REG_V16_S0 as u32,
V16_S1 = Register_REG_V16_S1 as u32,
V16_S2 = Register_REG_V16_S2 as u32,
V16_S3 = Register_REG_V16_S3 as u32,
V17_S0 = Register_REG_V17_S0 as u32,
V17_S1 = Register_REG_V17_S1 as u32,
V17_S2 = Register_REG_V17_S2 as u32,
V17_S3 = Register_REG_V17_S3 as u32,
V18_S0 = Register_REG_V18_S0 as u32,
V18_S1 = Register_REG_V18_S1 as u32,
V18_S2 = Register_REG_V18_S2 as u32,
V18_S3 = Register_REG_V18_S3 as u32,
V19_S0 = Register_REG_V19_S0 as u32,
V19_S1 = Register_REG_V19_S1 as u32,
V19_S2 = Register_REG_V19_S2 as u32,
V19_S3 = Register_REG_V19_S3 as u32,
V20_S0 = Register_REG_V20_S0 as u32,
V20_S1 = Register_REG_V20_S1 as u32,
V20_S2 = Register_REG_V20_S2 as u32,
V20_S3 = Register_REG_V20_S3 as u32,
V21_S0 = Register_REG_V21_S0 as u32,
V21_S1 = Register_REG_V21_S1 as u32,
V21_S2 = Register_REG_V21_S2 as u32,
V21_S3 = Register_REG_V21_S3 as u32,
V22_S0 = Register_REG_V22_S0 as u32,
V22_S1 = Register_REG_V22_S1 as u32,
V22_S2 = Register_REG_V22_S2 as u32,
V22_S3 = Register_REG_V22_S3 as u32,
V23_S0 = Register_REG_V23_S0 as u32,
V23_S1 = Register_REG_V23_S1 as u32,
V23_S2 = Register_REG_V23_S2 as u32,
V23_S3 = Register_REG_V23_S3 as u32,
V24_S0 = Register_REG_V24_S0 as u32,
V24_S1 = Register_REG_V24_S1 as u32,
V24_S2 = Register_REG_V24_S2 as u32,
V24_S3 = Register_REG_V24_S3 as u32,
V25_S0 = Register_REG_V25_S0 as u32,
V25_S1 = Register_REG_V25_S1 as u32,
V25_S2 = Register_REG_V25_S2 as u32,
V25_S3 = Register_REG_V25_S3 as u32,
V26_S0 = Register_REG_V26_S0 as u32,
V26_S1 = Register_REG_V26_S1 as u32,
V26_S2 = Register_REG_V26_S2 as u32,
V26_S3 = Register_REG_V26_S3 as u32,
V27_S0 = Register_REG_V27_S0 as u32,
V27_S1 = Register_REG_V27_S1 as u32,
V27_S2 = Register_REG_V27_S2 as u32,
V27_S3 = Register_REG_V27_S3 as u32,
V28_S0 = Register_REG_V28_S0 as u32,
V28_S1 = Register_REG_V28_S1 as u32,
V28_S2 = Register_REG_V28_S2 as u32,
V28_S3 = Register_REG_V28_S3 as u32,
V29_S0 = Register_REG_V29_S0 as u32,
V29_S1 = Register_REG_V29_S1 as u32,
V29_S2 = Register_REG_V29_S2 as u32,
V29_S3 = Register_REG_V29_S3 as u32,
V30_S0 = Register_REG_V30_S0 as u32,
V30_S1 = Register_REG_V30_S1 as u32,
V30_S2 = Register_REG_V30_S2 as u32,
V30_S3 = Register_REG_V30_S3 as u32,
V31_S0 = Register_REG_V31_S0 as u32,
V31_S1 = Register_REG_V31_S1 as u32,
V31_S2 = Register_REG_V31_S2 as u32,
V31_S3 = Register_REG_V31_S3 as u32,
V0_D0 = Register_REG_V0_D0 as u32,
V0_D1 = Register_REG_V0_D1 as u32,
V1_D0 = Register_REG_V1_D0 as u32,
V1_D1 = Register_REG_V1_D1 as u32,
V2_D0 = Register_REG_V2_D0 as u32,
V2_D1 = Register_REG_V2_D1 as u32,
V3_D0 = Register_REG_V3_D0 as u32,
V3_D1 = Register_REG_V3_D1 as u32,
V4_D0 = Register_REG_V4_D0 as u32,
V4_D1 = Register_REG_V4_D1 as u32,
V5_D0 = Register_REG_V5_D0 as u32,
V5_D1 = Register_REG_V5_D1 as u32,
V6_D0 = Register_REG_V6_D0 as u32,
V6_D1 = Register_REG_V6_D1 as u32,
V7_D0 = Register_REG_V7_D0 as u32,
V7_D1 = Register_REG_V7_D1 as u32,
V8_D0 = Register_REG_V8_D0 as u32,
V8_D1 = Register_REG_V8_D1 as u32,
V9_D0 = Register_REG_V9_D0 as u32,
V9_D1 = Register_REG_V9_D1 as u32,
V10_D0 = Register_REG_V10_D0 as u32,
V10_D1 = Register_REG_V10_D1 as u32,
V11_D0 = Register_REG_V11_D0 as u32,
V11_D1 = Register_REG_V11_D1 as u32,
V12_D0 = Register_REG_V12_D0 as u32,
V12_D1 = Register_REG_V12_D1 as u32,
V13_D0 = Register_REG_V13_D0 as u32,
V13_D1 = Register_REG_V13_D1 as u32,
V14_D0 = Register_REG_V14_D0 as u32,
V14_D1 = Register_REG_V14_D1 as u32,
V15_D0 = Register_REG_V15_D0 as u32,
V15_D1 = Register_REG_V15_D1 as u32,
V16_D0 = Register_REG_V16_D0 as u32,
V16_D1 = Register_REG_V16_D1 as u32,
V17_D0 = Register_REG_V17_D0 as u32,
V17_D1 = Register_REG_V17_D1 as u32,
V18_D0 = Register_REG_V18_D0 as u32,
V18_D1 = Register_REG_V18_D1 as u32,
V19_D0 = Register_REG_V19_D0 as u32,
V19_D1 = Register_REG_V19_D1 as u32,
V20_D0 = Register_REG_V20_D0 as u32,
V20_D1 = Register_REG_V20_D1 as u32,
V21_D0 = Register_REG_V21_D0 as u32,
V21_D1 = Register_REG_V21_D1 as u32,
V22_D0 = Register_REG_V22_D0 as u32,
V22_D1 = Register_REG_V22_D1 as u32,
V23_D0 = Register_REG_V23_D0 as u32,
V23_D1 = Register_REG_V23_D1 as u32,
V24_D0 = Register_REG_V24_D0 as u32,
V24_D1 = Register_REG_V24_D1 as u32,
V25_D0 = Register_REG_V25_D0 as u32,
V25_D1 = Register_REG_V25_D1 as u32,
V26_D0 = Register_REG_V26_D0 as u32,
V26_D1 = Register_REG_V26_D1 as u32,
V27_D0 = Register_REG_V27_D0 as u32,
V27_D1 = Register_REG_V27_D1 as u32,
V28_D0 = Register_REG_V28_D0 as u32,
V28_D1 = Register_REG_V28_D1 as u32,
V29_D0 = Register_REG_V29_D0 as u32,
V29_D1 = Register_REG_V29_D1 as u32,
V30_D0 = Register_REG_V30_D0 as u32,
V30_D1 = Register_REG_V30_D1 as u32,
V31_D0 = Register_REG_V31_D0 as u32,
V31_D1 = Register_REG_V31_D1 as u32,
Z0 = Register_REG_Z0 as u32,
Z1 = Register_REG_Z1 as u32,
Z2 = Register_REG_Z2 as u32,
Z3 = Register_REG_Z3 as u32,
Z4 = Register_REG_Z4 as u32,
Z5 = Register_REG_Z5 as u32,
Z6 = Register_REG_Z6 as u32,
Z7 = Register_REG_Z7 as u32,
Z8 = Register_REG_Z8 as u32,
Z9 = Register_REG_Z9 as u32,
Z10 = Register_REG_Z10 as u32,
Z11 = Register_REG_Z11 as u32,
Z12 = Register_REG_Z12 as u32,
Z13 = Register_REG_Z13 as u32,
Z14 = Register_REG_Z14 as u32,
Z15 = Register_REG_Z15 as u32,
Z16 = Register_REG_Z16 as u32,
Z17 = Register_REG_Z17 as u32,
Z18 = Register_REG_Z18 as u32,
Z19 = Register_REG_Z19 as u32,
Z20 = Register_REG_Z20 as u32,
Z21 = Register_REG_Z21 as u32,
Z22 = Register_REG_Z22 as u32,
Z23 = Register_REG_Z23 as u32,
Z24 = Register_REG_Z24 as u32,
Z25 = Register_REG_Z25 as u32,
Z26 = Register_REG_Z26 as u32,
Z27 = Register_REG_Z27 as u32,
Z28 = Register_REG_Z28 as u32,
Z29 = Register_REG_Z29 as u32,
Z30 = Register_REG_Z30 as u32,
ZZR = Register_REG_ZZR as u32,
Z31 = Register_REG_Z31 as u32,
P0 = Register_REG_P0 as u32,
P1 = Register_REG_P1 as u32,
P2 = Register_REG_P2 as u32,
P3 = Register_REG_P3 as u32,
P4 = Register_REG_P4 as u32,
P5 = Register_REG_P5 as u32,
P6 = Register_REG_P6 as u32,
P7 = Register_REG_P7 as u32,
P8 = Register_REG_P8 as u32,
P9 = Register_REG_P9 as u32,
P10 = Register_REG_P10 as u32,
P11 = Register_REG_P11 as u32,
P12 = Register_REG_P12 as u32,
P13 = Register_REG_P13 as u32,
P14 = Register_REG_P14 as u32,
P15 = Register_REG_P15 as u32,
P16 = Register_REG_P16 as u32,
P17 = Register_REG_P17 as u32,
P18 = Register_REG_P18 as u32,
P19 = Register_REG_P19 as u32,
P20 = Register_REG_P20 as u32,
P21 = Register_REG_P21 as u32,
P22 = Register_REG_P22 as u32,
P23 = Register_REG_P23 as u32,
P24 = Register_REG_P24 as u32,
P25 = Register_REG_P25 as u32,
P26 = Register_REG_P26 as u32,
P27 = Register_REG_P27 as u32,
P28 = Register_REG_P28 as u32,
P29 = Register_REG_P29 as u32,
P30 = Register_REG_P30 as u32,
P31 = Register_REG_P31 as u32,
PF0 = Register_REG_PF0 as u32,
PF1 = Register_REG_PF1 as u32,
PF2 = Register_REG_PF2 as u32,
PF3 = Register_REG_PF3 as u32,
PF4 = Register_REG_PF4 as u32,
PF5 = Register_REG_PF5 as u32,
PF6 = Register_REG_PF6 as u32,
PF7 = Register_REG_PF7 as u32,
PF8 = Register_REG_PF8 as u32,
PF9 = Register_REG_PF9 as u32,
PF10 = Register_REG_PF10 as u32,
PF11 = Register_REG_PF11 as u32,
PF12 = Register_REG_PF12 as u32,
PF13 = Register_REG_PF13 as u32,
PF14 = Register_REG_PF14 as u32,
PF15 = Register_REG_PF15 as u32,
PF16 = Register_REG_PF16 as u32,
PF17 = Register_REG_PF17 as u32,
PF18 = Register_REG_PF18 as u32,
PF19 = Register_REG_PF19 as u32,
PF20 = Register_REG_PF20 as u32,
PF21 = Register_REG_PF21 as u32,
PF22 = Register_REG_PF22 as u32,
PF23 = Register_REG_PF23 as u32,
PF24 = Register_REG_PF24 as u32,
PF25 = Register_REG_PF25 as u32,
PF26 = Register_REG_PF26 as u32,
PF27 = Register_REG_PF27 as u32,
PF28 = Register_REG_PF28 as u32,
PF29 = Register_REG_PF29 as u32,
PF30 = Register_REG_PF30 as u32,
PF31 = Register_REG_PF31 as u32,
}
const_assert_eq!(Register_REG_END, Register_REG_PF31 + 1);
impl Reg {
pub fn name(&self) -> &'static str {
#[cfg(target_os = "windows")]
{
unsafe { CStr::from_ptr(bad64_sys::get_register_name(self.to_i32().unwrap())) }
.to_str()
.unwrap()
}
#[cfg(not(target_os = "windows"))]
{
unsafe { CStr::from_ptr(bad64_sys::get_register_name(self.to_u32().unwrap())) }
.to_str()
.unwrap()
}
}
pub fn size(&self) -> usize {
#[cfg(target_os = "windows")]
{
unsafe { bad64_sys::get_register_size(self.to_i32().unwrap()) as usize }
}
#[cfg(not(target_os = "windows"))]
{
unsafe { bad64_sys::get_register_size(self.to_u32().unwrap()) as usize }
}
}
}