Enum iced_x86::Code

source ·
#[non_exhaustive]
pub enum Code {
Show 4936 variants INVALID = 0, DeclareByte = 1, DeclareWord = 2, DeclareDword = 3, DeclareQword = 4, Add_rm8_r8 = 5, Add_rm16_r16 = 6, Add_rm32_r32 = 7, Add_rm64_r64 = 8, Add_r8_rm8 = 9, Add_r16_rm16 = 10, Add_r32_rm32 = 11, Add_r64_rm64 = 12, Add_AL_imm8 = 13, Add_AX_imm16 = 14, Add_EAX_imm32 = 15, Add_RAX_imm32 = 16, Pushw_ES = 17, Pushd_ES = 18, Popw_ES = 19, Popd_ES = 20, Or_rm8_r8 = 21, Or_rm16_r16 = 22, Or_rm32_r32 = 23, Or_rm64_r64 = 24, Or_r8_rm8 = 25, Or_r16_rm16 = 26, Or_r32_rm32 = 27, Or_r64_rm64 = 28, Or_AL_imm8 = 29, Or_AX_imm16 = 30, Or_EAX_imm32 = 31, Or_RAX_imm32 = 32, Pushw_CS = 33, Pushd_CS = 34, Popw_CS = 35, Adc_rm8_r8 = 36, Adc_rm16_r16 = 37, Adc_rm32_r32 = 38, Adc_rm64_r64 = 39, Adc_r8_rm8 = 40, Adc_r16_rm16 = 41, Adc_r32_rm32 = 42, Adc_r64_rm64 = 43, Adc_AL_imm8 = 44, Adc_AX_imm16 = 45, Adc_EAX_imm32 = 46, Adc_RAX_imm32 = 47, Pushw_SS = 48, Pushd_SS = 49, Popw_SS = 50, Popd_SS = 51, Sbb_rm8_r8 = 52, Sbb_rm16_r16 = 53, Sbb_rm32_r32 = 54, Sbb_rm64_r64 = 55, Sbb_r8_rm8 = 56, Sbb_r16_rm16 = 57, Sbb_r32_rm32 = 58, Sbb_r64_rm64 = 59, Sbb_AL_imm8 = 60, Sbb_AX_imm16 = 61, Sbb_EAX_imm32 = 62, Sbb_RAX_imm32 = 63, Pushw_DS = 64, Pushd_DS = 65, Popw_DS = 66, Popd_DS = 67, And_rm8_r8 = 68, And_rm16_r16 = 69, And_rm32_r32 = 70, And_rm64_r64 = 71, And_r8_rm8 = 72, And_r16_rm16 = 73, And_r32_rm32 = 74, And_r64_rm64 = 75, And_AL_imm8 = 76, And_AX_imm16 = 77, And_EAX_imm32 = 78, And_RAX_imm32 = 79, Daa = 80, Sub_rm8_r8 = 81, Sub_rm16_r16 = 82, Sub_rm32_r32 = 83, Sub_rm64_r64 = 84, Sub_r8_rm8 = 85, Sub_r16_rm16 = 86, Sub_r32_rm32 = 87, Sub_r64_rm64 = 88, Sub_AL_imm8 = 89, Sub_AX_imm16 = 90, Sub_EAX_imm32 = 91, Sub_RAX_imm32 = 92, Das = 93, Xor_rm8_r8 = 94, Xor_rm16_r16 = 95, Xor_rm32_r32 = 96, Xor_rm64_r64 = 97, Xor_r8_rm8 = 98, Xor_r16_rm16 = 99, Xor_r32_rm32 = 100, Xor_r64_rm64 = 101, Xor_AL_imm8 = 102, Xor_AX_imm16 = 103, Xor_EAX_imm32 = 104, Xor_RAX_imm32 = 105, Aaa = 106, Cmp_rm8_r8 = 107, Cmp_rm16_r16 = 108, Cmp_rm32_r32 = 109, Cmp_rm64_r64 = 110, Cmp_r8_rm8 = 111, Cmp_r16_rm16 = 112, Cmp_r32_rm32 = 113, Cmp_r64_rm64 = 114, Cmp_AL_imm8 = 115, Cmp_AX_imm16 = 116, Cmp_EAX_imm32 = 117, Cmp_RAX_imm32 = 118, Aas = 119, Inc_r16 = 120, Inc_r32 = 121, Dec_r16 = 122, Dec_r32 = 123, Push_r16 = 124, Push_r32 = 125, Push_r64 = 126, Pop_r16 = 127, Pop_r32 = 128, Pop_r64 = 129, Pushaw = 130, Pushad = 131, Popaw = 132, Popad = 133, Bound_r16_m1616 = 134, Bound_r32_m3232 = 135, Arpl_rm16_r16 = 136, Arpl_r32m16_r32 = 137, Movsxd_r16_rm16 = 138, Movsxd_r32_rm32 = 139, Movsxd_r64_rm32 = 140, Push_imm16 = 141, Pushd_imm32 = 142, Pushq_imm32 = 143, Imul_r16_rm16_imm16 = 144, Imul_r32_rm32_imm32 = 145, Imul_r64_rm64_imm32 = 146, Pushw_imm8 = 147, Pushd_imm8 = 148, Pushq_imm8 = 149, Imul_r16_rm16_imm8 = 150, Imul_r32_rm32_imm8 = 151, Imul_r64_rm64_imm8 = 152, Insb_m8_DX = 153, Insw_m16_DX = 154, Insd_m32_DX = 155, Outsb_DX_m8 = 156, Outsw_DX_m16 = 157, Outsd_DX_m32 = 158, Jo_rel8_16 = 159, Jo_rel8_32 = 160, Jo_rel8_64 = 161, Jno_rel8_16 = 162, Jno_rel8_32 = 163, Jno_rel8_64 = 164, Jb_rel8_16 = 165, Jb_rel8_32 = 166, Jb_rel8_64 = 167, Jae_rel8_16 = 168, Jae_rel8_32 = 169, Jae_rel8_64 = 170, Je_rel8_16 = 171, Je_rel8_32 = 172, Je_rel8_64 = 173, Jne_rel8_16 = 174, Jne_rel8_32 = 175, Jne_rel8_64 = 176, Jbe_rel8_16 = 177, Jbe_rel8_32 = 178, Jbe_rel8_64 = 179, Ja_rel8_16 = 180, Ja_rel8_32 = 181, Ja_rel8_64 = 182, Js_rel8_16 = 183, Js_rel8_32 = 184, Js_rel8_64 = 185, Jns_rel8_16 = 186, Jns_rel8_32 = 187, Jns_rel8_64 = 188, Jp_rel8_16 = 189, Jp_rel8_32 = 190, Jp_rel8_64 = 191, Jnp_rel8_16 = 192, Jnp_rel8_32 = 193, Jnp_rel8_64 = 194, Jl_rel8_16 = 195, Jl_rel8_32 = 196, Jl_rel8_64 = 197, Jge_rel8_16 = 198, Jge_rel8_32 = 199, Jge_rel8_64 = 200, Jle_rel8_16 = 201, Jle_rel8_32 = 202, Jle_rel8_64 = 203, Jg_rel8_16 = 204, Jg_rel8_32 = 205, Jg_rel8_64 = 206, Add_rm8_imm8 = 207, Or_rm8_imm8 = 208, Adc_rm8_imm8 = 209, Sbb_rm8_imm8 = 210, And_rm8_imm8 = 211, Sub_rm8_imm8 = 212, Xor_rm8_imm8 = 213, Cmp_rm8_imm8 = 214, Add_rm16_imm16 = 215, Add_rm32_imm32 = 216, Add_rm64_imm32 = 217, Or_rm16_imm16 = 218, Or_rm32_imm32 = 219, Or_rm64_imm32 = 220, Adc_rm16_imm16 = 221, Adc_rm32_imm32 = 222, Adc_rm64_imm32 = 223, Sbb_rm16_imm16 = 224, Sbb_rm32_imm32 = 225, Sbb_rm64_imm32 = 226, And_rm16_imm16 = 227, And_rm32_imm32 = 228, And_rm64_imm32 = 229, Sub_rm16_imm16 = 230, Sub_rm32_imm32 = 231, Sub_rm64_imm32 = 232, Xor_rm16_imm16 = 233, Xor_rm32_imm32 = 234, Xor_rm64_imm32 = 235, Cmp_rm16_imm16 = 236, Cmp_rm32_imm32 = 237, Cmp_rm64_imm32 = 238, Add_rm8_imm8_82 = 239, Or_rm8_imm8_82 = 240, Adc_rm8_imm8_82 = 241, Sbb_rm8_imm8_82 = 242, And_rm8_imm8_82 = 243, Sub_rm8_imm8_82 = 244, Xor_rm8_imm8_82 = 245, Cmp_rm8_imm8_82 = 246, Add_rm16_imm8 = 247, Add_rm32_imm8 = 248, Add_rm64_imm8 = 249, Or_rm16_imm8 = 250, Or_rm32_imm8 = 251, Or_rm64_imm8 = 252, Adc_rm16_imm8 = 253, Adc_rm32_imm8 = 254, Adc_rm64_imm8 = 255, Sbb_rm16_imm8 = 256, Sbb_rm32_imm8 = 257, Sbb_rm64_imm8 = 258, And_rm16_imm8 = 259, And_rm32_imm8 = 260, And_rm64_imm8 = 261, Sub_rm16_imm8 = 262, Sub_rm32_imm8 = 263, Sub_rm64_imm8 = 264, Xor_rm16_imm8 = 265, Xor_rm32_imm8 = 266, Xor_rm64_imm8 = 267, Cmp_rm16_imm8 = 268, Cmp_rm32_imm8 = 269, Cmp_rm64_imm8 = 270, Test_rm8_r8 = 271, Test_rm16_r16 = 272, Test_rm32_r32 = 273, Test_rm64_r64 = 274, Xchg_rm8_r8 = 275, Xchg_rm16_r16 = 276, Xchg_rm32_r32 = 277, Xchg_rm64_r64 = 278, Mov_rm8_r8 = 279, Mov_rm16_r16 = 280, Mov_rm32_r32 = 281, Mov_rm64_r64 = 282, Mov_r8_rm8 = 283, Mov_r16_rm16 = 284, Mov_r32_rm32 = 285, Mov_r64_rm64 = 286, Mov_rm16_Sreg = 287, Mov_r32m16_Sreg = 288, Mov_r64m16_Sreg = 289, Lea_r16_m = 290, Lea_r32_m = 291, Lea_r64_m = 292, Mov_Sreg_rm16 = 293, Mov_Sreg_r32m16 = 294, Mov_Sreg_r64m16 = 295, Pop_rm16 = 296, Pop_rm32 = 297, Pop_rm64 = 298, Nopw = 299, Nopd = 300, Nopq = 301, Xchg_r16_AX = 302, Xchg_r32_EAX = 303, Xchg_r64_RAX = 304, Pause = 305, Cbw = 306, Cwde = 307, Cdqe = 308, Cwd = 309, Cdq = 310, Cqo = 311, Call_ptr1616 = 312, Call_ptr1632 = 313, Wait = 314, Pushfw = 315, Pushfd = 316, Pushfq = 317, Popfw = 318, Popfd = 319, Popfq = 320, Sahf = 321, Lahf = 322, Mov_AL_moffs8 = 323, Mov_AX_moffs16 = 324, Mov_EAX_moffs32 = 325, Mov_RAX_moffs64 = 326, Mov_moffs8_AL = 327, Mov_moffs16_AX = 328, Mov_moffs32_EAX = 329, Mov_moffs64_RAX = 330, Movsb_m8_m8 = 331, Movsw_m16_m16 = 332, Movsd_m32_m32 = 333, Movsq_m64_m64 = 334, Cmpsb_m8_m8 = 335, Cmpsw_m16_m16 = 336, Cmpsd_m32_m32 = 337, Cmpsq_m64_m64 = 338, Test_AL_imm8 = 339, Test_AX_imm16 = 340, Test_EAX_imm32 = 341, Test_RAX_imm32 = 342, Stosb_m8_AL = 343, Stosw_m16_AX = 344, Stosd_m32_EAX = 345, Stosq_m64_RAX = 346, Lodsb_AL_m8 = 347, Lodsw_AX_m16 = 348, Lodsd_EAX_m32 = 349, Lodsq_RAX_m64 = 350, Scasb_AL_m8 = 351, Scasw_AX_m16 = 352, Scasd_EAX_m32 = 353, Scasq_RAX_m64 = 354, Mov_r8_imm8 = 355, Mov_r16_imm16 = 356, Mov_r32_imm32 = 357, Mov_r64_imm64 = 358, Rol_rm8_imm8 = 359, Ror_rm8_imm8 = 360, Rcl_rm8_imm8 = 361, Rcr_rm8_imm8 = 362, Shl_rm8_imm8 = 363, Shr_rm8_imm8 = 364, Sal_rm8_imm8 = 365, Sar_rm8_imm8 = 366, Rol_rm16_imm8 = 367, Rol_rm32_imm8 = 368, Rol_rm64_imm8 = 369, Ror_rm16_imm8 = 370, Ror_rm32_imm8 = 371, Ror_rm64_imm8 = 372, Rcl_rm16_imm8 = 373, Rcl_rm32_imm8 = 374, Rcl_rm64_imm8 = 375, Rcr_rm16_imm8 = 376, Rcr_rm32_imm8 = 377, Rcr_rm64_imm8 = 378, Shl_rm16_imm8 = 379, Shl_rm32_imm8 = 380, Shl_rm64_imm8 = 381, Shr_rm16_imm8 = 382, Shr_rm32_imm8 = 383, Shr_rm64_imm8 = 384, Sal_rm16_imm8 = 385, Sal_rm32_imm8 = 386, Sal_rm64_imm8 = 387, Sar_rm16_imm8 = 388, Sar_rm32_imm8 = 389, Sar_rm64_imm8 = 390, Retnw_imm16 = 391, Retnd_imm16 = 392, Retnq_imm16 = 393, Retnw = 394, Retnd = 395, Retnq = 396, Les_r16_m1616 = 397, Les_r32_m1632 = 398, Lds_r16_m1616 = 399, Lds_r32_m1632 = 400, Mov_rm8_imm8 = 401, Xabort_imm8 = 402, Mov_rm16_imm16 = 403, Mov_rm32_imm32 = 404, Mov_rm64_imm32 = 405, Xbegin_rel16 = 406, Xbegin_rel32 = 407, Enterw_imm16_imm8 = 408, Enterd_imm16_imm8 = 409, Enterq_imm16_imm8 = 410, Leavew = 411, Leaved = 412, Leaveq = 413, Retfw_imm16 = 414, Retfd_imm16 = 415, Retfq_imm16 = 416, Retfw = 417, Retfd = 418, Retfq = 419, Int3 = 420, Int_imm8 = 421, Into = 422, Iretw = 423, Iretd = 424, Iretq = 425, Rol_rm8_1 = 426, Ror_rm8_1 = 427, Rcl_rm8_1 = 428, Rcr_rm8_1 = 429, Shl_rm8_1 = 430, Shr_rm8_1 = 431, Sal_rm8_1 = 432, Sar_rm8_1 = 433, Rol_rm16_1 = 434, Rol_rm32_1 = 435, Rol_rm64_1 = 436, Ror_rm16_1 = 437, Ror_rm32_1 = 438, Ror_rm64_1 = 439, Rcl_rm16_1 = 440, Rcl_rm32_1 = 441, Rcl_rm64_1 = 442, Rcr_rm16_1 = 443, Rcr_rm32_1 = 444, Rcr_rm64_1 = 445, Shl_rm16_1 = 446, Shl_rm32_1 = 447, Shl_rm64_1 = 448, Shr_rm16_1 = 449, Shr_rm32_1 = 450, Shr_rm64_1 = 451, Sal_rm16_1 = 452, Sal_rm32_1 = 453, Sal_rm64_1 = 454, Sar_rm16_1 = 455, Sar_rm32_1 = 456, Sar_rm64_1 = 457, Rol_rm8_CL = 458, Ror_rm8_CL = 459, Rcl_rm8_CL = 460, Rcr_rm8_CL = 461, Shl_rm8_CL = 462, Shr_rm8_CL = 463, Sal_rm8_CL = 464, Sar_rm8_CL = 465, Rol_rm16_CL = 466, Rol_rm32_CL = 467, Rol_rm64_CL = 468, Ror_rm16_CL = 469, Ror_rm32_CL = 470, Ror_rm64_CL = 471, Rcl_rm16_CL = 472, Rcl_rm32_CL = 473, Rcl_rm64_CL = 474, Rcr_rm16_CL = 475, Rcr_rm32_CL = 476, Rcr_rm64_CL = 477, Shl_rm16_CL = 478, Shl_rm32_CL = 479, Shl_rm64_CL = 480, Shr_rm16_CL = 481, Shr_rm32_CL = 482, Shr_rm64_CL = 483, Sal_rm16_CL = 484, Sal_rm32_CL = 485, Sal_rm64_CL = 486, Sar_rm16_CL = 487, Sar_rm32_CL = 488, Sar_rm64_CL = 489, Aam_imm8 = 490, Aad_imm8 = 491, Salc = 492, Xlat_m8 = 493, Fadd_m32fp = 494, Fmul_m32fp = 495, Fcom_m32fp = 496, Fcomp_m32fp = 497, Fsub_m32fp = 498, Fsubr_m32fp = 499, Fdiv_m32fp = 500, Fdivr_m32fp = 501, Fadd_st0_sti = 502, Fmul_st0_sti = 503, Fcom_st0_sti = 504, Fcomp_st0_sti = 505, Fsub_st0_sti = 506, Fsubr_st0_sti = 507, Fdiv_st0_sti = 508, Fdivr_st0_sti = 509, Fld_m32fp = 510, Fst_m32fp = 511, Fstp_m32fp = 512, Fldenv_m14byte = 513, Fldenv_m28byte = 514, Fldcw_m2byte = 515, Fnstenv_m14byte = 516, Fstenv_m14byte = 517, Fnstenv_m28byte = 518, Fstenv_m28byte = 519, Fnstcw_m2byte = 520, Fstcw_m2byte = 521, Fld_sti = 522, Fxch_st0_sti = 523, Fnop = 524, Fstpnce_sti = 525, Fchs = 526, Fabs = 527, Ftst = 528, Fxam = 529, Fld1 = 530, Fldl2t = 531, Fldl2e = 532, Fldpi = 533, Fldlg2 = 534, Fldln2 = 535, Fldz = 536, F2xm1 = 537, Fyl2x = 538, Fptan = 539, Fpatan = 540, Fxtract = 541, Fprem1 = 542, Fdecstp = 543, Fincstp = 544, Fprem = 545, Fyl2xp1 = 546, Fsqrt = 547, Fsincos = 548, Frndint = 549, Fscale = 550, Fsin = 551, Fcos = 552, Fiadd_m32int = 553, Fimul_m32int = 554, Ficom_m32int = 555, Ficomp_m32int = 556, Fisub_m32int = 557, Fisubr_m32int = 558, Fidiv_m32int = 559, Fidivr_m32int = 560, Fcmovb_st0_sti = 561, Fcmove_st0_sti = 562, Fcmovbe_st0_sti = 563, Fcmovu_st0_sti = 564, Fucompp = 565, Fild_m32int = 566, Fisttp_m32int = 567, Fist_m32int = 568, Fistp_m32int = 569, Fld_m80fp = 570, Fstp_m80fp = 571, Fcmovnb_st0_sti = 572, Fcmovne_st0_sti = 573, Fcmovnbe_st0_sti = 574, Fcmovnu_st0_sti = 575, Fneni = 576, Feni = 577, Fndisi = 578, Fdisi = 579, Fnclex = 580, Fclex = 581, Fninit = 582, Finit = 583, Fnsetpm = 584, Fsetpm = 585, Frstpm = 586, Fucomi_st0_sti = 587, Fcomi_st0_sti = 588, Fadd_m64fp = 589, Fmul_m64fp = 590, Fcom_m64fp = 591, Fcomp_m64fp = 592, Fsub_m64fp = 593, Fsubr_m64fp = 594, Fdiv_m64fp = 595, Fdivr_m64fp = 596, Fadd_sti_st0 = 597, Fmul_sti_st0 = 598, Fcom_st0_sti_DCD0 = 599, Fcomp_st0_sti_DCD8 = 600, Fsubr_sti_st0 = 601, Fsub_sti_st0 = 602, Fdivr_sti_st0 = 603, Fdiv_sti_st0 = 604, Fld_m64fp = 605, Fisttp_m64int = 606, Fst_m64fp = 607, Fstp_m64fp = 608, Frstor_m94byte = 609, Frstor_m108byte = 610, Fnsave_m94byte = 611, Fsave_m94byte = 612, Fnsave_m108byte = 613, Fsave_m108byte = 614, Fnstsw_m2byte = 615, Fstsw_m2byte = 616, Ffree_sti = 617, Fxch_st0_sti_DDC8 = 618, Fst_sti = 619, Fstp_sti = 620, Fucom_st0_sti = 621, Fucomp_st0_sti = 622, Fiadd_m16int = 623, Fimul_m16int = 624, Ficom_m16int = 625, Ficomp_m16int = 626, Fisub_m16int = 627, Fisubr_m16int = 628, Fidiv_m16int = 629, Fidivr_m16int = 630, Faddp_sti_st0 = 631, Fmulp_sti_st0 = 632, Fcomp_st0_sti_DED0 = 633, Fcompp = 634, Fsubrp_sti_st0 = 635, Fsubp_sti_st0 = 636, Fdivrp_sti_st0 = 637, Fdivp_sti_st0 = 638, Fild_m16int = 639, Fisttp_m16int = 640, Fist_m16int = 641, Fistp_m16int = 642, Fbld_m80bcd = 643, Fild_m64int = 644, Fbstp_m80bcd = 645, Fistp_m64int = 646, Ffreep_sti = 647, Fxch_st0_sti_DFC8 = 648, Fstp_sti_DFD0 = 649, Fstp_sti_DFD8 = 650, Fnstsw_AX = 651, Fstsw_AX = 652, Fstdw_AX = 653, Fstsg_AX = 654, Fucomip_st0_sti = 655, Fcomip_st0_sti = 656, Loopne_rel8_16_CX = 657, Loopne_rel8_32_CX = 658, Loopne_rel8_16_ECX = 659, Loopne_rel8_32_ECX = 660, Loopne_rel8_64_ECX = 661, Loopne_rel8_16_RCX = 662, Loopne_rel8_64_RCX = 663, Loope_rel8_16_CX = 664, Loope_rel8_32_CX = 665, Loope_rel8_16_ECX = 666, Loope_rel8_32_ECX = 667, Loope_rel8_64_ECX = 668, Loope_rel8_16_RCX = 669, Loope_rel8_64_RCX = 670, Loop_rel8_16_CX = 671, Loop_rel8_32_CX = 672, Loop_rel8_16_ECX = 673, Loop_rel8_32_ECX = 674, Loop_rel8_64_ECX = 675, Loop_rel8_16_RCX = 676, Loop_rel8_64_RCX = 677, Jcxz_rel8_16 = 678, Jcxz_rel8_32 = 679, Jecxz_rel8_16 = 680, Jecxz_rel8_32 = 681, Jecxz_rel8_64 = 682, Jrcxz_rel8_16 = 683, Jrcxz_rel8_64 = 684, In_AL_imm8 = 685, In_AX_imm8 = 686, In_EAX_imm8 = 687, Out_imm8_AL = 688, Out_imm8_AX = 689, Out_imm8_EAX = 690, Call_rel16 = 691, Call_rel32_32 = 692, Call_rel32_64 = 693, Jmp_rel16 = 694, Jmp_rel32_32 = 695, Jmp_rel32_64 = 696, Jmp_ptr1616 = 697, Jmp_ptr1632 = 698, Jmp_rel8_16 = 699, Jmp_rel8_32 = 700, Jmp_rel8_64 = 701, In_AL_DX = 702, In_AX_DX = 703, In_EAX_DX = 704, Out_DX_AL = 705, Out_DX_AX = 706, Out_DX_EAX = 707, Int1 = 708, Hlt = 709, Cmc = 710, Test_rm8_imm8 = 711, Test_rm8_imm8_F6r1 = 712, Not_rm8 = 713, Neg_rm8 = 714, Mul_rm8 = 715, Imul_rm8 = 716, Div_rm8 = 717, Idiv_rm8 = 718, Test_rm16_imm16 = 719, Test_rm32_imm32 = 720, Test_rm64_imm32 = 721, Test_rm16_imm16_F7r1 = 722, Test_rm32_imm32_F7r1 = 723, Test_rm64_imm32_F7r1 = 724, Not_rm16 = 725, Not_rm32 = 726, Not_rm64 = 727, Neg_rm16 = 728, Neg_rm32 = 729, Neg_rm64 = 730, Mul_rm16 = 731, Mul_rm32 = 732, Mul_rm64 = 733, Imul_rm16 = 734, Imul_rm32 = 735, Imul_rm64 = 736, Div_rm16 = 737, Div_rm32 = 738, Div_rm64 = 739, Idiv_rm16 = 740, Idiv_rm32 = 741, Idiv_rm64 = 742, Clc = 743, Stc = 744, Cli = 745, Sti = 746, Cld = 747, Std = 748, Inc_rm8 = 749, Dec_rm8 = 750, Inc_rm16 = 751, Inc_rm32 = 752, Inc_rm64 = 753, Dec_rm16 = 754, Dec_rm32 = 755, Dec_rm64 = 756, Call_rm16 = 757, Call_rm32 = 758, Call_rm64 = 759, Call_m1616 = 760, Call_m1632 = 761, Call_m1664 = 762, Jmp_rm16 = 763, Jmp_rm32 = 764, Jmp_rm64 = 765, Jmp_m1616 = 766, Jmp_m1632 = 767, Jmp_m1664 = 768, Push_rm16 = 769, Push_rm32 = 770, Push_rm64 = 771, Sldt_rm16 = 772, Sldt_r32m16 = 773, Sldt_r64m16 = 774, Str_rm16 = 775, Str_r32m16 = 776, Str_r64m16 = 777, Lldt_rm16 = 778, Lldt_r32m16 = 779, Lldt_r64m16 = 780, Ltr_rm16 = 781, Ltr_r32m16 = 782, Ltr_r64m16 = 783, Verr_rm16 = 784, Verr_r32m16 = 785, Verr_r64m16 = 786, Verw_rm16 = 787, Verw_r32m16 = 788, Verw_r64m16 = 789, Jmpe_rm16 = 790, Jmpe_rm32 = 791, Sgdt_m1632_16 = 792, Sgdt_m1632 = 793, Sgdt_m1664 = 794, Sidt_m1632_16 = 795, Sidt_m1632 = 796, Sidt_m1664 = 797, Lgdt_m1632_16 = 798, Lgdt_m1632 = 799, Lgdt_m1664 = 800, Lidt_m1632_16 = 801, Lidt_m1632 = 802, Lidt_m1664 = 803, Smsw_rm16 = 804, Smsw_r32m16 = 805, Smsw_r64m16 = 806, Rstorssp_m64 = 807, Lmsw_rm16 = 808, Lmsw_r32m16 = 809, Lmsw_r64m16 = 810, Invlpg_m = 811, Enclv = 812, Vmcall = 813, Vmlaunch = 814, Vmresume = 815, Vmxoff = 816, Pconfig = 817, Monitorw = 818, Monitord = 819, Monitorq = 820, Mwait = 821, Clac = 822, Stac = 823, Encls = 824, Xgetbv = 825, Xsetbv = 826, Vmfunc = 827, Xend = 828, Xtest = 829, Enclu = 830, Vmrunw = 831, Vmrund = 832, Vmrunq = 833, Vmmcall = 834, Vmloadw = 835, Vmloadd = 836, Vmloadq = 837, Vmsavew = 838, Vmsaved = 839, Vmsaveq = 840, Stgi = 841, Clgi = 842, Skinit = 843, Invlpgaw = 844, Invlpgad = 845, Invlpgaq = 846, Setssbsy = 847, Saveprevssp = 848, Rdpkru = 849, Wrpkru = 850, Swapgs = 851, Rdtscp = 852, Monitorxw = 853, Monitorxd = 854, Monitorxq = 855, Mcommit = 856, Mwaitx = 857, Clzerow = 858, Clzerod = 859, Clzeroq = 860, Rdpru = 861, Lar_r16_rm16 = 862, Lar_r32_r32m16 = 863, Lar_r64_r64m16 = 864, Lsl_r16_rm16 = 865, Lsl_r32_r32m16 = 866, Lsl_r64_r64m16 = 867, Storeall = 868, Loadall286 = 869, Syscall = 870, Clts = 871, Loadall386 = 872, Sysretd = 873, Sysretq = 874, Invd = 875, Wbinvd = 876, Wbnoinvd = 877, Cl1invmb = 878, Ud2 = 879, Reservednop_rm16_r16_0F0D = 880, Reservednop_rm32_r32_0F0D = 881, Reservednop_rm64_r64_0F0D = 882, Prefetch_m8 = 883, Prefetchw_m8 = 884, Prefetchwt1_m8 = 885, Femms = 886, Umov_rm8_r8 = 887, Umov_rm16_r16 = 888, Umov_rm32_r32 = 889, Umov_r8_rm8 = 890, Umov_r16_rm16 = 891, Umov_r32_rm32 = 892, Movups_xmm_xmmm128 = 893, VEX_Vmovups_xmm_xmmm128 = 894, VEX_Vmovups_ymm_ymmm256 = 895, EVEX_Vmovups_xmm_k1z_xmmm128 = 896, EVEX_Vmovups_ymm_k1z_ymmm256 = 897, EVEX_Vmovups_zmm_k1z_zmmm512 = 898, Movupd_xmm_xmmm128 = 899, VEX_Vmovupd_xmm_xmmm128 = 900, VEX_Vmovupd_ymm_ymmm256 = 901, EVEX_Vmovupd_xmm_k1z_xmmm128 = 902, EVEX_Vmovupd_ymm_k1z_ymmm256 = 903, EVEX_Vmovupd_zmm_k1z_zmmm512 = 904, Movss_xmm_xmmm32 = 905, VEX_Vmovss_xmm_xmm_xmm = 906, VEX_Vmovss_xmm_m32 = 907, EVEX_Vmovss_xmm_k1z_xmm_xmm = 908, EVEX_Vmovss_xmm_k1z_m32 = 909, Movsd_xmm_xmmm64 = 910, VEX_Vmovsd_xmm_xmm_xmm = 911, VEX_Vmovsd_xmm_m64 = 912, EVEX_Vmovsd_xmm_k1z_xmm_xmm = 913, EVEX_Vmovsd_xmm_k1z_m64 = 914, Movups_xmmm128_xmm = 915, VEX_Vmovups_xmmm128_xmm = 916, VEX_Vmovups_ymmm256_ymm = 917, EVEX_Vmovups_xmmm128_k1z_xmm = 918, EVEX_Vmovups_ymmm256_k1z_ymm = 919, EVEX_Vmovups_zmmm512_k1z_zmm = 920, Movupd_xmmm128_xmm = 921, VEX_Vmovupd_xmmm128_xmm = 922, VEX_Vmovupd_ymmm256_ymm = 923, EVEX_Vmovupd_xmmm128_k1z_xmm = 924, EVEX_Vmovupd_ymmm256_k1z_ymm = 925, EVEX_Vmovupd_zmmm512_k1z_zmm = 926, Movss_xmmm32_xmm = 927, VEX_Vmovss_xmm_xmm_xmm_0F11 = 928, VEX_Vmovss_m32_xmm = 929, EVEX_Vmovss_xmm_k1z_xmm_xmm_0F11 = 930, EVEX_Vmovss_m32_k1_xmm = 931, Movsd_xmmm64_xmm = 932, VEX_Vmovsd_xmm_xmm_xmm_0F11 = 933, VEX_Vmovsd_m64_xmm = 934, EVEX_Vmovsd_xmm_k1z_xmm_xmm_0F11 = 935, EVEX_Vmovsd_m64_k1_xmm = 936, Movhlps_xmm_xmm = 937, Movlps_xmm_m64 = 938, VEX_Vmovhlps_xmm_xmm_xmm = 939, VEX_Vmovlps_xmm_xmm_m64 = 940, EVEX_Vmovhlps_xmm_xmm_xmm = 941, EVEX_Vmovlps_xmm_xmm_m64 = 942, Movlpd_xmm_m64 = 943, VEX_Vmovlpd_xmm_xmm_m64 = 944, EVEX_Vmovlpd_xmm_xmm_m64 = 945, Movsldup_xmm_xmmm128 = 946, VEX_Vmovsldup_xmm_xmmm128 = 947, VEX_Vmovsldup_ymm_ymmm256 = 948, EVEX_Vmovsldup_xmm_k1z_xmmm128 = 949, EVEX_Vmovsldup_ymm_k1z_ymmm256 = 950, EVEX_Vmovsldup_zmm_k1z_zmmm512 = 951, Movddup_xmm_xmmm64 = 952, VEX_Vmovddup_xmm_xmmm64 = 953, VEX_Vmovddup_ymm_ymmm256 = 954, EVEX_Vmovddup_xmm_k1z_xmmm64 = 955, EVEX_Vmovddup_ymm_k1z_ymmm256 = 956, EVEX_Vmovddup_zmm_k1z_zmmm512 = 957, Movlps_m64_xmm = 958, VEX_Vmovlps_m64_xmm = 959, EVEX_Vmovlps_m64_xmm = 960, Movlpd_m64_xmm = 961, VEX_Vmovlpd_m64_xmm = 962, EVEX_Vmovlpd_m64_xmm = 963, Unpcklps_xmm_xmmm128 = 964, VEX_Vunpcklps_xmm_xmm_xmmm128 = 965, VEX_Vunpcklps_ymm_ymm_ymmm256 = 966, EVEX_Vunpcklps_xmm_k1z_xmm_xmmm128b32 = 967, EVEX_Vunpcklps_ymm_k1z_ymm_ymmm256b32 = 968, EVEX_Vunpcklps_zmm_k1z_zmm_zmmm512b32 = 969, Unpcklpd_xmm_xmmm128 = 970, VEX_Vunpcklpd_xmm_xmm_xmmm128 = 971, VEX_Vunpcklpd_ymm_ymm_ymmm256 = 972, EVEX_Vunpcklpd_xmm_k1z_xmm_xmmm128b64 = 973, EVEX_Vunpcklpd_ymm_k1z_ymm_ymmm256b64 = 974, EVEX_Vunpcklpd_zmm_k1z_zmm_zmmm512b64 = 975, Unpckhps_xmm_xmmm128 = 976, VEX_Vunpckhps_xmm_xmm_xmmm128 = 977, VEX_Vunpckhps_ymm_ymm_ymmm256 = 978, EVEX_Vunpckhps_xmm_k1z_xmm_xmmm128b32 = 979, EVEX_Vunpckhps_ymm_k1z_ymm_ymmm256b32 = 980, EVEX_Vunpckhps_zmm_k1z_zmm_zmmm512b32 = 981, Unpckhpd_xmm_xmmm128 = 982, VEX_Vunpckhpd_xmm_xmm_xmmm128 = 983, VEX_Vunpckhpd_ymm_ymm_ymmm256 = 984, EVEX_Vunpckhpd_xmm_k1z_xmm_xmmm128b64 = 985, EVEX_Vunpckhpd_ymm_k1z_ymm_ymmm256b64 = 986, EVEX_Vunpckhpd_zmm_k1z_zmm_zmmm512b64 = 987, Movlhps_xmm_xmm = 988, VEX_Vmovlhps_xmm_xmm_xmm = 989, EVEX_Vmovlhps_xmm_xmm_xmm = 990, Movhps_xmm_m64 = 991, VEX_Vmovhps_xmm_xmm_m64 = 992, EVEX_Vmovhps_xmm_xmm_m64 = 993, Movhpd_xmm_m64 = 994, VEX_Vmovhpd_xmm_xmm_m64 = 995, EVEX_Vmovhpd_xmm_xmm_m64 = 996, Movshdup_xmm_xmmm128 = 997, VEX_Vmovshdup_xmm_xmmm128 = 998, VEX_Vmovshdup_ymm_ymmm256 = 999, EVEX_Vmovshdup_xmm_k1z_xmmm128 = 1_000, EVEX_Vmovshdup_ymm_k1z_ymmm256 = 1_001, EVEX_Vmovshdup_zmm_k1z_zmmm512 = 1_002, Movhps_m64_xmm = 1_003, VEX_Vmovhps_m64_xmm = 1_004, EVEX_Vmovhps_m64_xmm = 1_005, Movhpd_m64_xmm = 1_006, VEX_Vmovhpd_m64_xmm = 1_007, EVEX_Vmovhpd_m64_xmm = 1_008, Reservednop_rm16_r16_0F18 = 1_009, Reservednop_rm32_r32_0F18 = 1_010, Reservednop_rm64_r64_0F18 = 1_011, Reservednop_rm16_r16_0F19 = 1_012, Reservednop_rm32_r32_0F19 = 1_013, Reservednop_rm64_r64_0F19 = 1_014, Reservednop_rm16_r16_0F1A = 1_015, Reservednop_rm32_r32_0F1A = 1_016, Reservednop_rm64_r64_0F1A = 1_017, Reservednop_rm16_r16_0F1B = 1_018, Reservednop_rm32_r32_0F1B = 1_019, Reservednop_rm64_r64_0F1B = 1_020, Reservednop_rm16_r16_0F1C = 1_021, Reservednop_rm32_r32_0F1C = 1_022, Reservednop_rm64_r64_0F1C = 1_023, Reservednop_rm16_r16_0F1D = 1_024, Reservednop_rm32_r32_0F1D = 1_025, Reservednop_rm64_r64_0F1D = 1_026, Reservednop_rm16_r16_0F1E = 1_027, Reservednop_rm32_r32_0F1E = 1_028, Reservednop_rm64_r64_0F1E = 1_029, Reservednop_rm16_r16_0F1F = 1_030, Reservednop_rm32_r32_0F1F = 1_031, Reservednop_rm64_r64_0F1F = 1_032, Prefetchnta_m8 = 1_033, Prefetcht0_m8 = 1_034, Prefetcht1_m8 = 1_035, Prefetcht2_m8 = 1_036, Bndldx_bnd_mib = 1_037, Bndmov_bnd_bndm64 = 1_038, Bndmov_bnd_bndm128 = 1_039, Bndcl_bnd_rm32 = 1_040, Bndcl_bnd_rm64 = 1_041, Bndcu_bnd_rm32 = 1_042, Bndcu_bnd_rm64 = 1_043, Bndstx_mib_bnd = 1_044, Bndmov_bndm64_bnd = 1_045, Bndmov_bndm128_bnd = 1_046, Bndmk_bnd_m32 = 1_047, Bndmk_bnd_m64 = 1_048, Bndcn_bnd_rm32 = 1_049, Bndcn_bnd_rm64 = 1_050, Cldemote_m8 = 1_051, Rdsspd_r32 = 1_052, Rdsspq_r64 = 1_053, Endbr64 = 1_054, Endbr32 = 1_055, Nop_rm16 = 1_056, Nop_rm32 = 1_057, Nop_rm64 = 1_058, Mov_r32_cr = 1_059, Mov_r64_cr = 1_060, Mov_r32_dr = 1_061, Mov_r64_dr = 1_062, Mov_cr_r32 = 1_063, Mov_cr_r64 = 1_064, Mov_dr_r32 = 1_065, Mov_dr_r64 = 1_066, Mov_r32_tr = 1_067, Mov_tr_r32 = 1_068, Movaps_xmm_xmmm128 = 1_069, VEX_Vmovaps_xmm_xmmm128 = 1_070, VEX_Vmovaps_ymm_ymmm256 = 1_071, EVEX_Vmovaps_xmm_k1z_xmmm128 = 1_072, EVEX_Vmovaps_ymm_k1z_ymmm256 = 1_073, EVEX_Vmovaps_zmm_k1z_zmmm512 = 1_074, Movapd_xmm_xmmm128 = 1_075, VEX_Vmovapd_xmm_xmmm128 = 1_076, VEX_Vmovapd_ymm_ymmm256 = 1_077, EVEX_Vmovapd_xmm_k1z_xmmm128 = 1_078, EVEX_Vmovapd_ymm_k1z_ymmm256 = 1_079, EVEX_Vmovapd_zmm_k1z_zmmm512 = 1_080, Movaps_xmmm128_xmm = 1_081, VEX_Vmovaps_xmmm128_xmm = 1_082, VEX_Vmovaps_ymmm256_ymm = 1_083, EVEX_Vmovaps_xmmm128_k1z_xmm = 1_084, EVEX_Vmovaps_ymmm256_k1z_ymm = 1_085, EVEX_Vmovaps_zmmm512_k1z_zmm = 1_086, Movapd_xmmm128_xmm = 1_087, VEX_Vmovapd_xmmm128_xmm = 1_088, VEX_Vmovapd_ymmm256_ymm = 1_089, EVEX_Vmovapd_xmmm128_k1z_xmm = 1_090, EVEX_Vmovapd_ymmm256_k1z_ymm = 1_091, EVEX_Vmovapd_zmmm512_k1z_zmm = 1_092, Cvtpi2ps_xmm_mmm64 = 1_093, Cvtpi2pd_xmm_mmm64 = 1_094, Cvtsi2ss_xmm_rm32 = 1_095, Cvtsi2ss_xmm_rm64 = 1_096, VEX_Vcvtsi2ss_xmm_xmm_rm32 = 1_097, VEX_Vcvtsi2ss_xmm_xmm_rm64 = 1_098, EVEX_Vcvtsi2ss_xmm_xmm_rm32_er = 1_099, EVEX_Vcvtsi2ss_xmm_xmm_rm64_er = 1_100, Cvtsi2sd_xmm_rm32 = 1_101, Cvtsi2sd_xmm_rm64 = 1_102, VEX_Vcvtsi2sd_xmm_xmm_rm32 = 1_103, VEX_Vcvtsi2sd_xmm_xmm_rm64 = 1_104, EVEX_Vcvtsi2sd_xmm_xmm_rm32_er = 1_105, EVEX_Vcvtsi2sd_xmm_xmm_rm64_er = 1_106, Movntps_m128_xmm = 1_107, VEX_Vmovntps_m128_xmm = 1_108, VEX_Vmovntps_m256_ymm = 1_109, EVEX_Vmovntps_m128_xmm = 1_110, EVEX_Vmovntps_m256_ymm = 1_111, EVEX_Vmovntps_m512_zmm = 1_112, Movntpd_m128_xmm = 1_113, VEX_Vmovntpd_m128_xmm = 1_114, VEX_Vmovntpd_m256_ymm = 1_115, EVEX_Vmovntpd_m128_xmm = 1_116, EVEX_Vmovntpd_m256_ymm = 1_117, EVEX_Vmovntpd_m512_zmm = 1_118, Movntss_m32_xmm = 1_119, Movntsd_m64_xmm = 1_120, Cvttps2pi_mm_xmmm64 = 1_121, Cvttpd2pi_mm_xmmm128 = 1_122, Cvttss2si_r32_xmmm32 = 1_123, Cvttss2si_r64_xmmm32 = 1_124, VEX_Vcvttss2si_r32_xmmm32 = 1_125, VEX_Vcvttss2si_r64_xmmm32 = 1_126, EVEX_Vcvttss2si_r32_xmmm32_sae = 1_127, EVEX_Vcvttss2si_r64_xmmm32_sae = 1_128, Cvttsd2si_r32_xmmm64 = 1_129, Cvttsd2si_r64_xmmm64 = 1_130, VEX_Vcvttsd2si_r32_xmmm64 = 1_131, VEX_Vcvttsd2si_r64_xmmm64 = 1_132, EVEX_Vcvttsd2si_r32_xmmm64_sae = 1_133, EVEX_Vcvttsd2si_r64_xmmm64_sae = 1_134, Cvtps2pi_mm_xmmm64 = 1_135, Cvtpd2pi_mm_xmmm128 = 1_136, Cvtss2si_r32_xmmm32 = 1_137, Cvtss2si_r64_xmmm32 = 1_138, VEX_Vcvtss2si_r32_xmmm32 = 1_139, VEX_Vcvtss2si_r64_xmmm32 = 1_140, EVEX_Vcvtss2si_r32_xmmm32_er = 1_141, EVEX_Vcvtss2si_r64_xmmm32_er = 1_142, Cvtsd2si_r32_xmmm64 = 1_143, Cvtsd2si_r64_xmmm64 = 1_144, VEX_Vcvtsd2si_r32_xmmm64 = 1_145, VEX_Vcvtsd2si_r64_xmmm64 = 1_146, EVEX_Vcvtsd2si_r32_xmmm64_er = 1_147, EVEX_Vcvtsd2si_r64_xmmm64_er = 1_148, Ucomiss_xmm_xmmm32 = 1_149, VEX_Vucomiss_xmm_xmmm32 = 1_150, EVEX_Vucomiss_xmm_xmmm32_sae = 1_151, Ucomisd_xmm_xmmm64 = 1_152, VEX_Vucomisd_xmm_xmmm64 = 1_153, EVEX_Vucomisd_xmm_xmmm64_sae = 1_154, Comiss_xmm_xmmm32 = 1_155, Comisd_xmm_xmmm64 = 1_156, VEX_Vcomiss_xmm_xmmm32 = 1_157, VEX_Vcomisd_xmm_xmmm64 = 1_158, EVEX_Vcomiss_xmm_xmmm32_sae = 1_159, EVEX_Vcomisd_xmm_xmmm64_sae = 1_160, Wrmsr = 1_161, Rdtsc = 1_162, Rdmsr = 1_163, Rdpmc = 1_164, Sysenter = 1_165, Sysexitd = 1_166, Sysexitq = 1_167, Getsecd = 1_168, Cmovo_r16_rm16 = 1_169, Cmovo_r32_rm32 = 1_170, Cmovo_r64_rm64 = 1_171, Cmovno_r16_rm16 = 1_172, Cmovno_r32_rm32 = 1_173, Cmovno_r64_rm64 = 1_174, Cmovb_r16_rm16 = 1_175, Cmovb_r32_rm32 = 1_176, Cmovb_r64_rm64 = 1_177, Cmovae_r16_rm16 = 1_178, Cmovae_r32_rm32 = 1_179, Cmovae_r64_rm64 = 1_180, Cmove_r16_rm16 = 1_181, Cmove_r32_rm32 = 1_182, Cmove_r64_rm64 = 1_183, Cmovne_r16_rm16 = 1_184, Cmovne_r32_rm32 = 1_185, Cmovne_r64_rm64 = 1_186, Cmovbe_r16_rm16 = 1_187, Cmovbe_r32_rm32 = 1_188, Cmovbe_r64_rm64 = 1_189, Cmova_r16_rm16 = 1_190, Cmova_r32_rm32 = 1_191, Cmova_r64_rm64 = 1_192, Cmovs_r16_rm16 = 1_193, Cmovs_r32_rm32 = 1_194, Cmovs_r64_rm64 = 1_195, Cmovns_r16_rm16 = 1_196, Cmovns_r32_rm32 = 1_197, Cmovns_r64_rm64 = 1_198, Cmovp_r16_rm16 = 1_199, Cmovp_r32_rm32 = 1_200, Cmovp_r64_rm64 = 1_201, Cmovnp_r16_rm16 = 1_202, Cmovnp_r32_rm32 = 1_203, Cmovnp_r64_rm64 = 1_204, Cmovl_r16_rm16 = 1_205, Cmovl_r32_rm32 = 1_206, Cmovl_r64_rm64 = 1_207, Cmovge_r16_rm16 = 1_208, Cmovge_r32_rm32 = 1_209, Cmovge_r64_rm64 = 1_210, Cmovle_r16_rm16 = 1_211, Cmovle_r32_rm32 = 1_212, Cmovle_r64_rm64 = 1_213, Cmovg_r16_rm16 = 1_214, Cmovg_r32_rm32 = 1_215, Cmovg_r64_rm64 = 1_216, VEX_Kandw_kr_kr_kr = 1_217, VEX_Kandq_kr_kr_kr = 1_218, VEX_Kandb_kr_kr_kr = 1_219, VEX_Kandd_kr_kr_kr = 1_220, VEX_Kandnw_kr_kr_kr = 1_221, VEX_Kandnq_kr_kr_kr = 1_222, VEX_Kandnb_kr_kr_kr = 1_223, VEX_Kandnd_kr_kr_kr = 1_224, VEX_Knotw_kr_kr = 1_225, VEX_Knotq_kr_kr = 1_226, VEX_Knotb_kr_kr = 1_227, VEX_Knotd_kr_kr = 1_228, VEX_Korw_kr_kr_kr = 1_229, VEX_Korq_kr_kr_kr = 1_230, VEX_Korb_kr_kr_kr = 1_231, VEX_Kord_kr_kr_kr = 1_232, VEX_Kxnorw_kr_kr_kr = 1_233, VEX_Kxnorq_kr_kr_kr = 1_234, VEX_Kxnorb_kr_kr_kr = 1_235, VEX_Kxnord_kr_kr_kr = 1_236, VEX_Kxorw_kr_kr_kr = 1_237, VEX_Kxorq_kr_kr_kr = 1_238, VEX_Kxorb_kr_kr_kr = 1_239, VEX_Kxord_kr_kr_kr = 1_240, VEX_Kaddw_kr_kr_kr = 1_241, VEX_Kaddq_kr_kr_kr = 1_242, VEX_Kaddb_kr_kr_kr = 1_243, VEX_Kaddd_kr_kr_kr = 1_244, VEX_Kunpckwd_kr_kr_kr = 1_245, VEX_Kunpckdq_kr_kr_kr = 1_246, VEX_Kunpckbw_kr_kr_kr = 1_247, Movmskps_r32_xmm = 1_248, Movmskps_r64_xmm = 1_249, VEX_Vmovmskps_r32_xmm = 1_250, VEX_Vmovmskps_r64_xmm = 1_251, VEX_Vmovmskps_r32_ymm = 1_252, VEX_Vmovmskps_r64_ymm = 1_253, Movmskpd_r32_xmm = 1_254, Movmskpd_r64_xmm = 1_255, VEX_Vmovmskpd_r32_xmm = 1_256, VEX_Vmovmskpd_r64_xmm = 1_257, VEX_Vmovmskpd_r32_ymm = 1_258, VEX_Vmovmskpd_r64_ymm = 1_259, Sqrtps_xmm_xmmm128 = 1_260, VEX_Vsqrtps_xmm_xmmm128 = 1_261, VEX_Vsqrtps_ymm_ymmm256 = 1_262, EVEX_Vsqrtps_xmm_k1z_xmmm128b32 = 1_263, EVEX_Vsqrtps_ymm_k1z_ymmm256b32 = 1_264, EVEX_Vsqrtps_zmm_k1z_zmmm512b32_er = 1_265, Sqrtpd_xmm_xmmm128 = 1_266, VEX_Vsqrtpd_xmm_xmmm128 = 1_267, VEX_Vsqrtpd_ymm_ymmm256 = 1_268, EVEX_Vsqrtpd_xmm_k1z_xmmm128b64 = 1_269, EVEX_Vsqrtpd_ymm_k1z_ymmm256b64 = 1_270, EVEX_Vsqrtpd_zmm_k1z_zmmm512b64_er = 1_271, Sqrtss_xmm_xmmm32 = 1_272, VEX_Vsqrtss_xmm_xmm_xmmm32 = 1_273, EVEX_Vsqrtss_xmm_k1z_xmm_xmmm32_er = 1_274, Sqrtsd_xmm_xmmm64 = 1_275, VEX_Vsqrtsd_xmm_xmm_xmmm64 = 1_276, EVEX_Vsqrtsd_xmm_k1z_xmm_xmmm64_er = 1_277, Rsqrtps_xmm_xmmm128 = 1_278, VEX_Vrsqrtps_xmm_xmmm128 = 1_279, VEX_Vrsqrtps_ymm_ymmm256 = 1_280, Rsqrtss_xmm_xmmm32 = 1_281, VEX_Vrsqrtss_xmm_xmm_xmmm32 = 1_282, Rcpps_xmm_xmmm128 = 1_283, VEX_Vrcpps_xmm_xmmm128 = 1_284, VEX_Vrcpps_ymm_ymmm256 = 1_285, Rcpss_xmm_xmmm32 = 1_286, VEX_Vrcpss_xmm_xmm_xmmm32 = 1_287, Andps_xmm_xmmm128 = 1_288, VEX_Vandps_xmm_xmm_xmmm128 = 1_289, VEX_Vandps_ymm_ymm_ymmm256 = 1_290, EVEX_Vandps_xmm_k1z_xmm_xmmm128b32 = 1_291, EVEX_Vandps_ymm_k1z_ymm_ymmm256b32 = 1_292, EVEX_Vandps_zmm_k1z_zmm_zmmm512b32 = 1_293, Andpd_xmm_xmmm128 = 1_294, VEX_Vandpd_xmm_xmm_xmmm128 = 1_295, VEX_Vandpd_ymm_ymm_ymmm256 = 1_296, EVEX_Vandpd_xmm_k1z_xmm_xmmm128b64 = 1_297, EVEX_Vandpd_ymm_k1z_ymm_ymmm256b64 = 1_298, EVEX_Vandpd_zmm_k1z_zmm_zmmm512b64 = 1_299, Andnps_xmm_xmmm128 = 1_300, VEX_Vandnps_xmm_xmm_xmmm128 = 1_301, VEX_Vandnps_ymm_ymm_ymmm256 = 1_302, EVEX_Vandnps_xmm_k1z_xmm_xmmm128b32 = 1_303, EVEX_Vandnps_ymm_k1z_ymm_ymmm256b32 = 1_304, EVEX_Vandnps_zmm_k1z_zmm_zmmm512b32 = 1_305, Andnpd_xmm_xmmm128 = 1_306, VEX_Vandnpd_xmm_xmm_xmmm128 = 1_307, VEX_Vandnpd_ymm_ymm_ymmm256 = 1_308, EVEX_Vandnpd_xmm_k1z_xmm_xmmm128b64 = 1_309, EVEX_Vandnpd_ymm_k1z_ymm_ymmm256b64 = 1_310, EVEX_Vandnpd_zmm_k1z_zmm_zmmm512b64 = 1_311, Orps_xmm_xmmm128 = 1_312, VEX_Vorps_xmm_xmm_xmmm128 = 1_313, VEX_Vorps_ymm_ymm_ymmm256 = 1_314, EVEX_Vorps_xmm_k1z_xmm_xmmm128b32 = 1_315, EVEX_Vorps_ymm_k1z_ymm_ymmm256b32 = 1_316, EVEX_Vorps_zmm_k1z_zmm_zmmm512b32 = 1_317, Orpd_xmm_xmmm128 = 1_318, VEX_Vorpd_xmm_xmm_xmmm128 = 1_319, VEX_Vorpd_ymm_ymm_ymmm256 = 1_320, EVEX_Vorpd_xmm_k1z_xmm_xmmm128b64 = 1_321, EVEX_Vorpd_ymm_k1z_ymm_ymmm256b64 = 1_322, EVEX_Vorpd_zmm_k1z_zmm_zmmm512b64 = 1_323, Xorps_xmm_xmmm128 = 1_324, VEX_Vxorps_xmm_xmm_xmmm128 = 1_325, VEX_Vxorps_ymm_ymm_ymmm256 = 1_326, EVEX_Vxorps_xmm_k1z_xmm_xmmm128b32 = 1_327, EVEX_Vxorps_ymm_k1z_ymm_ymmm256b32 = 1_328, EVEX_Vxorps_zmm_k1z_zmm_zmmm512b32 = 1_329, Xorpd_xmm_xmmm128 = 1_330, VEX_Vxorpd_xmm_xmm_xmmm128 = 1_331, VEX_Vxorpd_ymm_ymm_ymmm256 = 1_332, EVEX_Vxorpd_xmm_k1z_xmm_xmmm128b64 = 1_333, EVEX_Vxorpd_ymm_k1z_ymm_ymmm256b64 = 1_334, EVEX_Vxorpd_zmm_k1z_zmm_zmmm512b64 = 1_335, Addps_xmm_xmmm128 = 1_336, VEX_Vaddps_xmm_xmm_xmmm128 = 1_337, VEX_Vaddps_ymm_ymm_ymmm256 = 1_338, EVEX_Vaddps_xmm_k1z_xmm_xmmm128b32 = 1_339, EVEX_Vaddps_ymm_k1z_ymm_ymmm256b32 = 1_340, EVEX_Vaddps_zmm_k1z_zmm_zmmm512b32_er = 1_341, Addpd_xmm_xmmm128 = 1_342, VEX_Vaddpd_xmm_xmm_xmmm128 = 1_343, VEX_Vaddpd_ymm_ymm_ymmm256 = 1_344, EVEX_Vaddpd_xmm_k1z_xmm_xmmm128b64 = 1_345, EVEX_Vaddpd_ymm_k1z_ymm_ymmm256b64 = 1_346, EVEX_Vaddpd_zmm_k1z_zmm_zmmm512b64_er = 1_347, Addss_xmm_xmmm32 = 1_348, VEX_Vaddss_xmm_xmm_xmmm32 = 1_349, EVEX_Vaddss_xmm_k1z_xmm_xmmm32_er = 1_350, Addsd_xmm_xmmm64 = 1_351, VEX_Vaddsd_xmm_xmm_xmmm64 = 1_352, EVEX_Vaddsd_xmm_k1z_xmm_xmmm64_er = 1_353, Mulps_xmm_xmmm128 = 1_354, VEX_Vmulps_xmm_xmm_xmmm128 = 1_355, VEX_Vmulps_ymm_ymm_ymmm256 = 1_356, EVEX_Vmulps_xmm_k1z_xmm_xmmm128b32 = 1_357, EVEX_Vmulps_ymm_k1z_ymm_ymmm256b32 = 1_358, EVEX_Vmulps_zmm_k1z_zmm_zmmm512b32_er = 1_359, Mulpd_xmm_xmmm128 = 1_360, VEX_Vmulpd_xmm_xmm_xmmm128 = 1_361, VEX_Vmulpd_ymm_ymm_ymmm256 = 1_362, EVEX_Vmulpd_xmm_k1z_xmm_xmmm128b64 = 1_363, EVEX_Vmulpd_ymm_k1z_ymm_ymmm256b64 = 1_364, EVEX_Vmulpd_zmm_k1z_zmm_zmmm512b64_er = 1_365, Mulss_xmm_xmmm32 = 1_366, VEX_Vmulss_xmm_xmm_xmmm32 = 1_367, EVEX_Vmulss_xmm_k1z_xmm_xmmm32_er = 1_368, Mulsd_xmm_xmmm64 = 1_369, VEX_Vmulsd_xmm_xmm_xmmm64 = 1_370, EVEX_Vmulsd_xmm_k1z_xmm_xmmm64_er = 1_371, Cvtps2pd_xmm_xmmm64 = 1_372, VEX_Vcvtps2pd_xmm_xmmm64 = 1_373, VEX_Vcvtps2pd_ymm_xmmm128 = 1_374, EVEX_Vcvtps2pd_xmm_k1z_xmmm64b32 = 1_375, EVEX_Vcvtps2pd_ymm_k1z_xmmm128b32 = 1_376, EVEX_Vcvtps2pd_zmm_k1z_ymmm256b32_sae = 1_377, Cvtpd2ps_xmm_xmmm128 = 1_378, VEX_Vcvtpd2ps_xmm_xmmm128 = 1_379, VEX_Vcvtpd2ps_xmm_ymmm256 = 1_380, EVEX_Vcvtpd2ps_xmm_k1z_xmmm128b64 = 1_381, EVEX_Vcvtpd2ps_xmm_k1z_ymmm256b64 = 1_382, EVEX_Vcvtpd2ps_ymm_k1z_zmmm512b64_er = 1_383, Cvtss2sd_xmm_xmmm32 = 1_384, VEX_Vcvtss2sd_xmm_xmm_xmmm32 = 1_385, EVEX_Vcvtss2sd_xmm_k1z_xmm_xmmm32_sae = 1_386, Cvtsd2ss_xmm_xmmm64 = 1_387, VEX_Vcvtsd2ss_xmm_xmm_xmmm64 = 1_388, EVEX_Vcvtsd2ss_xmm_k1z_xmm_xmmm64_er = 1_389, Cvtdq2ps_xmm_xmmm128 = 1_390, VEX_Vcvtdq2ps_xmm_xmmm128 = 1_391, VEX_Vcvtdq2ps_ymm_ymmm256 = 1_392, EVEX_Vcvtdq2ps_xmm_k1z_xmmm128b32 = 1_393, EVEX_Vcvtdq2ps_ymm_k1z_ymmm256b32 = 1_394, EVEX_Vcvtdq2ps_zmm_k1z_zmmm512b32_er = 1_395, EVEX_Vcvtqq2ps_xmm_k1z_xmmm128b64 = 1_396, EVEX_Vcvtqq2ps_xmm_k1z_ymmm256b64 = 1_397, EVEX_Vcvtqq2ps_ymm_k1z_zmmm512b64_er = 1_398, Cvtps2dq_xmm_xmmm128 = 1_399, VEX_Vcvtps2dq_xmm_xmmm128 = 1_400, VEX_Vcvtps2dq_ymm_ymmm256 = 1_401, EVEX_Vcvtps2dq_xmm_k1z_xmmm128b32 = 1_402, EVEX_Vcvtps2dq_ymm_k1z_ymmm256b32 = 1_403, EVEX_Vcvtps2dq_zmm_k1z_zmmm512b32_er = 1_404, Cvttps2dq_xmm_xmmm128 = 1_405, VEX_Vcvttps2dq_xmm_xmmm128 = 1_406, VEX_Vcvttps2dq_ymm_ymmm256 = 1_407, EVEX_Vcvttps2dq_xmm_k1z_xmmm128b32 = 1_408, EVEX_Vcvttps2dq_ymm_k1z_ymmm256b32 = 1_409, EVEX_Vcvttps2dq_zmm_k1z_zmmm512b32_sae = 1_410, Subps_xmm_xmmm128 = 1_411, VEX_Vsubps_xmm_xmm_xmmm128 = 1_412, VEX_Vsubps_ymm_ymm_ymmm256 = 1_413, EVEX_Vsubps_xmm_k1z_xmm_xmmm128b32 = 1_414, EVEX_Vsubps_ymm_k1z_ymm_ymmm256b32 = 1_415, EVEX_Vsubps_zmm_k1z_zmm_zmmm512b32_er = 1_416, Subpd_xmm_xmmm128 = 1_417, VEX_Vsubpd_xmm_xmm_xmmm128 = 1_418, VEX_Vsubpd_ymm_ymm_ymmm256 = 1_419, EVEX_Vsubpd_xmm_k1z_xmm_xmmm128b64 = 1_420, EVEX_Vsubpd_ymm_k1z_ymm_ymmm256b64 = 1_421, EVEX_Vsubpd_zmm_k1z_zmm_zmmm512b64_er = 1_422, Subss_xmm_xmmm32 = 1_423, VEX_Vsubss_xmm_xmm_xmmm32 = 1_424, EVEX_Vsubss_xmm_k1z_xmm_xmmm32_er = 1_425, Subsd_xmm_xmmm64 = 1_426, VEX_Vsubsd_xmm_xmm_xmmm64 = 1_427, EVEX_Vsubsd_xmm_k1z_xmm_xmmm64_er = 1_428, Minps_xmm_xmmm128 = 1_429, VEX_Vminps_xmm_xmm_xmmm128 = 1_430, VEX_Vminps_ymm_ymm_ymmm256 = 1_431, EVEX_Vminps_xmm_k1z_xmm_xmmm128b32 = 1_432, EVEX_Vminps_ymm_k1z_ymm_ymmm256b32 = 1_433, EVEX_Vminps_zmm_k1z_zmm_zmmm512b32_sae = 1_434, Minpd_xmm_xmmm128 = 1_435, VEX_Vminpd_xmm_xmm_xmmm128 = 1_436, VEX_Vminpd_ymm_ymm_ymmm256 = 1_437, EVEX_Vminpd_xmm_k1z_xmm_xmmm128b64 = 1_438, EVEX_Vminpd_ymm_k1z_ymm_ymmm256b64 = 1_439, EVEX_Vminpd_zmm_k1z_zmm_zmmm512b64_sae = 1_440, Minss_xmm_xmmm32 = 1_441, VEX_Vminss_xmm_xmm_xmmm32 = 1_442, EVEX_Vminss_xmm_k1z_xmm_xmmm32_sae = 1_443, Minsd_xmm_xmmm64 = 1_444, VEX_Vminsd_xmm_xmm_xmmm64 = 1_445, EVEX_Vminsd_xmm_k1z_xmm_xmmm64_sae = 1_446, Divps_xmm_xmmm128 = 1_447, VEX_Vdivps_xmm_xmm_xmmm128 = 1_448, VEX_Vdivps_ymm_ymm_ymmm256 = 1_449, EVEX_Vdivps_xmm_k1z_xmm_xmmm128b32 = 1_450, EVEX_Vdivps_ymm_k1z_ymm_ymmm256b32 = 1_451, EVEX_Vdivps_zmm_k1z_zmm_zmmm512b32_er = 1_452, Divpd_xmm_xmmm128 = 1_453, VEX_Vdivpd_xmm_xmm_xmmm128 = 1_454, VEX_Vdivpd_ymm_ymm_ymmm256 = 1_455, EVEX_Vdivpd_xmm_k1z_xmm_xmmm128b64 = 1_456, EVEX_Vdivpd_ymm_k1z_ymm_ymmm256b64 = 1_457, EVEX_Vdivpd_zmm_k1z_zmm_zmmm512b64_er = 1_458, Divss_xmm_xmmm32 = 1_459, VEX_Vdivss_xmm_xmm_xmmm32 = 1_460, EVEX_Vdivss_xmm_k1z_xmm_xmmm32_er = 1_461, Divsd_xmm_xmmm64 = 1_462, VEX_Vdivsd_xmm_xmm_xmmm64 = 1_463, EVEX_Vdivsd_xmm_k1z_xmm_xmmm64_er = 1_464, Maxps_xmm_xmmm128 = 1_465, VEX_Vmaxps_xmm_xmm_xmmm128 = 1_466, VEX_Vmaxps_ymm_ymm_ymmm256 = 1_467, EVEX_Vmaxps_xmm_k1z_xmm_xmmm128b32 = 1_468, EVEX_Vmaxps_ymm_k1z_ymm_ymmm256b32 = 1_469, EVEX_Vmaxps_zmm_k1z_zmm_zmmm512b32_sae = 1_470, Maxpd_xmm_xmmm128 = 1_471, VEX_Vmaxpd_xmm_xmm_xmmm128 = 1_472, VEX_Vmaxpd_ymm_ymm_ymmm256 = 1_473, EVEX_Vmaxpd_xmm_k1z_xmm_xmmm128b64 = 1_474, EVEX_Vmaxpd_ymm_k1z_ymm_ymmm256b64 = 1_475, EVEX_Vmaxpd_zmm_k1z_zmm_zmmm512b64_sae = 1_476, Maxss_xmm_xmmm32 = 1_477, VEX_Vmaxss_xmm_xmm_xmmm32 = 1_478, EVEX_Vmaxss_xmm_k1z_xmm_xmmm32_sae = 1_479, Maxsd_xmm_xmmm64 = 1_480, VEX_Vmaxsd_xmm_xmm_xmmm64 = 1_481, EVEX_Vmaxsd_xmm_k1z_xmm_xmmm64_sae = 1_482, Punpcklbw_mm_mmm32 = 1_483, Punpcklbw_xmm_xmmm128 = 1_484, VEX_Vpunpcklbw_xmm_xmm_xmmm128 = 1_485, VEX_Vpunpcklbw_ymm_ymm_ymmm256 = 1_486, EVEX_Vpunpcklbw_xmm_k1z_xmm_xmmm128 = 1_487, EVEX_Vpunpcklbw_ymm_k1z_ymm_ymmm256 = 1_488, EVEX_Vpunpcklbw_zmm_k1z_zmm_zmmm512 = 1_489, Punpcklwd_mm_mmm32 = 1_490, Punpcklwd_xmm_xmmm128 = 1_491, VEX_Vpunpcklwd_xmm_xmm_xmmm128 = 1_492, VEX_Vpunpcklwd_ymm_ymm_ymmm256 = 1_493, EVEX_Vpunpcklwd_xmm_k1z_xmm_xmmm128 = 1_494, EVEX_Vpunpcklwd_ymm_k1z_ymm_ymmm256 = 1_495, EVEX_Vpunpcklwd_zmm_k1z_zmm_zmmm512 = 1_496, Punpckldq_mm_mmm32 = 1_497, Punpckldq_xmm_xmmm128 = 1_498, VEX_Vpunpckldq_xmm_xmm_xmmm128 = 1_499, VEX_Vpunpckldq_ymm_ymm_ymmm256 = 1_500, EVEX_Vpunpckldq_xmm_k1z_xmm_xmmm128b32 = 1_501, EVEX_Vpunpckldq_ymm_k1z_ymm_ymmm256b32 = 1_502, EVEX_Vpunpckldq_zmm_k1z_zmm_zmmm512b32 = 1_503, Packsswb_mm_mmm64 = 1_504, Packsswb_xmm_xmmm128 = 1_505, VEX_Vpacksswb_xmm_xmm_xmmm128 = 1_506, VEX_Vpacksswb_ymm_ymm_ymmm256 = 1_507, EVEX_Vpacksswb_xmm_k1z_xmm_xmmm128 = 1_508, EVEX_Vpacksswb_ymm_k1z_ymm_ymmm256 = 1_509, EVEX_Vpacksswb_zmm_k1z_zmm_zmmm512 = 1_510, Pcmpgtb_mm_mmm64 = 1_511, Pcmpgtb_xmm_xmmm128 = 1_512, VEX_Vpcmpgtb_xmm_xmm_xmmm128 = 1_513, VEX_Vpcmpgtb_ymm_ymm_ymmm256 = 1_514, EVEX_Vpcmpgtb_kr_k1_xmm_xmmm128 = 1_515, EVEX_Vpcmpgtb_kr_k1_ymm_ymmm256 = 1_516, EVEX_Vpcmpgtb_kr_k1_zmm_zmmm512 = 1_517, Pcmpgtw_mm_mmm64 = 1_518, Pcmpgtw_xmm_xmmm128 = 1_519, VEX_Vpcmpgtw_xmm_xmm_xmmm128 = 1_520, VEX_Vpcmpgtw_ymm_ymm_ymmm256 = 1_521, EVEX_Vpcmpgtw_kr_k1_xmm_xmmm128 = 1_522, EVEX_Vpcmpgtw_kr_k1_ymm_ymmm256 = 1_523, EVEX_Vpcmpgtw_kr_k1_zmm_zmmm512 = 1_524, Pcmpgtd_mm_mmm64 = 1_525, Pcmpgtd_xmm_xmmm128 = 1_526, VEX_Vpcmpgtd_xmm_xmm_xmmm128 = 1_527, VEX_Vpcmpgtd_ymm_ymm_ymmm256 = 1_528, EVEX_Vpcmpgtd_kr_k1_xmm_xmmm128b32 = 1_529, EVEX_Vpcmpgtd_kr_k1_ymm_ymmm256b32 = 1_530, EVEX_Vpcmpgtd_kr_k1_zmm_zmmm512b32 = 1_531, Packuswb_mm_mmm64 = 1_532, Packuswb_xmm_xmmm128 = 1_533, VEX_Vpackuswb_xmm_xmm_xmmm128 = 1_534, VEX_Vpackuswb_ymm_ymm_ymmm256 = 1_535, EVEX_Vpackuswb_xmm_k1z_xmm_xmmm128 = 1_536, EVEX_Vpackuswb_ymm_k1z_ymm_ymmm256 = 1_537, EVEX_Vpackuswb_zmm_k1z_zmm_zmmm512 = 1_538, Punpckhbw_mm_mmm64 = 1_539, Punpckhbw_xmm_xmmm128 = 1_540, VEX_Vpunpckhbw_xmm_xmm_xmmm128 = 1_541, VEX_Vpunpckhbw_ymm_ymm_ymmm256 = 1_542, EVEX_Vpunpckhbw_xmm_k1z_xmm_xmmm128 = 1_543, EVEX_Vpunpckhbw_ymm_k1z_ymm_ymmm256 = 1_544, EVEX_Vpunpckhbw_zmm_k1z_zmm_zmmm512 = 1_545, Punpckhwd_mm_mmm64 = 1_546, Punpckhwd_xmm_xmmm128 = 1_547, VEX_Vpunpckhwd_xmm_xmm_xmmm128 = 1_548, VEX_Vpunpckhwd_ymm_ymm_ymmm256 = 1_549, EVEX_Vpunpckhwd_xmm_k1z_xmm_xmmm128 = 1_550, EVEX_Vpunpckhwd_ymm_k1z_ymm_ymmm256 = 1_551, EVEX_Vpunpckhwd_zmm_k1z_zmm_zmmm512 = 1_552, Punpckhdq_mm_mmm64 = 1_553, Punpckhdq_xmm_xmmm128 = 1_554, VEX_Vpunpckhdq_xmm_xmm_xmmm128 = 1_555, VEX_Vpunpckhdq_ymm_ymm_ymmm256 = 1_556, EVEX_Vpunpckhdq_xmm_k1z_xmm_xmmm128b32 = 1_557, EVEX_Vpunpckhdq_ymm_k1z_ymm_ymmm256b32 = 1_558, EVEX_Vpunpckhdq_zmm_k1z_zmm_zmmm512b32 = 1_559, Packssdw_mm_mmm64 = 1_560, Packssdw_xmm_xmmm128 = 1_561, VEX_Vpackssdw_xmm_xmm_xmmm128 = 1_562, VEX_Vpackssdw_ymm_ymm_ymmm256 = 1_563, EVEX_Vpackssdw_xmm_k1z_xmm_xmmm128b32 = 1_564, EVEX_Vpackssdw_ymm_k1z_ymm_ymmm256b32 = 1_565, EVEX_Vpackssdw_zmm_k1z_zmm_zmmm512b32 = 1_566, Punpcklqdq_xmm_xmmm128 = 1_567, VEX_Vpunpcklqdq_xmm_xmm_xmmm128 = 1_568, VEX_Vpunpcklqdq_ymm_ymm_ymmm256 = 1_569, EVEX_Vpunpcklqdq_xmm_k1z_xmm_xmmm128b64 = 1_570, EVEX_Vpunpcklqdq_ymm_k1z_ymm_ymmm256b64 = 1_571, EVEX_Vpunpcklqdq_zmm_k1z_zmm_zmmm512b64 = 1_572, Punpckhqdq_xmm_xmmm128 = 1_573, VEX_Vpunpckhqdq_xmm_xmm_xmmm128 = 1_574, VEX_Vpunpckhqdq_ymm_ymm_ymmm256 = 1_575, EVEX_Vpunpckhqdq_xmm_k1z_xmm_xmmm128b64 = 1_576, EVEX_Vpunpckhqdq_ymm_k1z_ymm_ymmm256b64 = 1_577, EVEX_Vpunpckhqdq_zmm_k1z_zmm_zmmm512b64 = 1_578, Movd_mm_rm32 = 1_579, Movq_mm_rm64 = 1_580, Movd_xmm_rm32 = 1_581, Movq_xmm_rm64 = 1_582, VEX_Vmovd_xmm_rm32 = 1_583, VEX_Vmovq_xmm_rm64 = 1_584, EVEX_Vmovd_xmm_rm32 = 1_585, EVEX_Vmovq_xmm_rm64 = 1_586, Movq_mm_mmm64 = 1_587, Movdqa_xmm_xmmm128 = 1_588, VEX_Vmovdqa_xmm_xmmm128 = 1_589, VEX_Vmovdqa_ymm_ymmm256 = 1_590, EVEX_Vmovdqa32_xmm_k1z_xmmm128 = 1_591, EVEX_Vmovdqa32_ymm_k1z_ymmm256 = 1_592, EVEX_Vmovdqa32_zmm_k1z_zmmm512 = 1_593, EVEX_Vmovdqa64_xmm_k1z_xmmm128 = 1_594, EVEX_Vmovdqa64_ymm_k1z_ymmm256 = 1_595, EVEX_Vmovdqa64_zmm_k1z_zmmm512 = 1_596, Movdqu_xmm_xmmm128 = 1_597, VEX_Vmovdqu_xmm_xmmm128 = 1_598, VEX_Vmovdqu_ymm_ymmm256 = 1_599, EVEX_Vmovdqu32_xmm_k1z_xmmm128 = 1_600, EVEX_Vmovdqu32_ymm_k1z_ymmm256 = 1_601, EVEX_Vmovdqu32_zmm_k1z_zmmm512 = 1_602, EVEX_Vmovdqu64_xmm_k1z_xmmm128 = 1_603, EVEX_Vmovdqu64_ymm_k1z_ymmm256 = 1_604, EVEX_Vmovdqu64_zmm_k1z_zmmm512 = 1_605, EVEX_Vmovdqu8_xmm_k1z_xmmm128 = 1_606, EVEX_Vmovdqu8_ymm_k1z_ymmm256 = 1_607, EVEX_Vmovdqu8_zmm_k1z_zmmm512 = 1_608, EVEX_Vmovdqu16_xmm_k1z_xmmm128 = 1_609, EVEX_Vmovdqu16_ymm_k1z_ymmm256 = 1_610, EVEX_Vmovdqu16_zmm_k1z_zmmm512 = 1_611, Pshufw_mm_mmm64_imm8 = 1_612, Pshufd_xmm_xmmm128_imm8 = 1_613, VEX_Vpshufd_xmm_xmmm128_imm8 = 1_614, VEX_Vpshufd_ymm_ymmm256_imm8 = 1_615, EVEX_Vpshufd_xmm_k1z_xmmm128b32_imm8 = 1_616, EVEX_Vpshufd_ymm_k1z_ymmm256b32_imm8 = 1_617, EVEX_Vpshufd_zmm_k1z_zmmm512b32_imm8 = 1_618, Pshufhw_xmm_xmmm128_imm8 = 1_619, VEX_Vpshufhw_xmm_xmmm128_imm8 = 1_620, VEX_Vpshufhw_ymm_ymmm256_imm8 = 1_621, EVEX_Vpshufhw_xmm_k1z_xmmm128_imm8 = 1_622, EVEX_Vpshufhw_ymm_k1z_ymmm256_imm8 = 1_623, EVEX_Vpshufhw_zmm_k1z_zmmm512_imm8 = 1_624, Pshuflw_xmm_xmmm128_imm8 = 1_625, VEX_Vpshuflw_xmm_xmmm128_imm8 = 1_626, VEX_Vpshuflw_ymm_ymmm256_imm8 = 1_627, EVEX_Vpshuflw_xmm_k1z_xmmm128_imm8 = 1_628, EVEX_Vpshuflw_ymm_k1z_ymmm256_imm8 = 1_629, EVEX_Vpshuflw_zmm_k1z_zmmm512_imm8 = 1_630, Psrlw_mm_imm8 = 1_631, Psrlw_xmm_imm8 = 1_632, VEX_Vpsrlw_xmm_xmm_imm8 = 1_633, VEX_Vpsrlw_ymm_ymm_imm8 = 1_634, EVEX_Vpsrlw_xmm_k1z_xmmm128_imm8 = 1_635, EVEX_Vpsrlw_ymm_k1z_ymmm256_imm8 = 1_636, EVEX_Vpsrlw_zmm_k1z_zmmm512_imm8 = 1_637, Psraw_mm_imm8 = 1_638, Psraw_xmm_imm8 = 1_639, VEX_Vpsraw_xmm_xmm_imm8 = 1_640, VEX_Vpsraw_ymm_ymm_imm8 = 1_641, EVEX_Vpsraw_xmm_k1z_xmmm128_imm8 = 1_642, EVEX_Vpsraw_ymm_k1z_ymmm256_imm8 = 1_643, EVEX_Vpsraw_zmm_k1z_zmmm512_imm8 = 1_644, Psllw_mm_imm8 = 1_645, Psllw_xmm_imm8 = 1_646, VEX_Vpsllw_xmm_xmm_imm8 = 1_647, VEX_Vpsllw_ymm_ymm_imm8 = 1_648, EVEX_Vpsllw_xmm_k1z_xmmm128_imm8 = 1_649, EVEX_Vpsllw_ymm_k1z_ymmm256_imm8 = 1_650, EVEX_Vpsllw_zmm_k1z_zmmm512_imm8 = 1_651, EVEX_Vprord_xmm_k1z_xmmm128b32_imm8 = 1_652, EVEX_Vprord_ymm_k1z_ymmm256b32_imm8 = 1_653, EVEX_Vprord_zmm_k1z_zmmm512b32_imm8 = 1_654, EVEX_Vprorq_xmm_k1z_xmmm128b64_imm8 = 1_655, EVEX_Vprorq_ymm_k1z_ymmm256b64_imm8 = 1_656, EVEX_Vprorq_zmm_k1z_zmmm512b64_imm8 = 1_657, EVEX_Vprold_xmm_k1z_xmmm128b32_imm8 = 1_658, EVEX_Vprold_ymm_k1z_ymmm256b32_imm8 = 1_659, EVEX_Vprold_zmm_k1z_zmmm512b32_imm8 = 1_660, EVEX_Vprolq_xmm_k1z_xmmm128b64_imm8 = 1_661, EVEX_Vprolq_ymm_k1z_ymmm256b64_imm8 = 1_662, EVEX_Vprolq_zmm_k1z_zmmm512b64_imm8 = 1_663, Psrld_mm_imm8 = 1_664, Psrld_xmm_imm8 = 1_665, VEX_Vpsrld_xmm_xmm_imm8 = 1_666, VEX_Vpsrld_ymm_ymm_imm8 = 1_667, EVEX_Vpsrld_xmm_k1z_xmmm128b32_imm8 = 1_668, EVEX_Vpsrld_ymm_k1z_ymmm256b32_imm8 = 1_669, EVEX_Vpsrld_zmm_k1z_zmmm512b32_imm8 = 1_670, Psrad_mm_imm8 = 1_671, Psrad_xmm_imm8 = 1_672, VEX_Vpsrad_xmm_xmm_imm8 = 1_673, VEX_Vpsrad_ymm_ymm_imm8 = 1_674, EVEX_Vpsrad_xmm_k1z_xmmm128b32_imm8 = 1_675, EVEX_Vpsrad_ymm_k1z_ymmm256b32_imm8 = 1_676, EVEX_Vpsrad_zmm_k1z_zmmm512b32_imm8 = 1_677, EVEX_Vpsraq_xmm_k1z_xmmm128b64_imm8 = 1_678, EVEX_Vpsraq_ymm_k1z_ymmm256b64_imm8 = 1_679, EVEX_Vpsraq_zmm_k1z_zmmm512b64_imm8 = 1_680, Pslld_mm_imm8 = 1_681, Pslld_xmm_imm8 = 1_682, VEX_Vpslld_xmm_xmm_imm8 = 1_683, VEX_Vpslld_ymm_ymm_imm8 = 1_684, EVEX_Vpslld_xmm_k1z_xmmm128b32_imm8 = 1_685, EVEX_Vpslld_ymm_k1z_ymmm256b32_imm8 = 1_686, EVEX_Vpslld_zmm_k1z_zmmm512b32_imm8 = 1_687, Psrlq_mm_imm8 = 1_688, Psrlq_xmm_imm8 = 1_689, VEX_Vpsrlq_xmm_xmm_imm8 = 1_690, VEX_Vpsrlq_ymm_ymm_imm8 = 1_691, EVEX_Vpsrlq_xmm_k1z_xmmm128b64_imm8 = 1_692, EVEX_Vpsrlq_ymm_k1z_ymmm256b64_imm8 = 1_693, EVEX_Vpsrlq_zmm_k1z_zmmm512b64_imm8 = 1_694, Psrldq_xmm_imm8 = 1_695, VEX_Vpsrldq_xmm_xmm_imm8 = 1_696, VEX_Vpsrldq_ymm_ymm_imm8 = 1_697, EVEX_Vpsrldq_xmm_xmmm128_imm8 = 1_698, EVEX_Vpsrldq_ymm_ymmm256_imm8 = 1_699, EVEX_Vpsrldq_zmm_zmmm512_imm8 = 1_700, Psllq_mm_imm8 = 1_701, Psllq_xmm_imm8 = 1_702, VEX_Vpsllq_xmm_xmm_imm8 = 1_703, VEX_Vpsllq_ymm_ymm_imm8 = 1_704, EVEX_Vpsllq_xmm_k1z_xmmm128b64_imm8 = 1_705, EVEX_Vpsllq_ymm_k1z_ymmm256b64_imm8 = 1_706, EVEX_Vpsllq_zmm_k1z_zmmm512b64_imm8 = 1_707, Pslldq_xmm_imm8 = 1_708, VEX_Vpslldq_xmm_xmm_imm8 = 1_709, VEX_Vpslldq_ymm_ymm_imm8 = 1_710, EVEX_Vpslldq_xmm_xmmm128_imm8 = 1_711, EVEX_Vpslldq_ymm_ymmm256_imm8 = 1_712, EVEX_Vpslldq_zmm_zmmm512_imm8 = 1_713, Pcmpeqb_mm_mmm64 = 1_714, Pcmpeqb_xmm_xmmm128 = 1_715, VEX_Vpcmpeqb_xmm_xmm_xmmm128 = 1_716, VEX_Vpcmpeqb_ymm_ymm_ymmm256 = 1_717, EVEX_Vpcmpeqb_kr_k1_xmm_xmmm128 = 1_718, EVEX_Vpcmpeqb_kr_k1_ymm_ymmm256 = 1_719, EVEX_Vpcmpeqb_kr_k1_zmm_zmmm512 = 1_720, Pcmpeqw_mm_mmm64 = 1_721, Pcmpeqw_xmm_xmmm128 = 1_722, VEX_Vpcmpeqw_xmm_xmm_xmmm128 = 1_723, VEX_Vpcmpeqw_ymm_ymm_ymmm256 = 1_724, EVEX_Vpcmpeqw_kr_k1_xmm_xmmm128 = 1_725, EVEX_Vpcmpeqw_kr_k1_ymm_ymmm256 = 1_726, EVEX_Vpcmpeqw_kr_k1_zmm_zmmm512 = 1_727, Pcmpeqd_mm_mmm64 = 1_728, Pcmpeqd_xmm_xmmm128 = 1_729, VEX_Vpcmpeqd_xmm_xmm_xmmm128 = 1_730, VEX_Vpcmpeqd_ymm_ymm_ymmm256 = 1_731, EVEX_Vpcmpeqd_kr_k1_xmm_xmmm128b32 = 1_732, EVEX_Vpcmpeqd_kr_k1_ymm_ymmm256b32 = 1_733, EVEX_Vpcmpeqd_kr_k1_zmm_zmmm512b32 = 1_734, Emms = 1_735, VEX_Vzeroupper = 1_736, VEX_Vzeroall = 1_737, Vmread_rm32_r32 = 1_738, Vmread_rm64_r64 = 1_739, EVEX_Vcvttps2udq_xmm_k1z_xmmm128b32 = 1_740, EVEX_Vcvttps2udq_ymm_k1z_ymmm256b32 = 1_741, EVEX_Vcvttps2udq_zmm_k1z_zmmm512b32_sae = 1_742, EVEX_Vcvttpd2udq_xmm_k1z_xmmm128b64 = 1_743, EVEX_Vcvttpd2udq_xmm_k1z_ymmm256b64 = 1_744, EVEX_Vcvttpd2udq_ymm_k1z_zmmm512b64_sae = 1_745, Extrq_xmm_imm8_imm8 = 1_746, EVEX_Vcvttps2uqq_xmm_k1z_xmmm64b32 = 1_747, EVEX_Vcvttps2uqq_ymm_k1z_xmmm128b32 = 1_748, EVEX_Vcvttps2uqq_zmm_k1z_ymmm256b32_sae = 1_749, EVEX_Vcvttpd2uqq_xmm_k1z_xmmm128b64 = 1_750, EVEX_Vcvttpd2uqq_ymm_k1z_ymmm256b64 = 1_751, EVEX_Vcvttpd2uqq_zmm_k1z_zmmm512b64_sae = 1_752, EVEX_Vcvttss2usi_r32_xmmm32_sae = 1_753, EVEX_Vcvttss2usi_r64_xmmm32_sae = 1_754, Insertq_xmm_xmm_imm8_imm8 = 1_755, EVEX_Vcvttsd2usi_r32_xmmm64_sae = 1_756, EVEX_Vcvttsd2usi_r64_xmmm64_sae = 1_757, Vmwrite_r32_rm32 = 1_758, Vmwrite_r64_rm64 = 1_759, EVEX_Vcvtps2udq_xmm_k1z_xmmm128b32 = 1_760, EVEX_Vcvtps2udq_ymm_k1z_ymmm256b32 = 1_761, EVEX_Vcvtps2udq_zmm_k1z_zmmm512b32_er = 1_762, EVEX_Vcvtpd2udq_xmm_k1z_xmmm128b64 = 1_763, EVEX_Vcvtpd2udq_xmm_k1z_ymmm256b64 = 1_764, EVEX_Vcvtpd2udq_ymm_k1z_zmmm512b64_er = 1_765, Extrq_xmm_xmm = 1_766, EVEX_Vcvtps2uqq_xmm_k1z_xmmm64b32 = 1_767, EVEX_Vcvtps2uqq_ymm_k1z_xmmm128b32 = 1_768, EVEX_Vcvtps2uqq_zmm_k1z_ymmm256b32_er = 1_769, EVEX_Vcvtpd2uqq_xmm_k1z_xmmm128b64 = 1_770, EVEX_Vcvtpd2uqq_ymm_k1z_ymmm256b64 = 1_771, EVEX_Vcvtpd2uqq_zmm_k1z_zmmm512b64_er = 1_772, EVEX_Vcvtss2usi_r32_xmmm32_er = 1_773, EVEX_Vcvtss2usi_r64_xmmm32_er = 1_774, Insertq_xmm_xmm = 1_775, EVEX_Vcvtsd2usi_r32_xmmm64_er = 1_776, EVEX_Vcvtsd2usi_r64_xmmm64_er = 1_777, EVEX_Vcvttps2qq_xmm_k1z_xmmm64b32 = 1_778, EVEX_Vcvttps2qq_ymm_k1z_xmmm128b32 = 1_779, EVEX_Vcvttps2qq_zmm_k1z_ymmm256b32_sae = 1_780, EVEX_Vcvttpd2qq_xmm_k1z_xmmm128b64 = 1_781, EVEX_Vcvttpd2qq_ymm_k1z_ymmm256b64 = 1_782, EVEX_Vcvttpd2qq_zmm_k1z_zmmm512b64_sae = 1_783, EVEX_Vcvtudq2pd_xmm_k1z_xmmm64b32 = 1_784, EVEX_Vcvtudq2pd_ymm_k1z_xmmm128b32 = 1_785, EVEX_Vcvtudq2pd_zmm_k1z_ymmm256b32_er = 1_786, EVEX_Vcvtuqq2pd_xmm_k1z_xmmm128b64 = 1_787, EVEX_Vcvtuqq2pd_ymm_k1z_ymmm256b64 = 1_788, EVEX_Vcvtuqq2pd_zmm_k1z_zmmm512b64_er = 1_789, EVEX_Vcvtudq2ps_xmm_k1z_xmmm128b32 = 1_790, EVEX_Vcvtudq2ps_ymm_k1z_ymmm256b32 = 1_791, EVEX_Vcvtudq2ps_zmm_k1z_zmmm512b32_er = 1_792, EVEX_Vcvtuqq2ps_xmm_k1z_xmmm128b64 = 1_793, EVEX_Vcvtuqq2ps_xmm_k1z_ymmm256b64 = 1_794, EVEX_Vcvtuqq2ps_ymm_k1z_zmmm512b64_er = 1_795, EVEX_Vcvtps2qq_xmm_k1z_xmmm64b32 = 1_796, EVEX_Vcvtps2qq_ymm_k1z_xmmm128b32 = 1_797, EVEX_Vcvtps2qq_zmm_k1z_ymmm256b32_er = 1_798, EVEX_Vcvtpd2qq_xmm_k1z_xmmm128b64 = 1_799, EVEX_Vcvtpd2qq_ymm_k1z_ymmm256b64 = 1_800, EVEX_Vcvtpd2qq_zmm_k1z_zmmm512b64_er = 1_801, EVEX_Vcvtusi2ss_xmm_xmm_rm32_er = 1_802, EVEX_Vcvtusi2ss_xmm_xmm_rm64_er = 1_803, EVEX_Vcvtusi2sd_xmm_xmm_rm32_er = 1_804, EVEX_Vcvtusi2sd_xmm_xmm_rm64_er = 1_805, Haddpd_xmm_xmmm128 = 1_806, VEX_Vhaddpd_xmm_xmm_xmmm128 = 1_807, VEX_Vhaddpd_ymm_ymm_ymmm256 = 1_808, Haddps_xmm_xmmm128 = 1_809, VEX_Vhaddps_xmm_xmm_xmmm128 = 1_810, VEX_Vhaddps_ymm_ymm_ymmm256 = 1_811, Hsubpd_xmm_xmmm128 = 1_812, VEX_Vhsubpd_xmm_xmm_xmmm128 = 1_813, VEX_Vhsubpd_ymm_ymm_ymmm256 = 1_814, Hsubps_xmm_xmmm128 = 1_815, VEX_Vhsubps_xmm_xmm_xmmm128 = 1_816, VEX_Vhsubps_ymm_ymm_ymmm256 = 1_817, Movd_rm32_mm = 1_818, Movq_rm64_mm = 1_819, Movd_rm32_xmm = 1_820, Movq_rm64_xmm = 1_821, VEX_Vmovd_rm32_xmm = 1_822, VEX_Vmovq_rm64_xmm = 1_823, EVEX_Vmovd_rm32_xmm = 1_824, EVEX_Vmovq_rm64_xmm = 1_825, Movq_xmm_xmmm64 = 1_826, VEX_Vmovq_xmm_xmmm64 = 1_827, EVEX_Vmovq_xmm_xmmm64 = 1_828, Movq_mmm64_mm = 1_829, Movdqa_xmmm128_xmm = 1_830, VEX_Vmovdqa_xmmm128_xmm = 1_831, VEX_Vmovdqa_ymmm256_ymm = 1_832, EVEX_Vmovdqa32_xmmm128_k1z_xmm = 1_833, EVEX_Vmovdqa32_ymmm256_k1z_ymm = 1_834, EVEX_Vmovdqa32_zmmm512_k1z_zmm = 1_835, EVEX_Vmovdqa64_xmmm128_k1z_xmm = 1_836, EVEX_Vmovdqa64_ymmm256_k1z_ymm = 1_837, EVEX_Vmovdqa64_zmmm512_k1z_zmm = 1_838, Movdqu_xmmm128_xmm = 1_839, VEX_Vmovdqu_xmmm128_xmm = 1_840, VEX_Vmovdqu_ymmm256_ymm = 1_841, EVEX_Vmovdqu32_xmmm128_k1z_xmm = 1_842, EVEX_Vmovdqu32_ymmm256_k1z_ymm = 1_843, EVEX_Vmovdqu32_zmmm512_k1z_zmm = 1_844, EVEX_Vmovdqu64_xmmm128_k1z_xmm = 1_845, EVEX_Vmovdqu64_ymmm256_k1z_ymm = 1_846, EVEX_Vmovdqu64_zmmm512_k1z_zmm = 1_847, EVEX_Vmovdqu8_xmmm128_k1z_xmm = 1_848, EVEX_Vmovdqu8_ymmm256_k1z_ymm = 1_849, EVEX_Vmovdqu8_zmmm512_k1z_zmm = 1_850, EVEX_Vmovdqu16_xmmm128_k1z_xmm = 1_851, EVEX_Vmovdqu16_ymmm256_k1z_ymm = 1_852, EVEX_Vmovdqu16_zmmm512_k1z_zmm = 1_853, Jo_rel16 = 1_854, Jo_rel32_32 = 1_855, Jo_rel32_64 = 1_856, Jno_rel16 = 1_857, Jno_rel32_32 = 1_858, Jno_rel32_64 = 1_859, Jb_rel16 = 1_860, Jb_rel32_32 = 1_861, Jb_rel32_64 = 1_862, Jae_rel16 = 1_863, Jae_rel32_32 = 1_864, Jae_rel32_64 = 1_865, Je_rel16 = 1_866, Je_rel32_32 = 1_867, Je_rel32_64 = 1_868, Jne_rel16 = 1_869, Jne_rel32_32 = 1_870, Jne_rel32_64 = 1_871, Jbe_rel16 = 1_872, Jbe_rel32_32 = 1_873, Jbe_rel32_64 = 1_874, Ja_rel16 = 1_875, Ja_rel32_32 = 1_876, Ja_rel32_64 = 1_877, Js_rel16 = 1_878, Js_rel32_32 = 1_879, Js_rel32_64 = 1_880, Jns_rel16 = 1_881, Jns_rel32_32 = 1_882, Jns_rel32_64 = 1_883, Jp_rel16 = 1_884, Jp_rel32_32 = 1_885, Jp_rel32_64 = 1_886, Jnp_rel16 = 1_887, Jnp_rel32_32 = 1_888, Jnp_rel32_64 = 1_889, Jl_rel16 = 1_890, Jl_rel32_32 = 1_891, Jl_rel32_64 = 1_892, Jge_rel16 = 1_893, Jge_rel32_32 = 1_894, Jge_rel32_64 = 1_895, Jle_rel16 = 1_896, Jle_rel32_32 = 1_897, Jle_rel32_64 = 1_898, Jg_rel16 = 1_899, Jg_rel32_32 = 1_900, Jg_rel32_64 = 1_901, Seto_rm8 = 1_902, Setno_rm8 = 1_903, Setb_rm8 = 1_904, Setae_rm8 = 1_905, Sete_rm8 = 1_906, Setne_rm8 = 1_907, Setbe_rm8 = 1_908, Seta_rm8 = 1_909, Sets_rm8 = 1_910, Setns_rm8 = 1_911, Setp_rm8 = 1_912, Setnp_rm8 = 1_913, Setl_rm8 = 1_914, Setge_rm8 = 1_915, Setle_rm8 = 1_916, Setg_rm8 = 1_917, VEX_Kmovw_kr_km16 = 1_918, VEX_Kmovq_kr_km64 = 1_919, VEX_Kmovb_kr_km8 = 1_920, VEX_Kmovd_kr_km32 = 1_921, VEX_Kmovw_m16_kr = 1_922, VEX_Kmovq_m64_kr = 1_923, VEX_Kmovb_m8_kr = 1_924, VEX_Kmovd_m32_kr = 1_925, VEX_Kmovw_kr_r32 = 1_926, VEX_Kmovb_kr_r32 = 1_927, VEX_Kmovd_kr_r32 = 1_928, VEX_Kmovq_kr_r64 = 1_929, VEX_Kmovw_r32_kr = 1_930, VEX_Kmovb_r32_kr = 1_931, VEX_Kmovd_r32_kr = 1_932, VEX_Kmovq_r64_kr = 1_933, VEX_Kortestw_kr_kr = 1_934, VEX_Kortestq_kr_kr = 1_935, VEX_Kortestb_kr_kr = 1_936, VEX_Kortestd_kr_kr = 1_937, VEX_Ktestw_kr_kr = 1_938, VEX_Ktestq_kr_kr = 1_939, VEX_Ktestb_kr_kr = 1_940, VEX_Ktestd_kr_kr = 1_941, Pushw_FS = 1_942, Pushd_FS = 1_943, Pushq_FS = 1_944, Popw_FS = 1_945, Popd_FS = 1_946, Popq_FS = 1_947, Cpuid = 1_948, Bt_rm16_r16 = 1_949, Bt_rm32_r32 = 1_950, Bt_rm64_r64 = 1_951, Shld_rm16_r16_imm8 = 1_952, Shld_rm32_r32_imm8 = 1_953, Shld_rm64_r64_imm8 = 1_954, Shld_rm16_r16_CL = 1_955, Shld_rm32_r32_CL = 1_956, Shld_rm64_r64_CL = 1_957, Montmul_16 = 1_958, Montmul_32 = 1_959, Montmul_64 = 1_960, Xsha1_16 = 1_961, Xsha1_32 = 1_962, Xsha1_64 = 1_963, Xsha256_16 = 1_964, Xsha256_32 = 1_965, Xsha256_64 = 1_966, Xbts_r16_rm16 = 1_967, Xbts_r32_rm32 = 1_968, Xstore_16 = 1_969, Xstore_32 = 1_970, Xstore_64 = 1_971, Xcryptecb_16 = 1_972, Xcryptecb_32 = 1_973, Xcryptecb_64 = 1_974, Xcryptcbc_16 = 1_975, Xcryptcbc_32 = 1_976, Xcryptcbc_64 = 1_977, Xcryptctr_16 = 1_978, Xcryptctr_32 = 1_979, Xcryptctr_64 = 1_980, Xcryptcfb_16 = 1_981, Xcryptcfb_32 = 1_982, Xcryptcfb_64 = 1_983, Xcryptofb_16 = 1_984, Xcryptofb_32 = 1_985, Xcryptofb_64 = 1_986, Ibts_rm16_r16 = 1_987, Ibts_rm32_r32 = 1_988, Cmpxchg486_rm8_r8 = 1_989, Cmpxchg486_rm16_r16 = 1_990, Cmpxchg486_rm32_r32 = 1_991, Pushw_GS = 1_992, Pushd_GS = 1_993, Pushq_GS = 1_994, Popw_GS = 1_995, Popd_GS = 1_996, Popq_GS = 1_997, Rsm = 1_998, Bts_rm16_r16 = 1_999, Bts_rm32_r32 = 2_000, Bts_rm64_r64 = 2_001, Shrd_rm16_r16_imm8 = 2_002, Shrd_rm32_r32_imm8 = 2_003, Shrd_rm64_r64_imm8 = 2_004, Shrd_rm16_r16_CL = 2_005, Shrd_rm32_r32_CL = 2_006, Shrd_rm64_r64_CL = 2_007, Fxsave_m512byte = 2_008, Fxsave64_m512byte = 2_009, Rdfsbase_r32 = 2_010, Rdfsbase_r64 = 2_011, Fxrstor_m512byte = 2_012, Fxrstor64_m512byte = 2_013, Rdgsbase_r32 = 2_014, Rdgsbase_r64 = 2_015, Ldmxcsr_m32 = 2_016, Wrfsbase_r32 = 2_017, Wrfsbase_r64 = 2_018, VEX_Vldmxcsr_m32 = 2_019, Stmxcsr_m32 = 2_020, Wrgsbase_r32 = 2_021, Wrgsbase_r64 = 2_022, VEX_Vstmxcsr_m32 = 2_023, Xsave_mem = 2_024, Xsave64_mem = 2_025, Ptwrite_rm32 = 2_026, Ptwrite_rm64 = 2_027, Xrstor_mem = 2_028, Xrstor64_mem = 2_029, Incsspd_r32 = 2_030, Incsspq_r64 = 2_031, Xsaveopt_mem = 2_032, Xsaveopt64_mem = 2_033, Clwb_m8 = 2_034, Tpause_r32 = 2_035, Tpause_r64 = 2_036, Clrssbsy_m64 = 2_037, Umonitor_r16 = 2_038, Umonitor_r32 = 2_039, Umonitor_r64 = 2_040, Umwait_r32 = 2_041, Umwait_r64 = 2_042, Clflush_m8 = 2_043, Clflushopt_m8 = 2_044, Lfence = 2_045, Lfence_E9 = 2_046, Lfence_EA = 2_047, Lfence_EB = 2_048, Lfence_EC = 2_049, Lfence_ED = 2_050, Lfence_EE = 2_051, Lfence_EF = 2_052, Mfence = 2_053, Mfence_F1 = 2_054, Mfence_F2 = 2_055, Mfence_F3 = 2_056, Mfence_F4 = 2_057, Mfence_F5 = 2_058, Mfence_F6 = 2_059, Mfence_F7 = 2_060, Sfence = 2_061, Sfence_F9 = 2_062, Sfence_FA = 2_063, Sfence_FB = 2_064, Sfence_FC = 2_065, Sfence_FD = 2_066, Sfence_FE = 2_067, Sfence_FF = 2_068, Pcommit = 2_069, Imul_r16_rm16 = 2_070, Imul_r32_rm32 = 2_071, Imul_r64_rm64 = 2_072, Cmpxchg_rm8_r8 = 2_073, Cmpxchg_rm16_r16 = 2_074, Cmpxchg_rm32_r32 = 2_075, Cmpxchg_rm64_r64 = 2_076, Lss_r16_m1616 = 2_077, Lss_r32_m1632 = 2_078, Lss_r64_m1664 = 2_079, Btr_rm16_r16 = 2_080, Btr_rm32_r32 = 2_081, Btr_rm64_r64 = 2_082, Lfs_r16_m1616 = 2_083, Lfs_r32_m1632 = 2_084, Lfs_r64_m1664 = 2_085, Lgs_r16_m1616 = 2_086, Lgs_r32_m1632 = 2_087, Lgs_r64_m1664 = 2_088, Movzx_r16_rm8 = 2_089, Movzx_r32_rm8 = 2_090, Movzx_r64_rm8 = 2_091, Movzx_r16_rm16 = 2_092, Movzx_r32_rm16 = 2_093, Movzx_r64_rm16 = 2_094, Jmpe_disp16 = 2_095, Jmpe_disp32 = 2_096, Popcnt_r16_rm16 = 2_097, Popcnt_r32_rm32 = 2_098, Popcnt_r64_rm64 = 2_099, Ud1_r16_rm16 = 2_100, Ud1_r32_rm32 = 2_101, Ud1_r64_rm64 = 2_102, Bt_rm16_imm8 = 2_103, Bt_rm32_imm8 = 2_104, Bt_rm64_imm8 = 2_105, Bts_rm16_imm8 = 2_106, Bts_rm32_imm8 = 2_107, Bts_rm64_imm8 = 2_108, Btr_rm16_imm8 = 2_109, Btr_rm32_imm8 = 2_110, Btr_rm64_imm8 = 2_111, Btc_rm16_imm8 = 2_112, Btc_rm32_imm8 = 2_113, Btc_rm64_imm8 = 2_114, Btc_rm16_r16 = 2_115, Btc_rm32_r32 = 2_116, Btc_rm64_r64 = 2_117, Bsf_r16_rm16 = 2_118, Bsf_r32_rm32 = 2_119, Bsf_r64_rm64 = 2_120, Tzcnt_r16_rm16 = 2_121, Tzcnt_r32_rm32 = 2_122, Tzcnt_r64_rm64 = 2_123, Bsr_r16_rm16 = 2_124, Bsr_r32_rm32 = 2_125, Bsr_r64_rm64 = 2_126, Lzcnt_r16_rm16 = 2_127, Lzcnt_r32_rm32 = 2_128, Lzcnt_r64_rm64 = 2_129, Movsx_r16_rm8 = 2_130, Movsx_r32_rm8 = 2_131, Movsx_r64_rm8 = 2_132, Movsx_r16_rm16 = 2_133, Movsx_r32_rm16 = 2_134, Movsx_r64_rm16 = 2_135, Xadd_rm8_r8 = 2_136, Xadd_rm16_r16 = 2_137, Xadd_rm32_r32 = 2_138, Xadd_rm64_r64 = 2_139, Cmpps_xmm_xmmm128_imm8 = 2_140, VEX_Vcmpps_xmm_xmm_xmmm128_imm8 = 2_141, VEX_Vcmpps_ymm_ymm_ymmm256_imm8 = 2_142, EVEX_Vcmpps_kr_k1_xmm_xmmm128b32_imm8 = 2_143, EVEX_Vcmpps_kr_k1_ymm_ymmm256b32_imm8 = 2_144, EVEX_Vcmpps_kr_k1_zmm_zmmm512b32_imm8_sae = 2_145, Cmppd_xmm_xmmm128_imm8 = 2_146, VEX_Vcmppd_xmm_xmm_xmmm128_imm8 = 2_147, VEX_Vcmppd_ymm_ymm_ymmm256_imm8 = 2_148, EVEX_Vcmppd_kr_k1_xmm_xmmm128b64_imm8 = 2_149, EVEX_Vcmppd_kr_k1_ymm_ymmm256b64_imm8 = 2_150, EVEX_Vcmppd_kr_k1_zmm_zmmm512b64_imm8_sae = 2_151, Cmpss_xmm_xmmm32_imm8 = 2_152, VEX_Vcmpss_xmm_xmm_xmmm32_imm8 = 2_153, EVEX_Vcmpss_kr_k1_xmm_xmmm32_imm8_sae = 2_154, Cmpsd_xmm_xmmm64_imm8 = 2_155, VEX_Vcmpsd_xmm_xmm_xmmm64_imm8 = 2_156, EVEX_Vcmpsd_kr_k1_xmm_xmmm64_imm8_sae = 2_157, Movnti_m32_r32 = 2_158, Movnti_m64_r64 = 2_159, Pinsrw_mm_r32m16_imm8 = 2_160, Pinsrw_mm_r64m16_imm8 = 2_161, Pinsrw_xmm_r32m16_imm8 = 2_162, Pinsrw_xmm_r64m16_imm8 = 2_163, VEX_Vpinsrw_xmm_xmm_r32m16_imm8 = 2_164, VEX_Vpinsrw_xmm_xmm_r64m16_imm8 = 2_165, EVEX_Vpinsrw_xmm_xmm_r32m16_imm8 = 2_166, EVEX_Vpinsrw_xmm_xmm_r64m16_imm8 = 2_167, Pextrw_r32_mm_imm8 = 2_168, Pextrw_r64_mm_imm8 = 2_169, Pextrw_r32_xmm_imm8 = 2_170, Pextrw_r64_xmm_imm8 = 2_171, VEX_Vpextrw_r32_xmm_imm8 = 2_172, VEX_Vpextrw_r64_xmm_imm8 = 2_173, EVEX_Vpextrw_r32_xmm_imm8 = 2_174, EVEX_Vpextrw_r64_xmm_imm8 = 2_175, Shufps_xmm_xmmm128_imm8 = 2_176, VEX_Vshufps_xmm_xmm_xmmm128_imm8 = 2_177, VEX_Vshufps_ymm_ymm_ymmm256_imm8 = 2_178, EVEX_Vshufps_xmm_k1z_xmm_xmmm128b32_imm8 = 2_179, EVEX_Vshufps_ymm_k1z_ymm_ymmm256b32_imm8 = 2_180, EVEX_Vshufps_zmm_k1z_zmm_zmmm512b32_imm8 = 2_181, Shufpd_xmm_xmmm128_imm8 = 2_182, VEX_Vshufpd_xmm_xmm_xmmm128_imm8 = 2_183, VEX_Vshufpd_ymm_ymm_ymmm256_imm8 = 2_184, EVEX_Vshufpd_xmm_k1z_xmm_xmmm128b64_imm8 = 2_185, EVEX_Vshufpd_ymm_k1z_ymm_ymmm256b64_imm8 = 2_186, EVEX_Vshufpd_zmm_k1z_zmm_zmmm512b64_imm8 = 2_187, Cmpxchg8b_m64 = 2_188, Cmpxchg16b_m128 = 2_189, Xrstors_mem = 2_190, Xrstors64_mem = 2_191, Xsavec_mem = 2_192, Xsavec64_mem = 2_193, Xsaves_mem = 2_194, Xsaves64_mem = 2_195, Vmptrld_m64 = 2_196, Vmclear_m64 = 2_197, Vmxon_m64 = 2_198, Rdrand_r16 = 2_199, Rdrand_r32 = 2_200, Rdrand_r64 = 2_201, Vmptrst_m64 = 2_202, Rdseed_r16 = 2_203, Rdseed_r32 = 2_204, Rdseed_r64 = 2_205, Rdpid_r32 = 2_206, Rdpid_r64 = 2_207, Bswap_r16 = 2_208, Bswap_r32 = 2_209, Bswap_r64 = 2_210, Addsubpd_xmm_xmmm128 = 2_211, VEX_Vaddsubpd_xmm_xmm_xmmm128 = 2_212, VEX_Vaddsubpd_ymm_ymm_ymmm256 = 2_213, Addsubps_xmm_xmmm128 = 2_214, VEX_Vaddsubps_xmm_xmm_xmmm128 = 2_215, VEX_Vaddsubps_ymm_ymm_ymmm256 = 2_216, Psrlw_mm_mmm64 = 2_217, Psrlw_xmm_xmmm128 = 2_218, VEX_Vpsrlw_xmm_xmm_xmmm128 = 2_219, VEX_Vpsrlw_ymm_ymm_xmmm128 = 2_220, EVEX_Vpsrlw_xmm_k1z_xmm_xmmm128 = 2_221, EVEX_Vpsrlw_ymm_k1z_ymm_xmmm128 = 2_222, EVEX_Vpsrlw_zmm_k1z_zmm_xmmm128 = 2_223, Psrld_mm_mmm64 = 2_224, Psrld_xmm_xmmm128 = 2_225, VEX_Vpsrld_xmm_xmm_xmmm128 = 2_226, VEX_Vpsrld_ymm_ymm_xmmm128 = 2_227, EVEX_Vpsrld_xmm_k1z_xmm_xmmm128 = 2_228, EVEX_Vpsrld_ymm_k1z_ymm_xmmm128 = 2_229, EVEX_Vpsrld_zmm_k1z_zmm_xmmm128 = 2_230, Psrlq_mm_mmm64 = 2_231, Psrlq_xmm_xmmm128 = 2_232, VEX_Vpsrlq_xmm_xmm_xmmm128 = 2_233, VEX_Vpsrlq_ymm_ymm_xmmm128 = 2_234, EVEX_Vpsrlq_xmm_k1z_xmm_xmmm128 = 2_235, EVEX_Vpsrlq_ymm_k1z_ymm_xmmm128 = 2_236, EVEX_Vpsrlq_zmm_k1z_zmm_xmmm128 = 2_237, Paddq_mm_mmm64 = 2_238, Paddq_xmm_xmmm128 = 2_239, VEX_Vpaddq_xmm_xmm_xmmm128 = 2_240, VEX_Vpaddq_ymm_ymm_ymmm256 = 2_241, EVEX_Vpaddq_xmm_k1z_xmm_xmmm128b64 = 2_242, EVEX_Vpaddq_ymm_k1z_ymm_ymmm256b64 = 2_243, EVEX_Vpaddq_zmm_k1z_zmm_zmmm512b64 = 2_244, Pmullw_mm_mmm64 = 2_245, Pmullw_xmm_xmmm128 = 2_246, VEX_Vpmullw_xmm_xmm_xmmm128 = 2_247, VEX_Vpmullw_ymm_ymm_ymmm256 = 2_248, EVEX_Vpmullw_xmm_k1z_xmm_xmmm128 = 2_249, EVEX_Vpmullw_ymm_k1z_ymm_ymmm256 = 2_250, EVEX_Vpmullw_zmm_k1z_zmm_zmmm512 = 2_251, Movq_xmmm64_xmm = 2_252, VEX_Vmovq_xmmm64_xmm = 2_253, EVEX_Vmovq_xmmm64_xmm = 2_254, Movq2dq_xmm_mm = 2_255, Movdq2q_mm_xmm = 2_256, Pmovmskb_r32_mm = 2_257, Pmovmskb_r64_mm = 2_258, Pmovmskb_r32_xmm = 2_259, Pmovmskb_r64_xmm = 2_260, VEX_Vpmovmskb_r32_xmm = 2_261, VEX_Vpmovmskb_r64_xmm = 2_262, VEX_Vpmovmskb_r32_ymm = 2_263, VEX_Vpmovmskb_r64_ymm = 2_264, Psubusb_mm_mmm64 = 2_265, Psubusb_xmm_xmmm128 = 2_266, VEX_Vpsubusb_xmm_xmm_xmmm128 = 2_267, VEX_Vpsubusb_ymm_ymm_ymmm256 = 2_268, EVEX_Vpsubusb_xmm_k1z_xmm_xmmm128 = 2_269, EVEX_Vpsubusb_ymm_k1z_ymm_ymmm256 = 2_270, EVEX_Vpsubusb_zmm_k1z_zmm_zmmm512 = 2_271, Psubusw_mm_mmm64 = 2_272, Psubusw_xmm_xmmm128 = 2_273, VEX_Vpsubusw_xmm_xmm_xmmm128 = 2_274, VEX_Vpsubusw_ymm_ymm_ymmm256 = 2_275, EVEX_Vpsubusw_xmm_k1z_xmm_xmmm128 = 2_276, EVEX_Vpsubusw_ymm_k1z_ymm_ymmm256 = 2_277, EVEX_Vpsubusw_zmm_k1z_zmm_zmmm512 = 2_278, Pminub_mm_mmm64 = 2_279, Pminub_xmm_xmmm128 = 2_280, VEX_Vpminub_xmm_xmm_xmmm128 = 2_281, VEX_Vpminub_ymm_ymm_ymmm256 = 2_282, EVEX_Vpminub_xmm_k1z_xmm_xmmm128 = 2_283, EVEX_Vpminub_ymm_k1z_ymm_ymmm256 = 2_284, EVEX_Vpminub_zmm_k1z_zmm_zmmm512 = 2_285, Pand_mm_mmm64 = 2_286, Pand_xmm_xmmm128 = 2_287, VEX_Vpand_xmm_xmm_xmmm128 = 2_288, VEX_Vpand_ymm_ymm_ymmm256 = 2_289, EVEX_Vpandd_xmm_k1z_xmm_xmmm128b32 = 2_290, EVEX_Vpandd_ymm_k1z_ymm_ymmm256b32 = 2_291, EVEX_Vpandd_zmm_k1z_zmm_zmmm512b32 = 2_292, EVEX_Vpandq_xmm_k1z_xmm_xmmm128b64 = 2_293, EVEX_Vpandq_ymm_k1z_ymm_ymmm256b64 = 2_294, EVEX_Vpandq_zmm_k1z_zmm_zmmm512b64 = 2_295, Paddusb_mm_mmm64 = 2_296, Paddusb_xmm_xmmm128 = 2_297, VEX_Vpaddusb_xmm_xmm_xmmm128 = 2_298, VEX_Vpaddusb_ymm_ymm_ymmm256 = 2_299, EVEX_Vpaddusb_xmm_k1z_xmm_xmmm128 = 2_300, EVEX_Vpaddusb_ymm_k1z_ymm_ymmm256 = 2_301, EVEX_Vpaddusb_zmm_k1z_zmm_zmmm512 = 2_302, Paddusw_mm_mmm64 = 2_303, Paddusw_xmm_xmmm128 = 2_304, VEX_Vpaddusw_xmm_xmm_xmmm128 = 2_305, VEX_Vpaddusw_ymm_ymm_ymmm256 = 2_306, EVEX_Vpaddusw_xmm_k1z_xmm_xmmm128 = 2_307, EVEX_Vpaddusw_ymm_k1z_ymm_ymmm256 = 2_308, EVEX_Vpaddusw_zmm_k1z_zmm_zmmm512 = 2_309, Pmaxub_mm_mmm64 = 2_310, Pmaxub_xmm_xmmm128 = 2_311, VEX_Vpmaxub_xmm_xmm_xmmm128 = 2_312, VEX_Vpmaxub_ymm_ymm_ymmm256 = 2_313, EVEX_Vpmaxub_xmm_k1z_xmm_xmmm128 = 2_314, EVEX_Vpmaxub_ymm_k1z_ymm_ymmm256 = 2_315, EVEX_Vpmaxub_zmm_k1z_zmm_zmmm512 = 2_316, Pandn_mm_mmm64 = 2_317, Pandn_xmm_xmmm128 = 2_318, VEX_Vpandn_xmm_xmm_xmmm128 = 2_319, VEX_Vpandn_ymm_ymm_ymmm256 = 2_320, EVEX_Vpandnd_xmm_k1z_xmm_xmmm128b32 = 2_321, EVEX_Vpandnd_ymm_k1z_ymm_ymmm256b32 = 2_322, EVEX_Vpandnd_zmm_k1z_zmm_zmmm512b32 = 2_323, EVEX_Vpandnq_xmm_k1z_xmm_xmmm128b64 = 2_324, EVEX_Vpandnq_ymm_k1z_ymm_ymmm256b64 = 2_325, EVEX_Vpandnq_zmm_k1z_zmm_zmmm512b64 = 2_326, Pavgb_mm_mmm64 = 2_327, Pavgb_xmm_xmmm128 = 2_328, VEX_Vpavgb_xmm_xmm_xmmm128 = 2_329, VEX_Vpavgb_ymm_ymm_ymmm256 = 2_330, EVEX_Vpavgb_xmm_k1z_xmm_xmmm128 = 2_331, EVEX_Vpavgb_ymm_k1z_ymm_ymmm256 = 2_332, EVEX_Vpavgb_zmm_k1z_zmm_zmmm512 = 2_333, Psraw_mm_mmm64 = 2_334, Psraw_xmm_xmmm128 = 2_335, VEX_Vpsraw_xmm_xmm_xmmm128 = 2_336, VEX_Vpsraw_ymm_ymm_xmmm128 = 2_337, EVEX_Vpsraw_xmm_k1z_xmm_xmmm128 = 2_338, EVEX_Vpsraw_ymm_k1z_ymm_xmmm128 = 2_339, EVEX_Vpsraw_zmm_k1z_zmm_xmmm128 = 2_340, Psrad_mm_mmm64 = 2_341, Psrad_xmm_xmmm128 = 2_342, VEX_Vpsrad_xmm_xmm_xmmm128 = 2_343, VEX_Vpsrad_ymm_ymm_xmmm128 = 2_344, EVEX_Vpsrad_xmm_k1z_xmm_xmmm128 = 2_345, EVEX_Vpsrad_ymm_k1z_ymm_xmmm128 = 2_346, EVEX_Vpsrad_zmm_k1z_zmm_xmmm128 = 2_347, EVEX_Vpsraq_xmm_k1z_xmm_xmmm128 = 2_348, EVEX_Vpsraq_ymm_k1z_ymm_xmmm128 = 2_349, EVEX_Vpsraq_zmm_k1z_zmm_xmmm128 = 2_350, Pavgw_mm_mmm64 = 2_351, Pavgw_xmm_xmmm128 = 2_352, VEX_Vpavgw_xmm_xmm_xmmm128 = 2_353, VEX_Vpavgw_ymm_ymm_ymmm256 = 2_354, EVEX_Vpavgw_xmm_k1z_xmm_xmmm128 = 2_355, EVEX_Vpavgw_ymm_k1z_ymm_ymmm256 = 2_356, EVEX_Vpavgw_zmm_k1z_zmm_zmmm512 = 2_357, Pmulhuw_mm_mmm64 = 2_358, Pmulhuw_xmm_xmmm128 = 2_359, VEX_Vpmulhuw_xmm_xmm_xmmm128 = 2_360, VEX_Vpmulhuw_ymm_ymm_ymmm256 = 2_361, EVEX_Vpmulhuw_xmm_k1z_xmm_xmmm128 = 2_362, EVEX_Vpmulhuw_ymm_k1z_ymm_ymmm256 = 2_363, EVEX_Vpmulhuw_zmm_k1z_zmm_zmmm512 = 2_364, Pmulhw_mm_mmm64 = 2_365, Pmulhw_xmm_xmmm128 = 2_366, VEX_Vpmulhw_xmm_xmm_xmmm128 = 2_367, VEX_Vpmulhw_ymm_ymm_ymmm256 = 2_368, EVEX_Vpmulhw_xmm_k1z_xmm_xmmm128 = 2_369, EVEX_Vpmulhw_ymm_k1z_ymm_ymmm256 = 2_370, EVEX_Vpmulhw_zmm_k1z_zmm_zmmm512 = 2_371, Cvttpd2dq_xmm_xmmm128 = 2_372, VEX_Vcvttpd2dq_xmm_xmmm128 = 2_373, VEX_Vcvttpd2dq_xmm_ymmm256 = 2_374, EVEX_Vcvttpd2dq_xmm_k1z_xmmm128b64 = 2_375, EVEX_Vcvttpd2dq_xmm_k1z_ymmm256b64 = 2_376, EVEX_Vcvttpd2dq_ymm_k1z_zmmm512b64_sae = 2_377, Cvtdq2pd_xmm_xmmm64 = 2_378, VEX_Vcvtdq2pd_xmm_xmmm64 = 2_379, VEX_Vcvtdq2pd_ymm_xmmm128 = 2_380, EVEX_Vcvtdq2pd_xmm_k1z_xmmm64b32 = 2_381, EVEX_Vcvtdq2pd_ymm_k1z_xmmm128b32 = 2_382, EVEX_Vcvtdq2pd_zmm_k1z_ymmm256b32_er = 2_383, EVEX_Vcvtqq2pd_xmm_k1z_xmmm128b64 = 2_384, EVEX_Vcvtqq2pd_ymm_k1z_ymmm256b64 = 2_385, EVEX_Vcvtqq2pd_zmm_k1z_zmmm512b64_er = 2_386, Cvtpd2dq_xmm_xmmm128 = 2_387, VEX_Vcvtpd2dq_xmm_xmmm128 = 2_388, VEX_Vcvtpd2dq_xmm_ymmm256 = 2_389, EVEX_Vcvtpd2dq_xmm_k1z_xmmm128b64 = 2_390, EVEX_Vcvtpd2dq_xmm_k1z_ymmm256b64 = 2_391, EVEX_Vcvtpd2dq_ymm_k1z_zmmm512b64_er = 2_392, Movntq_m64_mm = 2_393, Movntdq_m128_xmm = 2_394, VEX_Vmovntdq_m128_xmm = 2_395, VEX_Vmovntdq_m256_ymm = 2_396, EVEX_Vmovntdq_m128_xmm = 2_397, EVEX_Vmovntdq_m256_ymm = 2_398, EVEX_Vmovntdq_m512_zmm = 2_399, Psubsb_mm_mmm64 = 2_400, Psubsb_xmm_xmmm128 = 2_401, VEX_Vpsubsb_xmm_xmm_xmmm128 = 2_402, VEX_Vpsubsb_ymm_ymm_ymmm256 = 2_403, EVEX_Vpsubsb_xmm_k1z_xmm_xmmm128 = 2_404, EVEX_Vpsubsb_ymm_k1z_ymm_ymmm256 = 2_405, EVEX_Vpsubsb_zmm_k1z_zmm_zmmm512 = 2_406, Psubsw_mm_mmm64 = 2_407, Psubsw_xmm_xmmm128 = 2_408, VEX_Vpsubsw_xmm_xmm_xmmm128 = 2_409, VEX_Vpsubsw_ymm_ymm_ymmm256 = 2_410, EVEX_Vpsubsw_xmm_k1z_xmm_xmmm128 = 2_411, EVEX_Vpsubsw_ymm_k1z_ymm_ymmm256 = 2_412, EVEX_Vpsubsw_zmm_k1z_zmm_zmmm512 = 2_413, Pminsw_mm_mmm64 = 2_414, Pminsw_xmm_xmmm128 = 2_415, VEX_Vpminsw_xmm_xmm_xmmm128 = 2_416, VEX_Vpminsw_ymm_ymm_ymmm256 = 2_417, EVEX_Vpminsw_xmm_k1z_xmm_xmmm128 = 2_418, EVEX_Vpminsw_ymm_k1z_ymm_ymmm256 = 2_419, EVEX_Vpminsw_zmm_k1z_zmm_zmmm512 = 2_420, Por_mm_mmm64 = 2_421, Por_xmm_xmmm128 = 2_422, VEX_Vpor_xmm_xmm_xmmm128 = 2_423, VEX_Vpor_ymm_ymm_ymmm256 = 2_424, EVEX_Vpord_xmm_k1z_xmm_xmmm128b32 = 2_425, EVEX_Vpord_ymm_k1z_ymm_ymmm256b32 = 2_426, EVEX_Vpord_zmm_k1z_zmm_zmmm512b32 = 2_427, EVEX_Vporq_xmm_k1z_xmm_xmmm128b64 = 2_428, EVEX_Vporq_ymm_k1z_ymm_ymmm256b64 = 2_429, EVEX_Vporq_zmm_k1z_zmm_zmmm512b64 = 2_430, Paddsb_mm_mmm64 = 2_431, Paddsb_xmm_xmmm128 = 2_432, VEX_Vpaddsb_xmm_xmm_xmmm128 = 2_433, VEX_Vpaddsb_ymm_ymm_ymmm256 = 2_434, EVEX_Vpaddsb_xmm_k1z_xmm_xmmm128 = 2_435, EVEX_Vpaddsb_ymm_k1z_ymm_ymmm256 = 2_436, EVEX_Vpaddsb_zmm_k1z_zmm_zmmm512 = 2_437, Paddsw_mm_mmm64 = 2_438, Paddsw_xmm_xmmm128 = 2_439, VEX_Vpaddsw_xmm_xmm_xmmm128 = 2_440, VEX_Vpaddsw_ymm_ymm_ymmm256 = 2_441, EVEX_Vpaddsw_xmm_k1z_xmm_xmmm128 = 2_442, EVEX_Vpaddsw_ymm_k1z_ymm_ymmm256 = 2_443, EVEX_Vpaddsw_zmm_k1z_zmm_zmmm512 = 2_444, Pmaxsw_mm_mmm64 = 2_445, Pmaxsw_xmm_xmmm128 = 2_446, VEX_Vpmaxsw_xmm_xmm_xmmm128 = 2_447, VEX_Vpmaxsw_ymm_ymm_ymmm256 = 2_448, EVEX_Vpmaxsw_xmm_k1z_xmm_xmmm128 = 2_449, EVEX_Vpmaxsw_ymm_k1z_ymm_ymmm256 = 2_450, EVEX_Vpmaxsw_zmm_k1z_zmm_zmmm512 = 2_451, Pxor_mm_mmm64 = 2_452, Pxor_xmm_xmmm128 = 2_453, VEX_Vpxor_xmm_xmm_xmmm128 = 2_454, VEX_Vpxor_ymm_ymm_ymmm256 = 2_455, EVEX_Vpxord_xmm_k1z_xmm_xmmm128b32 = 2_456, EVEX_Vpxord_ymm_k1z_ymm_ymmm256b32 = 2_457, EVEX_Vpxord_zmm_k1z_zmm_zmmm512b32 = 2_458, EVEX_Vpxorq_xmm_k1z_xmm_xmmm128b64 = 2_459, EVEX_Vpxorq_ymm_k1z_ymm_ymmm256b64 = 2_460, EVEX_Vpxorq_zmm_k1z_zmm_zmmm512b64 = 2_461, Lddqu_xmm_m128 = 2_462, VEX_Vlddqu_xmm_m128 = 2_463, VEX_Vlddqu_ymm_m256 = 2_464, Psllw_mm_mmm64 = 2_465, Psllw_xmm_xmmm128 = 2_466, VEX_Vpsllw_xmm_xmm_xmmm128 = 2_467, VEX_Vpsllw_ymm_ymm_xmmm128 = 2_468, EVEX_Vpsllw_xmm_k1z_xmm_xmmm128 = 2_469, EVEX_Vpsllw_ymm_k1z_ymm_xmmm128 = 2_470, EVEX_Vpsllw_zmm_k1z_zmm_xmmm128 = 2_471, Pslld_mm_mmm64 = 2_472, Pslld_xmm_xmmm128 = 2_473, VEX_Vpslld_xmm_xmm_xmmm128 = 2_474, VEX_Vpslld_ymm_ymm_xmmm128 = 2_475, EVEX_Vpslld_xmm_k1z_xmm_xmmm128 = 2_476, EVEX_Vpslld_ymm_k1z_ymm_xmmm128 = 2_477, EVEX_Vpslld_zmm_k1z_zmm_xmmm128 = 2_478, Psllq_mm_mmm64 = 2_479, Psllq_xmm_xmmm128 = 2_480, VEX_Vpsllq_xmm_xmm_xmmm128 = 2_481, VEX_Vpsllq_ymm_ymm_xmmm128 = 2_482, EVEX_Vpsllq_xmm_k1z_xmm_xmmm128 = 2_483, EVEX_Vpsllq_ymm_k1z_ymm_xmmm128 = 2_484, EVEX_Vpsllq_zmm_k1z_zmm_xmmm128 = 2_485, Pmuludq_mm_mmm64 = 2_486, Pmuludq_xmm_xmmm128 = 2_487, VEX_Vpmuludq_xmm_xmm_xmmm128 = 2_488, VEX_Vpmuludq_ymm_ymm_ymmm256 = 2_489, EVEX_Vpmuludq_xmm_k1z_xmm_xmmm128b64 = 2_490, EVEX_Vpmuludq_ymm_k1z_ymm_ymmm256b64 = 2_491, EVEX_Vpmuludq_zmm_k1z_zmm_zmmm512b64 = 2_492, Pmaddwd_mm_mmm64 = 2_493, Pmaddwd_xmm_xmmm128 = 2_494, VEX_Vpmaddwd_xmm_xmm_xmmm128 = 2_495, VEX_Vpmaddwd_ymm_ymm_ymmm256 = 2_496, EVEX_Vpmaddwd_xmm_k1z_xmm_xmmm128 = 2_497, EVEX_Vpmaddwd_ymm_k1z_ymm_ymmm256 = 2_498, EVEX_Vpmaddwd_zmm_k1z_zmm_zmmm512 = 2_499, Psadbw_mm_mmm64 = 2_500, Psadbw_xmm_xmmm128 = 2_501, VEX_Vpsadbw_xmm_xmm_xmmm128 = 2_502, VEX_Vpsadbw_ymm_ymm_ymmm256 = 2_503, EVEX_Vpsadbw_xmm_xmm_xmmm128 = 2_504, EVEX_Vpsadbw_ymm_ymm_ymmm256 = 2_505, EVEX_Vpsadbw_zmm_zmm_zmmm512 = 2_506, Maskmovq_rDI_mm_mm = 2_507, Maskmovdqu_rDI_xmm_xmm = 2_508, VEX_Vmaskmovdqu_rDI_xmm_xmm = 2_509, Psubb_mm_mmm64 = 2_510, Psubb_xmm_xmmm128 = 2_511, VEX_Vpsubb_xmm_xmm_xmmm128 = 2_512, VEX_Vpsubb_ymm_ymm_ymmm256 = 2_513, EVEX_Vpsubb_xmm_k1z_xmm_xmmm128 = 2_514, EVEX_Vpsubb_ymm_k1z_ymm_ymmm256 = 2_515, EVEX_Vpsubb_zmm_k1z_zmm_zmmm512 = 2_516, Psubw_mm_mmm64 = 2_517, Psubw_xmm_xmmm128 = 2_518, VEX_Vpsubw_xmm_xmm_xmmm128 = 2_519, VEX_Vpsubw_ymm_ymm_ymmm256 = 2_520, EVEX_Vpsubw_xmm_k1z_xmm_xmmm128 = 2_521, EVEX_Vpsubw_ymm_k1z_ymm_ymmm256 = 2_522, EVEX_Vpsubw_zmm_k1z_zmm_zmmm512 = 2_523, Psubd_mm_mmm64 = 2_524, Psubd_xmm_xmmm128 = 2_525, VEX_Vpsubd_xmm_xmm_xmmm128 = 2_526, VEX_Vpsubd_ymm_ymm_ymmm256 = 2_527, EVEX_Vpsubd_xmm_k1z_xmm_xmmm128b32 = 2_528, EVEX_Vpsubd_ymm_k1z_ymm_ymmm256b32 = 2_529, EVEX_Vpsubd_zmm_k1z_zmm_zmmm512b32 = 2_530, Psubq_mm_mmm64 = 2_531, Psubq_xmm_xmmm128 = 2_532, VEX_Vpsubq_xmm_xmm_xmmm128 = 2_533, VEX_Vpsubq_ymm_ymm_ymmm256 = 2_534, EVEX_Vpsubq_xmm_k1z_xmm_xmmm128b64 = 2_535, EVEX_Vpsubq_ymm_k1z_ymm_ymmm256b64 = 2_536, EVEX_Vpsubq_zmm_k1z_zmm_zmmm512b64 = 2_537, Paddb_mm_mmm64 = 2_538, Paddb_xmm_xmmm128 = 2_539, VEX_Vpaddb_xmm_xmm_xmmm128 = 2_540, VEX_Vpaddb_ymm_ymm_ymmm256 = 2_541, EVEX_Vpaddb_xmm_k1z_xmm_xmmm128 = 2_542, EVEX_Vpaddb_ymm_k1z_ymm_ymmm256 = 2_543, EVEX_Vpaddb_zmm_k1z_zmm_zmmm512 = 2_544, Paddw_mm_mmm64 = 2_545, Paddw_xmm_xmmm128 = 2_546, VEX_Vpaddw_xmm_xmm_xmmm128 = 2_547, VEX_Vpaddw_ymm_ymm_ymmm256 = 2_548, EVEX_Vpaddw_xmm_k1z_xmm_xmmm128 = 2_549, EVEX_Vpaddw_ymm_k1z_ymm_ymmm256 = 2_550, EVEX_Vpaddw_zmm_k1z_zmm_zmmm512 = 2_551, Paddd_mm_mmm64 = 2_552, Paddd_xmm_xmmm128 = 2_553, VEX_Vpaddd_xmm_xmm_xmmm128 = 2_554, VEX_Vpaddd_ymm_ymm_ymmm256 = 2_555, EVEX_Vpaddd_xmm_k1z_xmm_xmmm128b32 = 2_556, EVEX_Vpaddd_ymm_k1z_ymm_ymmm256b32 = 2_557, EVEX_Vpaddd_zmm_k1z_zmm_zmmm512b32 = 2_558, Ud0_r16_rm16 = 2_559, Ud0_r32_rm32 = 2_560, Ud0_r64_rm64 = 2_561, Pshufb_mm_mmm64 = 2_562, Pshufb_xmm_xmmm128 = 2_563, VEX_Vpshufb_xmm_xmm_xmmm128 = 2_564, VEX_Vpshufb_ymm_ymm_ymmm256 = 2_565, EVEX_Vpshufb_xmm_k1z_xmm_xmmm128 = 2_566, EVEX_Vpshufb_ymm_k1z_ymm_ymmm256 = 2_567, EVEX_Vpshufb_zmm_k1z_zmm_zmmm512 = 2_568, Phaddw_mm_mmm64 = 2_569, Phaddw_xmm_xmmm128 = 2_570, VEX_Vphaddw_xmm_xmm_xmmm128 = 2_571, VEX_Vphaddw_ymm_ymm_ymmm256 = 2_572, Phaddd_mm_mmm64 = 2_573, Phaddd_xmm_xmmm128 = 2_574, VEX_Vphaddd_xmm_xmm_xmmm128 = 2_575, VEX_Vphaddd_ymm_ymm_ymmm256 = 2_576, Phaddsw_mm_mmm64 = 2_577, Phaddsw_xmm_xmmm128 = 2_578, VEX_Vphaddsw_xmm_xmm_xmmm128 = 2_579, VEX_Vphaddsw_ymm_ymm_ymmm256 = 2_580, Pmaddubsw_mm_mmm64 = 2_581, Pmaddubsw_xmm_xmmm128 = 2_582, VEX_Vpmaddubsw_xmm_xmm_xmmm128 = 2_583, VEX_Vpmaddubsw_ymm_ymm_ymmm256 = 2_584, EVEX_Vpmaddubsw_xmm_k1z_xmm_xmmm128 = 2_585, EVEX_Vpmaddubsw_ymm_k1z_ymm_ymmm256 = 2_586, EVEX_Vpmaddubsw_zmm_k1z_zmm_zmmm512 = 2_587, Phsubw_mm_mmm64 = 2_588, Phsubw_xmm_xmmm128 = 2_589, VEX_Vphsubw_xmm_xmm_xmmm128 = 2_590, VEX_Vphsubw_ymm_ymm_ymmm256 = 2_591, Phsubd_mm_mmm64 = 2_592, Phsubd_xmm_xmmm128 = 2_593, VEX_Vphsubd_xmm_xmm_xmmm128 = 2_594, VEX_Vphsubd_ymm_ymm_ymmm256 = 2_595, Phsubsw_mm_mmm64 = 2_596, Phsubsw_xmm_xmmm128 = 2_597, VEX_Vphsubsw_xmm_xmm_xmmm128 = 2_598, VEX_Vphsubsw_ymm_ymm_ymmm256 = 2_599, Psignb_mm_mmm64 = 2_600, Psignb_xmm_xmmm128 = 2_601, VEX_Vpsignb_xmm_xmm_xmmm128 = 2_602, VEX_Vpsignb_ymm_ymm_ymmm256 = 2_603, Psignw_mm_mmm64 = 2_604, Psignw_xmm_xmmm128 = 2_605, VEX_Vpsignw_xmm_xmm_xmmm128 = 2_606, VEX_Vpsignw_ymm_ymm_ymmm256 = 2_607, Psignd_mm_mmm64 = 2_608, Psignd_xmm_xmmm128 = 2_609, VEX_Vpsignd_xmm_xmm_xmmm128 = 2_610, VEX_Vpsignd_ymm_ymm_ymmm256 = 2_611, Pmulhrsw_mm_mmm64 = 2_612, Pmulhrsw_xmm_xmmm128 = 2_613, VEX_Vpmulhrsw_xmm_xmm_xmmm128 = 2_614, VEX_Vpmulhrsw_ymm_ymm_ymmm256 = 2_615, EVEX_Vpmulhrsw_xmm_k1z_xmm_xmmm128 = 2_616, EVEX_Vpmulhrsw_ymm_k1z_ymm_ymmm256 = 2_617, EVEX_Vpmulhrsw_zmm_k1z_zmm_zmmm512 = 2_618, VEX_Vpermilps_xmm_xmm_xmmm128 = 2_619, VEX_Vpermilps_ymm_ymm_ymmm256 = 2_620, EVEX_Vpermilps_xmm_k1z_xmm_xmmm128b32 = 2_621, EVEX_Vpermilps_ymm_k1z_ymm_ymmm256b32 = 2_622, EVEX_Vpermilps_zmm_k1z_zmm_zmmm512b32 = 2_623, VEX_Vpermilpd_xmm_xmm_xmmm128 = 2_624, VEX_Vpermilpd_ymm_ymm_ymmm256 = 2_625, EVEX_Vpermilpd_xmm_k1z_xmm_xmmm128b64 = 2_626, EVEX_Vpermilpd_ymm_k1z_ymm_ymmm256b64 = 2_627, EVEX_Vpermilpd_zmm_k1z_zmm_zmmm512b64 = 2_628, VEX_Vtestps_xmm_xmmm128 = 2_629, VEX_Vtestps_ymm_ymmm256 = 2_630, VEX_Vtestpd_xmm_xmmm128 = 2_631, VEX_Vtestpd_ymm_ymmm256 = 2_632, Pblendvb_xmm_xmmm128 = 2_633, EVEX_Vpsrlvw_xmm_k1z_xmm_xmmm128 = 2_634, EVEX_Vpsrlvw_ymm_k1z_ymm_ymmm256 = 2_635, EVEX_Vpsrlvw_zmm_k1z_zmm_zmmm512 = 2_636, EVEX_Vpmovuswb_xmmm64_k1z_xmm = 2_637, EVEX_Vpmovuswb_xmmm128_k1z_ymm = 2_638, EVEX_Vpmovuswb_ymmm256_k1z_zmm = 2_639, EVEX_Vpsravw_xmm_k1z_xmm_xmmm128 = 2_640, EVEX_Vpsravw_ymm_k1z_ymm_ymmm256 = 2_641, EVEX_Vpsravw_zmm_k1z_zmm_zmmm512 = 2_642, EVEX_Vpmovusdb_xmmm32_k1z_xmm = 2_643, EVEX_Vpmovusdb_xmmm64_k1z_ymm = 2_644, EVEX_Vpmovusdb_xmmm128_k1z_zmm = 2_645, EVEX_Vpsllvw_xmm_k1z_xmm_xmmm128 = 2_646, EVEX_Vpsllvw_ymm_k1z_ymm_ymmm256 = 2_647, EVEX_Vpsllvw_zmm_k1z_zmm_zmmm512 = 2_648, EVEX_Vpmovusqb_xmmm16_k1z_xmm = 2_649, EVEX_Vpmovusqb_xmmm32_k1z_ymm = 2_650, EVEX_Vpmovusqb_xmmm64_k1z_zmm = 2_651, VEX_Vcvtph2ps_xmm_xmmm64 = 2_652, VEX_Vcvtph2ps_ymm_xmmm128 = 2_653, EVEX_Vcvtph2ps_xmm_k1z_xmmm64 = 2_654, EVEX_Vcvtph2ps_ymm_k1z_xmmm128 = 2_655, EVEX_Vcvtph2ps_zmm_k1z_ymmm256_sae = 2_656, EVEX_Vpmovusdw_xmmm64_k1z_xmm = 2_657, EVEX_Vpmovusdw_xmmm128_k1z_ymm = 2_658, EVEX_Vpmovusdw_ymmm256_k1z_zmm = 2_659, Blendvps_xmm_xmmm128 = 2_660, EVEX_Vprorvd_xmm_k1z_xmm_xmmm128b32 = 2_661, EVEX_Vprorvd_ymm_k1z_ymm_ymmm256b32 = 2_662, EVEX_Vprorvd_zmm_k1z_zmm_zmmm512b32 = 2_663, EVEX_Vprorvq_xmm_k1z_xmm_xmmm128b64 = 2_664, EVEX_Vprorvq_ymm_k1z_ymm_ymmm256b64 = 2_665, EVEX_Vprorvq_zmm_k1z_zmm_zmmm512b64 = 2_666, EVEX_Vpmovusqw_xmmm32_k1z_xmm = 2_667, EVEX_Vpmovusqw_xmmm64_k1z_ymm = 2_668, EVEX_Vpmovusqw_xmmm128_k1z_zmm = 2_669, Blendvpd_xmm_xmmm128 = 2_670, EVEX_Vprolvd_xmm_k1z_xmm_xmmm128b32 = 2_671, EVEX_Vprolvd_ymm_k1z_ymm_ymmm256b32 = 2_672, EVEX_Vprolvd_zmm_k1z_zmm_zmmm512b32 = 2_673, EVEX_Vprolvq_xmm_k1z_xmm_xmmm128b64 = 2_674, EVEX_Vprolvq_ymm_k1z_ymm_ymmm256b64 = 2_675, EVEX_Vprolvq_zmm_k1z_zmm_zmmm512b64 = 2_676, EVEX_Vpmovusqd_xmmm64_k1z_xmm = 2_677, EVEX_Vpmovusqd_xmmm128_k1z_ymm = 2_678, EVEX_Vpmovusqd_ymmm256_k1z_zmm = 2_679, VEX_Vpermps_ymm_ymm_ymmm256 = 2_680, EVEX_Vpermps_ymm_k1z_ymm_ymmm256b32 = 2_681, EVEX_Vpermps_zmm_k1z_zmm_zmmm512b32 = 2_682, EVEX_Vpermpd_ymm_k1z_ymm_ymmm256b64 = 2_683, EVEX_Vpermpd_zmm_k1z_zmm_zmmm512b64 = 2_684, Ptest_xmm_xmmm128 = 2_685, VEX_Vptest_xmm_xmmm128 = 2_686, VEX_Vptest_ymm_ymmm256 = 2_687, VEX_Vbroadcastss_xmm_m32 = 2_688, VEX_Vbroadcastss_ymm_m32 = 2_689, EVEX_Vbroadcastss_xmm_k1z_xmmm32 = 2_690, EVEX_Vbroadcastss_ymm_k1z_xmmm32 = 2_691, EVEX_Vbroadcastss_zmm_k1z_xmmm32 = 2_692, VEX_Vbroadcastsd_ymm_m64 = 2_693, EVEX_Vbroadcastf32x2_ymm_k1z_xmmm64 = 2_694, EVEX_Vbroadcastf32x2_zmm_k1z_xmmm64 = 2_695, EVEX_Vbroadcastsd_ymm_k1z_xmmm64 = 2_696, EVEX_Vbroadcastsd_zmm_k1z_xmmm64 = 2_697, VEX_Vbroadcastf128_ymm_m128 = 2_698, EVEX_Vbroadcastf32x4_ymm_k1z_m128 = 2_699, EVEX_Vbroadcastf32x4_zmm_k1z_m128 = 2_700, EVEX_Vbroadcastf64x2_ymm_k1z_m128 = 2_701, EVEX_Vbroadcastf64x2_zmm_k1z_m128 = 2_702, EVEX_Vbroadcastf32x8_zmm_k1z_m256 = 2_703, EVEX_Vbroadcastf64x4_zmm_k1z_m256 = 2_704, Pabsb_mm_mmm64 = 2_705, Pabsb_xmm_xmmm128 = 2_706, VEX_Vpabsb_xmm_xmmm128 = 2_707, VEX_Vpabsb_ymm_ymmm256 = 2_708, EVEX_Vpabsb_xmm_k1z_xmmm128 = 2_709, EVEX_Vpabsb_ymm_k1z_ymmm256 = 2_710, EVEX_Vpabsb_zmm_k1z_zmmm512 = 2_711, Pabsw_mm_mmm64 = 2_712, Pabsw_xmm_xmmm128 = 2_713, VEX_Vpabsw_xmm_xmmm128 = 2_714, VEX_Vpabsw_ymm_ymmm256 = 2_715, EVEX_Vpabsw_xmm_k1z_xmmm128 = 2_716, EVEX_Vpabsw_ymm_k1z_ymmm256 = 2_717, EVEX_Vpabsw_zmm_k1z_zmmm512 = 2_718, Pabsd_mm_mmm64 = 2_719, Pabsd_xmm_xmmm128 = 2_720, VEX_Vpabsd_xmm_xmmm128 = 2_721, VEX_Vpabsd_ymm_ymmm256 = 2_722, EVEX_Vpabsd_xmm_k1z_xmmm128b32 = 2_723, EVEX_Vpabsd_ymm_k1z_ymmm256b32 = 2_724, EVEX_Vpabsd_zmm_k1z_zmmm512b32 = 2_725, EVEX_Vpabsq_xmm_k1z_xmmm128b64 = 2_726, EVEX_Vpabsq_ymm_k1z_ymmm256b64 = 2_727, EVEX_Vpabsq_zmm_k1z_zmmm512b64 = 2_728, Pmovsxbw_xmm_xmmm64 = 2_729, VEX_Vpmovsxbw_xmm_xmmm64 = 2_730, VEX_Vpmovsxbw_ymm_xmmm128 = 2_731, EVEX_Vpmovsxbw_xmm_k1z_xmmm64 = 2_732, EVEX_Vpmovsxbw_ymm_k1z_xmmm128 = 2_733, EVEX_Vpmovsxbw_zmm_k1z_ymmm256 = 2_734, EVEX_Vpmovswb_xmmm64_k1z_xmm = 2_735, EVEX_Vpmovswb_xmmm128_k1z_ymm = 2_736, EVEX_Vpmovswb_ymmm256_k1z_zmm = 2_737, Pmovsxbd_xmm_xmmm32 = 2_738, VEX_Vpmovsxbd_xmm_xmmm32 = 2_739, VEX_Vpmovsxbd_ymm_xmmm64 = 2_740, EVEX_Vpmovsxbd_xmm_k1z_xmmm32 = 2_741, EVEX_Vpmovsxbd_ymm_k1z_xmmm64 = 2_742, EVEX_Vpmovsxbd_zmm_k1z_xmmm128 = 2_743, EVEX_Vpmovsdb_xmmm32_k1z_xmm = 2_744, EVEX_Vpmovsdb_xmmm64_k1z_ymm = 2_745, EVEX_Vpmovsdb_xmmm128_k1z_zmm = 2_746, Pmovsxbq_xmm_xmmm16 = 2_747, VEX_Vpmovsxbq_xmm_xmmm16 = 2_748, VEX_Vpmovsxbq_ymm_xmmm32 = 2_749, EVEX_Vpmovsxbq_xmm_k1z_xmmm16 = 2_750, EVEX_Vpmovsxbq_ymm_k1z_xmmm32 = 2_751, EVEX_Vpmovsxbq_zmm_k1z_xmmm64 = 2_752, EVEX_Vpmovsqb_xmmm16_k1z_xmm = 2_753, EVEX_Vpmovsqb_xmmm32_k1z_ymm = 2_754, EVEX_Vpmovsqb_xmmm64_k1z_zmm = 2_755, Pmovsxwd_xmm_xmmm64 = 2_756, VEX_Vpmovsxwd_xmm_xmmm64 = 2_757, VEX_Vpmovsxwd_ymm_xmmm128 = 2_758, EVEX_Vpmovsxwd_xmm_k1z_xmmm64 = 2_759, EVEX_Vpmovsxwd_ymm_k1z_xmmm128 = 2_760, EVEX_Vpmovsxwd_zmm_k1z_ymmm256 = 2_761, EVEX_Vpmovsdw_xmmm64_k1z_xmm = 2_762, EVEX_Vpmovsdw_xmmm128_k1z_ymm = 2_763, EVEX_Vpmovsdw_ymmm256_k1z_zmm = 2_764, Pmovsxwq_xmm_xmmm32 = 2_765, VEX_Vpmovsxwq_xmm_xmmm32 = 2_766, VEX_Vpmovsxwq_ymm_xmmm64 = 2_767, EVEX_Vpmovsxwq_xmm_k1z_xmmm32 = 2_768, EVEX_Vpmovsxwq_ymm_k1z_xmmm64 = 2_769, EVEX_Vpmovsxwq_zmm_k1z_xmmm128 = 2_770, EVEX_Vpmovsqw_xmmm32_k1z_xmm = 2_771, EVEX_Vpmovsqw_xmmm64_k1z_ymm = 2_772, EVEX_Vpmovsqw_xmmm128_k1z_zmm = 2_773, Pmovsxdq_xmm_xmmm64 = 2_774, VEX_Vpmovsxdq_xmm_xmmm64 = 2_775, VEX_Vpmovsxdq_ymm_xmmm128 = 2_776, EVEX_Vpmovsxdq_xmm_k1z_xmmm64 = 2_777, EVEX_Vpmovsxdq_ymm_k1z_xmmm128 = 2_778, EVEX_Vpmovsxdq_zmm_k1z_ymmm256 = 2_779, EVEX_Vpmovsqd_xmmm64_k1z_xmm = 2_780, EVEX_Vpmovsqd_xmmm128_k1z_ymm = 2_781, EVEX_Vpmovsqd_ymmm256_k1z_zmm = 2_782, EVEX_Vptestmb_kr_k1_xmm_xmmm128 = 2_783, EVEX_Vptestmb_kr_k1_ymm_ymmm256 = 2_784, EVEX_Vptestmb_kr_k1_zmm_zmmm512 = 2_785, EVEX_Vptestmw_kr_k1_xmm_xmmm128 = 2_786, EVEX_Vptestmw_kr_k1_ymm_ymmm256 = 2_787, EVEX_Vptestmw_kr_k1_zmm_zmmm512 = 2_788, EVEX_Vptestnmb_kr_k1_xmm_xmmm128 = 2_789, EVEX_Vptestnmb_kr_k1_ymm_ymmm256 = 2_790, EVEX_Vptestnmb_kr_k1_zmm_zmmm512 = 2_791, EVEX_Vptestnmw_kr_k1_xmm_xmmm128 = 2_792, EVEX_Vptestnmw_kr_k1_ymm_ymmm256 = 2_793, EVEX_Vptestnmw_kr_k1_zmm_zmmm512 = 2_794, EVEX_Vptestmd_kr_k1_xmm_xmmm128b32 = 2_795, EVEX_Vptestmd_kr_k1_ymm_ymmm256b32 = 2_796, EVEX_Vptestmd_kr_k1_zmm_zmmm512b32 = 2_797, EVEX_Vptestmq_kr_k1_xmm_xmmm128b64 = 2_798, EVEX_Vptestmq_kr_k1_ymm_ymmm256b64 = 2_799, EVEX_Vptestmq_kr_k1_zmm_zmmm512b64 = 2_800, EVEX_Vptestnmd_kr_k1_xmm_xmmm128b32 = 2_801, EVEX_Vptestnmd_kr_k1_ymm_ymmm256b32 = 2_802, EVEX_Vptestnmd_kr_k1_zmm_zmmm512b32 = 2_803, EVEX_Vptestnmq_kr_k1_xmm_xmmm128b64 = 2_804, EVEX_Vptestnmq_kr_k1_ymm_ymmm256b64 = 2_805, EVEX_Vptestnmq_kr_k1_zmm_zmmm512b64 = 2_806, Pmuldq_xmm_xmmm128 = 2_807, VEX_Vpmuldq_xmm_xmm_xmmm128 = 2_808, VEX_Vpmuldq_ymm_ymm_ymmm256 = 2_809, EVEX_Vpmuldq_xmm_k1z_xmm_xmmm128b64 = 2_810, EVEX_Vpmuldq_ymm_k1z_ymm_ymmm256b64 = 2_811, EVEX_Vpmuldq_zmm_k1z_zmm_zmmm512b64 = 2_812, EVEX_Vpmovm2b_xmm_kr = 2_813, EVEX_Vpmovm2b_ymm_kr = 2_814, EVEX_Vpmovm2b_zmm_kr = 2_815, EVEX_Vpmovm2w_xmm_kr = 2_816, EVEX_Vpmovm2w_ymm_kr = 2_817, EVEX_Vpmovm2w_zmm_kr = 2_818, Pcmpeqq_xmm_xmmm128 = 2_819, VEX_Vpcmpeqq_xmm_xmm_xmmm128 = 2_820, VEX_Vpcmpeqq_ymm_ymm_ymmm256 = 2_821, EVEX_Vpcmpeqq_kr_k1_xmm_xmmm128b64 = 2_822, EVEX_Vpcmpeqq_kr_k1_ymm_ymmm256b64 = 2_823, EVEX_Vpcmpeqq_kr_k1_zmm_zmmm512b64 = 2_824, EVEX_Vpmovb2m_kr_xmm = 2_825, EVEX_Vpmovb2m_kr_ymm = 2_826, EVEX_Vpmovb2m_kr_zmm = 2_827, EVEX_Vpmovw2m_kr_xmm = 2_828, EVEX_Vpmovw2m_kr_ymm = 2_829, EVEX_Vpmovw2m_kr_zmm = 2_830, Movntdqa_xmm_m128 = 2_831, VEX_Vmovntdqa_xmm_m128 = 2_832, VEX_Vmovntdqa_ymm_m256 = 2_833, EVEX_Vmovntdqa_xmm_m128 = 2_834, EVEX_Vmovntdqa_ymm_m256 = 2_835, EVEX_Vmovntdqa_zmm_m512 = 2_836, EVEX_Vpbroadcastmb2q_xmm_kr = 2_837, EVEX_Vpbroadcastmb2q_ymm_kr = 2_838, EVEX_Vpbroadcastmb2q_zmm_kr = 2_839, Packusdw_xmm_xmmm128 = 2_840, VEX_Vpackusdw_xmm_xmm_xmmm128 = 2_841, VEX_Vpackusdw_ymm_ymm_ymmm256 = 2_842, EVEX_Vpackusdw_xmm_k1z_xmm_xmmm128b32 = 2_843, EVEX_Vpackusdw_ymm_k1z_ymm_ymmm256b32 = 2_844, EVEX_Vpackusdw_zmm_k1z_zmm_zmmm512b32 = 2_845, VEX_Vmaskmovps_xmm_xmm_m128 = 2_846, VEX_Vmaskmovps_ymm_ymm_m256 = 2_847, EVEX_Vscalefps_xmm_k1z_xmm_xmmm128b32 = 2_848, EVEX_Vscalefps_ymm_k1z_ymm_ymmm256b32 = 2_849, EVEX_Vscalefps_zmm_k1z_zmm_zmmm512b32_er = 2_850, EVEX_Vscalefpd_xmm_k1z_xmm_xmmm128b64 = 2_851, EVEX_Vscalefpd_ymm_k1z_ymm_ymmm256b64 = 2_852, EVEX_Vscalefpd_zmm_k1z_zmm_zmmm512b64_er = 2_853, VEX_Vmaskmovpd_xmm_xmm_m128 = 2_854, VEX_Vmaskmovpd_ymm_ymm_m256 = 2_855, EVEX_Vscalefss_xmm_k1z_xmm_xmmm32_er = 2_856, EVEX_Vscalefsd_xmm_k1z_xmm_xmmm64_er = 2_857, VEX_Vmaskmovps_m128_xmm_xmm = 2_858, VEX_Vmaskmovps_m256_ymm_ymm = 2_859, VEX_Vmaskmovpd_m128_xmm_xmm = 2_860, VEX_Vmaskmovpd_m256_ymm_ymm = 2_861, Pmovzxbw_xmm_xmmm64 = 2_862, VEX_Vpmovzxbw_xmm_xmmm64 = 2_863, VEX_Vpmovzxbw_ymm_xmmm128 = 2_864, EVEX_Vpmovzxbw_xmm_k1z_xmmm64 = 2_865, EVEX_Vpmovzxbw_ymm_k1z_xmmm128 = 2_866, EVEX_Vpmovzxbw_zmm_k1z_ymmm256 = 2_867, EVEX_Vpmovwb_xmmm64_k1z_xmm = 2_868, EVEX_Vpmovwb_xmmm128_k1z_ymm = 2_869, EVEX_Vpmovwb_ymmm256_k1z_zmm = 2_870, Pmovzxbd_xmm_xmmm32 = 2_871, VEX_Vpmovzxbd_xmm_xmmm32 = 2_872, VEX_Vpmovzxbd_ymm_xmmm64 = 2_873, EVEX_Vpmovzxbd_xmm_k1z_xmmm32 = 2_874, EVEX_Vpmovzxbd_ymm_k1z_xmmm64 = 2_875, EVEX_Vpmovzxbd_zmm_k1z_xmmm128 = 2_876, EVEX_Vpmovdb_xmmm32_k1z_xmm = 2_877, EVEX_Vpmovdb_xmmm64_k1z_ymm = 2_878, EVEX_Vpmovdb_xmmm128_k1z_zmm = 2_879, Pmovzxbq_xmm_xmmm16 = 2_880, VEX_Vpmovzxbq_xmm_xmmm16 = 2_881, VEX_Vpmovzxbq_ymm_xmmm32 = 2_882, EVEX_Vpmovzxbq_xmm_k1z_xmmm16 = 2_883, EVEX_Vpmovzxbq_ymm_k1z_xmmm32 = 2_884, EVEX_Vpmovzxbq_zmm_k1z_xmmm64 = 2_885, EVEX_Vpmovqb_xmmm16_k1z_xmm = 2_886, EVEX_Vpmovqb_xmmm32_k1z_ymm = 2_887, EVEX_Vpmovqb_xmmm64_k1z_zmm = 2_888, Pmovzxwd_xmm_xmmm64 = 2_889, VEX_Vpmovzxwd_xmm_xmmm64 = 2_890, VEX_Vpmovzxwd_ymm_xmmm128 = 2_891, EVEX_Vpmovzxwd_xmm_k1z_xmmm64 = 2_892, EVEX_Vpmovzxwd_ymm_k1z_xmmm128 = 2_893, EVEX_Vpmovzxwd_zmm_k1z_ymmm256 = 2_894, EVEX_Vpmovdw_xmmm64_k1z_xmm = 2_895, EVEX_Vpmovdw_xmmm128_k1z_ymm = 2_896, EVEX_Vpmovdw_ymmm256_k1z_zmm = 2_897, Pmovzxwq_xmm_xmmm32 = 2_898, VEX_Vpmovzxwq_xmm_xmmm32 = 2_899, VEX_Vpmovzxwq_ymm_xmmm64 = 2_900, EVEX_Vpmovzxwq_xmm_k1z_xmmm32 = 2_901, EVEX_Vpmovzxwq_ymm_k1z_xmmm64 = 2_902, EVEX_Vpmovzxwq_zmm_k1z_xmmm128 = 2_903, EVEX_Vpmovqw_xmmm32_k1z_xmm = 2_904, EVEX_Vpmovqw_xmmm64_k1z_ymm = 2_905, EVEX_Vpmovqw_xmmm128_k1z_zmm = 2_906, Pmovzxdq_xmm_xmmm64 = 2_907, VEX_Vpmovzxdq_xmm_xmmm64 = 2_908, VEX_Vpmovzxdq_ymm_xmmm128 = 2_909, EVEX_Vpmovzxdq_xmm_k1z_xmmm64 = 2_910, EVEX_Vpmovzxdq_ymm_k1z_xmmm128 = 2_911, EVEX_Vpmovzxdq_zmm_k1z_ymmm256 = 2_912, EVEX_Vpmovqd_xmmm64_k1z_xmm = 2_913, EVEX_Vpmovqd_xmmm128_k1z_ymm = 2_914, EVEX_Vpmovqd_ymmm256_k1z_zmm = 2_915, VEX_Vpermd_ymm_ymm_ymmm256 = 2_916, EVEX_Vpermd_ymm_k1z_ymm_ymmm256b32 = 2_917, EVEX_Vpermd_zmm_k1z_zmm_zmmm512b32 = 2_918, EVEX_Vpermq_ymm_k1z_ymm_ymmm256b64 = 2_919, EVEX_Vpermq_zmm_k1z_zmm_zmmm512b64 = 2_920, Pcmpgtq_xmm_xmmm128 = 2_921, VEX_Vpcmpgtq_xmm_xmm_xmmm128 = 2_922, VEX_Vpcmpgtq_ymm_ymm_ymmm256 = 2_923, EVEX_Vpcmpgtq_kr_k1_xmm_xmmm128b64 = 2_924, EVEX_Vpcmpgtq_kr_k1_ymm_ymmm256b64 = 2_925, EVEX_Vpcmpgtq_kr_k1_zmm_zmmm512b64 = 2_926, Pminsb_xmm_xmmm128 = 2_927, VEX_Vpminsb_xmm_xmm_xmmm128 = 2_928, VEX_Vpminsb_ymm_ymm_ymmm256 = 2_929, EVEX_Vpminsb_xmm_k1z_xmm_xmmm128 = 2_930, EVEX_Vpminsb_ymm_k1z_ymm_ymmm256 = 2_931, EVEX_Vpminsb_zmm_k1z_zmm_zmmm512 = 2_932, EVEX_Vpmovm2d_xmm_kr = 2_933, EVEX_Vpmovm2d_ymm_kr = 2_934, EVEX_Vpmovm2d_zmm_kr = 2_935, EVEX_Vpmovm2q_xmm_kr = 2_936, EVEX_Vpmovm2q_ymm_kr = 2_937, EVEX_Vpmovm2q_zmm_kr = 2_938, Pminsd_xmm_xmmm128 = 2_939, VEX_Vpminsd_xmm_xmm_xmmm128 = 2_940, VEX_Vpminsd_ymm_ymm_ymmm256 = 2_941, EVEX_Vpminsd_xmm_k1z_xmm_xmmm128b32 = 2_942, EVEX_Vpminsd_ymm_k1z_ymm_ymmm256b32 = 2_943, EVEX_Vpminsd_zmm_k1z_zmm_zmmm512b32 = 2_944, EVEX_Vpminsq_xmm_k1z_xmm_xmmm128b64 = 2_945, EVEX_Vpminsq_ymm_k1z_ymm_ymmm256b64 = 2_946, EVEX_Vpminsq_zmm_k1z_zmm_zmmm512b64 = 2_947, EVEX_Vpmovd2m_kr_xmm = 2_948, EVEX_Vpmovd2m_kr_ymm = 2_949, EVEX_Vpmovd2m_kr_zmm = 2_950, EVEX_Vpmovq2m_kr_xmm = 2_951, EVEX_Vpmovq2m_kr_ymm = 2_952, EVEX_Vpmovq2m_kr_zmm = 2_953, Pminuw_xmm_xmmm128 = 2_954, VEX_Vpminuw_xmm_xmm_xmmm128 = 2_955, VEX_Vpminuw_ymm_ymm_ymmm256 = 2_956, EVEX_Vpminuw_xmm_k1z_xmm_xmmm128 = 2_957, EVEX_Vpminuw_ymm_k1z_ymm_ymmm256 = 2_958, EVEX_Vpminuw_zmm_k1z_zmm_zmmm512 = 2_959, EVEX_Vpbroadcastmw2d_xmm_kr = 2_960, EVEX_Vpbroadcastmw2d_ymm_kr = 2_961, EVEX_Vpbroadcastmw2d_zmm_kr = 2_962, Pminud_xmm_xmmm128 = 2_963, VEX_Vpminud_xmm_xmm_xmmm128 = 2_964, VEX_Vpminud_ymm_ymm_ymmm256 = 2_965, EVEX_Vpminud_xmm_k1z_xmm_xmmm128b32 = 2_966, EVEX_Vpminud_ymm_k1z_ymm_ymmm256b32 = 2_967, EVEX_Vpminud_zmm_k1z_zmm_zmmm512b32 = 2_968, EVEX_Vpminuq_xmm_k1z_xmm_xmmm128b64 = 2_969, EVEX_Vpminuq_ymm_k1z_ymm_ymmm256b64 = 2_970, EVEX_Vpminuq_zmm_k1z_zmm_zmmm512b64 = 2_971, Pmaxsb_xmm_xmmm128 = 2_972, VEX_Vpmaxsb_xmm_xmm_xmmm128 = 2_973, VEX_Vpmaxsb_ymm_ymm_ymmm256 = 2_974, EVEX_Vpmaxsb_xmm_k1z_xmm_xmmm128 = 2_975, EVEX_Vpmaxsb_ymm_k1z_ymm_ymmm256 = 2_976, EVEX_Vpmaxsb_zmm_k1z_zmm_zmmm512 = 2_977, Pmaxsd_xmm_xmmm128 = 2_978, VEX_Vpmaxsd_xmm_xmm_xmmm128 = 2_979, VEX_Vpmaxsd_ymm_ymm_ymmm256 = 2_980, EVEX_Vpmaxsd_xmm_k1z_xmm_xmmm128b32 = 2_981, EVEX_Vpmaxsd_ymm_k1z_ymm_ymmm256b32 = 2_982, EVEX_Vpmaxsd_zmm_k1z_zmm_zmmm512b32 = 2_983, EVEX_Vpmaxsq_xmm_k1z_xmm_xmmm128b64 = 2_984, EVEX_Vpmaxsq_ymm_k1z_ymm_ymmm256b64 = 2_985, EVEX_Vpmaxsq_zmm_k1z_zmm_zmmm512b64 = 2_986, Pmaxuw_xmm_xmmm128 = 2_987, VEX_Vpmaxuw_xmm_xmm_xmmm128 = 2_988, VEX_Vpmaxuw_ymm_ymm_ymmm256 = 2_989, EVEX_Vpmaxuw_xmm_k1z_xmm_xmmm128 = 2_990, EVEX_Vpmaxuw_ymm_k1z_ymm_ymmm256 = 2_991, EVEX_Vpmaxuw_zmm_k1z_zmm_zmmm512 = 2_992, Pmaxud_xmm_xmmm128 = 2_993, VEX_Vpmaxud_xmm_xmm_xmmm128 = 2_994, VEX_Vpmaxud_ymm_ymm_ymmm256 = 2_995, EVEX_Vpmaxud_xmm_k1z_xmm_xmmm128b32 = 2_996, EVEX_Vpmaxud_ymm_k1z_ymm_ymmm256b32 = 2_997, EVEX_Vpmaxud_zmm_k1z_zmm_zmmm512b32 = 2_998, EVEX_Vpmaxuq_xmm_k1z_xmm_xmmm128b64 = 2_999, EVEX_Vpmaxuq_ymm_k1z_ymm_ymmm256b64 = 3_000, EVEX_Vpmaxuq_zmm_k1z_zmm_zmmm512b64 = 3_001, Pmulld_xmm_xmmm128 = 3_002, VEX_Vpmulld_xmm_xmm_xmmm128 = 3_003, VEX_Vpmulld_ymm_ymm_ymmm256 = 3_004, EVEX_Vpmulld_xmm_k1z_xmm_xmmm128b32 = 3_005, EVEX_Vpmulld_ymm_k1z_ymm_ymmm256b32 = 3_006, EVEX_Vpmulld_zmm_k1z_zmm_zmmm512b32 = 3_007, EVEX_Vpmullq_xmm_k1z_xmm_xmmm128b64 = 3_008, EVEX_Vpmullq_ymm_k1z_ymm_ymmm256b64 = 3_009, EVEX_Vpmullq_zmm_k1z_zmm_zmmm512b64 = 3_010, Phminposuw_xmm_xmmm128 = 3_011, VEX_Vphminposuw_xmm_xmmm128 = 3_012, EVEX_Vgetexpps_xmm_k1z_xmmm128b32 = 3_013, EVEX_Vgetexpps_ymm_k1z_ymmm256b32 = 3_014, EVEX_Vgetexpps_zmm_k1z_zmmm512b32_sae = 3_015, EVEX_Vgetexppd_xmm_k1z_xmmm128b64 = 3_016, EVEX_Vgetexppd_ymm_k1z_ymmm256b64 = 3_017, EVEX_Vgetexppd_zmm_k1z_zmmm512b64_sae = 3_018, EVEX_Vgetexpss_xmm_k1z_xmm_xmmm32_sae = 3_019, EVEX_Vgetexpsd_xmm_k1z_xmm_xmmm64_sae = 3_020, EVEX_Vplzcntd_xmm_k1z_xmmm128b32 = 3_021, EVEX_Vplzcntd_ymm_k1z_ymmm256b32 = 3_022, EVEX_Vplzcntd_zmm_k1z_zmmm512b32 = 3_023, EVEX_Vplzcntq_xmm_k1z_xmmm128b64 = 3_024, EVEX_Vplzcntq_ymm_k1z_ymmm256b64 = 3_025, EVEX_Vplzcntq_zmm_k1z_zmmm512b64 = 3_026, VEX_Vpsrlvd_xmm_xmm_xmmm128 = 3_027, VEX_Vpsrlvd_ymm_ymm_ymmm256 = 3_028, VEX_Vpsrlvq_xmm_xmm_xmmm128 = 3_029, VEX_Vpsrlvq_ymm_ymm_ymmm256 = 3_030, EVEX_Vpsrlvd_xmm_k1z_xmm_xmmm128b32 = 3_031, EVEX_Vpsrlvd_ymm_k1z_ymm_ymmm256b32 = 3_032, EVEX_Vpsrlvd_zmm_k1z_zmm_zmmm512b32 = 3_033, EVEX_Vpsrlvq_xmm_k1z_xmm_xmmm128b64 = 3_034, EVEX_Vpsrlvq_ymm_k1z_ymm_ymmm256b64 = 3_035, EVEX_Vpsrlvq_zmm_k1z_zmm_zmmm512b64 = 3_036, VEX_Vpsravd_xmm_xmm_xmmm128 = 3_037, VEX_Vpsravd_ymm_ymm_ymmm256 = 3_038, EVEX_Vpsravd_xmm_k1z_xmm_xmmm128b32 = 3_039, EVEX_Vpsravd_ymm_k1z_ymm_ymmm256b32 = 3_040, EVEX_Vpsravd_zmm_k1z_zmm_zmmm512b32 = 3_041, EVEX_Vpsravq_xmm_k1z_xmm_xmmm128b64 = 3_042, EVEX_Vpsravq_ymm_k1z_ymm_ymmm256b64 = 3_043, EVEX_Vpsravq_zmm_k1z_zmm_zmmm512b64 = 3_044, VEX_Vpsllvd_xmm_xmm_xmmm128 = 3_045, VEX_Vpsllvd_ymm_ymm_ymmm256 = 3_046, VEX_Vpsllvq_xmm_xmm_xmmm128 = 3_047, VEX_Vpsllvq_ymm_ymm_ymmm256 = 3_048, EVEX_Vpsllvd_xmm_k1z_xmm_xmmm128b32 = 3_049, EVEX_Vpsllvd_ymm_k1z_ymm_ymmm256b32 = 3_050, EVEX_Vpsllvd_zmm_k1z_zmm_zmmm512b32 = 3_051, EVEX_Vpsllvq_xmm_k1z_xmm_xmmm128b64 = 3_052, EVEX_Vpsllvq_ymm_k1z_ymm_ymmm256b64 = 3_053, EVEX_Vpsllvq_zmm_k1z_zmm_zmmm512b64 = 3_054, EVEX_Vrcp14ps_xmm_k1z_xmmm128b32 = 3_055, EVEX_Vrcp14ps_ymm_k1z_ymmm256b32 = 3_056, EVEX_Vrcp14ps_zmm_k1z_zmmm512b32 = 3_057, EVEX_Vrcp14pd_xmm_k1z_xmmm128b64 = 3_058, EVEX_Vrcp14pd_ymm_k1z_ymmm256b64 = 3_059, EVEX_Vrcp14pd_zmm_k1z_zmmm512b64 = 3_060, EVEX_Vrcp14ss_xmm_k1z_xmm_xmmm32 = 3_061, EVEX_Vrcp14sd_xmm_k1z_xmm_xmmm64 = 3_062, EVEX_Vrsqrt14ps_xmm_k1z_xmmm128b32 = 3_063, EVEX_Vrsqrt14ps_ymm_k1z_ymmm256b32 = 3_064, EVEX_Vrsqrt14ps_zmm_k1z_zmmm512b32 = 3_065, EVEX_Vrsqrt14pd_xmm_k1z_xmmm128b64 = 3_066, EVEX_Vrsqrt14pd_ymm_k1z_ymmm256b64 = 3_067, EVEX_Vrsqrt14pd_zmm_k1z_zmmm512b64 = 3_068, EVEX_Vrsqrt14ss_xmm_k1z_xmm_xmmm32 = 3_069, EVEX_Vrsqrt14sd_xmm_k1z_xmm_xmmm64 = 3_070, EVEX_Vpdpbusd_xmm_k1z_xmm_xmmm128b32 = 3_071, EVEX_Vpdpbusd_ymm_k1z_ymm_ymmm256b32 = 3_072, EVEX_Vpdpbusd_zmm_k1z_zmm_zmmm512b32 = 3_073, EVEX_Vpdpbusds_xmm_k1z_xmm_xmmm128b32 = 3_074, EVEX_Vpdpbusds_ymm_k1z_ymm_ymmm256b32 = 3_075, EVEX_Vpdpbusds_zmm_k1z_zmm_zmmm512b32 = 3_076, EVEX_Vpdpwssd_xmm_k1z_xmm_xmmm128b32 = 3_077, EVEX_Vpdpwssd_ymm_k1z_ymm_ymmm256b32 = 3_078, EVEX_Vpdpwssd_zmm_k1z_zmm_zmmm512b32 = 3_079, EVEX_Vdpbf16ps_xmm_k1z_xmm_xmmm128b32 = 3_080, EVEX_Vdpbf16ps_ymm_k1z_ymm_ymmm256b32 = 3_081, EVEX_Vdpbf16ps_zmm_k1z_zmm_zmmm512b32 = 3_082, EVEX_Vp4dpwssd_zmm_k1z_zmmp3_m128 = 3_083, EVEX_Vpdpwssds_xmm_k1z_xmm_xmmm128b32 = 3_084, EVEX_Vpdpwssds_ymm_k1z_ymm_ymmm256b32 = 3_085, EVEX_Vpdpwssds_zmm_k1z_zmm_zmmm512b32 = 3_086, EVEX_Vp4dpwssds_zmm_k1z_zmmp3_m128 = 3_087, EVEX_Vpopcntb_xmm_k1z_xmmm128 = 3_088, EVEX_Vpopcntb_ymm_k1z_ymmm256 = 3_089, EVEX_Vpopcntb_zmm_k1z_zmmm512 = 3_090, EVEX_Vpopcntw_xmm_k1z_xmmm128 = 3_091, EVEX_Vpopcntw_ymm_k1z_ymmm256 = 3_092, EVEX_Vpopcntw_zmm_k1z_zmmm512 = 3_093, EVEX_Vpopcntd_xmm_k1z_xmmm128b32 = 3_094, EVEX_Vpopcntd_ymm_k1z_ymmm256b32 = 3_095, EVEX_Vpopcntd_zmm_k1z_zmmm512b32 = 3_096, EVEX_Vpopcntq_xmm_k1z_xmmm128b64 = 3_097, EVEX_Vpopcntq_ymm_k1z_ymmm256b64 = 3_098, EVEX_Vpopcntq_zmm_k1z_zmmm512b64 = 3_099, VEX_Vpbroadcastd_xmm_xmmm32 = 3_100, VEX_Vpbroadcastd_ymm_xmmm32 = 3_101, EVEX_Vpbroadcastd_xmm_k1z_xmmm32 = 3_102, EVEX_Vpbroadcastd_ymm_k1z_xmmm32 = 3_103, EVEX_Vpbroadcastd_zmm_k1z_xmmm32 = 3_104, VEX_Vpbroadcastq_xmm_xmmm64 = 3_105, VEX_Vpbroadcastq_ymm_xmmm64 = 3_106, EVEX_Vbroadcasti32x2_xmm_k1z_xmmm64 = 3_107, EVEX_Vbroadcasti32x2_ymm_k1z_xmmm64 = 3_108, EVEX_Vbroadcasti32x2_zmm_k1z_xmmm64 = 3_109, EVEX_Vpbroadcastq_xmm_k1z_xmmm64 = 3_110, EVEX_Vpbroadcastq_ymm_k1z_xmmm64 = 3_111, EVEX_Vpbroadcastq_zmm_k1z_xmmm64 = 3_112, VEX_Vbroadcasti128_ymm_m128 = 3_113, EVEX_Vbroadcasti32x4_ymm_k1z_m128 = 3_114, EVEX_Vbroadcasti32x4_zmm_k1z_m128 = 3_115, EVEX_Vbroadcasti64x2_ymm_k1z_m128 = 3_116, EVEX_Vbroadcasti64x2_zmm_k1z_m128 = 3_117, EVEX_Vbroadcasti32x8_zmm_k1z_m256 = 3_118, EVEX_Vbroadcasti64x4_zmm_k1z_m256 = 3_119, EVEX_Vpexpandb_xmm_k1z_xmmm128 = 3_120, EVEX_Vpexpandb_ymm_k1z_ymmm256 = 3_121, EVEX_Vpexpandb_zmm_k1z_zmmm512 = 3_122, EVEX_Vpexpandw_xmm_k1z_xmmm128 = 3_123, EVEX_Vpexpandw_ymm_k1z_ymmm256 = 3_124, EVEX_Vpexpandw_zmm_k1z_zmmm512 = 3_125, EVEX_Vpcompressb_xmmm128_k1z_xmm = 3_126, EVEX_Vpcompressb_ymmm256_k1z_ymm = 3_127, EVEX_Vpcompressb_zmmm512_k1z_zmm = 3_128, EVEX_Vpcompressw_xmmm128_k1z_xmm = 3_129, EVEX_Vpcompressw_ymmm256_k1z_ymm = 3_130, EVEX_Vpcompressw_zmmm512_k1z_zmm = 3_131, EVEX_Vpblendmd_xmm_k1z_xmm_xmmm128b32 = 3_132, EVEX_Vpblendmd_ymm_k1z_ymm_ymmm256b32 = 3_133, EVEX_Vpblendmd_zmm_k1z_zmm_zmmm512b32 = 3_134, EVEX_Vpblendmq_xmm_k1z_xmm_xmmm128b64 = 3_135, EVEX_Vpblendmq_ymm_k1z_ymm_ymmm256b64 = 3_136, EVEX_Vpblendmq_zmm_k1z_zmm_zmmm512b64 = 3_137, EVEX_Vblendmps_xmm_k1z_xmm_xmmm128b32 = 3_138, EVEX_Vblendmps_ymm_k1z_ymm_ymmm256b32 = 3_139, EVEX_Vblendmps_zmm_k1z_zmm_zmmm512b32 = 3_140, EVEX_Vblendmpd_xmm_k1z_xmm_xmmm128b64 = 3_141, EVEX_Vblendmpd_ymm_k1z_ymm_ymmm256b64 = 3_142, EVEX_Vblendmpd_zmm_k1z_zmm_zmmm512b64 = 3_143, EVEX_Vpblendmb_xmm_k1z_xmm_xmmm128 = 3_144, EVEX_Vpblendmb_ymm_k1z_ymm_ymmm256 = 3_145, EVEX_Vpblendmb_zmm_k1z_zmm_zmmm512 = 3_146, EVEX_Vpblendmw_xmm_k1z_xmm_xmmm128 = 3_147, EVEX_Vpblendmw_ymm_k1z_ymm_ymmm256 = 3_148, EVEX_Vpblendmw_zmm_k1z_zmm_zmmm512 = 3_149, EVEX_Vp2intersectd_kp1_xmm_xmmm128b32 = 3_150, EVEX_Vp2intersectd_kp1_ymm_ymmm256b32 = 3_151, EVEX_Vp2intersectd_kp1_zmm_zmmm512b32 = 3_152, EVEX_Vp2intersectq_kp1_xmm_xmmm128b64 = 3_153, EVEX_Vp2intersectq_kp1_ymm_ymmm256b64 = 3_154, EVEX_Vp2intersectq_kp1_zmm_zmmm512b64 = 3_155, EVEX_Vpshldvw_xmm_k1z_xmm_xmmm128 = 3_156, EVEX_Vpshldvw_ymm_k1z_ymm_ymmm256 = 3_157, EVEX_Vpshldvw_zmm_k1z_zmm_zmmm512 = 3_158, EVEX_Vpshldvd_xmm_k1z_xmm_xmmm128b32 = 3_159, EVEX_Vpshldvd_ymm_k1z_ymm_ymmm256b32 = 3_160, EVEX_Vpshldvd_zmm_k1z_zmm_zmmm512b32 = 3_161, EVEX_Vpshldvq_xmm_k1z_xmm_xmmm128b64 = 3_162, EVEX_Vpshldvq_ymm_k1z_ymm_ymmm256b64 = 3_163, EVEX_Vpshldvq_zmm_k1z_zmm_zmmm512b64 = 3_164, EVEX_Vpshrdvw_xmm_k1z_xmm_xmmm128 = 3_165, EVEX_Vpshrdvw_ymm_k1z_ymm_ymmm256 = 3_166, EVEX_Vpshrdvw_zmm_k1z_zmm_zmmm512 = 3_167, EVEX_Vcvtneps2bf16_xmm_k1z_xmmm128b32 = 3_168, EVEX_Vcvtneps2bf16_xmm_k1z_ymmm256b32 = 3_169, EVEX_Vcvtneps2bf16_ymm_k1z_zmmm512b32 = 3_170, EVEX_Vcvtne2ps2bf16_xmm_k1z_xmm_xmmm128b32 = 3_171, EVEX_Vcvtne2ps2bf16_ymm_k1z_ymm_ymmm256b32 = 3_172, EVEX_Vcvtne2ps2bf16_zmm_k1z_zmm_zmmm512b32 = 3_173, EVEX_Vpshrdvd_xmm_k1z_xmm_xmmm128b32 = 3_174, EVEX_Vpshrdvd_ymm_k1z_ymm_ymmm256b32 = 3_175, EVEX_Vpshrdvd_zmm_k1z_zmm_zmmm512b32 = 3_176, EVEX_Vpshrdvq_xmm_k1z_xmm_xmmm128b64 = 3_177, EVEX_Vpshrdvq_ymm_k1z_ymm_ymmm256b64 = 3_178, EVEX_Vpshrdvq_zmm_k1z_zmm_zmmm512b64 = 3_179, EVEX_Vpermi2b_xmm_k1z_xmm_xmmm128 = 3_180, EVEX_Vpermi2b_ymm_k1z_ymm_ymmm256 = 3_181, EVEX_Vpermi2b_zmm_k1z_zmm_zmmm512 = 3_182, EVEX_Vpermi2w_xmm_k1z_xmm_xmmm128 = 3_183, EVEX_Vpermi2w_ymm_k1z_ymm_ymmm256 = 3_184, EVEX_Vpermi2w_zmm_k1z_zmm_zmmm512 = 3_185, EVEX_Vpermi2d_xmm_k1z_xmm_xmmm128b32 = 3_186, EVEX_Vpermi2d_ymm_k1z_ymm_ymmm256b32 = 3_187, EVEX_Vpermi2d_zmm_k1z_zmm_zmmm512b32 = 3_188, EVEX_Vpermi2q_xmm_k1z_xmm_xmmm128b64 = 3_189, EVEX_Vpermi2q_ymm_k1z_ymm_ymmm256b64 = 3_190, EVEX_Vpermi2q_zmm_k1z_zmm_zmmm512b64 = 3_191, EVEX_Vpermi2ps_xmm_k1z_xmm_xmmm128b32 = 3_192, EVEX_Vpermi2ps_ymm_k1z_ymm_ymmm256b32 = 3_193, EVEX_Vpermi2ps_zmm_k1z_zmm_zmmm512b32 = 3_194, EVEX_Vpermi2pd_xmm_k1z_xmm_xmmm128b64 = 3_195, EVEX_Vpermi2pd_ymm_k1z_ymm_ymmm256b64 = 3_196, EVEX_Vpermi2pd_zmm_k1z_zmm_zmmm512b64 = 3_197, VEX_Vpbroadcastb_xmm_xmmm8 = 3_198, VEX_Vpbroadcastb_ymm_xmmm8 = 3_199, EVEX_Vpbroadcastb_xmm_k1z_xmmm8 = 3_200, EVEX_Vpbroadcastb_ymm_k1z_xmmm8 = 3_201, EVEX_Vpbroadcastb_zmm_k1z_xmmm8 = 3_202, VEX_Vpbroadcastw_xmm_xmmm16 = 3_203, VEX_Vpbroadcastw_ymm_xmmm16 = 3_204, EVEX_Vpbroadcastw_xmm_k1z_xmmm16 = 3_205, EVEX_Vpbroadcastw_ymm_k1z_xmmm16 = 3_206, EVEX_Vpbroadcastw_zmm_k1z_xmmm16 = 3_207, EVEX_Vpbroadcastb_xmm_k1z_r32 = 3_208, EVEX_Vpbroadcastb_ymm_k1z_r32 = 3_209, EVEX_Vpbroadcastb_zmm_k1z_r32 = 3_210, EVEX_Vpbroadcastw_xmm_k1z_r32 = 3_211, EVEX_Vpbroadcastw_ymm_k1z_r32 = 3_212, EVEX_Vpbroadcastw_zmm_k1z_r32 = 3_213, EVEX_Vpbroadcastd_xmm_k1z_r32 = 3_214, EVEX_Vpbroadcastd_ymm_k1z_r32 = 3_215, EVEX_Vpbroadcastd_zmm_k1z_r32 = 3_216, EVEX_Vpbroadcastq_xmm_k1z_r64 = 3_217, EVEX_Vpbroadcastq_ymm_k1z_r64 = 3_218, EVEX_Vpbroadcastq_zmm_k1z_r64 = 3_219, EVEX_Vpermt2b_xmm_k1z_xmm_xmmm128 = 3_220, EVEX_Vpermt2b_ymm_k1z_ymm_ymmm256 = 3_221, EVEX_Vpermt2b_zmm_k1z_zmm_zmmm512 = 3_222, EVEX_Vpermt2w_xmm_k1z_xmm_xmmm128 = 3_223, EVEX_Vpermt2w_ymm_k1z_ymm_ymmm256 = 3_224, EVEX_Vpermt2w_zmm_k1z_zmm_zmmm512 = 3_225, EVEX_Vpermt2d_xmm_k1z_xmm_xmmm128b32 = 3_226, EVEX_Vpermt2d_ymm_k1z_ymm_ymmm256b32 = 3_227, EVEX_Vpermt2d_zmm_k1z_zmm_zmmm512b32 = 3_228, EVEX_Vpermt2q_xmm_k1z_xmm_xmmm128b64 = 3_229, EVEX_Vpermt2q_ymm_k1z_ymm_ymmm256b64 = 3_230, EVEX_Vpermt2q_zmm_k1z_zmm_zmmm512b64 = 3_231, EVEX_Vpermt2ps_xmm_k1z_xmm_xmmm128b32 = 3_232, EVEX_Vpermt2ps_ymm_k1z_ymm_ymmm256b32 = 3_233, EVEX_Vpermt2ps_zmm_k1z_zmm_zmmm512b32 = 3_234, EVEX_Vpermt2pd_xmm_k1z_xmm_xmmm128b64 = 3_235, EVEX_Vpermt2pd_ymm_k1z_ymm_ymmm256b64 = 3_236, EVEX_Vpermt2pd_zmm_k1z_zmm_zmmm512b64 = 3_237, Invept_r32_m128 = 3_238, Invept_r64_m128 = 3_239, Invvpid_r32_m128 = 3_240, Invvpid_r64_m128 = 3_241, Invpcid_r32_m128 = 3_242, Invpcid_r64_m128 = 3_243, EVEX_Vpmultishiftqb_xmm_k1z_xmm_xmmm128b64 = 3_244, EVEX_Vpmultishiftqb_ymm_k1z_ymm_ymmm256b64 = 3_245, EVEX_Vpmultishiftqb_zmm_k1z_zmm_zmmm512b64 = 3_246, EVEX_Vexpandps_xmm_k1z_xmmm128 = 3_247, EVEX_Vexpandps_ymm_k1z_ymmm256 = 3_248, EVEX_Vexpandps_zmm_k1z_zmmm512 = 3_249, EVEX_Vexpandpd_xmm_k1z_xmmm128 = 3_250, EVEX_Vexpandpd_ymm_k1z_ymmm256 = 3_251, EVEX_Vexpandpd_zmm_k1z_zmmm512 = 3_252, EVEX_Vpexpandd_xmm_k1z_xmmm128 = 3_253, EVEX_Vpexpandd_ymm_k1z_ymmm256 = 3_254, EVEX_Vpexpandd_zmm_k1z_zmmm512 = 3_255, EVEX_Vpexpandq_xmm_k1z_xmmm128 = 3_256, EVEX_Vpexpandq_ymm_k1z_ymmm256 = 3_257, EVEX_Vpexpandq_zmm_k1z_zmmm512 = 3_258, EVEX_Vcompressps_xmmm128_k1z_xmm = 3_259, EVEX_Vcompressps_ymmm256_k1z_ymm = 3_260, EVEX_Vcompressps_zmmm512_k1z_zmm = 3_261, EVEX_Vcompresspd_xmmm128_k1z_xmm = 3_262, EVEX_Vcompresspd_ymmm256_k1z_ymm = 3_263, EVEX_Vcompresspd_zmmm512_k1z_zmm = 3_264, EVEX_Vpcompressd_xmmm128_k1z_xmm = 3_265, EVEX_Vpcompressd_ymmm256_k1z_ymm = 3_266, EVEX_Vpcompressd_zmmm512_k1z_zmm = 3_267, EVEX_Vpcompressq_xmmm128_k1z_xmm = 3_268, EVEX_Vpcompressq_ymmm256_k1z_ymm = 3_269, EVEX_Vpcompressq_zmmm512_k1z_zmm = 3_270, VEX_Vpmaskmovd_xmm_xmm_m128 = 3_271, VEX_Vpmaskmovd_ymm_ymm_m256 = 3_272, VEX_Vpmaskmovq_xmm_xmm_m128 = 3_273, VEX_Vpmaskmovq_ymm_ymm_m256 = 3_274, EVEX_Vpermb_xmm_k1z_xmm_xmmm128 = 3_275, EVEX_Vpermb_ymm_k1z_ymm_ymmm256 = 3_276, EVEX_Vpermb_zmm_k1z_zmm_zmmm512 = 3_277, EVEX_Vpermw_xmm_k1z_xmm_xmmm128 = 3_278, EVEX_Vpermw_ymm_k1z_ymm_ymmm256 = 3_279, EVEX_Vpermw_zmm_k1z_zmm_zmmm512 = 3_280, VEX_Vpmaskmovd_m128_xmm_xmm = 3_281, VEX_Vpmaskmovd_m256_ymm_ymm = 3_282, VEX_Vpmaskmovq_m128_xmm_xmm = 3_283, VEX_Vpmaskmovq_m256_ymm_ymm = 3_284, EVEX_Vpshufbitqmb_kr_k1_xmm_xmmm128 = 3_285, EVEX_Vpshufbitqmb_kr_k1_ymm_ymmm256 = 3_286, EVEX_Vpshufbitqmb_kr_k1_zmm_zmmm512 = 3_287, VEX_Vpgatherdd_xmm_vm32x_xmm = 3_288, VEX_Vpgatherdd_ymm_vm32y_ymm = 3_289, VEX_Vpgatherdq_xmm_vm32x_xmm = 3_290, VEX_Vpgatherdq_ymm_vm32x_ymm = 3_291, EVEX_Vpgatherdd_xmm_k1_vm32x = 3_292, EVEX_Vpgatherdd_ymm_k1_vm32y = 3_293, EVEX_Vpgatherdd_zmm_k1_vm32z = 3_294, EVEX_Vpgatherdq_xmm_k1_vm32x = 3_295, EVEX_Vpgatherdq_ymm_k1_vm32x = 3_296, EVEX_Vpgatherdq_zmm_k1_vm32y = 3_297, VEX_Vpgatherqd_xmm_vm64x_xmm = 3_298, VEX_Vpgatherqd_xmm_vm64y_xmm = 3_299, VEX_Vpgatherqq_xmm_vm64x_xmm = 3_300, VEX_Vpgatherqq_ymm_vm64y_ymm = 3_301, EVEX_Vpgatherqd_xmm_k1_vm64x = 3_302, EVEX_Vpgatherqd_xmm_k1_vm64y = 3_303, EVEX_Vpgatherqd_ymm_k1_vm64z = 3_304, EVEX_Vpgatherqq_xmm_k1_vm64x = 3_305, EVEX_Vpgatherqq_ymm_k1_vm64y = 3_306, EVEX_Vpgatherqq_zmm_k1_vm64z = 3_307, VEX_Vgatherdps_xmm_vm32x_xmm = 3_308, VEX_Vgatherdps_ymm_vm32y_ymm = 3_309, VEX_Vgatherdpd_xmm_vm32x_xmm = 3_310, VEX_Vgatherdpd_ymm_vm32x_ymm = 3_311, EVEX_Vgatherdps_xmm_k1_vm32x = 3_312, EVEX_Vgatherdps_ymm_k1_vm32y = 3_313, EVEX_Vgatherdps_zmm_k1_vm32z = 3_314, EVEX_Vgatherdpd_xmm_k1_vm32x = 3_315, EVEX_Vgatherdpd_ymm_k1_vm32x = 3_316, EVEX_Vgatherdpd_zmm_k1_vm32y = 3_317, VEX_Vgatherqps_xmm_vm64x_xmm = 3_318, VEX_Vgatherqps_xmm_vm64y_xmm = 3_319, VEX_Vgatherqpd_xmm_vm64x_xmm = 3_320, VEX_Vgatherqpd_ymm_vm64y_ymm = 3_321, EVEX_Vgatherqps_xmm_k1_vm64x = 3_322, EVEX_Vgatherqps_xmm_k1_vm64y = 3_323, EVEX_Vgatherqps_ymm_k1_vm64z = 3_324, EVEX_Vgatherqpd_xmm_k1_vm64x = 3_325, EVEX_Vgatherqpd_ymm_k1_vm64y = 3_326, EVEX_Vgatherqpd_zmm_k1_vm64z = 3_327, VEX_Vfmaddsub132ps_xmm_xmm_xmmm128 = 3_328, VEX_Vfmaddsub132ps_ymm_ymm_ymmm256 = 3_329, VEX_Vfmaddsub132pd_xmm_xmm_xmmm128 = 3_330, VEX_Vfmaddsub132pd_ymm_ymm_ymmm256 = 3_331, EVEX_Vfmaddsub132ps_xmm_k1z_xmm_xmmm128b32 = 3_332, EVEX_Vfmaddsub132ps_ymm_k1z_ymm_ymmm256b32 = 3_333, EVEX_Vfmaddsub132ps_zmm_k1z_zmm_zmmm512b32_er = 3_334, EVEX_Vfmaddsub132pd_xmm_k1z_xmm_xmmm128b64 = 3_335, EVEX_Vfmaddsub132pd_ymm_k1z_ymm_ymmm256b64 = 3_336, EVEX_Vfmaddsub132pd_zmm_k1z_zmm_zmmm512b64_er = 3_337, VEX_Vfmsubadd132ps_xmm_xmm_xmmm128 = 3_338, VEX_Vfmsubadd132ps_ymm_ymm_ymmm256 = 3_339, VEX_Vfmsubadd132pd_xmm_xmm_xmmm128 = 3_340, VEX_Vfmsubadd132pd_ymm_ymm_ymmm256 = 3_341, EVEX_Vfmsubadd132ps_xmm_k1z_xmm_xmmm128b32 = 3_342, EVEX_Vfmsubadd132ps_ymm_k1z_ymm_ymmm256b32 = 3_343, EVEX_Vfmsubadd132ps_zmm_k1z_zmm_zmmm512b32_er = 3_344, EVEX_Vfmsubadd132pd_xmm_k1z_xmm_xmmm128b64 = 3_345, EVEX_Vfmsubadd132pd_ymm_k1z_ymm_ymmm256b64 = 3_346, EVEX_Vfmsubadd132pd_zmm_k1z_zmm_zmmm512b64_er = 3_347, VEX_Vfmadd132ps_xmm_xmm_xmmm128 = 3_348, VEX_Vfmadd132ps_ymm_ymm_ymmm256 = 3_349, VEX_Vfmadd132pd_xmm_xmm_xmmm128 = 3_350, VEX_Vfmadd132pd_ymm_ymm_ymmm256 = 3_351, EVEX_Vfmadd132ps_xmm_k1z_xmm_xmmm128b32 = 3_352, EVEX_Vfmadd132ps_ymm_k1z_ymm_ymmm256b32 = 3_353, EVEX_Vfmadd132ps_zmm_k1z_zmm_zmmm512b32_er = 3_354, EVEX_Vfmadd132pd_xmm_k1z_xmm_xmmm128b64 = 3_355, EVEX_Vfmadd132pd_ymm_k1z_ymm_ymmm256b64 = 3_356, EVEX_Vfmadd132pd_zmm_k1z_zmm_zmmm512b64_er = 3_357, VEX_Vfmadd132ss_xmm_xmm_xmmm32 = 3_358, VEX_Vfmadd132sd_xmm_xmm_xmmm64 = 3_359, EVEX_Vfmadd132ss_xmm_k1z_xmm_xmmm32_er = 3_360, EVEX_Vfmadd132sd_xmm_k1z_xmm_xmmm64_er = 3_361, VEX_Vfmsub132ps_xmm_xmm_xmmm128 = 3_362, VEX_Vfmsub132ps_ymm_ymm_ymmm256 = 3_363, VEX_Vfmsub132pd_xmm_xmm_xmmm128 = 3_364, VEX_Vfmsub132pd_ymm_ymm_ymmm256 = 3_365, EVEX_Vfmsub132ps_xmm_k1z_xmm_xmmm128b32 = 3_366, EVEX_Vfmsub132ps_ymm_k1z_ymm_ymmm256b32 = 3_367, EVEX_Vfmsub132ps_zmm_k1z_zmm_zmmm512b32_er = 3_368, EVEX_Vfmsub132pd_xmm_k1z_xmm_xmmm128b64 = 3_369, EVEX_Vfmsub132pd_ymm_k1z_ymm_ymmm256b64 = 3_370, EVEX_Vfmsub132pd_zmm_k1z_zmm_zmmm512b64_er = 3_371, EVEX_V4fmaddps_zmm_k1z_zmmp3_m128 = 3_372, VEX_Vfmsub132ss_xmm_xmm_xmmm32 = 3_373, VEX_Vfmsub132sd_xmm_xmm_xmmm64 = 3_374, EVEX_Vfmsub132ss_xmm_k1z_xmm_xmmm32_er = 3_375, EVEX_Vfmsub132sd_xmm_k1z_xmm_xmmm64_er = 3_376, EVEX_V4fmaddss_xmm_k1z_xmmp3_m128 = 3_377, VEX_Vfnmadd132ps_xmm_xmm_xmmm128 = 3_378, VEX_Vfnmadd132ps_ymm_ymm_ymmm256 = 3_379, VEX_Vfnmadd132pd_xmm_xmm_xmmm128 = 3_380, VEX_Vfnmadd132pd_ymm_ymm_ymmm256 = 3_381, EVEX_Vfnmadd132ps_xmm_k1z_xmm_xmmm128b32 = 3_382, EVEX_Vfnmadd132ps_ymm_k1z_ymm_ymmm256b32 = 3_383, EVEX_Vfnmadd132ps_zmm_k1z_zmm_zmmm512b32_er = 3_384, EVEX_Vfnmadd132pd_xmm_k1z_xmm_xmmm128b64 = 3_385, EVEX_Vfnmadd132pd_ymm_k1z_ymm_ymmm256b64 = 3_386, EVEX_Vfnmadd132pd_zmm_k1z_zmm_zmmm512b64_er = 3_387, VEX_Vfnmadd132ss_xmm_xmm_xmmm32 = 3_388, VEX_Vfnmadd132sd_xmm_xmm_xmmm64 = 3_389, EVEX_Vfnmadd132ss_xmm_k1z_xmm_xmmm32_er = 3_390, EVEX_Vfnmadd132sd_xmm_k1z_xmm_xmmm64_er = 3_391, VEX_Vfnmsub132ps_xmm_xmm_xmmm128 = 3_392, VEX_Vfnmsub132ps_ymm_ymm_ymmm256 = 3_393, VEX_Vfnmsub132pd_xmm_xmm_xmmm128 = 3_394, VEX_Vfnmsub132pd_ymm_ymm_ymmm256 = 3_395, EVEX_Vfnmsub132ps_xmm_k1z_xmm_xmmm128b32 = 3_396, EVEX_Vfnmsub132ps_ymm_k1z_ymm_ymmm256b32 = 3_397, EVEX_Vfnmsub132ps_zmm_k1z_zmm_zmmm512b32_er = 3_398, EVEX_Vfnmsub132pd_xmm_k1z_xmm_xmmm128b64 = 3_399, EVEX_Vfnmsub132pd_ymm_k1z_ymm_ymmm256b64 = 3_400, EVEX_Vfnmsub132pd_zmm_k1z_zmm_zmmm512b64_er = 3_401, VEX_Vfnmsub132ss_xmm_xmm_xmmm32 = 3_402, VEX_Vfnmsub132sd_xmm_xmm_xmmm64 = 3_403, EVEX_Vfnmsub132ss_xmm_k1z_xmm_xmmm32_er = 3_404, EVEX_Vfnmsub132sd_xmm_k1z_xmm_xmmm64_er = 3_405, EVEX_Vpscatterdd_vm32x_k1_xmm = 3_406, EVEX_Vpscatterdd_vm32y_k1_ymm = 3_407, EVEX_Vpscatterdd_vm32z_k1_zmm = 3_408, EVEX_Vpscatterdq_vm32x_k1_xmm = 3_409, EVEX_Vpscatterdq_vm32x_k1_ymm = 3_410, EVEX_Vpscatterdq_vm32y_k1_zmm = 3_411, EVEX_Vpscatterqd_vm64x_k1_xmm = 3_412, EVEX_Vpscatterqd_vm64y_k1_xmm = 3_413, EVEX_Vpscatterqd_vm64z_k1_ymm = 3_414, EVEX_Vpscatterqq_vm64x_k1_xmm = 3_415, EVEX_Vpscatterqq_vm64y_k1_ymm = 3_416, EVEX_Vpscatterqq_vm64z_k1_zmm = 3_417, EVEX_Vscatterdps_vm32x_k1_xmm = 3_418, EVEX_Vscatterdps_vm32y_k1_ymm = 3_419, EVEX_Vscatterdps_vm32z_k1_zmm = 3_420, EVEX_Vscatterdpd_vm32x_k1_xmm = 3_421, EVEX_Vscatterdpd_vm32x_k1_ymm = 3_422, EVEX_Vscatterdpd_vm32y_k1_zmm = 3_423, EVEX_Vscatterqps_vm64x_k1_xmm = 3_424, EVEX_Vscatterqps_vm64y_k1_xmm = 3_425, EVEX_Vscatterqps_vm64z_k1_ymm = 3_426, EVEX_Vscatterqpd_vm64x_k1_xmm = 3_427, EVEX_Vscatterqpd_vm64y_k1_ymm = 3_428, EVEX_Vscatterqpd_vm64z_k1_zmm = 3_429, VEX_Vfmaddsub213ps_xmm_xmm_xmmm128 = 3_430, VEX_Vfmaddsub213ps_ymm_ymm_ymmm256 = 3_431, VEX_Vfmaddsub213pd_xmm_xmm_xmmm128 = 3_432, VEX_Vfmaddsub213pd_ymm_ymm_ymmm256 = 3_433, EVEX_Vfmaddsub213ps_xmm_k1z_xmm_xmmm128b32 = 3_434, EVEX_Vfmaddsub213ps_ymm_k1z_ymm_ymmm256b32 = 3_435, EVEX_Vfmaddsub213ps_zmm_k1z_zmm_zmmm512b32_er = 3_436, EVEX_Vfmaddsub213pd_xmm_k1z_xmm_xmmm128b64 = 3_437, EVEX_Vfmaddsub213pd_ymm_k1z_ymm_ymmm256b64 = 3_438, EVEX_Vfmaddsub213pd_zmm_k1z_zmm_zmmm512b64_er = 3_439, VEX_Vfmsubadd213ps_xmm_xmm_xmmm128 = 3_440, VEX_Vfmsubadd213ps_ymm_ymm_ymmm256 = 3_441, VEX_Vfmsubadd213pd_xmm_xmm_xmmm128 = 3_442, VEX_Vfmsubadd213pd_ymm_ymm_ymmm256 = 3_443, EVEX_Vfmsubadd213ps_xmm_k1z_xmm_xmmm128b32 = 3_444, EVEX_Vfmsubadd213ps_ymm_k1z_ymm_ymmm256b32 = 3_445, EVEX_Vfmsubadd213ps_zmm_k1z_zmm_zmmm512b32_er = 3_446, EVEX_Vfmsubadd213pd_xmm_k1z_xmm_xmmm128b64 = 3_447, EVEX_Vfmsubadd213pd_ymm_k1z_ymm_ymmm256b64 = 3_448, EVEX_Vfmsubadd213pd_zmm_k1z_zmm_zmmm512b64_er = 3_449, VEX_Vfmadd213ps_xmm_xmm_xmmm128 = 3_450, VEX_Vfmadd213ps_ymm_ymm_ymmm256 = 3_451, VEX_Vfmadd213pd_xmm_xmm_xmmm128 = 3_452, VEX_Vfmadd213pd_ymm_ymm_ymmm256 = 3_453, EVEX_Vfmadd213ps_xmm_k1z_xmm_xmmm128b32 = 3_454, EVEX_Vfmadd213ps_ymm_k1z_ymm_ymmm256b32 = 3_455, EVEX_Vfmadd213ps_zmm_k1z_zmm_zmmm512b32_er = 3_456, EVEX_Vfmadd213pd_xmm_k1z_xmm_xmmm128b64 = 3_457, EVEX_Vfmadd213pd_ymm_k1z_ymm_ymmm256b64 = 3_458, EVEX_Vfmadd213pd_zmm_k1z_zmm_zmmm512b64_er = 3_459, VEX_Vfmadd213ss_xmm_xmm_xmmm32 = 3_460, VEX_Vfmadd213sd_xmm_xmm_xmmm64 = 3_461, EVEX_Vfmadd213ss_xmm_k1z_xmm_xmmm32_er = 3_462, EVEX_Vfmadd213sd_xmm_k1z_xmm_xmmm64_er = 3_463, VEX_Vfmsub213ps_xmm_xmm_xmmm128 = 3_464, VEX_Vfmsub213ps_ymm_ymm_ymmm256 = 3_465, VEX_Vfmsub213pd_xmm_xmm_xmmm128 = 3_466, VEX_Vfmsub213pd_ymm_ymm_ymmm256 = 3_467, EVEX_Vfmsub213ps_xmm_k1z_xmm_xmmm128b32 = 3_468, EVEX_Vfmsub213ps_ymm_k1z_ymm_ymmm256b32 = 3_469, EVEX_Vfmsub213ps_zmm_k1z_zmm_zmmm512b32_er = 3_470, EVEX_Vfmsub213pd_xmm_k1z_xmm_xmmm128b64 = 3_471, EVEX_Vfmsub213pd_ymm_k1z_ymm_ymmm256b64 = 3_472, EVEX_Vfmsub213pd_zmm_k1z_zmm_zmmm512b64_er = 3_473, EVEX_V4fnmaddps_zmm_k1z_zmmp3_m128 = 3_474, VEX_Vfmsub213ss_xmm_xmm_xmmm32 = 3_475, VEX_Vfmsub213sd_xmm_xmm_xmmm64 = 3_476, EVEX_Vfmsub213ss_xmm_k1z_xmm_xmmm32_er = 3_477, EVEX_Vfmsub213sd_xmm_k1z_xmm_xmmm64_er = 3_478, EVEX_V4fnmaddss_xmm_k1z_xmmp3_m128 = 3_479, VEX_Vfnmadd213ps_xmm_xmm_xmmm128 = 3_480, VEX_Vfnmadd213ps_ymm_ymm_ymmm256 = 3_481, VEX_Vfnmadd213pd_xmm_xmm_xmmm128 = 3_482, VEX_Vfnmadd213pd_ymm_ymm_ymmm256 = 3_483, EVEX_Vfnmadd213ps_xmm_k1z_xmm_xmmm128b32 = 3_484, EVEX_Vfnmadd213ps_ymm_k1z_ymm_ymmm256b32 = 3_485, EVEX_Vfnmadd213ps_zmm_k1z_zmm_zmmm512b32_er = 3_486, EVEX_Vfnmadd213pd_xmm_k1z_xmm_xmmm128b64 = 3_487, EVEX_Vfnmadd213pd_ymm_k1z_ymm_ymmm256b64 = 3_488, EVEX_Vfnmadd213pd_zmm_k1z_zmm_zmmm512b64_er = 3_489, VEX_Vfnmadd213ss_xmm_xmm_xmmm32 = 3_490, VEX_Vfnmadd213sd_xmm_xmm_xmmm64 = 3_491, EVEX_Vfnmadd213ss_xmm_k1z_xmm_xmmm32_er = 3_492, EVEX_Vfnmadd213sd_xmm_k1z_xmm_xmmm64_er = 3_493, VEX_Vfnmsub213ps_xmm_xmm_xmmm128 = 3_494, VEX_Vfnmsub213ps_ymm_ymm_ymmm256 = 3_495, VEX_Vfnmsub213pd_xmm_xmm_xmmm128 = 3_496, VEX_Vfnmsub213pd_ymm_ymm_ymmm256 = 3_497, EVEX_Vfnmsub213ps_xmm_k1z_xmm_xmmm128b32 = 3_498, EVEX_Vfnmsub213ps_ymm_k1z_ymm_ymmm256b32 = 3_499, EVEX_Vfnmsub213ps_zmm_k1z_zmm_zmmm512b32_er = 3_500, EVEX_Vfnmsub213pd_xmm_k1z_xmm_xmmm128b64 = 3_501, EVEX_Vfnmsub213pd_ymm_k1z_ymm_ymmm256b64 = 3_502, EVEX_Vfnmsub213pd_zmm_k1z_zmm_zmmm512b64_er = 3_503, VEX_Vfnmsub213ss_xmm_xmm_xmmm32 = 3_504, VEX_Vfnmsub213sd_xmm_xmm_xmmm64 = 3_505, EVEX_Vfnmsub213ss_xmm_k1z_xmm_xmmm32_er = 3_506, EVEX_Vfnmsub213sd_xmm_k1z_xmm_xmmm64_er = 3_507, EVEX_Vpmadd52luq_xmm_k1z_xmm_xmmm128b64 = 3_508, EVEX_Vpmadd52luq_ymm_k1z_ymm_ymmm256b64 = 3_509, EVEX_Vpmadd52luq_zmm_k1z_zmm_zmmm512b64 = 3_510, EVEX_Vpmadd52huq_xmm_k1z_xmm_xmmm128b64 = 3_511, EVEX_Vpmadd52huq_ymm_k1z_ymm_ymmm256b64 = 3_512, EVEX_Vpmadd52huq_zmm_k1z_zmm_zmmm512b64 = 3_513, VEX_Vfmaddsub231ps_xmm_xmm_xmmm128 = 3_514, VEX_Vfmaddsub231ps_ymm_ymm_ymmm256 = 3_515, VEX_Vfmaddsub231pd_xmm_xmm_xmmm128 = 3_516, VEX_Vfmaddsub231pd_ymm_ymm_ymmm256 = 3_517, EVEX_Vfmaddsub231ps_xmm_k1z_xmm_xmmm128b32 = 3_518, EVEX_Vfmaddsub231ps_ymm_k1z_ymm_ymmm256b32 = 3_519, EVEX_Vfmaddsub231ps_zmm_k1z_zmm_zmmm512b32_er = 3_520, EVEX_Vfmaddsub231pd_xmm_k1z_xmm_xmmm128b64 = 3_521, EVEX_Vfmaddsub231pd_ymm_k1z_ymm_ymmm256b64 = 3_522, EVEX_Vfmaddsub231pd_zmm_k1z_zmm_zmmm512b64_er = 3_523, VEX_Vfmsubadd231ps_xmm_xmm_xmmm128 = 3_524, VEX_Vfmsubadd231ps_ymm_ymm_ymmm256 = 3_525, VEX_Vfmsubadd231pd_xmm_xmm_xmmm128 = 3_526, VEX_Vfmsubadd231pd_ymm_ymm_ymmm256 = 3_527, EVEX_Vfmsubadd231ps_xmm_k1z_xmm_xmmm128b32 = 3_528, EVEX_Vfmsubadd231ps_ymm_k1z_ymm_ymmm256b32 = 3_529, EVEX_Vfmsubadd231ps_zmm_k1z_zmm_zmmm512b32_er = 3_530, EVEX_Vfmsubadd231pd_xmm_k1z_xmm_xmmm128b64 = 3_531, EVEX_Vfmsubadd231pd_ymm_k1z_ymm_ymmm256b64 = 3_532, EVEX_Vfmsubadd231pd_zmm_k1z_zmm_zmmm512b64_er = 3_533, VEX_Vfmadd231ps_xmm_xmm_xmmm128 = 3_534, VEX_Vfmadd231ps_ymm_ymm_ymmm256 = 3_535, VEX_Vfmadd231pd_xmm_xmm_xmmm128 = 3_536, VEX_Vfmadd231pd_ymm_ymm_ymmm256 = 3_537, EVEX_Vfmadd231ps_xmm_k1z_xmm_xmmm128b32 = 3_538, EVEX_Vfmadd231ps_ymm_k1z_ymm_ymmm256b32 = 3_539, EVEX_Vfmadd231ps_zmm_k1z_zmm_zmmm512b32_er = 3_540, EVEX_Vfmadd231pd_xmm_k1z_xmm_xmmm128b64 = 3_541, EVEX_Vfmadd231pd_ymm_k1z_ymm_ymmm256b64 = 3_542, EVEX_Vfmadd231pd_zmm_k1z_zmm_zmmm512b64_er = 3_543, VEX_Vfmadd231ss_xmm_xmm_xmmm32 = 3_544, VEX_Vfmadd231sd_xmm_xmm_xmmm64 = 3_545, EVEX_Vfmadd231ss_xmm_k1z_xmm_xmmm32_er = 3_546, EVEX_Vfmadd231sd_xmm_k1z_xmm_xmmm64_er = 3_547, VEX_Vfmsub231ps_xmm_xmm_xmmm128 = 3_548, VEX_Vfmsub231ps_ymm_ymm_ymmm256 = 3_549, VEX_Vfmsub231pd_xmm_xmm_xmmm128 = 3_550, VEX_Vfmsub231pd_ymm_ymm_ymmm256 = 3_551, EVEX_Vfmsub231ps_xmm_k1z_xmm_xmmm128b32 = 3_552, EVEX_Vfmsub231ps_ymm_k1z_ymm_ymmm256b32 = 3_553, EVEX_Vfmsub231ps_zmm_k1z_zmm_zmmm512b32_er = 3_554, EVEX_Vfmsub231pd_xmm_k1z_xmm_xmmm128b64 = 3_555, EVEX_Vfmsub231pd_ymm_k1z_ymm_ymmm256b64 = 3_556, EVEX_Vfmsub231pd_zmm_k1z_zmm_zmmm512b64_er = 3_557, VEX_Vfmsub231ss_xmm_xmm_xmmm32 = 3_558, VEX_Vfmsub231sd_xmm_xmm_xmmm64 = 3_559, EVEX_Vfmsub231ss_xmm_k1z_xmm_xmmm32_er = 3_560, EVEX_Vfmsub231sd_xmm_k1z_xmm_xmmm64_er = 3_561, VEX_Vfnmadd231ps_xmm_xmm_xmmm128 = 3_562, VEX_Vfnmadd231ps_ymm_ymm_ymmm256 = 3_563, VEX_Vfnmadd231pd_xmm_xmm_xmmm128 = 3_564, VEX_Vfnmadd231pd_ymm_ymm_ymmm256 = 3_565, EVEX_Vfnmadd231ps_xmm_k1z_xmm_xmmm128b32 = 3_566, EVEX_Vfnmadd231ps_ymm_k1z_ymm_ymmm256b32 = 3_567, EVEX_Vfnmadd231ps_zmm_k1z_zmm_zmmm512b32_er = 3_568, EVEX_Vfnmadd231pd_xmm_k1z_xmm_xmmm128b64 = 3_569, EVEX_Vfnmadd231pd_ymm_k1z_ymm_ymmm256b64 = 3_570, EVEX_Vfnmadd231pd_zmm_k1z_zmm_zmmm512b64_er = 3_571, VEX_Vfnmadd231ss_xmm_xmm_xmmm32 = 3_572, VEX_Vfnmadd231sd_xmm_xmm_xmmm64 = 3_573, EVEX_Vfnmadd231ss_xmm_k1z_xmm_xmmm32_er = 3_574, EVEX_Vfnmadd231sd_xmm_k1z_xmm_xmmm64_er = 3_575, VEX_Vfnmsub231ps_xmm_xmm_xmmm128 = 3_576, VEX_Vfnmsub231ps_ymm_ymm_ymmm256 = 3_577, VEX_Vfnmsub231pd_xmm_xmm_xmmm128 = 3_578, VEX_Vfnmsub231pd_ymm_ymm_ymmm256 = 3_579, EVEX_Vfnmsub231ps_xmm_k1z_xmm_xmmm128b32 = 3_580, EVEX_Vfnmsub231ps_ymm_k1z_ymm_ymmm256b32 = 3_581, EVEX_Vfnmsub231ps_zmm_k1z_zmm_zmmm512b32_er = 3_582, EVEX_Vfnmsub231pd_xmm_k1z_xmm_xmmm128b64 = 3_583, EVEX_Vfnmsub231pd_ymm_k1z_ymm_ymmm256b64 = 3_584, EVEX_Vfnmsub231pd_zmm_k1z_zmm_zmmm512b64_er = 3_585, VEX_Vfnmsub231ss_xmm_xmm_xmmm32 = 3_586, VEX_Vfnmsub231sd_xmm_xmm_xmmm64 = 3_587, EVEX_Vfnmsub231ss_xmm_k1z_xmm_xmmm32_er = 3_588, EVEX_Vfnmsub231sd_xmm_k1z_xmm_xmmm64_er = 3_589, EVEX_Vpconflictd_xmm_k1z_xmmm128b32 = 3_590, EVEX_Vpconflictd_ymm_k1z_ymmm256b32 = 3_591, EVEX_Vpconflictd_zmm_k1z_zmmm512b32 = 3_592, EVEX_Vpconflictq_xmm_k1z_xmmm128b64 = 3_593, EVEX_Vpconflictq_ymm_k1z_ymmm256b64 = 3_594, EVEX_Vpconflictq_zmm_k1z_zmmm512b64 = 3_595, EVEX_Vgatherpf0dps_vm32z_k1 = 3_596, EVEX_Vgatherpf0dpd_vm32y_k1 = 3_597, EVEX_Vgatherpf1dps_vm32z_k1 = 3_598, EVEX_Vgatherpf1dpd_vm32y_k1 = 3_599, EVEX_Vscatterpf0dps_vm32z_k1 = 3_600, EVEX_Vscatterpf0dpd_vm32y_k1 = 3_601, EVEX_Vscatterpf1dps_vm32z_k1 = 3_602, EVEX_Vscatterpf1dpd_vm32y_k1 = 3_603, EVEX_Vgatherpf0qps_vm64z_k1 = 3_604, EVEX_Vgatherpf0qpd_vm64z_k1 = 3_605, EVEX_Vgatherpf1qps_vm64z_k1 = 3_606, EVEX_Vgatherpf1qpd_vm64z_k1 = 3_607, EVEX_Vscatterpf0qps_vm64z_k1 = 3_608, EVEX_Vscatterpf0qpd_vm64z_k1 = 3_609, EVEX_Vscatterpf1qps_vm64z_k1 = 3_610, EVEX_Vscatterpf1qpd_vm64z_k1 = 3_611, Sha1nexte_xmm_xmmm128 = 3_612, EVEX_Vexp2ps_zmm_k1z_zmmm512b32_sae = 3_613, EVEX_Vexp2pd_zmm_k1z_zmmm512b64_sae = 3_614, Sha1msg1_xmm_xmmm128 = 3_615, Sha1msg2_xmm_xmmm128 = 3_616, EVEX_Vrcp28ps_zmm_k1z_zmmm512b32_sae = 3_617, EVEX_Vrcp28pd_zmm_k1z_zmmm512b64_sae = 3_618, Sha256rnds2_xmm_xmmm128 = 3_619, EVEX_Vrcp28ss_xmm_k1z_xmm_xmmm32_sae = 3_620, EVEX_Vrcp28sd_xmm_k1z_xmm_xmmm64_sae = 3_621, Sha256msg1_xmm_xmmm128 = 3_622, EVEX_Vrsqrt28ps_zmm_k1z_zmmm512b32_sae = 3_623, EVEX_Vrsqrt28pd_zmm_k1z_zmmm512b64_sae = 3_624, Sha256msg2_xmm_xmmm128 = 3_625, EVEX_Vrsqrt28ss_xmm_k1z_xmm_xmmm32_sae = 3_626, EVEX_Vrsqrt28sd_xmm_k1z_xmm_xmmm64_sae = 3_627, Gf2p8mulb_xmm_xmmm128 = 3_628, VEX_Vgf2p8mulb_xmm_xmm_xmmm128 = 3_629, VEX_Vgf2p8mulb_ymm_ymm_ymmm256 = 3_630, EVEX_Vgf2p8mulb_xmm_k1z_xmm_xmmm128 = 3_631, EVEX_Vgf2p8mulb_ymm_k1z_ymm_ymmm256 = 3_632, EVEX_Vgf2p8mulb_zmm_k1z_zmm_zmmm512 = 3_633, Aesimc_xmm_xmmm128 = 3_634, VEX_Vaesimc_xmm_xmmm128 = 3_635, Aesenc_xmm_xmmm128 = 3_636, VEX_Vaesenc_xmm_xmm_xmmm128 = 3_637, VEX_Vaesenc_ymm_ymm_ymmm256 = 3_638, EVEX_Vaesenc_xmm_xmm_xmmm128 = 3_639, EVEX_Vaesenc_ymm_ymm_ymmm256 = 3_640, EVEX_Vaesenc_zmm_zmm_zmmm512 = 3_641, Aesenclast_xmm_xmmm128 = 3_642, VEX_Vaesenclast_xmm_xmm_xmmm128 = 3_643, VEX_Vaesenclast_ymm_ymm_ymmm256 = 3_644, EVEX_Vaesenclast_xmm_xmm_xmmm128 = 3_645, EVEX_Vaesenclast_ymm_ymm_ymmm256 = 3_646, EVEX_Vaesenclast_zmm_zmm_zmmm512 = 3_647, Aesdec_xmm_xmmm128 = 3_648, VEX_Vaesdec_xmm_xmm_xmmm128 = 3_649, VEX_Vaesdec_ymm_ymm_ymmm256 = 3_650, EVEX_Vaesdec_xmm_xmm_xmmm128 = 3_651, EVEX_Vaesdec_ymm_ymm_ymmm256 = 3_652, EVEX_Vaesdec_zmm_zmm_zmmm512 = 3_653, Aesdeclast_xmm_xmmm128 = 3_654, VEX_Vaesdeclast_xmm_xmm_xmmm128 = 3_655, VEX_Vaesdeclast_ymm_ymm_ymmm256 = 3_656, EVEX_Vaesdeclast_xmm_xmm_xmmm128 = 3_657, EVEX_Vaesdeclast_ymm_ymm_ymmm256 = 3_658, EVEX_Vaesdeclast_zmm_zmm_zmmm512 = 3_659, Movbe_r16_m16 = 3_660, Movbe_r32_m32 = 3_661, Movbe_r64_m64 = 3_662, Crc32_r32_rm8 = 3_663, Crc32_r64_rm8 = 3_664, Movbe_m16_r16 = 3_665, Movbe_m32_r32 = 3_666, Movbe_m64_r64 = 3_667, Crc32_r32_rm16 = 3_668, Crc32_r32_rm32 = 3_669, Crc32_r64_rm64 = 3_670, VEX_Andn_r32_r32_rm32 = 3_671, VEX_Andn_r64_r64_rm64 = 3_672, VEX_Blsr_r32_rm32 = 3_673, VEX_Blsr_r64_rm64 = 3_674, VEX_Blsmsk_r32_rm32 = 3_675, VEX_Blsmsk_r64_rm64 = 3_676, VEX_Blsi_r32_rm32 = 3_677, VEX_Blsi_r64_rm64 = 3_678, VEX_Bzhi_r32_rm32_r32 = 3_679, VEX_Bzhi_r64_rm64_r64 = 3_680, Wrussd_m32_r32 = 3_681, Wrussq_m64_r64 = 3_682, VEX_Pext_r32_r32_rm32 = 3_683, VEX_Pext_r64_r64_rm64 = 3_684, VEX_Pdep_r32_r32_rm32 = 3_685, VEX_Pdep_r64_r64_rm64 = 3_686, Wrssd_m32_r32 = 3_687, Wrssq_m64_r64 = 3_688, Adcx_r32_rm32 = 3_689, Adcx_r64_rm64 = 3_690, Adox_r32_rm32 = 3_691, Adox_r64_rm64 = 3_692, VEX_Mulx_r32_r32_rm32 = 3_693, VEX_Mulx_r64_r64_rm64 = 3_694, VEX_Bextr_r32_rm32_r32 = 3_695, VEX_Bextr_r64_rm64_r64 = 3_696, VEX_Shlx_r32_rm32_r32 = 3_697, VEX_Shlx_r64_rm64_r64 = 3_698, VEX_Sarx_r32_rm32_r32 = 3_699, VEX_Sarx_r64_rm64_r64 = 3_700, VEX_Shrx_r32_rm32_r32 = 3_701, VEX_Shrx_r64_rm64_r64 = 3_702, Movdir64b_r16_m512 = 3_703, Movdir64b_r32_m512 = 3_704, Movdir64b_r64_m512 = 3_705, Enqcmds_r16_m512 = 3_706, Enqcmds_r32_m512 = 3_707, Enqcmds_r64_m512 = 3_708, Enqcmd_r16_m512 = 3_709, Enqcmd_r32_m512 = 3_710, Enqcmd_r64_m512 = 3_711, Movdiri_m32_r32 = 3_712, Movdiri_m64_r64 = 3_713, VEX_Vpermq_ymm_ymmm256_imm8 = 3_714, EVEX_Vpermq_ymm_k1z_ymmm256b64_imm8 = 3_715, EVEX_Vpermq_zmm_k1z_zmmm512b64_imm8 = 3_716, VEX_Vpermpd_ymm_ymmm256_imm8 = 3_717, EVEX_Vpermpd_ymm_k1z_ymmm256b64_imm8 = 3_718, EVEX_Vpermpd_zmm_k1z_zmmm512b64_imm8 = 3_719, VEX_Vpblendd_xmm_xmm_xmmm128_imm8 = 3_720, VEX_Vpblendd_ymm_ymm_ymmm256_imm8 = 3_721, EVEX_Valignd_xmm_k1z_xmm_xmmm128b32_imm8 = 3_722, EVEX_Valignd_ymm_k1z_ymm_ymmm256b32_imm8 = 3_723, EVEX_Valignd_zmm_k1z_zmm_zmmm512b32_imm8 = 3_724, EVEX_Valignq_xmm_k1z_xmm_xmmm128b64_imm8 = 3_725, EVEX_Valignq_ymm_k1z_ymm_ymmm256b64_imm8 = 3_726, EVEX_Valignq_zmm_k1z_zmm_zmmm512b64_imm8 = 3_727, VEX_Vpermilps_xmm_xmmm128_imm8 = 3_728, VEX_Vpermilps_ymm_ymmm256_imm8 = 3_729, EVEX_Vpermilps_xmm_k1z_xmmm128b32_imm8 = 3_730, EVEX_Vpermilps_ymm_k1z_ymmm256b32_imm8 = 3_731, EVEX_Vpermilps_zmm_k1z_zmmm512b32_imm8 = 3_732, VEX_Vpermilpd_xmm_xmmm128_imm8 = 3_733, VEX_Vpermilpd_ymm_ymmm256_imm8 = 3_734, EVEX_Vpermilpd_xmm_k1z_xmmm128b64_imm8 = 3_735, EVEX_Vpermilpd_ymm_k1z_ymmm256b64_imm8 = 3_736, EVEX_Vpermilpd_zmm_k1z_zmmm512b64_imm8 = 3_737, VEX_Vperm2f128_ymm_ymm_ymmm256_imm8 = 3_738, Roundps_xmm_xmmm128_imm8 = 3_739, VEX_Vroundps_xmm_xmmm128_imm8 = 3_740, VEX_Vroundps_ymm_ymmm256_imm8 = 3_741, EVEX_Vrndscaleps_xmm_k1z_xmmm128b32_imm8 = 3_742, EVEX_Vrndscaleps_ymm_k1z_ymmm256b32_imm8 = 3_743, EVEX_Vrndscaleps_zmm_k1z_zmmm512b32_imm8_sae = 3_744, Roundpd_xmm_xmmm128_imm8 = 3_745, VEX_Vroundpd_xmm_xmmm128_imm8 = 3_746, VEX_Vroundpd_ymm_ymmm256_imm8 = 3_747, EVEX_Vrndscalepd_xmm_k1z_xmmm128b64_imm8 = 3_748, EVEX_Vrndscalepd_ymm_k1z_ymmm256b64_imm8 = 3_749, EVEX_Vrndscalepd_zmm_k1z_zmmm512b64_imm8_sae = 3_750, Roundss_xmm_xmmm32_imm8 = 3_751, VEX_Vroundss_xmm_xmm_xmmm32_imm8 = 3_752, EVEX_Vrndscaless_xmm_k1z_xmm_xmmm32_imm8_sae = 3_753, Roundsd_xmm_xmmm64_imm8 = 3_754, VEX_Vroundsd_xmm_xmm_xmmm64_imm8 = 3_755, EVEX_Vrndscalesd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_756, Blendps_xmm_xmmm128_imm8 = 3_757, VEX_Vblendps_xmm_xmm_xmmm128_imm8 = 3_758, VEX_Vblendps_ymm_ymm_ymmm256_imm8 = 3_759, Blendpd_xmm_xmmm128_imm8 = 3_760, VEX_Vblendpd_xmm_xmm_xmmm128_imm8 = 3_761, VEX_Vblendpd_ymm_ymm_ymmm256_imm8 = 3_762, Pblendw_xmm_xmmm128_imm8 = 3_763, VEX_Vpblendw_xmm_xmm_xmmm128_imm8 = 3_764, VEX_Vpblendw_ymm_ymm_ymmm256_imm8 = 3_765, Palignr_mm_mmm64_imm8 = 3_766, Palignr_xmm_xmmm128_imm8 = 3_767, VEX_Vpalignr_xmm_xmm_xmmm128_imm8 = 3_768, VEX_Vpalignr_ymm_ymm_ymmm256_imm8 = 3_769, EVEX_Vpalignr_xmm_k1z_xmm_xmmm128_imm8 = 3_770, EVEX_Vpalignr_ymm_k1z_ymm_ymmm256_imm8 = 3_771, EVEX_Vpalignr_zmm_k1z_zmm_zmmm512_imm8 = 3_772, Pextrb_r32m8_xmm_imm8 = 3_773, Pextrb_r64m8_xmm_imm8 = 3_774, VEX_Vpextrb_r32m8_xmm_imm8 = 3_775, VEX_Vpextrb_r64m8_xmm_imm8 = 3_776, EVEX_Vpextrb_r32m8_xmm_imm8 = 3_777, EVEX_Vpextrb_r64m8_xmm_imm8 = 3_778, Pextrw_r32m16_xmm_imm8 = 3_779, Pextrw_r64m16_xmm_imm8 = 3_780, VEX_Vpextrw_r32m16_xmm_imm8 = 3_781, VEX_Vpextrw_r64m16_xmm_imm8 = 3_782, EVEX_Vpextrw_r32m16_xmm_imm8 = 3_783, EVEX_Vpextrw_r64m16_xmm_imm8 = 3_784, Pextrd_rm32_xmm_imm8 = 3_785, Pextrq_rm64_xmm_imm8 = 3_786, VEX_Vpextrd_rm32_xmm_imm8 = 3_787, VEX_Vpextrq_rm64_xmm_imm8 = 3_788, EVEX_Vpextrd_rm32_xmm_imm8 = 3_789, EVEX_Vpextrq_rm64_xmm_imm8 = 3_790, Extractps_rm32_xmm_imm8 = 3_791, Extractps_r64m32_xmm_imm8 = 3_792, VEX_Vextractps_rm32_xmm_imm8 = 3_793, VEX_Vextractps_r64m32_xmm_imm8 = 3_794, EVEX_Vextractps_rm32_xmm_imm8 = 3_795, EVEX_Vextractps_r64m32_xmm_imm8 = 3_796, VEX_Vinsertf128_ymm_ymm_xmmm128_imm8 = 3_797, EVEX_Vinsertf32x4_ymm_k1z_ymm_xmmm128_imm8 = 3_798, EVEX_Vinsertf32x4_zmm_k1z_zmm_xmmm128_imm8 = 3_799, EVEX_Vinsertf64x2_ymm_k1z_ymm_xmmm128_imm8 = 3_800, EVEX_Vinsertf64x2_zmm_k1z_zmm_xmmm128_imm8 = 3_801, VEX_Vextractf128_xmmm128_ymm_imm8 = 3_802, EVEX_Vextractf32x4_xmmm128_k1z_ymm_imm8 = 3_803, EVEX_Vextractf32x4_xmmm128_k1z_zmm_imm8 = 3_804, EVEX_Vextractf64x2_xmmm128_k1z_ymm_imm8 = 3_805, EVEX_Vextractf64x2_xmmm128_k1z_zmm_imm8 = 3_806, EVEX_Vinsertf32x8_zmm_k1z_zmm_ymmm256_imm8 = 3_807, EVEX_Vinsertf64x4_zmm_k1z_zmm_ymmm256_imm8 = 3_808, EVEX_Vextractf32x8_ymmm256_k1z_zmm_imm8 = 3_809, EVEX_Vextractf64x4_ymmm256_k1z_zmm_imm8 = 3_810, VEX_Vcvtps2ph_xmmm64_xmm_imm8 = 3_811, VEX_Vcvtps2ph_xmmm128_ymm_imm8 = 3_812, EVEX_Vcvtps2ph_xmmm64_k1z_xmm_imm8 = 3_813, EVEX_Vcvtps2ph_xmmm128_k1z_ymm_imm8 = 3_814, EVEX_Vcvtps2ph_ymmm256_k1z_zmm_imm8_sae = 3_815, EVEX_Vpcmpud_kr_k1_xmm_xmmm128b32_imm8 = 3_816, EVEX_Vpcmpud_kr_k1_ymm_ymmm256b32_imm8 = 3_817, EVEX_Vpcmpud_kr_k1_zmm_zmmm512b32_imm8 = 3_818, EVEX_Vpcmpuq_kr_k1_xmm_xmmm128b64_imm8 = 3_819, EVEX_Vpcmpuq_kr_k1_ymm_ymmm256b64_imm8 = 3_820, EVEX_Vpcmpuq_kr_k1_zmm_zmmm512b64_imm8 = 3_821, EVEX_Vpcmpd_kr_k1_xmm_xmmm128b32_imm8 = 3_822, EVEX_Vpcmpd_kr_k1_ymm_ymmm256b32_imm8 = 3_823, EVEX_Vpcmpd_kr_k1_zmm_zmmm512b32_imm8 = 3_824, EVEX_Vpcmpq_kr_k1_xmm_xmmm128b64_imm8 = 3_825, EVEX_Vpcmpq_kr_k1_ymm_ymmm256b64_imm8 = 3_826, EVEX_Vpcmpq_kr_k1_zmm_zmmm512b64_imm8 = 3_827, Pinsrb_xmm_r32m8_imm8 = 3_828, Pinsrb_xmm_r64m8_imm8 = 3_829, VEX_Vpinsrb_xmm_xmm_r32m8_imm8 = 3_830, VEX_Vpinsrb_xmm_xmm_r64m8_imm8 = 3_831, EVEX_Vpinsrb_xmm_xmm_r32m8_imm8 = 3_832, EVEX_Vpinsrb_xmm_xmm_r64m8_imm8 = 3_833, Insertps_xmm_xmmm32_imm8 = 3_834, VEX_Vinsertps_xmm_xmm_xmmm32_imm8 = 3_835, EVEX_Vinsertps_xmm_xmm_xmmm32_imm8 = 3_836, Pinsrd_xmm_rm32_imm8 = 3_837, Pinsrq_xmm_rm64_imm8 = 3_838, VEX_Vpinsrd_xmm_xmm_rm32_imm8 = 3_839, VEX_Vpinsrq_xmm_xmm_rm64_imm8 = 3_840, EVEX_Vpinsrd_xmm_xmm_rm32_imm8 = 3_841, EVEX_Vpinsrq_xmm_xmm_rm64_imm8 = 3_842, EVEX_Vshuff32x4_ymm_k1z_ymm_ymmm256b32_imm8 = 3_843, EVEX_Vshuff32x4_zmm_k1z_zmm_zmmm512b32_imm8 = 3_844, EVEX_Vshuff64x2_ymm_k1z_ymm_ymmm256b64_imm8 = 3_845, EVEX_Vshuff64x2_zmm_k1z_zmm_zmmm512b64_imm8 = 3_846, EVEX_Vpternlogd_xmm_k1z_xmm_xmmm128b32_imm8 = 3_847, EVEX_Vpternlogd_ymm_k1z_ymm_ymmm256b32_imm8 = 3_848, EVEX_Vpternlogd_zmm_k1z_zmm_zmmm512b32_imm8 = 3_849, EVEX_Vpternlogq_xmm_k1z_xmm_xmmm128b64_imm8 = 3_850, EVEX_Vpternlogq_ymm_k1z_ymm_ymmm256b64_imm8 = 3_851, EVEX_Vpternlogq_zmm_k1z_zmm_zmmm512b64_imm8 = 3_852, EVEX_Vgetmantps_xmm_k1z_xmmm128b32_imm8 = 3_853, EVEX_Vgetmantps_ymm_k1z_ymmm256b32_imm8 = 3_854, EVEX_Vgetmantps_zmm_k1z_zmmm512b32_imm8_sae = 3_855, EVEX_Vgetmantpd_xmm_k1z_xmmm128b64_imm8 = 3_856, EVEX_Vgetmantpd_ymm_k1z_ymmm256b64_imm8 = 3_857, EVEX_Vgetmantpd_zmm_k1z_zmmm512b64_imm8_sae = 3_858, EVEX_Vgetmantss_xmm_k1z_xmm_xmmm32_imm8_sae = 3_859, EVEX_Vgetmantsd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_860, VEX_Kshiftrb_kr_kr_imm8 = 3_861, VEX_Kshiftrw_kr_kr_imm8 = 3_862, VEX_Kshiftrd_kr_kr_imm8 = 3_863, VEX_Kshiftrq_kr_kr_imm8 = 3_864, VEX_Kshiftlb_kr_kr_imm8 = 3_865, VEX_Kshiftlw_kr_kr_imm8 = 3_866, VEX_Kshiftld_kr_kr_imm8 = 3_867, VEX_Kshiftlq_kr_kr_imm8 = 3_868, VEX_Vinserti128_ymm_ymm_xmmm128_imm8 = 3_869, EVEX_Vinserti32x4_ymm_k1z_ymm_xmmm128_imm8 = 3_870, EVEX_Vinserti32x4_zmm_k1z_zmm_xmmm128_imm8 = 3_871, EVEX_Vinserti64x2_ymm_k1z_ymm_xmmm128_imm8 = 3_872, EVEX_Vinserti64x2_zmm_k1z_zmm_xmmm128_imm8 = 3_873, VEX_Vextracti128_xmmm128_ymm_imm8 = 3_874, EVEX_Vextracti32x4_xmmm128_k1z_ymm_imm8 = 3_875, EVEX_Vextracti32x4_xmmm128_k1z_zmm_imm8 = 3_876, EVEX_Vextracti64x2_xmmm128_k1z_ymm_imm8 = 3_877, EVEX_Vextracti64x2_xmmm128_k1z_zmm_imm8 = 3_878, EVEX_Vinserti32x8_zmm_k1z_zmm_ymmm256_imm8 = 3_879, EVEX_Vinserti64x4_zmm_k1z_zmm_ymmm256_imm8 = 3_880, EVEX_Vextracti32x8_ymmm256_k1z_zmm_imm8 = 3_881, EVEX_Vextracti64x4_ymmm256_k1z_zmm_imm8 = 3_882, EVEX_Vpcmpub_kr_k1_xmm_xmmm128_imm8 = 3_883, EVEX_Vpcmpub_kr_k1_ymm_ymmm256_imm8 = 3_884, EVEX_Vpcmpub_kr_k1_zmm_zmmm512_imm8 = 3_885, EVEX_Vpcmpuw_kr_k1_xmm_xmmm128_imm8 = 3_886, EVEX_Vpcmpuw_kr_k1_ymm_ymmm256_imm8 = 3_887, EVEX_Vpcmpuw_kr_k1_zmm_zmmm512_imm8 = 3_888, EVEX_Vpcmpb_kr_k1_xmm_xmmm128_imm8 = 3_889, EVEX_Vpcmpb_kr_k1_ymm_ymmm256_imm8 = 3_890, EVEX_Vpcmpb_kr_k1_zmm_zmmm512_imm8 = 3_891, EVEX_Vpcmpw_kr_k1_xmm_xmmm128_imm8 = 3_892, EVEX_Vpcmpw_kr_k1_ymm_ymmm256_imm8 = 3_893, EVEX_Vpcmpw_kr_k1_zmm_zmmm512_imm8 = 3_894, Dpps_xmm_xmmm128_imm8 = 3_895, VEX_Vdpps_xmm_xmm_xmmm128_imm8 = 3_896, VEX_Vdpps_ymm_ymm_ymmm256_imm8 = 3_897, Dppd_xmm_xmmm128_imm8 = 3_898, VEX_Vdppd_xmm_xmm_xmmm128_imm8 = 3_899, Mpsadbw_xmm_xmmm128_imm8 = 3_900, VEX_Vmpsadbw_xmm_xmm_xmmm128_imm8 = 3_901, VEX_Vmpsadbw_ymm_ymm_ymmm256_imm8 = 3_902, EVEX_Vdbpsadbw_xmm_k1z_xmm_xmmm128_imm8 = 3_903, EVEX_Vdbpsadbw_ymm_k1z_ymm_ymmm256_imm8 = 3_904, EVEX_Vdbpsadbw_zmm_k1z_zmm_zmmm512_imm8 = 3_905, EVEX_Vshufi32x4_ymm_k1z_ymm_ymmm256b32_imm8 = 3_906, EVEX_Vshufi32x4_zmm_k1z_zmm_zmmm512b32_imm8 = 3_907, EVEX_Vshufi64x2_ymm_k1z_ymm_ymmm256b64_imm8 = 3_908, EVEX_Vshufi64x2_zmm_k1z_zmm_zmmm512b64_imm8 = 3_909, Pclmulqdq_xmm_xmmm128_imm8 = 3_910, VEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8 = 3_911, VEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8 = 3_912, EVEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8 = 3_913, EVEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8 = 3_914, EVEX_Vpclmulqdq_zmm_zmm_zmmm512_imm8 = 3_915, VEX_Vperm2i128_ymm_ymm_ymmm256_imm8 = 3_916, VEX_Vpermil2ps_xmm_xmm_xmmm128_xmm_imm4 = 3_917, VEX_Vpermil2ps_ymm_ymm_ymmm256_ymm_imm4 = 3_918, VEX_Vpermil2ps_xmm_xmm_xmm_xmmm128_imm4 = 3_919, VEX_Vpermil2ps_ymm_ymm_ymm_ymmm256_imm4 = 3_920, VEX_Vpermil2pd_xmm_xmm_xmmm128_xmm_imm4 = 3_921, VEX_Vpermil2pd_ymm_ymm_ymmm256_ymm_imm4 = 3_922, VEX_Vpermil2pd_xmm_xmm_xmm_xmmm128_imm4 = 3_923, VEX_Vpermil2pd_ymm_ymm_ymm_ymmm256_imm4 = 3_924, VEX_Vblendvps_xmm_xmm_xmmm128_xmm = 3_925, VEX_Vblendvps_ymm_ymm_ymmm256_ymm = 3_926, VEX_Vblendvpd_xmm_xmm_xmmm128_xmm = 3_927, VEX_Vblendvpd_ymm_ymm_ymmm256_ymm = 3_928, VEX_Vpblendvb_xmm_xmm_xmmm128_xmm = 3_929, VEX_Vpblendvb_ymm_ymm_ymmm256_ymm = 3_930, EVEX_Vrangeps_xmm_k1z_xmm_xmmm128b32_imm8 = 3_931, EVEX_Vrangeps_ymm_k1z_ymm_ymmm256b32_imm8 = 3_932, EVEX_Vrangeps_zmm_k1z_zmm_zmmm512b32_imm8_sae = 3_933, EVEX_Vrangepd_xmm_k1z_xmm_xmmm128b64_imm8 = 3_934, EVEX_Vrangepd_ymm_k1z_ymm_ymmm256b64_imm8 = 3_935, EVEX_Vrangepd_zmm_k1z_zmm_zmmm512b64_imm8_sae = 3_936, EVEX_Vrangess_xmm_k1z_xmm_xmmm32_imm8_sae = 3_937, EVEX_Vrangesd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_938, EVEX_Vfixupimmps_xmm_k1z_xmm_xmmm128b32_imm8 = 3_939, EVEX_Vfixupimmps_ymm_k1z_ymm_ymmm256b32_imm8 = 3_940, EVEX_Vfixupimmps_zmm_k1z_zmm_zmmm512b32_imm8_sae = 3_941, EVEX_Vfixupimmpd_xmm_k1z_xmm_xmmm128b64_imm8 = 3_942, EVEX_Vfixupimmpd_ymm_k1z_ymm_ymmm256b64_imm8 = 3_943, EVEX_Vfixupimmpd_zmm_k1z_zmm_zmmm512b64_imm8_sae = 3_944, EVEX_Vfixupimmss_xmm_k1z_xmm_xmmm32_imm8_sae = 3_945, EVEX_Vfixupimmsd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_946, EVEX_Vreduceps_xmm_k1z_xmmm128b32_imm8 = 3_947, EVEX_Vreduceps_ymm_k1z_ymmm256b32_imm8 = 3_948, EVEX_Vreduceps_zmm_k1z_zmmm512b32_imm8_sae = 3_949, EVEX_Vreducepd_xmm_k1z_xmmm128b64_imm8 = 3_950, EVEX_Vreducepd_ymm_k1z_ymmm256b64_imm8 = 3_951, EVEX_Vreducepd_zmm_k1z_zmmm512b64_imm8_sae = 3_952, EVEX_Vreducess_xmm_k1z_xmm_xmmm32_imm8_sae = 3_953, EVEX_Vreducesd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_954, VEX_Vfmaddsubps_xmm_xmm_xmmm128_xmm = 3_955, VEX_Vfmaddsubps_ymm_ymm_ymmm256_ymm = 3_956, VEX_Vfmaddsubps_xmm_xmm_xmm_xmmm128 = 3_957, VEX_Vfmaddsubps_ymm_ymm_ymm_ymmm256 = 3_958, VEX_Vfmaddsubpd_xmm_xmm_xmmm128_xmm = 3_959, VEX_Vfmaddsubpd_ymm_ymm_ymmm256_ymm = 3_960, VEX_Vfmaddsubpd_xmm_xmm_xmm_xmmm128 = 3_961, VEX_Vfmaddsubpd_ymm_ymm_ymm_ymmm256 = 3_962, VEX_Vfmsubaddps_xmm_xmm_xmmm128_xmm = 3_963, VEX_Vfmsubaddps_ymm_ymm_ymmm256_ymm = 3_964, VEX_Vfmsubaddps_xmm_xmm_xmm_xmmm128 = 3_965, VEX_Vfmsubaddps_ymm_ymm_ymm_ymmm256 = 3_966, VEX_Vfmsubaddpd_xmm_xmm_xmmm128_xmm = 3_967, VEX_Vfmsubaddpd_ymm_ymm_ymmm256_ymm = 3_968, VEX_Vfmsubaddpd_xmm_xmm_xmm_xmmm128 = 3_969, VEX_Vfmsubaddpd_ymm_ymm_ymm_ymmm256 = 3_970, Pcmpestrm_xmm_xmmm128_imm8 = 3_971, Pcmpestrm64_xmm_xmmm128_imm8 = 3_972, VEX_Vpcmpestrm_xmm_xmmm128_imm8 = 3_973, VEX_Vpcmpestrm64_xmm_xmmm128_imm8 = 3_974, Pcmpestri_xmm_xmmm128_imm8 = 3_975, Pcmpestri64_xmm_xmmm128_imm8 = 3_976, VEX_Vpcmpestri_xmm_xmmm128_imm8 = 3_977, VEX_Vpcmpestri64_xmm_xmmm128_imm8 = 3_978, Pcmpistrm_xmm_xmmm128_imm8 = 3_979, VEX_Vpcmpistrm_xmm_xmmm128_imm8 = 3_980, Pcmpistri_xmm_xmmm128_imm8 = 3_981, VEX_Vpcmpistri_xmm_xmmm128_imm8 = 3_982, EVEX_Vfpclassps_kr_k1_xmmm128b32_imm8 = 3_983, EVEX_Vfpclassps_kr_k1_ymmm256b32_imm8 = 3_984, EVEX_Vfpclassps_kr_k1_zmmm512b32_imm8 = 3_985, EVEX_Vfpclasspd_kr_k1_xmmm128b64_imm8 = 3_986, EVEX_Vfpclasspd_kr_k1_ymmm256b64_imm8 = 3_987, EVEX_Vfpclasspd_kr_k1_zmmm512b64_imm8 = 3_988, EVEX_Vfpclassss_kr_k1_xmmm32_imm8 = 3_989, EVEX_Vfpclasssd_kr_k1_xmmm64_imm8 = 3_990, VEX_Vfmaddps_xmm_xmm_xmmm128_xmm = 3_991, VEX_Vfmaddps_ymm_ymm_ymmm256_ymm = 3_992, VEX_Vfmaddps_xmm_xmm_xmm_xmmm128 = 3_993, VEX_Vfmaddps_ymm_ymm_ymm_ymmm256 = 3_994, VEX_Vfmaddpd_xmm_xmm_xmmm128_xmm = 3_995, VEX_Vfmaddpd_ymm_ymm_ymmm256_ymm = 3_996, VEX_Vfmaddpd_xmm_xmm_xmm_xmmm128 = 3_997, VEX_Vfmaddpd_ymm_ymm_ymm_ymmm256 = 3_998, VEX_Vfmaddss_xmm_xmm_xmmm32_xmm = 3_999, VEX_Vfmaddss_xmm_xmm_xmm_xmmm32 = 4_000, VEX_Vfmaddsd_xmm_xmm_xmmm64_xmm = 4_001, VEX_Vfmaddsd_xmm_xmm_xmm_xmmm64 = 4_002, VEX_Vfmsubps_xmm_xmm_xmmm128_xmm = 4_003, VEX_Vfmsubps_ymm_ymm_ymmm256_ymm = 4_004, VEX_Vfmsubps_xmm_xmm_xmm_xmmm128 = 4_005, VEX_Vfmsubps_ymm_ymm_ymm_ymmm256 = 4_006, VEX_Vfmsubpd_xmm_xmm_xmmm128_xmm = 4_007, VEX_Vfmsubpd_ymm_ymm_ymmm256_ymm = 4_008, VEX_Vfmsubpd_xmm_xmm_xmm_xmmm128 = 4_009, VEX_Vfmsubpd_ymm_ymm_ymm_ymmm256 = 4_010, VEX_Vfmsubss_xmm_xmm_xmmm32_xmm = 4_011, VEX_Vfmsubss_xmm_xmm_xmm_xmmm32 = 4_012, VEX_Vfmsubsd_xmm_xmm_xmmm64_xmm = 4_013, VEX_Vfmsubsd_xmm_xmm_xmm_xmmm64 = 4_014, EVEX_Vpshldw_xmm_k1z_xmm_xmmm128_imm8 = 4_015, EVEX_Vpshldw_ymm_k1z_ymm_ymmm256_imm8 = 4_016, EVEX_Vpshldw_zmm_k1z_zmm_zmmm512_imm8 = 4_017, EVEX_Vpshldd_xmm_k1z_xmm_xmmm128b32_imm8 = 4_018, EVEX_Vpshldd_ymm_k1z_ymm_ymmm256b32_imm8 = 4_019, EVEX_Vpshldd_zmm_k1z_zmm_zmmm512b32_imm8 = 4_020, EVEX_Vpshldq_xmm_k1z_xmm_xmmm128b64_imm8 = 4_021, EVEX_Vpshldq_ymm_k1z_ymm_ymmm256b64_imm8 = 4_022, EVEX_Vpshldq_zmm_k1z_zmm_zmmm512b64_imm8 = 4_023, EVEX_Vpshrdw_xmm_k1z_xmm_xmmm128_imm8 = 4_024, EVEX_Vpshrdw_ymm_k1z_ymm_ymmm256_imm8 = 4_025, EVEX_Vpshrdw_zmm_k1z_zmm_zmmm512_imm8 = 4_026, EVEX_Vpshrdd_xmm_k1z_xmm_xmmm128b32_imm8 = 4_027, EVEX_Vpshrdd_ymm_k1z_ymm_ymmm256b32_imm8 = 4_028, EVEX_Vpshrdd_zmm_k1z_zmm_zmmm512b32_imm8 = 4_029, EVEX_Vpshrdq_xmm_k1z_xmm_xmmm128b64_imm8 = 4_030, EVEX_Vpshrdq_ymm_k1z_ymm_ymmm256b64_imm8 = 4_031, EVEX_Vpshrdq_zmm_k1z_zmm_zmmm512b64_imm8 = 4_032, VEX_Vfnmaddps_xmm_xmm_xmmm128_xmm = 4_033, VEX_Vfnmaddps_ymm_ymm_ymmm256_ymm = 4_034, VEX_Vfnmaddps_xmm_xmm_xmm_xmmm128 = 4_035, VEX_Vfnmaddps_ymm_ymm_ymm_ymmm256 = 4_036, VEX_Vfnmaddpd_xmm_xmm_xmmm128_xmm = 4_037, VEX_Vfnmaddpd_ymm_ymm_ymmm256_ymm = 4_038, VEX_Vfnmaddpd_xmm_xmm_xmm_xmmm128 = 4_039, VEX_Vfnmaddpd_ymm_ymm_ymm_ymmm256 = 4_040, VEX_Vfnmaddss_xmm_xmm_xmmm32_xmm = 4_041, VEX_Vfnmaddss_xmm_xmm_xmm_xmmm32 = 4_042, VEX_Vfnmaddsd_xmm_xmm_xmmm64_xmm = 4_043, VEX_Vfnmaddsd_xmm_xmm_xmm_xmmm64 = 4_044, VEX_Vfnmsubps_xmm_xmm_xmmm128_xmm = 4_045, VEX_Vfnmsubps_ymm_ymm_ymmm256_ymm = 4_046, VEX_Vfnmsubps_xmm_xmm_xmm_xmmm128 = 4_047, VEX_Vfnmsubps_ymm_ymm_ymm_ymmm256 = 4_048, VEX_Vfnmsubpd_xmm_xmm_xmmm128_xmm = 4_049, VEX_Vfnmsubpd_ymm_ymm_ymmm256_ymm = 4_050, VEX_Vfnmsubpd_xmm_xmm_xmm_xmmm128 = 4_051, VEX_Vfnmsubpd_ymm_ymm_ymm_ymmm256 = 4_052, VEX_Vfnmsubss_xmm_xmm_xmmm32_xmm = 4_053, VEX_Vfnmsubss_xmm_xmm_xmm_xmmm32 = 4_054, VEX_Vfnmsubsd_xmm_xmm_xmmm64_xmm = 4_055, VEX_Vfnmsubsd_xmm_xmm_xmm_xmmm64 = 4_056, Sha1rnds4_xmm_xmmm128_imm8 = 4_057, Gf2p8affineqb_xmm_xmmm128_imm8 = 4_058, VEX_Vgf2p8affineqb_xmm_xmm_xmmm128_imm8 = 4_059, VEX_Vgf2p8affineqb_ymm_ymm_ymmm256_imm8 = 4_060, EVEX_Vgf2p8affineqb_xmm_k1z_xmm_xmmm128b64_imm8 = 4_061, EVEX_Vgf2p8affineqb_ymm_k1z_ymm_ymmm256b64_imm8 = 4_062, EVEX_Vgf2p8affineqb_zmm_k1z_zmm_zmmm512b64_imm8 = 4_063, Gf2p8affineinvqb_xmm_xmmm128_imm8 = 4_064, VEX_Vgf2p8affineinvqb_xmm_xmm_xmmm128_imm8 = 4_065, VEX_Vgf2p8affineinvqb_ymm_ymm_ymmm256_imm8 = 4_066, EVEX_Vgf2p8affineinvqb_xmm_k1z_xmm_xmmm128b64_imm8 = 4_067, EVEX_Vgf2p8affineinvqb_ymm_k1z_ymm_ymmm256b64_imm8 = 4_068, EVEX_Vgf2p8affineinvqb_zmm_k1z_zmm_zmmm512b64_imm8 = 4_069, Aeskeygenassist_xmm_xmmm128_imm8 = 4_070, VEX_Vaeskeygenassist_xmm_xmmm128_imm8 = 4_071, VEX_Rorx_r32_rm32_imm8 = 4_072, VEX_Rorx_r64_rm64_imm8 = 4_073, XOP_Vpmacssww_xmm_xmm_xmmm128_xmm = 4_074, XOP_Vpmacsswd_xmm_xmm_xmmm128_xmm = 4_075, XOP_Vpmacssdql_xmm_xmm_xmmm128_xmm = 4_076, XOP_Vpmacssdd_xmm_xmm_xmmm128_xmm = 4_077, XOP_Vpmacssdqh_xmm_xmm_xmmm128_xmm = 4_078, XOP_Vpmacsww_xmm_xmm_xmmm128_xmm = 4_079, XOP_Vpmacswd_xmm_xmm_xmmm128_xmm = 4_080, XOP_Vpmacsdql_xmm_xmm_xmmm128_xmm = 4_081, XOP_Vpmacsdd_xmm_xmm_xmmm128_xmm = 4_082, XOP_Vpmacsdqh_xmm_xmm_xmmm128_xmm = 4_083, XOP_Vpcmov_xmm_xmm_xmmm128_xmm = 4_084, XOP_Vpcmov_ymm_ymm_ymmm256_ymm = 4_085, XOP_Vpcmov_xmm_xmm_xmm_xmmm128 = 4_086, XOP_Vpcmov_ymm_ymm_ymm_ymmm256 = 4_087, XOP_Vpperm_xmm_xmm_xmmm128_xmm = 4_088, XOP_Vpperm_xmm_xmm_xmm_xmmm128 = 4_089, XOP_Vpmadcsswd_xmm_xmm_xmmm128_xmm = 4_090, XOP_Vpmadcswd_xmm_xmm_xmmm128_xmm = 4_091, XOP_Vprotb_xmm_xmmm128_imm8 = 4_092, XOP_Vprotw_xmm_xmmm128_imm8 = 4_093, XOP_Vprotd_xmm_xmmm128_imm8 = 4_094, XOP_Vprotq_xmm_xmmm128_imm8 = 4_095, XOP_Vpcomb_xmm_xmm_xmmm128_imm8 = 4_096, XOP_Vpcomw_xmm_xmm_xmmm128_imm8 = 4_097, XOP_Vpcomd_xmm_xmm_xmmm128_imm8 = 4_098, XOP_Vpcomq_xmm_xmm_xmmm128_imm8 = 4_099, XOP_Vpcomub_xmm_xmm_xmmm128_imm8 = 4_100, XOP_Vpcomuw_xmm_xmm_xmmm128_imm8 = 4_101, XOP_Vpcomud_xmm_xmm_xmmm128_imm8 = 4_102, XOP_Vpcomuq_xmm_xmm_xmmm128_imm8 = 4_103, XOP_Blcfill_r32_rm32 = 4_104, XOP_Blcfill_r64_rm64 = 4_105, XOP_Blsfill_r32_rm32 = 4_106, XOP_Blsfill_r64_rm64 = 4_107, XOP_Blcs_r32_rm32 = 4_108, XOP_Blcs_r64_rm64 = 4_109, XOP_Tzmsk_r32_rm32 = 4_110, XOP_Tzmsk_r64_rm64 = 4_111, XOP_Blcic_r32_rm32 = 4_112, XOP_Blcic_r64_rm64 = 4_113, XOP_Blsic_r32_rm32 = 4_114, XOP_Blsic_r64_rm64 = 4_115, XOP_T1mskc_r32_rm32 = 4_116, XOP_T1mskc_r64_rm64 = 4_117, XOP_Blcmsk_r32_rm32 = 4_118, XOP_Blcmsk_r64_rm64 = 4_119, XOP_Blci_r32_rm32 = 4_120, XOP_Blci_r64_rm64 = 4_121, XOP_Llwpcb_r32 = 4_122, XOP_Llwpcb_r64 = 4_123, XOP_Slwpcb_r32 = 4_124, XOP_Slwpcb_r64 = 4_125, XOP_Vfrczps_xmm_xmmm128 = 4_126, XOP_Vfrczps_ymm_ymmm256 = 4_127, XOP_Vfrczpd_xmm_xmmm128 = 4_128, XOP_Vfrczpd_ymm_ymmm256 = 4_129, XOP_Vfrczss_xmm_xmmm32 = 4_130, XOP_Vfrczsd_xmm_xmmm64 = 4_131, XOP_Vprotb_xmm_xmmm128_xmm = 4_132, XOP_Vprotb_xmm_xmm_xmmm128 = 4_133, XOP_Vprotw_xmm_xmmm128_xmm = 4_134, XOP_Vprotw_xmm_xmm_xmmm128 = 4_135, XOP_Vprotd_xmm_xmmm128_xmm = 4_136, XOP_Vprotd_xmm_xmm_xmmm128 = 4_137, XOP_Vprotq_xmm_xmmm128_xmm = 4_138, XOP_Vprotq_xmm_xmm_xmmm128 = 4_139, XOP_Vpshlb_xmm_xmmm128_xmm = 4_140, XOP_Vpshlb_xmm_xmm_xmmm128 = 4_141, XOP_Vpshlw_xmm_xmmm128_xmm = 4_142, XOP_Vpshlw_xmm_xmm_xmmm128 = 4_143, XOP_Vpshld_xmm_xmmm128_xmm = 4_144, XOP_Vpshld_xmm_xmm_xmmm128 = 4_145, XOP_Vpshlq_xmm_xmmm128_xmm = 4_146, XOP_Vpshlq_xmm_xmm_xmmm128 = 4_147, XOP_Vpshab_xmm_xmmm128_xmm = 4_148, XOP_Vpshab_xmm_xmm_xmmm128 = 4_149, XOP_Vpshaw_xmm_xmmm128_xmm = 4_150, XOP_Vpshaw_xmm_xmm_xmmm128 = 4_151, XOP_Vpshad_xmm_xmmm128_xmm = 4_152, XOP_Vpshad_xmm_xmm_xmmm128 = 4_153, XOP_Vpshaq_xmm_xmmm128_xmm = 4_154, XOP_Vpshaq_xmm_xmm_xmmm128 = 4_155, XOP_Vphaddbw_xmm_xmmm128 = 4_156, XOP_Vphaddbd_xmm_xmmm128 = 4_157, XOP_Vphaddbq_xmm_xmmm128 = 4_158, XOP_Vphaddwd_xmm_xmmm128 = 4_159, XOP_Vphaddwq_xmm_xmmm128 = 4_160, XOP_Vphadddq_xmm_xmmm128 = 4_161, XOP_Vphaddubw_xmm_xmmm128 = 4_162, XOP_Vphaddubd_xmm_xmmm128 = 4_163, XOP_Vphaddubq_xmm_xmmm128 = 4_164, XOP_Vphadduwd_xmm_xmmm128 = 4_165, XOP_Vphadduwq_xmm_xmmm128 = 4_166, XOP_Vphaddudq_xmm_xmmm128 = 4_167, XOP_Vphsubbw_xmm_xmmm128 = 4_168, XOP_Vphsubwd_xmm_xmmm128 = 4_169, XOP_Vphsubdq_xmm_xmmm128 = 4_170, XOP_Bextr_r32_rm32_imm32 = 4_171, XOP_Bextr_r64_rm64_imm32 = 4_172, XOP_Lwpins_r32_rm32_imm32 = 4_173, XOP_Lwpins_r64_rm32_imm32 = 4_174, XOP_Lwpval_r32_rm32_imm32 = 4_175, XOP_Lwpval_r64_rm32_imm32 = 4_176, D3NOW_Pi2fw_mm_mmm64 = 4_177, D3NOW_Pi2fd_mm_mmm64 = 4_178, D3NOW_Pf2iw_mm_mmm64 = 4_179, D3NOW_Pf2id_mm_mmm64 = 4_180, D3NOW_Pfrcpv_mm_mmm64 = 4_181, D3NOW_Pfrsqrtv_mm_mmm64 = 4_182, D3NOW_Pfnacc_mm_mmm64 = 4_183, D3NOW_Pfpnacc_mm_mmm64 = 4_184, D3NOW_Pfcmpge_mm_mmm64 = 4_185, D3NOW_Pfmin_mm_mmm64 = 4_186, D3NOW_Pfrcp_mm_mmm64 = 4_187, D3NOW_Pfrsqrt_mm_mmm64 = 4_188, D3NOW_Pfsub_mm_mmm64 = 4_189, D3NOW_Pfadd_mm_mmm64 = 4_190, D3NOW_Pfcmpgt_mm_mmm64 = 4_191, D3NOW_Pfmax_mm_mmm64 = 4_192, D3NOW_Pfrcpit1_mm_mmm64 = 4_193, D3NOW_Pfrsqit1_mm_mmm64 = 4_194, D3NOW_Pfsubr_mm_mmm64 = 4_195, D3NOW_Pfacc_mm_mmm64 = 4_196, D3NOW_Pfcmpeq_mm_mmm64 = 4_197, D3NOW_Pfmul_mm_mmm64 = 4_198, D3NOW_Pfrcpit2_mm_mmm64 = 4_199, D3NOW_Pmulhrw_mm_mmm64 = 4_200, D3NOW_Pswapd_mm_mmm64 = 4_201, D3NOW_Pavgusb_mm_mmm64 = 4_202, Rmpadjust = 4_203, Rmpupdate = 4_204, Psmash = 4_205, Pvalidatew = 4_206, Pvalidated = 4_207, Pvalidateq = 4_208, Serialize = 4_209, Xsusldtrk = 4_210, Xresldtrk = 4_211, Invlpgbw = 4_212, Invlpgbd = 4_213, Invlpgbq = 4_214, Tlbsync = 4_215, Prefetchreserved3_m8 = 4_216, Prefetchreserved4_m8 = 4_217, Prefetchreserved5_m8 = 4_218, Prefetchreserved6_m8 = 4_219, Prefetchreserved7_m8 = 4_220, Ud0 = 4_221, Vmgexit = 4_222, Getsecq = 4_223, VEX_Ldtilecfg_m512 = 4_224, VEX_Tilerelease = 4_225, VEX_Sttilecfg_m512 = 4_226, VEX_Tilezero_tmm = 4_227, VEX_Tileloaddt1_tmm_sibmem = 4_228, VEX_Tilestored_sibmem_tmm = 4_229, VEX_Tileloadd_tmm_sibmem = 4_230, VEX_Tdpbf16ps_tmm_tmm_tmm = 4_231, VEX_Tdpbuud_tmm_tmm_tmm = 4_232, VEX_Tdpbusd_tmm_tmm_tmm = 4_233, VEX_Tdpbsud_tmm_tmm_tmm = 4_234, VEX_Tdpbssd_tmm_tmm_tmm = 4_235, Fnstdw_AX = 4_236, Fnstsg_AX = 4_237, Rdshr_rm32 = 4_238, Wrshr_rm32 = 4_239, Smint = 4_240, Dmint = 4_241, Rdm = 4_242, Svdc_m80_Sreg = 4_243, Rsdc_Sreg_m80 = 4_244, Svldt_m80 = 4_245, Rsldt_m80 = 4_246, Svts_m80 = 4_247, Rsts_m80 = 4_248, Smint_0F7E = 4_249, Bb0_reset = 4_250, Bb1_reset = 4_251, Cpu_write = 4_252, Cpu_read = 4_253, Altinst = 4_254, Paveb_mm_mmm64 = 4_255, Paddsiw_mm_mmm64 = 4_256, Pmagw_mm_mmm64 = 4_257, Pdistib_mm_m64 = 4_258, Psubsiw_mm_mmm64 = 4_259, Pmvzb_mm_m64 = 4_260, Pmulhrw_mm_mmm64 = 4_261, Pmvnzb_mm_m64 = 4_262, Pmvlzb_mm_m64 = 4_263, Pmvgezb_mm_m64 = 4_264, Pmulhriw_mm_mmm64 = 4_265, Pmachriw_mm_m64 = 4_266, Cyrix_D9D7 = 4_267, Cyrix_D9E2 = 4_268, Ftstp = 4_269, Cyrix_D9E7 = 4_270, Frint2 = 4_271, Frichop = 4_272, Cyrix_DED8 = 4_273, Cyrix_DEDA = 4_274, Cyrix_DEDC = 4_275, Cyrix_DEDD = 4_276, Cyrix_DEDE = 4_277, Frinear = 4_278, Tdcall = 4_279, Seamret = 4_280, Seamops = 4_281, Seamcall = 4_282, Aesencwide128kl_m384 = 4_283, Aesdecwide128kl_m384 = 4_284, Aesencwide256kl_m512 = 4_285, Aesdecwide256kl_m512 = 4_286, Loadiwkey_xmm_xmm = 4_287, Aesenc128kl_xmm_m384 = 4_288, Aesdec128kl_xmm_m384 = 4_289, Aesenc256kl_xmm_m512 = 4_290, Aesdec256kl_xmm_m512 = 4_291, Encodekey128_r32_r32 = 4_292, Encodekey256_r32_r32 = 4_293, VEX_Vbroadcastss_xmm_xmm = 4_294, VEX_Vbroadcastss_ymm_xmm = 4_295, VEX_Vbroadcastsd_ymm_xmm = 4_296, Vmgexit_F2 = 4_297, Uiret = 4_298, Testui = 4_299, Clui = 4_300, Stui = 4_301, Senduipi_r64 = 4_302, Hreset_imm8 = 4_303, VEX_Vpdpbusd_xmm_xmm_xmmm128 = 4_304, VEX_Vpdpbusd_ymm_ymm_ymmm256 = 4_305, VEX_Vpdpbusds_xmm_xmm_xmmm128 = 4_306, VEX_Vpdpbusds_ymm_ymm_ymmm256 = 4_307, VEX_Vpdpwssd_xmm_xmm_xmmm128 = 4_308, VEX_Vpdpwssd_ymm_ymm_ymmm256 = 4_309, VEX_Vpdpwssds_xmm_xmm_xmmm128 = 4_310, VEX_Vpdpwssds_ymm_ymm_ymmm256 = 4_311, Ccs_hash_16 = 4_312, Ccs_hash_32 = 4_313, Ccs_hash_64 = 4_314, Ccs_encrypt_16 = 4_315, Ccs_encrypt_32 = 4_316, Ccs_encrypt_64 = 4_317, Lkgs_rm16 = 4_318, Lkgs_r32m16 = 4_319, Lkgs_r64m16 = 4_320, Eretu = 4_321, Erets = 4_322, EVEX_Vaddph_xmm_k1z_xmm_xmmm128b16 = 4_323, EVEX_Vaddph_ymm_k1z_ymm_ymmm256b16 = 4_324, EVEX_Vaddph_zmm_k1z_zmm_zmmm512b16_er = 4_325, EVEX_Vaddsh_xmm_k1z_xmm_xmmm16_er = 4_326, EVEX_Vcmpph_kr_k1_xmm_xmmm128b16_imm8 = 4_327, EVEX_Vcmpph_kr_k1_ymm_ymmm256b16_imm8 = 4_328, EVEX_Vcmpph_kr_k1_zmm_zmmm512b16_imm8_sae = 4_329, EVEX_Vcmpsh_kr_k1_xmm_xmmm16_imm8_sae = 4_330, EVEX_Vcomish_xmm_xmmm16_sae = 4_331, EVEX_Vcvtdq2ph_xmm_k1z_xmmm128b32 = 4_332, EVEX_Vcvtdq2ph_xmm_k1z_ymmm256b32 = 4_333, EVEX_Vcvtdq2ph_ymm_k1z_zmmm512b32_er = 4_334, EVEX_Vcvtpd2ph_xmm_k1z_xmmm128b64 = 4_335, EVEX_Vcvtpd2ph_xmm_k1z_ymmm256b64 = 4_336, EVEX_Vcvtpd2ph_xmm_k1z_zmmm512b64_er = 4_337, EVEX_Vcvtph2dq_xmm_k1z_xmmm64b16 = 4_338, EVEX_Vcvtph2dq_ymm_k1z_xmmm128b16 = 4_339, EVEX_Vcvtph2dq_zmm_k1z_ymmm256b16_er = 4_340, EVEX_Vcvtph2pd_xmm_k1z_xmmm32b16 = 4_341, EVEX_Vcvtph2pd_ymm_k1z_xmmm64b16 = 4_342, EVEX_Vcvtph2pd_zmm_k1z_xmmm128b16_sae = 4_343, EVEX_Vcvtph2psx_xmm_k1z_xmmm64b16 = 4_344, EVEX_Vcvtph2psx_ymm_k1z_xmmm128b16 = 4_345, EVEX_Vcvtph2psx_zmm_k1z_ymmm256b16_sae = 4_346, EVEX_Vcvtph2qq_xmm_k1z_xmmm32b16 = 4_347, EVEX_Vcvtph2qq_ymm_k1z_xmmm64b16 = 4_348, EVEX_Vcvtph2qq_zmm_k1z_xmmm128b16_er = 4_349, EVEX_Vcvtph2udq_xmm_k1z_xmmm64b16 = 4_350, EVEX_Vcvtph2udq_ymm_k1z_xmmm128b16 = 4_351, EVEX_Vcvtph2udq_zmm_k1z_ymmm256b16_er = 4_352, EVEX_Vcvtph2uqq_xmm_k1z_xmmm32b16 = 4_353, EVEX_Vcvtph2uqq_ymm_k1z_xmmm64b16 = 4_354, EVEX_Vcvtph2uqq_zmm_k1z_xmmm128b16_er = 4_355, EVEX_Vcvtph2uw_xmm_k1z_xmmm128b16 = 4_356, EVEX_Vcvtph2uw_ymm_k1z_ymmm256b16 = 4_357, EVEX_Vcvtph2uw_zmm_k1z_zmmm512b16_er = 4_358, EVEX_Vcvtph2w_xmm_k1z_xmmm128b16 = 4_359, EVEX_Vcvtph2w_ymm_k1z_ymmm256b16 = 4_360, EVEX_Vcvtph2w_zmm_k1z_zmmm512b16_er = 4_361, EVEX_Vcvtps2phx_xmm_k1z_xmmm128b32 = 4_362, EVEX_Vcvtps2phx_xmm_k1z_ymmm256b32 = 4_363, EVEX_Vcvtps2phx_ymm_k1z_zmmm512b32_er = 4_364, EVEX_Vcvtqq2ph_xmm_k1z_xmmm128b64 = 4_365, EVEX_Vcvtqq2ph_xmm_k1z_ymmm256b64 = 4_366, EVEX_Vcvtqq2ph_xmm_k1z_zmmm512b64_er = 4_367, EVEX_Vcvtsd2sh_xmm_k1z_xmm_xmmm64_er = 4_368, EVEX_Vcvtsh2sd_xmm_k1z_xmm_xmmm16_sae = 4_369, EVEX_Vcvtsh2si_r32_xmmm16_er = 4_370, EVEX_Vcvtsh2si_r64_xmmm16_er = 4_371, EVEX_Vcvtsh2ss_xmm_k1z_xmm_xmmm16_sae = 4_372, EVEX_Vcvtsh2usi_r32_xmmm16_er = 4_373, EVEX_Vcvtsh2usi_r64_xmmm16_er = 4_374, EVEX_Vcvtsi2sh_xmm_xmm_rm32_er = 4_375, EVEX_Vcvtsi2sh_xmm_xmm_rm64_er = 4_376, EVEX_Vcvtss2sh_xmm_k1z_xmm_xmmm32_er = 4_377, EVEX_Vcvttph2dq_xmm_k1z_xmmm64b16 = 4_378, EVEX_Vcvttph2dq_ymm_k1z_xmmm128b16 = 4_379, EVEX_Vcvttph2dq_zmm_k1z_ymmm256b16_sae = 4_380, EVEX_Vcvttph2qq_xmm_k1z_xmmm32b16 = 4_381, EVEX_Vcvttph2qq_ymm_k1z_xmmm64b16 = 4_382, EVEX_Vcvttph2qq_zmm_k1z_xmmm128b16_sae = 4_383, EVEX_Vcvttph2udq_xmm_k1z_xmmm64b16 = 4_384, EVEX_Vcvttph2udq_ymm_k1z_xmmm128b16 = 4_385, EVEX_Vcvttph2udq_zmm_k1z_ymmm256b16_sae = 4_386, EVEX_Vcvttph2uqq_xmm_k1z_xmmm32b16 = 4_387, EVEX_Vcvttph2uqq_ymm_k1z_xmmm64b16 = 4_388, EVEX_Vcvttph2uqq_zmm_k1z_xmmm128b16_sae = 4_389, EVEX_Vcvttph2uw_xmm_k1z_xmmm128b16 = 4_390, EVEX_Vcvttph2uw_ymm_k1z_ymmm256b16 = 4_391, EVEX_Vcvttph2uw_zmm_k1z_zmmm512b16_sae = 4_392, EVEX_Vcvttph2w_xmm_k1z_xmmm128b16 = 4_393, EVEX_Vcvttph2w_ymm_k1z_ymmm256b16 = 4_394, EVEX_Vcvttph2w_zmm_k1z_zmmm512b16_sae = 4_395, EVEX_Vcvttsh2si_r32_xmmm16_sae = 4_396, EVEX_Vcvttsh2si_r64_xmmm16_sae = 4_397, EVEX_Vcvttsh2usi_r32_xmmm16_sae = 4_398, EVEX_Vcvttsh2usi_r64_xmmm16_sae = 4_399, EVEX_Vcvtudq2ph_xmm_k1z_xmmm128b32 = 4_400, EVEX_Vcvtudq2ph_xmm_k1z_ymmm256b32 = 4_401, EVEX_Vcvtudq2ph_ymm_k1z_zmmm512b32_er = 4_402, EVEX_Vcvtuqq2ph_xmm_k1z_xmmm128b64 = 4_403, EVEX_Vcvtuqq2ph_xmm_k1z_ymmm256b64 = 4_404, EVEX_Vcvtuqq2ph_xmm_k1z_zmmm512b64_er = 4_405, EVEX_Vcvtusi2sh_xmm_xmm_rm32_er = 4_406, EVEX_Vcvtusi2sh_xmm_xmm_rm64_er = 4_407, EVEX_Vcvtuw2ph_xmm_k1z_xmmm128b16 = 4_408, EVEX_Vcvtuw2ph_ymm_k1z_ymmm256b16 = 4_409, EVEX_Vcvtuw2ph_zmm_k1z_zmmm512b16_er = 4_410, EVEX_Vcvtw2ph_xmm_k1z_xmmm128b16 = 4_411, EVEX_Vcvtw2ph_ymm_k1z_ymmm256b16 = 4_412, EVEX_Vcvtw2ph_zmm_k1z_zmmm512b16_er = 4_413, EVEX_Vdivph_xmm_k1z_xmm_xmmm128b16 = 4_414, EVEX_Vdivph_ymm_k1z_ymm_ymmm256b16 = 4_415, EVEX_Vdivph_zmm_k1z_zmm_zmmm512b16_er = 4_416, EVEX_Vdivsh_xmm_k1z_xmm_xmmm16_er = 4_417, EVEX_Vfcmaddcph_xmm_k1z_xmm_xmmm128b32 = 4_418, EVEX_Vfcmaddcph_ymm_k1z_ymm_ymmm256b32 = 4_419, EVEX_Vfcmaddcph_zmm_k1z_zmm_zmmm512b32_er = 4_420, EVEX_Vfmaddcph_xmm_k1z_xmm_xmmm128b32 = 4_421, EVEX_Vfmaddcph_ymm_k1z_ymm_ymmm256b32 = 4_422, EVEX_Vfmaddcph_zmm_k1z_zmm_zmmm512b32_er = 4_423, EVEX_Vfcmaddcsh_xmm_k1z_xmm_xmmm32_er = 4_424, EVEX_Vfmaddcsh_xmm_k1z_xmm_xmmm32_er = 4_425, EVEX_Vfcmulcph_xmm_k1z_xmm_xmmm128b32 = 4_426, EVEX_Vfcmulcph_ymm_k1z_ymm_ymmm256b32 = 4_427, EVEX_Vfcmulcph_zmm_k1z_zmm_zmmm512b32_er = 4_428, EVEX_Vfmulcph_xmm_k1z_xmm_xmmm128b32 = 4_429, EVEX_Vfmulcph_ymm_k1z_ymm_ymmm256b32 = 4_430, EVEX_Vfmulcph_zmm_k1z_zmm_zmmm512b32_er = 4_431, EVEX_Vfcmulcsh_xmm_k1z_xmm_xmmm32_er = 4_432, EVEX_Vfmulcsh_xmm_k1z_xmm_xmmm32_er = 4_433, EVEX_Vfmaddsub132ph_xmm_k1z_xmm_xmmm128b16 = 4_434, EVEX_Vfmaddsub132ph_ymm_k1z_ymm_ymmm256b16 = 4_435, EVEX_Vfmaddsub132ph_zmm_k1z_zmm_zmmm512b16_er = 4_436, EVEX_Vfmaddsub213ph_xmm_k1z_xmm_xmmm128b16 = 4_437, EVEX_Vfmaddsub213ph_ymm_k1z_ymm_ymmm256b16 = 4_438, EVEX_Vfmaddsub213ph_zmm_k1z_zmm_zmmm512b16_er = 4_439, EVEX_Vfmaddsub231ph_xmm_k1z_xmm_xmmm128b16 = 4_440, EVEX_Vfmaddsub231ph_ymm_k1z_ymm_ymmm256b16 = 4_441, EVEX_Vfmaddsub231ph_zmm_k1z_zmm_zmmm512b16_er = 4_442, EVEX_Vfmsubadd132ph_xmm_k1z_xmm_xmmm128b16 = 4_443, EVEX_Vfmsubadd132ph_ymm_k1z_ymm_ymmm256b16 = 4_444, EVEX_Vfmsubadd132ph_zmm_k1z_zmm_zmmm512b16_er = 4_445, EVEX_Vfmsubadd213ph_xmm_k1z_xmm_xmmm128b16 = 4_446, EVEX_Vfmsubadd213ph_ymm_k1z_ymm_ymmm256b16 = 4_447, EVEX_Vfmsubadd213ph_zmm_k1z_zmm_zmmm512b16_er = 4_448, EVEX_Vfmsubadd231ph_xmm_k1z_xmm_xmmm128b16 = 4_449, EVEX_Vfmsubadd231ph_ymm_k1z_ymm_ymmm256b16 = 4_450, EVEX_Vfmsubadd231ph_zmm_k1z_zmm_zmmm512b16_er = 4_451, EVEX_Vfmadd132ph_xmm_k1z_xmm_xmmm128b16 = 4_452, EVEX_Vfmadd132ph_ymm_k1z_ymm_ymmm256b16 = 4_453, EVEX_Vfmadd132ph_zmm_k1z_zmm_zmmm512b16_er = 4_454, EVEX_Vfmadd213ph_xmm_k1z_xmm_xmmm128b16 = 4_455, EVEX_Vfmadd213ph_ymm_k1z_ymm_ymmm256b16 = 4_456, EVEX_Vfmadd213ph_zmm_k1z_zmm_zmmm512b16_er = 4_457, EVEX_Vfmadd231ph_xmm_k1z_xmm_xmmm128b16 = 4_458, EVEX_Vfmadd231ph_ymm_k1z_ymm_ymmm256b16 = 4_459, EVEX_Vfmadd231ph_zmm_k1z_zmm_zmmm512b16_er = 4_460, EVEX_Vfnmadd132ph_xmm_k1z_xmm_xmmm128b16 = 4_461, EVEX_Vfnmadd132ph_ymm_k1z_ymm_ymmm256b16 = 4_462, EVEX_Vfnmadd132ph_zmm_k1z_zmm_zmmm512b16_er = 4_463, EVEX_Vfnmadd213ph_xmm_k1z_xmm_xmmm128b16 = 4_464, EVEX_Vfnmadd213ph_ymm_k1z_ymm_ymmm256b16 = 4_465, EVEX_Vfnmadd213ph_zmm_k1z_zmm_zmmm512b16_er = 4_466, EVEX_Vfnmadd231ph_xmm_k1z_xmm_xmmm128b16 = 4_467, EVEX_Vfnmadd231ph_ymm_k1z_ymm_ymmm256b16 = 4_468, EVEX_Vfnmadd231ph_zmm_k1z_zmm_zmmm512b16_er = 4_469, EVEX_Vfmadd132sh_xmm_k1z_xmm_xmmm16_er = 4_470, EVEX_Vfmadd213sh_xmm_k1z_xmm_xmmm16_er = 4_471, EVEX_Vfmadd231sh_xmm_k1z_xmm_xmmm16_er = 4_472, EVEX_Vfnmadd132sh_xmm_k1z_xmm_xmmm16_er = 4_473, EVEX_Vfnmadd213sh_xmm_k1z_xmm_xmmm16_er = 4_474, EVEX_Vfnmadd231sh_xmm_k1z_xmm_xmmm16_er = 4_475, EVEX_Vfmsub132ph_xmm_k1z_xmm_xmmm128b16 = 4_476, EVEX_Vfmsub132ph_ymm_k1z_ymm_ymmm256b16 = 4_477, EVEX_Vfmsub132ph_zmm_k1z_zmm_zmmm512b16_er = 4_478, EVEX_Vfmsub213ph_xmm_k1z_xmm_xmmm128b16 = 4_479, EVEX_Vfmsub213ph_ymm_k1z_ymm_ymmm256b16 = 4_480, EVEX_Vfmsub213ph_zmm_k1z_zmm_zmmm512b16_er = 4_481, EVEX_Vfmsub231ph_xmm_k1z_xmm_xmmm128b16 = 4_482, EVEX_Vfmsub231ph_ymm_k1z_ymm_ymmm256b16 = 4_483, EVEX_Vfmsub231ph_zmm_k1z_zmm_zmmm512b16_er = 4_484, EVEX_Vfnmsub132ph_xmm_k1z_xmm_xmmm128b16 = 4_485, EVEX_Vfnmsub132ph_ymm_k1z_ymm_ymmm256b16 = 4_486, EVEX_Vfnmsub132ph_zmm_k1z_zmm_zmmm512b16_er = 4_487, EVEX_Vfnmsub213ph_xmm_k1z_xmm_xmmm128b16 = 4_488, EVEX_Vfnmsub213ph_ymm_k1z_ymm_ymmm256b16 = 4_489, EVEX_Vfnmsub213ph_zmm_k1z_zmm_zmmm512b16_er = 4_490, EVEX_Vfnmsub231ph_xmm_k1z_xmm_xmmm128b16 = 4_491, EVEX_Vfnmsub231ph_ymm_k1z_ymm_ymmm256b16 = 4_492, EVEX_Vfnmsub231ph_zmm_k1z_zmm_zmmm512b16_er = 4_493, EVEX_Vfmsub132sh_xmm_k1z_xmm_xmmm16_er = 4_494, EVEX_Vfmsub213sh_xmm_k1z_xmm_xmmm16_er = 4_495, EVEX_Vfmsub231sh_xmm_k1z_xmm_xmmm16_er = 4_496, EVEX_Vfnmsub132sh_xmm_k1z_xmm_xmmm16_er = 4_497, EVEX_Vfnmsub213sh_xmm_k1z_xmm_xmmm16_er = 4_498, EVEX_Vfnmsub231sh_xmm_k1z_xmm_xmmm16_er = 4_499, EVEX_Vfpclassph_kr_k1_xmmm128b16_imm8 = 4_500, EVEX_Vfpclassph_kr_k1_ymmm256b16_imm8 = 4_501, EVEX_Vfpclassph_kr_k1_zmmm512b16_imm8 = 4_502, EVEX_Vfpclasssh_kr_k1_xmmm16_imm8 = 4_503, EVEX_Vgetexpph_xmm_k1z_xmmm128b16 = 4_504, EVEX_Vgetexpph_ymm_k1z_ymmm256b16 = 4_505, EVEX_Vgetexpph_zmm_k1z_zmmm512b16_sae = 4_506, EVEX_Vgetexpsh_xmm_k1z_xmm_xmmm16_sae = 4_507, EVEX_Vgetmantph_xmm_k1z_xmmm128b16_imm8 = 4_508, EVEX_Vgetmantph_ymm_k1z_ymmm256b16_imm8 = 4_509, EVEX_Vgetmantph_zmm_k1z_zmmm512b16_imm8_sae = 4_510, EVEX_Vgetmantsh_xmm_k1z_xmm_xmmm16_imm8_sae = 4_511, EVEX_Vmaxph_xmm_k1z_xmm_xmmm128b16 = 4_512, EVEX_Vmaxph_ymm_k1z_ymm_ymmm256b16 = 4_513, EVEX_Vmaxph_zmm_k1z_zmm_zmmm512b16_sae = 4_514, EVEX_Vmaxsh_xmm_k1z_xmm_xmmm16_sae = 4_515, EVEX_Vminph_xmm_k1z_xmm_xmmm128b16 = 4_516, EVEX_Vminph_ymm_k1z_ymm_ymmm256b16 = 4_517, EVEX_Vminph_zmm_k1z_zmm_zmmm512b16_sae = 4_518, EVEX_Vminsh_xmm_k1z_xmm_xmmm16_sae = 4_519, EVEX_Vmovsh_xmm_k1z_m16 = 4_520, EVEX_Vmovsh_m16_k1_xmm = 4_521, EVEX_Vmovsh_xmm_k1z_xmm_xmm = 4_522, EVEX_Vmovsh_xmm_k1z_xmm_xmm_MAP5_11 = 4_523, EVEX_Vmovw_xmm_r32m16 = 4_524, EVEX_Vmovw_xmm_r64m16 = 4_525, EVEX_Vmovw_r32m16_xmm = 4_526, EVEX_Vmovw_r64m16_xmm = 4_527, EVEX_Vmulph_xmm_k1z_xmm_xmmm128b16 = 4_528, EVEX_Vmulph_ymm_k1z_ymm_ymmm256b16 = 4_529, EVEX_Vmulph_zmm_k1z_zmm_zmmm512b16_er = 4_530, EVEX_Vmulsh_xmm_k1z_xmm_xmmm16_er = 4_531, EVEX_Vrcpph_xmm_k1z_xmmm128b16 = 4_532, EVEX_Vrcpph_ymm_k1z_ymmm256b16 = 4_533, EVEX_Vrcpph_zmm_k1z_zmmm512b16 = 4_534, EVEX_Vrcpsh_xmm_k1z_xmm_xmmm16 = 4_535, EVEX_Vreduceph_xmm_k1z_xmmm128b16_imm8 = 4_536, EVEX_Vreduceph_ymm_k1z_ymmm256b16_imm8 = 4_537, EVEX_Vreduceph_zmm_k1z_zmmm512b16_imm8_sae = 4_538, EVEX_Vreducesh_xmm_k1z_xmm_xmmm16_imm8_sae = 4_539, EVEX_Vrndscaleph_xmm_k1z_xmmm128b16_imm8 = 4_540, EVEX_Vrndscaleph_ymm_k1z_ymmm256b16_imm8 = 4_541, EVEX_Vrndscaleph_zmm_k1z_zmmm512b16_imm8_sae = 4_542, EVEX_Vrndscalesh_xmm_k1z_xmm_xmmm16_imm8_sae = 4_543, EVEX_Vrsqrtph_xmm_k1z_xmmm128b16 = 4_544, EVEX_Vrsqrtph_ymm_k1z_ymmm256b16 = 4_545, EVEX_Vrsqrtph_zmm_k1z_zmmm512b16 = 4_546, EVEX_Vrsqrtsh_xmm_k1z_xmm_xmmm16 = 4_547, EVEX_Vscalefph_xmm_k1z_xmm_xmmm128b16 = 4_548, EVEX_Vscalefph_ymm_k1z_ymm_ymmm256b16 = 4_549, EVEX_Vscalefph_zmm_k1z_zmm_zmmm512b16_er = 4_550, EVEX_Vscalefsh_xmm_k1z_xmm_xmmm16_er = 4_551, EVEX_Vsqrtph_xmm_k1z_xmmm128b16 = 4_552, EVEX_Vsqrtph_ymm_k1z_ymmm256b16 = 4_553, EVEX_Vsqrtph_zmm_k1z_zmmm512b16_er = 4_554, EVEX_Vsqrtsh_xmm_k1z_xmm_xmmm16_er = 4_555, EVEX_Vsubph_xmm_k1z_xmm_xmmm128b16 = 4_556, EVEX_Vsubph_ymm_k1z_ymm_ymmm256b16 = 4_557, EVEX_Vsubph_zmm_k1z_zmm_zmmm512b16_er = 4_558, EVEX_Vsubsh_xmm_k1z_xmm_xmmm16_er = 4_559, EVEX_Vucomish_xmm_xmmm16_sae = 4_560, Rdudbg = 4_561, Wrudbg = 4_562, VEX_KNC_Jkzd_kr_rel8_64 = 4_563, VEX_KNC_Jknzd_kr_rel8_64 = 4_564, VEX_KNC_Vprefetchnta_m8 = 4_565, VEX_KNC_Vprefetch0_m8 = 4_566, VEX_KNC_Vprefetch1_m8 = 4_567, VEX_KNC_Vprefetch2_m8 = 4_568, VEX_KNC_Vprefetchenta_m8 = 4_569, VEX_KNC_Vprefetche0_m8 = 4_570, VEX_KNC_Vprefetche1_m8 = 4_571, VEX_KNC_Vprefetche2_m8 = 4_572, VEX_KNC_Kand_kr_kr = 4_573, VEX_KNC_Kandn_kr_kr = 4_574, VEX_KNC_Kandnr_kr_kr = 4_575, VEX_KNC_Knot_kr_kr = 4_576, VEX_KNC_Kor_kr_kr = 4_577, VEX_KNC_Kxnor_kr_kr = 4_578, VEX_KNC_Kxor_kr_kr = 4_579, VEX_KNC_Kmerge2l1h_kr_kr = 4_580, VEX_KNC_Kmerge2l1l_kr_kr = 4_581, VEX_KNC_Jkzd_kr_rel32_64 = 4_582, VEX_KNC_Jknzd_kr_rel32_64 = 4_583, VEX_KNC_Kmov_kr_kr = 4_584, VEX_KNC_Kmov_kr_r32 = 4_585, VEX_KNC_Kmov_r32_kr = 4_586, VEX_KNC_Kconcath_r64_kr_kr = 4_587, VEX_KNC_Kconcatl_r64_kr_kr = 4_588, VEX_KNC_Kortest_kr_kr = 4_589, VEX_KNC_Delay_r32 = 4_590, VEX_KNC_Delay_r64 = 4_591, VEX_KNC_Spflt_r32 = 4_592, VEX_KNC_Spflt_r64 = 4_593, VEX_KNC_Clevict1_m8 = 4_594, VEX_KNC_Clevict0_m8 = 4_595, VEX_KNC_Popcnt_r32_r32 = 4_596, VEX_KNC_Popcnt_r64_r64 = 4_597, VEX_KNC_Tzcnt_r32_r32 = 4_598, VEX_KNC_Tzcnt_r64_r64 = 4_599, VEX_KNC_Tzcnti_r32_r32 = 4_600, VEX_KNC_Tzcnti_r64_r64 = 4_601, VEX_KNC_Lzcnt_r32_r32 = 4_602, VEX_KNC_Lzcnt_r64_r64 = 4_603, VEX_KNC_Undoc_r32_rm32_128_F3_0F38_W0_F0 = 4_604, VEX_KNC_Undoc_r64_rm64_128_F3_0F38_W1_F0 = 4_605, VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F0 = 4_606, VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F0 = 4_607, VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F1 = 4_608, VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F1 = 4_609, VEX_KNC_Kextract_kr_r64_imm8 = 4_610, MVEX_Vprefetchnta_m = 4_611, MVEX_Vprefetch0_m = 4_612, MVEX_Vprefetch1_m = 4_613, MVEX_Vprefetch2_m = 4_614, MVEX_Vprefetchenta_m = 4_615, MVEX_Vprefetche0_m = 4_616, MVEX_Vprefetche1_m = 4_617, MVEX_Vprefetche2_m = 4_618, MVEX_Vmovaps_zmm_k1_zmmmt = 4_619, MVEX_Vmovapd_zmm_k1_zmmmt = 4_620, MVEX_Vmovaps_mt_k1_zmm = 4_621, MVEX_Vmovapd_mt_k1_zmm = 4_622, MVEX_Vmovnrapd_m_k1_zmm = 4_623, MVEX_Vmovnrngoapd_m_k1_zmm = 4_624, MVEX_Vmovnraps_m_k1_zmm = 4_625, MVEX_Vmovnrngoaps_m_k1_zmm = 4_626, MVEX_Vaddps_zmm_k1_zmm_zmmmt = 4_627, MVEX_Vaddpd_zmm_k1_zmm_zmmmt = 4_628, MVEX_Vmulps_zmm_k1_zmm_zmmmt = 4_629, MVEX_Vmulpd_zmm_k1_zmm_zmmmt = 4_630, MVEX_Vcvtps2pd_zmm_k1_zmmmt = 4_631, MVEX_Vcvtpd2ps_zmm_k1_zmmmt = 4_632, MVEX_Vsubps_zmm_k1_zmm_zmmmt = 4_633, MVEX_Vsubpd_zmm_k1_zmm_zmmmt = 4_634, MVEX_Vpcmpgtd_kr_k1_zmm_zmmmt = 4_635, MVEX_Vmovdqa32_zmm_k1_zmmmt = 4_636, MVEX_Vmovdqa64_zmm_k1_zmmmt = 4_637, MVEX_Vpshufd_zmm_k1_zmmmt_imm8 = 4_638, MVEX_Vpsrld_zmm_k1_zmmmt_imm8 = 4_639, MVEX_Vpsrad_zmm_k1_zmmmt_imm8 = 4_640, MVEX_Vpslld_zmm_k1_zmmmt_imm8 = 4_641, MVEX_Vpcmpeqd_kr_k1_zmm_zmmmt = 4_642, MVEX_Vcvtudq2pd_zmm_k1_zmmmt = 4_643, MVEX_Vmovdqa32_mt_k1_zmm = 4_644, MVEX_Vmovdqa64_mt_k1_zmm = 4_645, MVEX_Clevict1_m = 4_646, MVEX_Clevict0_m = 4_647, MVEX_Vcmpps_kr_k1_zmm_zmmmt_imm8 = 4_648, MVEX_Vcmppd_kr_k1_zmm_zmmmt_imm8 = 4_649, MVEX_Vpandd_zmm_k1_zmm_zmmmt = 4_650, MVEX_Vpandq_zmm_k1_zmm_zmmmt = 4_651, MVEX_Vpandnd_zmm_k1_zmm_zmmmt = 4_652, MVEX_Vpandnq_zmm_k1_zmm_zmmmt = 4_653, MVEX_Vcvtdq2pd_zmm_k1_zmmmt = 4_654, MVEX_Vpord_zmm_k1_zmm_zmmmt = 4_655, MVEX_Vporq_zmm_k1_zmm_zmmmt = 4_656, MVEX_Vpxord_zmm_k1_zmm_zmmmt = 4_657, MVEX_Vpxorq_zmm_k1_zmm_zmmmt = 4_658, MVEX_Vpsubd_zmm_k1_zmm_zmmmt = 4_659, MVEX_Vpaddd_zmm_k1_zmm_zmmmt = 4_660, MVEX_Vbroadcastss_zmm_k1_mt = 4_661, MVEX_Vbroadcastsd_zmm_k1_mt = 4_662, MVEX_Vbroadcastf32x4_zmm_k1_mt = 4_663, MVEX_Vbroadcastf64x4_zmm_k1_mt = 4_664, MVEX_Vptestmd_kr_k1_zmm_zmmmt = 4_665, MVEX_Vpermd_zmm_k1_zmm_zmmmt = 4_666, MVEX_Vpminsd_zmm_k1_zmm_zmmmt = 4_667, MVEX_Vpminud_zmm_k1_zmm_zmmmt = 4_668, MVEX_Vpmaxsd_zmm_k1_zmm_zmmmt = 4_669, MVEX_Vpmaxud_zmm_k1_zmm_zmmmt = 4_670, MVEX_Vpmulld_zmm_k1_zmm_zmmmt = 4_671, MVEX_Vgetexpps_zmm_k1_zmmmt = 4_672, MVEX_Vgetexppd_zmm_k1_zmmmt = 4_673, MVEX_Vpsrlvd_zmm_k1_zmm_zmmmt = 4_674, MVEX_Vpsravd_zmm_k1_zmm_zmmmt = 4_675, MVEX_Vpsllvd_zmm_k1_zmm_zmmmt = 4_676, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_48 = 4_677, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_49 = 4_678, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4A = 4_679, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4B = 4_680, MVEX_Vaddnps_zmm_k1_zmm_zmmmt = 4_681, MVEX_Vaddnpd_zmm_k1_zmm_zmmmt = 4_682, MVEX_Vgmaxabsps_zmm_k1_zmm_zmmmt = 4_683, MVEX_Vgminps_zmm_k1_zmm_zmmmt = 4_684, MVEX_Vgminpd_zmm_k1_zmm_zmmmt = 4_685, MVEX_Vgmaxps_zmm_k1_zmm_zmmmt = 4_686, MVEX_Vgmaxpd_zmm_k1_zmm_zmmmt = 4_687, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_54 = 4_688, MVEX_Vfixupnanps_zmm_k1_zmm_zmmmt = 4_689, MVEX_Vfixupnanpd_zmm_k1_zmm_zmmmt = 4_690, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_56 = 4_691, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_57 = 4_692, MVEX_Vpbroadcastd_zmm_k1_mt = 4_693, MVEX_Vpbroadcastq_zmm_k1_mt = 4_694, MVEX_Vbroadcasti32x4_zmm_k1_mt = 4_695, MVEX_Vbroadcasti64x4_zmm_k1_mt = 4_696, MVEX_Vpadcd_zmm_k1_kr_zmmmt = 4_697, MVEX_Vpaddsetcd_zmm_k1_kr_zmmmt = 4_698, MVEX_Vpsbbd_zmm_k1_kr_zmmmt = 4_699, MVEX_Vpsubsetbd_zmm_k1_kr_zmmmt = 4_700, MVEX_Vpblendmd_zmm_k1_zmm_zmmmt = 4_701, MVEX_Vpblendmq_zmm_k1_zmm_zmmmt = 4_702, MVEX_Vblendmps_zmm_k1_zmm_zmmmt = 4_703, MVEX_Vblendmpd_zmm_k1_zmm_zmmmt = 4_704, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_67 = 4_705, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_68 = 4_706, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_69 = 4_707, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6A = 4_708, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6B = 4_709, MVEX_Vpsubrd_zmm_k1_zmm_zmmmt = 4_710, MVEX_Vsubrps_zmm_k1_zmm_zmmmt = 4_711, MVEX_Vsubrpd_zmm_k1_zmm_zmmmt = 4_712, MVEX_Vpsbbrd_zmm_k1_kr_zmmmt = 4_713, MVEX_Vpsubrsetbd_zmm_k1_kr_zmmmt = 4_714, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_70 = 4_715, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_71 = 4_716, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_72 = 4_717, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_73 = 4_718, MVEX_Vpcmpltd_kr_k1_zmm_zmmmt = 4_719, MVEX_Vscaleps_zmm_k1_zmm_zmmmt = 4_720, MVEX_Vpmulhud_zmm_k1_zmm_zmmmt = 4_721, MVEX_Vpmulhd_zmm_k1_zmm_zmmmt = 4_722, MVEX_Vpgatherdd_zmm_k1_mvt = 4_723, MVEX_Vpgatherdq_zmm_k1_mvt = 4_724, MVEX_Vgatherdps_zmm_k1_mvt = 4_725, MVEX_Vgatherdpd_zmm_k1_mvt = 4_726, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_94 = 4_727, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_94 = 4_728, MVEX_Vfmadd132ps_zmm_k1_zmm_zmmmt = 4_729, MVEX_Vfmadd132pd_zmm_k1_zmm_zmmmt = 4_730, MVEX_Vfmsub132ps_zmm_k1_zmm_zmmmt = 4_731, MVEX_Vfmsub132pd_zmm_k1_zmm_zmmmt = 4_732, MVEX_Vfnmadd132ps_zmm_k1_zmm_zmmmt = 4_733, MVEX_Vfnmadd132pd_zmm_k1_zmm_zmmmt = 4_734, MVEX_Vfnmsub132ps_zmm_k1_zmm_zmmmt = 4_735, MVEX_Vfnmsub132pd_zmm_k1_zmm_zmmmt = 4_736, MVEX_Vpscatterdd_mvt_k1_zmm = 4_737, MVEX_Vpscatterdq_mvt_k1_zmm = 4_738, MVEX_Vscatterdps_mvt_k1_zmm = 4_739, MVEX_Vscatterdpd_mvt_k1_zmm = 4_740, MVEX_Vfmadd233ps_zmm_k1_zmm_zmmmt = 4_741, MVEX_Vfmadd213ps_zmm_k1_zmm_zmmmt = 4_742, MVEX_Vfmadd213pd_zmm_k1_zmm_zmmmt = 4_743, MVEX_Vfmsub213ps_zmm_k1_zmm_zmmmt = 4_744, MVEX_Vfmsub213pd_zmm_k1_zmm_zmmmt = 4_745, MVEX_Vfnmadd213ps_zmm_k1_zmm_zmmmt = 4_746, MVEX_Vfnmadd213pd_zmm_k1_zmm_zmmmt = 4_747, MVEX_Vfnmsub213ps_zmm_k1_zmm_zmmmt = 4_748, MVEX_Vfnmsub213pd_zmm_k1_zmm_zmmmt = 4_749, MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B0 = 4_750, MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B2 = 4_751, MVEX_Vpmadd233d_zmm_k1_zmm_zmmmt = 4_752, MVEX_Vpmadd231d_zmm_k1_zmm_zmmmt = 4_753, MVEX_Vfmadd231ps_zmm_k1_zmm_zmmmt = 4_754, MVEX_Vfmadd231pd_zmm_k1_zmm_zmmmt = 4_755, MVEX_Vfmsub231ps_zmm_k1_zmm_zmmmt = 4_756, MVEX_Vfmsub231pd_zmm_k1_zmm_zmmmt = 4_757, MVEX_Vfnmadd231ps_zmm_k1_zmm_zmmmt = 4_758, MVEX_Vfnmadd231pd_zmm_k1_zmm_zmmmt = 4_759, MVEX_Vfnmsub231ps_zmm_k1_zmm_zmmmt = 4_760, MVEX_Vfnmsub231pd_zmm_k1_zmm_zmmmt = 4_761, MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_C0 = 4_762, MVEX_Vgatherpf0hintdps_mvt_k1 = 4_763, MVEX_Vgatherpf0hintdpd_mvt_k1 = 4_764, MVEX_Vgatherpf0dps_mvt_k1 = 4_765, MVEX_Vgatherpf1dps_mvt_k1 = 4_766, MVEX_Vscatterpf0hintdps_mvt_k1 = 4_767, MVEX_Vscatterpf0hintdpd_mvt_k1 = 4_768, MVEX_Vscatterpf0dps_mvt_k1 = 4_769, MVEX_Vscatterpf1dps_mvt_k1 = 4_770, MVEX_Vexp223ps_zmm_k1_zmmmt = 4_771, MVEX_Vlog2ps_zmm_k1_zmmmt = 4_772, MVEX_Vrcp23ps_zmm_k1_zmmmt = 4_773, MVEX_Vrsqrt23ps_zmm_k1_zmmmt = 4_774, MVEX_Vaddsetsps_zmm_k1_zmm_zmmmt = 4_775, MVEX_Vpaddsetsd_zmm_k1_zmm_zmmmt = 4_776, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CE = 4_777, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_CE = 4_778, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CF = 4_779, MVEX_Vloadunpackld_zmm_k1_mt = 4_780, MVEX_Vloadunpacklq_zmm_k1_mt = 4_781, MVEX_Vpackstoreld_mt_k1_zmm = 4_782, MVEX_Vpackstorelq_mt_k1_zmm = 4_783, MVEX_Vloadunpacklps_zmm_k1_mt = 4_784, MVEX_Vloadunpacklpd_zmm_k1_mt = 4_785, MVEX_Vpackstorelps_mt_k1_zmm = 4_786, MVEX_Vpackstorelpd_mt_k1_zmm = 4_787, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D2 = 4_788, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D2 = 4_789, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D3 = 4_790, MVEX_Vloadunpackhd_zmm_k1_mt = 4_791, MVEX_Vloadunpackhq_zmm_k1_mt = 4_792, MVEX_Vpackstorehd_mt_k1_zmm = 4_793, MVEX_Vpackstorehq_mt_k1_zmm = 4_794, MVEX_Vloadunpackhps_zmm_k1_mt = 4_795, MVEX_Vloadunpackhpd_zmm_k1_mt = 4_796, MVEX_Vpackstorehps_mt_k1_zmm = 4_797, MVEX_Vpackstorehpd_mt_k1_zmm = 4_798, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D6 = 4_799, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D6 = 4_800, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D7 = 4_801, MVEX_Valignd_zmm_k1_zmm_zmmmt_imm8 = 4_802, MVEX_Vpermf32x4_zmm_k1_zmmmt_imm8 = 4_803, MVEX_Vpcmpud_kr_k1_zmm_zmmmt_imm8 = 4_804, MVEX_Vpcmpd_kr_k1_zmm_zmmmt_imm8 = 4_805, MVEX_Vgetmantps_zmm_k1_zmmmt_imm8 = 4_806, MVEX_Vgetmantpd_zmm_k1_zmmmt_imm8 = 4_807, MVEX_Vrndfxpntps_zmm_k1_zmmmt_imm8 = 4_808, MVEX_Vrndfxpntpd_zmm_k1_zmmmt_imm8 = 4_809, MVEX_Vcvtfxpntudq2ps_zmm_k1_zmmmt_imm8 = 4_810, MVEX_Vcvtfxpntps2udq_zmm_k1_zmmmt_imm8 = 4_811, MVEX_Vcvtfxpntpd2udq_zmm_k1_zmmmt_imm8 = 4_812, MVEX_Vcvtfxpntdq2ps_zmm_k1_zmmmt_imm8 = 4_813, MVEX_Vcvtfxpntps2dq_zmm_k1_zmmmt_imm8 = 4_814, MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D0 = 4_815, MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D1 = 4_816, MVEX_Vcvtfxpntpd2dq_zmm_k1_zmmmt_imm8 = 4_817, Via_undoc_F30FA6F0_16 = 4_818, Via_undoc_F30FA6F0_32 = 4_819, Via_undoc_F30FA6F0_64 = 4_820, Via_undoc_F30FA6F8_16 = 4_821, Via_undoc_F30FA6F8_32 = 4_822, Via_undoc_F30FA6F8_64 = 4_823, Xsha512_16 = 4_824, Xsha512_32 = 4_825, Xsha512_64 = 4_826, Xstore_alt_16 = 4_827, Xstore_alt_32 = 4_828, Xstore_alt_64 = 4_829, Xsha512_alt_16 = 4_830, Xsha512_alt_32 = 4_831, Xsha512_alt_64 = 4_832, Zero_bytes = 4_833, Wrmsrns = 4_834, Wrmsrlist = 4_835, Rdmsrlist = 4_836, Rmpquery = 4_837, Prefetchit1_m8 = 4_838, Prefetchit0_m8 = 4_839, Aadd_m32_r32 = 4_840, Aadd_m64_r64 = 4_841, Aand_m32_r32 = 4_842, Aand_m64_r64 = 4_843, Axor_m32_r32 = 4_844, Axor_m64_r64 = 4_845, Aor_m32_r32 = 4_846, Aor_m64_r64 = 4_847, VEX_Vpdpbuud_xmm_xmm_xmmm128 = 4_848, VEX_Vpdpbuud_ymm_ymm_ymmm256 = 4_849, VEX_Vpdpbsud_xmm_xmm_xmmm128 = 4_850, VEX_Vpdpbsud_ymm_ymm_ymmm256 = 4_851, VEX_Vpdpbssd_xmm_xmm_xmmm128 = 4_852, VEX_Vpdpbssd_ymm_ymm_ymmm256 = 4_853, VEX_Vpdpbuuds_xmm_xmm_xmmm128 = 4_854, VEX_Vpdpbuuds_ymm_ymm_ymmm256 = 4_855, VEX_Vpdpbsuds_xmm_xmm_xmmm128 = 4_856, VEX_Vpdpbsuds_ymm_ymm_ymmm256 = 4_857, VEX_Vpdpbssds_xmm_xmm_xmmm128 = 4_858, VEX_Vpdpbssds_ymm_ymm_ymmm256 = 4_859, VEX_Tdpfp16ps_tmm_tmm_tmm = 4_860, VEX_Vcvtneps2bf16_xmm_xmmm128 = 4_861, VEX_Vcvtneps2bf16_xmm_ymmm256 = 4_862, VEX_Vcvtneoph2ps_xmm_m128 = 4_863, VEX_Vcvtneoph2ps_ymm_m256 = 4_864, VEX_Vcvtneeph2ps_xmm_m128 = 4_865, VEX_Vcvtneeph2ps_ymm_m256 = 4_866, VEX_Vcvtneebf162ps_xmm_m128 = 4_867, VEX_Vcvtneebf162ps_ymm_m256 = 4_868, VEX_Vcvtneobf162ps_xmm_m128 = 4_869, VEX_Vcvtneobf162ps_ymm_m256 = 4_870, VEX_Vbcstnesh2ps_xmm_m16 = 4_871, VEX_Vbcstnesh2ps_ymm_m16 = 4_872, VEX_Vbcstnebf162ps_xmm_m16 = 4_873, VEX_Vbcstnebf162ps_ymm_m16 = 4_874, VEX_Vpmadd52luq_xmm_xmm_xmmm128 = 4_875, VEX_Vpmadd52luq_ymm_ymm_ymmm256 = 4_876, VEX_Vpmadd52huq_xmm_xmm_xmmm128 = 4_877, VEX_Vpmadd52huq_ymm_ymm_ymmm256 = 4_878, VEX_Cmpoxadd_m32_r32_r32 = 4_879, VEX_Cmpoxadd_m64_r64_r64 = 4_880, VEX_Cmpnoxadd_m32_r32_r32 = 4_881, VEX_Cmpnoxadd_m64_r64_r64 = 4_882, VEX_Cmpbxadd_m32_r32_r32 = 4_883, VEX_Cmpbxadd_m64_r64_r64 = 4_884, VEX_Cmpnbxadd_m32_r32_r32 = 4_885, VEX_Cmpnbxadd_m64_r64_r64 = 4_886, VEX_Cmpzxadd_m32_r32_r32 = 4_887, VEX_Cmpzxadd_m64_r64_r64 = 4_888, VEX_Cmpnzxadd_m32_r32_r32 = 4_889, VEX_Cmpnzxadd_m64_r64_r64 = 4_890, VEX_Cmpbexadd_m32_r32_r32 = 4_891, VEX_Cmpbexadd_m64_r64_r64 = 4_892, VEX_Cmpnbexadd_m32_r32_r32 = 4_893, VEX_Cmpnbexadd_m64_r64_r64 = 4_894, VEX_Cmpsxadd_m32_r32_r32 = 4_895, VEX_Cmpsxadd_m64_r64_r64 = 4_896, VEX_Cmpnsxadd_m32_r32_r32 = 4_897, VEX_Cmpnsxadd_m64_r64_r64 = 4_898, VEX_Cmppxadd_m32_r32_r32 = 4_899, VEX_Cmppxadd_m64_r64_r64 = 4_900, VEX_Cmpnpxadd_m32_r32_r32 = 4_901, VEX_Cmpnpxadd_m64_r64_r64 = 4_902, VEX_Cmplxadd_m32_r32_r32 = 4_903, VEX_Cmplxadd_m64_r64_r64 = 4_904, VEX_Cmpnlxadd_m32_r32_r32 = 4_905, VEX_Cmpnlxadd_m64_r64_r64 = 4_906, VEX_Cmplexadd_m32_r32_r32 = 4_907, VEX_Cmplexadd_m64_r64_r64 = 4_908, VEX_Cmpnlexadd_m32_r32_r32 = 4_909, VEX_Cmpnlexadd_m64_r64_r64 = 4_910, VEX_Tcmmrlfp16ps_tmm_tmm_tmm = 4_911, VEX_Tcmmimfp16ps_tmm_tmm_tmm = 4_912, Pbndkb = 4_913, VEX_Vsha512rnds2_ymm_ymm_xmm = 4_914, VEX_Vsha512msg1_ymm_xmm = 4_915, VEX_Vsha512msg2_ymm_ymm = 4_916, VEX_Vpdpwuud_xmm_xmm_xmmm128 = 4_917, VEX_Vpdpwuud_ymm_ymm_ymmm256 = 4_918, VEX_Vpdpwusd_xmm_xmm_xmmm128 = 4_919, VEX_Vpdpwusd_ymm_ymm_ymmm256 = 4_920, VEX_Vpdpwsud_xmm_xmm_xmmm128 = 4_921, VEX_Vpdpwsud_ymm_ymm_ymmm256 = 4_922, VEX_Vpdpwuuds_xmm_xmm_xmmm128 = 4_923, VEX_Vpdpwuuds_ymm_ymm_ymmm256 = 4_924, VEX_Vpdpwusds_xmm_xmm_xmmm128 = 4_925, VEX_Vpdpwusds_ymm_ymm_ymmm256 = 4_926, VEX_Vpdpwsuds_xmm_xmm_xmmm128 = 4_927, VEX_Vpdpwsuds_ymm_ymm_ymmm256 = 4_928, VEX_Vsm3msg1_xmm_xmm_xmmm128 = 4_929, VEX_Vsm3msg2_xmm_xmm_xmmm128 = 4_930, VEX_Vsm4key4_xmm_xmm_xmmm128 = 4_931, VEX_Vsm4key4_ymm_ymm_ymmm256 = 4_932, VEX_Vsm4rnds4_xmm_xmm_xmmm128 = 4_933, VEX_Vsm4rnds4_ymm_ymm_ymmm256 = 4_934, VEX_Vsm3rnds2_xmm_xmm_xmmm128_imm8 = 4_935,
}
Expand description

x86 instruction code

Variants (Non-exhaustive)§

This enum is marked as non-exhaustive
Non-exhaustive enums could have additional variants added in future. Therefore, when matching against variants of non-exhaustive enums, an extra wildcard arm must be added to account for any future variants.
§

INVALID = 0

It’s an invalid instruction, eg. it’s a new unknown instruction, garbage or there’s not enough bytes to decode the instruction etc.

§

DeclareByte = 1

A db/.byte asm directive that can store 1-16 bytes

§

DeclareWord = 2

A dw/.word asm directive that can store 1-8 words

§

DeclareDword = 3

A dd/.int asm directive that can store 1-4 dwords

§

DeclareQword = 4

A dq/.quad asm directive that can store 1-2 qwords

§

Add_rm8_r8 = 5

ADD r/m8, r8

00 /r

8086+

16/32/64-bit

§

Add_rm16_r16 = 6

ADD r/m16, r16

o16 01 /r

8086+

16/32/64-bit

§

Add_rm32_r32 = 7

ADD r/m32, r32

o32 01 /r

386+

16/32/64-bit

§

Add_rm64_r64 = 8

ADD r/m64, r64

o64 01 /r

X64

64-bit

§

Add_r8_rm8 = 9

ADD r8, r/m8

02 /r

8086+

16/32/64-bit

§

Add_r16_rm16 = 10

ADD r16, r/m16

o16 03 /r

8086+

16/32/64-bit

§

Add_r32_rm32 = 11

ADD r32, r/m32

o32 03 /r

386+

16/32/64-bit

§

Add_r64_rm64 = 12

ADD r64, r/m64

o64 03 /r

X64

64-bit

§

Add_AL_imm8 = 13

ADD AL, imm8

04 ib

8086+

16/32/64-bit

§

Add_AX_imm16 = 14

ADD AX, imm16

o16 05 iw

8086+

16/32/64-bit

§

Add_EAX_imm32 = 15

ADD EAX, imm32

o32 05 id

386+

16/32/64-bit

§

Add_RAX_imm32 = 16

ADD RAX, imm32

o64 05 id

X64

64-bit

§

Pushw_ES = 17

PUSH ES

o16 06

8086+

16/32-bit

§

Pushd_ES = 18

PUSH ES

o32 06

386+

16/32-bit

§

Popw_ES = 19

POP ES

o16 07

8086+

16/32-bit

§

Popd_ES = 20

POP ES

o32 07

386+

16/32-bit

§

Or_rm8_r8 = 21

OR r/m8, r8

08 /r

8086+

16/32/64-bit

§

Or_rm16_r16 = 22

OR r/m16, r16

o16 09 /r

8086+

16/32/64-bit

§

Or_rm32_r32 = 23

OR r/m32, r32

o32 09 /r

386+

16/32/64-bit

§

Or_rm64_r64 = 24

OR r/m64, r64

o64 09 /r

X64

64-bit

§

Or_r8_rm8 = 25

OR r8, r/m8

0A /r

8086+

16/32/64-bit

§

Or_r16_rm16 = 26

OR r16, r/m16

o16 0B /r

8086+

16/32/64-bit

§

Or_r32_rm32 = 27

OR r32, r/m32

o32 0B /r

386+

16/32/64-bit

§

Or_r64_rm64 = 28

OR r64, r/m64

o64 0B /r

X64

64-bit

§

Or_AL_imm8 = 29

OR AL, imm8

0C ib

8086+

16/32/64-bit

§

Or_AX_imm16 = 30

OR AX, imm16

o16 0D iw

8086+

16/32/64-bit

§

Or_EAX_imm32 = 31

OR EAX, imm32

o32 0D id

386+

16/32/64-bit

§

Or_RAX_imm32 = 32

OR RAX, imm32

o64 0D id

X64

64-bit

§

Pushw_CS = 33

PUSH CS

o16 0E

8086+

16/32-bit

§

Pushd_CS = 34

PUSH CS

o32 0E

386+

16/32-bit

§

Popw_CS = 35

POP CS

o16 0F

8086

16-bit

§

Adc_rm8_r8 = 36

ADC r/m8, r8

10 /r

8086+

16/32/64-bit

§

Adc_rm16_r16 = 37

ADC r/m16, r16

o16 11 /r

8086+

16/32/64-bit

§

Adc_rm32_r32 = 38

ADC r/m32, r32

o32 11 /r

386+

16/32/64-bit

§

Adc_rm64_r64 = 39

ADC r/m64, r64

o64 11 /r

X64

64-bit

§

Adc_r8_rm8 = 40

ADC r8, r/m8

12 /r

8086+

16/32/64-bit

§

Adc_r16_rm16 = 41

ADC r16, r/m16

o16 13 /r

8086+

16/32/64-bit

§

Adc_r32_rm32 = 42

ADC r32, r/m32

o32 13 /r

386+

16/32/64-bit

§

Adc_r64_rm64 = 43

ADC r64, r/m64

o64 13 /r

X64

64-bit

§

Adc_AL_imm8 = 44

ADC AL, imm8

14 ib

8086+

16/32/64-bit

§

Adc_AX_imm16 = 45

ADC AX, imm16

o16 15 iw

8086+

16/32/64-bit

§

Adc_EAX_imm32 = 46

ADC EAX, imm32

o32 15 id

386+

16/32/64-bit

§

Adc_RAX_imm32 = 47

ADC RAX, imm32

o64 15 id

X64

64-bit

§

Pushw_SS = 48

PUSH SS

o16 16

8086+

16/32-bit

§

Pushd_SS = 49

PUSH SS

o32 16

386+

16/32-bit

§

Popw_SS = 50

POP SS

o16 17

8086+

16/32-bit

§

Popd_SS = 51

POP SS

o32 17

386+

16/32-bit

§

Sbb_rm8_r8 = 52

SBB r/m8, r8

18 /r

8086+

16/32/64-bit

§

Sbb_rm16_r16 = 53

SBB r/m16, r16

o16 19 /r

8086+

16/32/64-bit

§

Sbb_rm32_r32 = 54

SBB r/m32, r32

o32 19 /r

386+

16/32/64-bit

§

Sbb_rm64_r64 = 55

SBB r/m64, r64

o64 19 /r

X64

64-bit

§

Sbb_r8_rm8 = 56

SBB r8, r/m8

1A /r

8086+

16/32/64-bit

§

Sbb_r16_rm16 = 57

SBB r16, r/m16

o16 1B /r

8086+

16/32/64-bit

§

Sbb_r32_rm32 = 58

SBB r32, r/m32

o32 1B /r

386+

16/32/64-bit

§

Sbb_r64_rm64 = 59

SBB r64, r/m64

o64 1B /r

X64

64-bit

§

Sbb_AL_imm8 = 60

SBB AL, imm8

1C ib

8086+

16/32/64-bit

§

Sbb_AX_imm16 = 61

SBB AX, imm16

o16 1D iw

8086+

16/32/64-bit

§

Sbb_EAX_imm32 = 62

SBB EAX, imm32

o32 1D id

386+

16/32/64-bit

§

Sbb_RAX_imm32 = 63

SBB RAX, imm32

o64 1D id

X64

64-bit

§

Pushw_DS = 64

PUSH DS

o16 1E

8086+

16/32-bit

§

Pushd_DS = 65

PUSH DS

o32 1E

386+

16/32-bit

§

Popw_DS = 66

POP DS

o16 1F

8086+

16/32-bit

§

Popd_DS = 67

POP DS

o32 1F

386+

16/32-bit

§

And_rm8_r8 = 68

AND r/m8, r8

20 /r

8086+

16/32/64-bit

§

And_rm16_r16 = 69

AND r/m16, r16

o16 21 /r

8086+

16/32/64-bit

§

And_rm32_r32 = 70

AND r/m32, r32

o32 21 /r

386+

16/32/64-bit

§

And_rm64_r64 = 71

AND r/m64, r64

o64 21 /r

X64

64-bit

§

And_r8_rm8 = 72

AND r8, r/m8

22 /r

8086+

16/32/64-bit

§

And_r16_rm16 = 73

AND r16, r/m16

o16 23 /r

8086+

16/32/64-bit

§

And_r32_rm32 = 74

AND r32, r/m32

o32 23 /r

386+

16/32/64-bit

§

And_r64_rm64 = 75

AND r64, r/m64

o64 23 /r

X64

64-bit

§

And_AL_imm8 = 76

AND AL, imm8

24 ib

8086+

16/32/64-bit

§

And_AX_imm16 = 77

AND AX, imm16

o16 25 iw

8086+

16/32/64-bit

§

And_EAX_imm32 = 78

AND EAX, imm32

o32 25 id

386+

16/32/64-bit

§

And_RAX_imm32 = 79

AND RAX, imm32

o64 25 id

X64

64-bit

§

Daa = 80

DAA

27

8086+

16/32-bit

§

Sub_rm8_r8 = 81

SUB r/m8, r8

28 /r

8086+

16/32/64-bit

§

Sub_rm16_r16 = 82

SUB r/m16, r16

o16 29 /r

8086+

16/32/64-bit

§

Sub_rm32_r32 = 83

SUB r/m32, r32

o32 29 /r

386+

16/32/64-bit

§

Sub_rm64_r64 = 84

SUB r/m64, r64

o64 29 /r

X64

64-bit

§

Sub_r8_rm8 = 85

SUB r8, r/m8

2A /r

8086+

16/32/64-bit

§

Sub_r16_rm16 = 86

SUB r16, r/m16

o16 2B /r

8086+

16/32/64-bit

§

Sub_r32_rm32 = 87

SUB r32, r/m32

o32 2B /r

386+

16/32/64-bit

§

Sub_r64_rm64 = 88

SUB r64, r/m64

o64 2B /r

X64

64-bit

§

Sub_AL_imm8 = 89

SUB AL, imm8

2C ib

8086+

16/32/64-bit

§

Sub_AX_imm16 = 90

SUB AX, imm16

o16 2D iw

8086+

16/32/64-bit

§

Sub_EAX_imm32 = 91

SUB EAX, imm32

o32 2D id

386+

16/32/64-bit

§

Sub_RAX_imm32 = 92

SUB RAX, imm32

o64 2D id

X64

64-bit

§

Das = 93

DAS

2F

8086+

16/32-bit

§

Xor_rm8_r8 = 94

XOR r/m8, r8

30 /r

8086+

16/32/64-bit

§

Xor_rm16_r16 = 95

XOR r/m16, r16

o16 31 /r

8086+

16/32/64-bit

§

Xor_rm32_r32 = 96

XOR r/m32, r32

o32 31 /r

386+

16/32/64-bit

§

Xor_rm64_r64 = 97

XOR r/m64, r64

o64 31 /r

X64

64-bit

§

Xor_r8_rm8 = 98

XOR r8, r/m8

32 /r

8086+

16/32/64-bit

§

Xor_r16_rm16 = 99

XOR r16, r/m16

o16 33 /r

8086+

16/32/64-bit

§

Xor_r32_rm32 = 100

XOR r32, r/m32

o32 33 /r

386+

16/32/64-bit

§

Xor_r64_rm64 = 101

XOR r64, r/m64

o64 33 /r

X64

64-bit

§

Xor_AL_imm8 = 102

XOR AL, imm8

34 ib

8086+

16/32/64-bit

§

Xor_AX_imm16 = 103

XOR AX, imm16

o16 35 iw

8086+

16/32/64-bit

§

Xor_EAX_imm32 = 104

XOR EAX, imm32

o32 35 id

386+

16/32/64-bit

§

Xor_RAX_imm32 = 105

XOR RAX, imm32

o64 35 id

X64

64-bit

§

Aaa = 106

AAA

37

8086+

16/32-bit

§

Cmp_rm8_r8 = 107

CMP r/m8, r8

38 /r

8086+

16/32/64-bit

§

Cmp_rm16_r16 = 108

CMP r/m16, r16

o16 39 /r

8086+

16/32/64-bit

§

Cmp_rm32_r32 = 109

CMP r/m32, r32

o32 39 /r

386+

16/32/64-bit

§

Cmp_rm64_r64 = 110

CMP r/m64, r64

o64 39 /r

X64

64-bit

§

Cmp_r8_rm8 = 111

CMP r8, r/m8

3A /r

8086+

16/32/64-bit

§

Cmp_r16_rm16 = 112

CMP r16, r/m16

o16 3B /r

8086+

16/32/64-bit

§

Cmp_r32_rm32 = 113

CMP r32, r/m32

o32 3B /r

386+

16/32/64-bit

§

Cmp_r64_rm64 = 114

CMP r64, r/m64

o64 3B /r

X64

64-bit

§

Cmp_AL_imm8 = 115

CMP AL, imm8

3C ib

8086+

16/32/64-bit

§

Cmp_AX_imm16 = 116

CMP AX, imm16

o16 3D iw

8086+

16/32/64-bit

§

Cmp_EAX_imm32 = 117

CMP EAX, imm32

o32 3D id

386+

16/32/64-bit

§

Cmp_RAX_imm32 = 118

CMP RAX, imm32

o64 3D id

X64

64-bit

§

Aas = 119

AAS

3F

8086+

16/32-bit

§

Inc_r16 = 120

INC r16

o16 40+rw

8086+

16/32-bit

§

Inc_r32 = 121

INC r32

o32 40+rd

386+

16/32-bit

§

Dec_r16 = 122

DEC r16

o16 48+rw

8086+

16/32-bit

§

Dec_r32 = 123

DEC r32

o32 48+rd

386+

16/32-bit

§

Push_r16 = 124

PUSH r16

o16 50+rw

8086+

16/32/64-bit

§

Push_r32 = 125

PUSH r32

o32 50+rd

386+

16/32-bit

§

Push_r64 = 126

PUSH r64

o64 50+ro

X64

64-bit

§

Pop_r16 = 127

POP r16

o16 58+rw

8086+

16/32/64-bit

§

Pop_r32 = 128

POP r32

o32 58+rd

386+

16/32-bit

§

Pop_r64 = 129

POP r64

o64 58+ro

X64

64-bit

§

Pushaw = 130

PUSHA

o16 60

186+

16/32-bit

§

Pushad = 131

PUSHAD

o32 60

386+

16/32-bit

§

Popaw = 132

POPA

o16 61

186+

16/32-bit

§

Popad = 133

POPAD

o32 61

386+

16/32-bit

§

Bound_r16_m1616 = 134

BOUND r16, m16&16

o16 62 /r

186+

16/32-bit

§

Bound_r32_m3232 = 135

BOUND r32, m32&32

o32 62 /r

386+

16/32-bit

§

Arpl_rm16_r16 = 136

ARPL r/m16, r16

o16 63 /r

286+

16/32-bit

§

Arpl_r32m16_r32 = 137

ARPL r32/m16, r32

o32 63 /r

386+

16/32-bit

§

Movsxd_r16_rm16 = 138

MOVSXD r16, r/m16

o16 63 /r

X64

64-bit

§

Movsxd_r32_rm32 = 139

MOVSXD r32, r/m32

o32 63 /r

X64

64-bit

§

Movsxd_r64_rm32 = 140

MOVSXD r64, r/m32

o64 63 /r

X64

64-bit

§

Push_imm16 = 141

PUSH imm16

o16 68 iw

186+

16/32/64-bit

§

Pushd_imm32 = 142

PUSH imm32

o32 68 id

386+

16/32-bit

§

Pushq_imm32 = 143

PUSH imm32

o64 68 id

X64

64-bit

§

Imul_r16_rm16_imm16 = 144

IMUL r16, r/m16, imm16

o16 69 /r iw

186+

16/32/64-bit

§

Imul_r32_rm32_imm32 = 145

IMUL r32, r/m32, imm32

o32 69 /r id

386+

16/32/64-bit

§

Imul_r64_rm64_imm32 = 146

IMUL r64, r/m64, imm32

o64 69 /r id

X64

64-bit

§

Pushw_imm8 = 147

PUSH imm8

o16 6A ib

186+

16/32/64-bit

§

Pushd_imm8 = 148

PUSH imm8

o32 6A ib

386+

16/32-bit

§

Pushq_imm8 = 149

PUSH imm8

o64 6A ib

X64

64-bit

§

Imul_r16_rm16_imm8 = 150

IMUL r16, r/m16, imm8

o16 6B /r ib

186+

16/32/64-bit

§

Imul_r32_rm32_imm8 = 151

IMUL r32, r/m32, imm8

o32 6B /r ib

386+

16/32/64-bit

§

Imul_r64_rm64_imm8 = 152

IMUL r64, r/m64, imm8

o64 6B /r ib

X64

64-bit

§

Insb_m8_DX = 153

INSB

6C

186+

16/32/64-bit

§

Insw_m16_DX = 154

INSW

o16 6D

186+

16/32/64-bit

§

Insd_m32_DX = 155

INSD

o32 6D

386+

16/32/64-bit

§

Outsb_DX_m8 = 156

OUTSB

6E

186+

16/32/64-bit

§

Outsw_DX_m16 = 157

OUTSW

o16 6F

186+

16/32/64-bit

§

Outsd_DX_m32 = 158

OUTSD

o32 6F

386+

16/32/64-bit

§

Jo_rel8_16 = 159

JO rel8

o16 70 cb

8086+

16/32/64-bit

§

Jo_rel8_32 = 160

JO rel8

o32 70 cb

386+

16/32-bit

§

Jo_rel8_64 = 161

JO rel8

o64 70 cb

X64

64-bit

§

Jno_rel8_16 = 162

JNO rel8

o16 71 cb

8086+

16/32/64-bit

§

Jno_rel8_32 = 163

JNO rel8

o32 71 cb

386+

16/32-bit

§

Jno_rel8_64 = 164

JNO rel8

o64 71 cb

X64

64-bit

§

Jb_rel8_16 = 165

JB rel8

o16 72 cb

8086+

16/32/64-bit

§

Jb_rel8_32 = 166

JB rel8

o32 72 cb

386+

16/32-bit

§

Jb_rel8_64 = 167

JB rel8

o64 72 cb

X64

64-bit

§

Jae_rel8_16 = 168

JAE rel8

o16 73 cb

8086+

16/32/64-bit

§

Jae_rel8_32 = 169

JAE rel8

o32 73 cb

386+

16/32-bit

§

Jae_rel8_64 = 170

JAE rel8

o64 73 cb

X64

64-bit

§

Je_rel8_16 = 171

JE rel8

o16 74 cb

8086+

16/32/64-bit

§

Je_rel8_32 = 172

JE rel8

o32 74 cb

386+

16/32-bit

§

Je_rel8_64 = 173

JE rel8

o64 74 cb

X64

64-bit

§

Jne_rel8_16 = 174

JNE rel8

o16 75 cb

8086+

16/32/64-bit

§

Jne_rel8_32 = 175

JNE rel8

o32 75 cb

386+

16/32-bit

§

Jne_rel8_64 = 176

JNE rel8

o64 75 cb

X64

64-bit

§

Jbe_rel8_16 = 177

JBE rel8

o16 76 cb

8086+

16/32/64-bit

§

Jbe_rel8_32 = 178

JBE rel8

o32 76 cb

386+

16/32-bit

§

Jbe_rel8_64 = 179

JBE rel8

o64 76 cb

X64

64-bit

§

Ja_rel8_16 = 180

JA rel8

o16 77 cb

8086+

16/32/64-bit

§

Ja_rel8_32 = 181

JA rel8

o32 77 cb

386+

16/32-bit

§

Ja_rel8_64 = 182

JA rel8

o64 77 cb

X64

64-bit

§

Js_rel8_16 = 183

JS rel8

o16 78 cb

8086+

16/32/64-bit

§

Js_rel8_32 = 184

JS rel8

o32 78 cb

386+

16/32-bit

§

Js_rel8_64 = 185

JS rel8

o64 78 cb

X64

64-bit

§

Jns_rel8_16 = 186

JNS rel8

o16 79 cb

8086+

16/32/64-bit

§

Jns_rel8_32 = 187

JNS rel8

o32 79 cb

386+

16/32-bit

§

Jns_rel8_64 = 188

JNS rel8

o64 79 cb

X64

64-bit

§

Jp_rel8_16 = 189

JP rel8

o16 7A cb

8086+

16/32/64-bit

§

Jp_rel8_32 = 190

JP rel8

o32 7A cb

386+

16/32-bit

§

Jp_rel8_64 = 191

JP rel8

o64 7A cb

X64

64-bit

§

Jnp_rel8_16 = 192

JNP rel8

o16 7B cb

8086+

16/32/64-bit

§

Jnp_rel8_32 = 193

JNP rel8

o32 7B cb

386+

16/32-bit

§

Jnp_rel8_64 = 194

JNP rel8

o64 7B cb

X64

64-bit

§

Jl_rel8_16 = 195

JL rel8

o16 7C cb

8086+

16/32/64-bit

§

Jl_rel8_32 = 196

JL rel8

o32 7C cb

386+

16/32-bit

§

Jl_rel8_64 = 197

JL rel8

o64 7C cb

X64

64-bit

§

Jge_rel8_16 = 198

JGE rel8

o16 7D cb

8086+

16/32/64-bit

§

Jge_rel8_32 = 199

JGE rel8

o32 7D cb

386+

16/32-bit

§

Jge_rel8_64 = 200

JGE rel8

o64 7D cb

X64

64-bit

§

Jle_rel8_16 = 201

JLE rel8

o16 7E cb

8086+

16/32/64-bit

§

Jle_rel8_32 = 202

JLE rel8

o32 7E cb

386+

16/32-bit

§

Jle_rel8_64 = 203

JLE rel8

o64 7E cb

X64

64-bit

§

Jg_rel8_16 = 204

JG rel8

o16 7F cb

8086+

16/32/64-bit

§

Jg_rel8_32 = 205

JG rel8

o32 7F cb

386+

16/32-bit

§

Jg_rel8_64 = 206

JG rel8

o64 7F cb

X64

64-bit

§

Add_rm8_imm8 = 207

ADD r/m8, imm8

80 /0 ib

8086+

16/32/64-bit

§

Or_rm8_imm8 = 208

OR r/m8, imm8

80 /1 ib

8086+

16/32/64-bit

§

Adc_rm8_imm8 = 209

ADC r/m8, imm8

80 /2 ib

8086+

16/32/64-bit

§

Sbb_rm8_imm8 = 210

SBB r/m8, imm8

80 /3 ib

8086+

16/32/64-bit

§

And_rm8_imm8 = 211

AND r/m8, imm8

80 /4 ib

8086+

16/32/64-bit

§

Sub_rm8_imm8 = 212

SUB r/m8, imm8

80 /5 ib

8086+

16/32/64-bit

§

Xor_rm8_imm8 = 213

XOR r/m8, imm8

80 /6 ib

8086+

16/32/64-bit

§

Cmp_rm8_imm8 = 214

CMP r/m8, imm8

80 /7 ib

8086+

16/32/64-bit

§

Add_rm16_imm16 = 215

ADD r/m16, imm16

o16 81 /0 iw

8086+

16/32/64-bit

§

Add_rm32_imm32 = 216

ADD r/m32, imm32

o32 81 /0 id

386+

16/32/64-bit

§

Add_rm64_imm32 = 217

ADD r/m64, imm32

o64 81 /0 id

X64

64-bit

§

Or_rm16_imm16 = 218

OR r/m16, imm16

o16 81 /1 iw

8086+

16/32/64-bit

§

Or_rm32_imm32 = 219

OR r/m32, imm32

o32 81 /1 id

386+

16/32/64-bit

§

Or_rm64_imm32 = 220

OR r/m64, imm32

o64 81 /1 id

X64

64-bit

§

Adc_rm16_imm16 = 221

ADC r/m16, imm16

o16 81 /2 iw

8086+

16/32/64-bit

§

Adc_rm32_imm32 = 222

ADC r/m32, imm32

o32 81 /2 id

386+

16/32/64-bit

§

Adc_rm64_imm32 = 223

ADC r/m64, imm32

o64 81 /2 id

X64

64-bit

§

Sbb_rm16_imm16 = 224

SBB r/m16, imm16

o16 81 /3 iw

8086+

16/32/64-bit

§

Sbb_rm32_imm32 = 225

SBB r/m32, imm32

o32 81 /3 id

386+

16/32/64-bit

§

Sbb_rm64_imm32 = 226

SBB r/m64, imm32

o64 81 /3 id

X64

64-bit

§

And_rm16_imm16 = 227

AND r/m16, imm16

o16 81 /4 iw

8086+

16/32/64-bit

§

And_rm32_imm32 = 228

AND r/m32, imm32

o32 81 /4 id

386+

16/32/64-bit

§

And_rm64_imm32 = 229

AND r/m64, imm32

o64 81 /4 id

X64

64-bit

§

Sub_rm16_imm16 = 230

SUB r/m16, imm16

o16 81 /5 iw

8086+

16/32/64-bit

§

Sub_rm32_imm32 = 231

SUB r/m32, imm32

o32 81 /5 id

386+

16/32/64-bit

§

Sub_rm64_imm32 = 232

SUB r/m64, imm32

o64 81 /5 id

X64

64-bit

§

Xor_rm16_imm16 = 233

XOR r/m16, imm16

o16 81 /6 iw

8086+

16/32/64-bit

§

Xor_rm32_imm32 = 234

XOR r/m32, imm32

o32 81 /6 id

386+

16/32/64-bit

§

Xor_rm64_imm32 = 235

XOR r/m64, imm32

o64 81 /6 id

X64

64-bit

§

Cmp_rm16_imm16 = 236

CMP r/m16, imm16

o16 81 /7 iw

8086+

16/32/64-bit

§

Cmp_rm32_imm32 = 237

CMP r/m32, imm32

o32 81 /7 id

386+

16/32/64-bit

§

Cmp_rm64_imm32 = 238

CMP r/m64, imm32

o64 81 /7 id

X64

64-bit

§

Add_rm8_imm8_82 = 239

ADD r/m8, imm8

82 /0 ib

8086+

16/32-bit

§

Or_rm8_imm8_82 = 240

OR r/m8, imm8

82 /1 ib

8086+

16/32-bit

§

Adc_rm8_imm8_82 = 241

ADC r/m8, imm8

82 /2 ib

8086+

16/32-bit

§

Sbb_rm8_imm8_82 = 242

SBB r/m8, imm8

82 /3 ib

8086+

16/32-bit

§

And_rm8_imm8_82 = 243

AND r/m8, imm8

82 /4 ib

8086+

16/32-bit

§

Sub_rm8_imm8_82 = 244

SUB r/m8, imm8

82 /5 ib

8086+

16/32-bit

§

Xor_rm8_imm8_82 = 245

XOR r/m8, imm8

82 /6 ib

8086+

16/32-bit

§

Cmp_rm8_imm8_82 = 246

CMP r/m8, imm8

82 /7 ib

8086+

16/32-bit

§

Add_rm16_imm8 = 247

ADD r/m16, imm8

o16 83 /0 ib

8086+

16/32/64-bit

§

Add_rm32_imm8 = 248

ADD r/m32, imm8

o32 83 /0 ib

386+

16/32/64-bit

§

Add_rm64_imm8 = 249

ADD r/m64, imm8

o64 83 /0 ib

X64

64-bit

§

Or_rm16_imm8 = 250

OR r/m16, imm8

o16 83 /1 ib

8086+

16/32/64-bit

§

Or_rm32_imm8 = 251

OR r/m32, imm8

o32 83 /1 ib

386+

16/32/64-bit

§

Or_rm64_imm8 = 252

OR r/m64, imm8

o64 83 /1 ib

X64

64-bit

§

Adc_rm16_imm8 = 253

ADC r/m16, imm8

o16 83 /2 ib

8086+

16/32/64-bit

§

Adc_rm32_imm8 = 254

ADC r/m32, imm8

o32 83 /2 ib

386+

16/32/64-bit

§

Adc_rm64_imm8 = 255

ADC r/m64, imm8

o64 83 /2 ib

X64

64-bit

§

Sbb_rm16_imm8 = 256

SBB r/m16, imm8

o16 83 /3 ib

8086+

16/32/64-bit

§

Sbb_rm32_imm8 = 257

SBB r/m32, imm8

o32 83 /3 ib

386+

16/32/64-bit

§

Sbb_rm64_imm8 = 258

SBB r/m64, imm8

o64 83 /3 ib

X64

64-bit

§

And_rm16_imm8 = 259

AND r/m16, imm8

o16 83 /4 ib

8086+

16/32/64-bit

§

And_rm32_imm8 = 260

AND r/m32, imm8

o32 83 /4 ib

386+

16/32/64-bit

§

And_rm64_imm8 = 261

AND r/m64, imm8

o64 83 /4 ib

X64

64-bit

§

Sub_rm16_imm8 = 262

SUB r/m16, imm8

o16 83 /5 ib

8086+

16/32/64-bit

§

Sub_rm32_imm8 = 263

SUB r/m32, imm8

o32 83 /5 ib

386+

16/32/64-bit

§

Sub_rm64_imm8 = 264

SUB r/m64, imm8

o64 83 /5 ib

X64

64-bit

§

Xor_rm16_imm8 = 265

XOR r/m16, imm8

o16 83 /6 ib

8086+

16/32/64-bit

§

Xor_rm32_imm8 = 266

XOR r/m32, imm8

o32 83 /6 ib

386+

16/32/64-bit

§

Xor_rm64_imm8 = 267

XOR r/m64, imm8

o64 83 /6 ib

X64

64-bit

§

Cmp_rm16_imm8 = 268

CMP r/m16, imm8

o16 83 /7 ib

8086+

16/32/64-bit

§

Cmp_rm32_imm8 = 269

CMP r/m32, imm8

o32 83 /7 ib

386+

16/32/64-bit

§

Cmp_rm64_imm8 = 270

CMP r/m64, imm8

o64 83 /7 ib

X64

64-bit

§

Test_rm8_r8 = 271

TEST r/m8, r8

84 /r

8086+

16/32/64-bit

§

Test_rm16_r16 = 272

TEST r/m16, r16

o16 85 /r

8086+

16/32/64-bit

§

Test_rm32_r32 = 273

TEST r/m32, r32

o32 85 /r

386+

16/32/64-bit

§

Test_rm64_r64 = 274

TEST r/m64, r64

o64 85 /r

X64

64-bit

§

Xchg_rm8_r8 = 275

XCHG r/m8, r8

86 /r

8086+

16/32/64-bit

§

Xchg_rm16_r16 = 276

XCHG r/m16, r16

o16 87 /r

8086+

16/32/64-bit

§

Xchg_rm32_r32 = 277

XCHG r/m32, r32

o32 87 /r

386+

16/32/64-bit

§

Xchg_rm64_r64 = 278

XCHG r/m64, r64

o64 87 /r

X64

64-bit

§

Mov_rm8_r8 = 279

MOV r/m8, r8

88 /r

8086+

16/32/64-bit

§

Mov_rm16_r16 = 280

MOV r/m16, r16

o16 89 /r

8086+

16/32/64-bit

§

Mov_rm32_r32 = 281

MOV r/m32, r32

o32 89 /r

386+

16/32/64-bit

§

Mov_rm64_r64 = 282

MOV r/m64, r64

o64 89 /r

X64

64-bit

§

Mov_r8_rm8 = 283

MOV r8, r/m8

8A /r

8086+

16/32/64-bit

§

Mov_r16_rm16 = 284

MOV r16, r/m16

o16 8B /r

8086+

16/32/64-bit

§

Mov_r32_rm32 = 285

MOV r32, r/m32

o32 8B /r

386+

16/32/64-bit

§

Mov_r64_rm64 = 286

MOV r64, r/m64

o64 8B /r

X64

64-bit

§

Mov_rm16_Sreg = 287

MOV r/m16, Sreg

o16 8C /r

8086+

16/32/64-bit

§

Mov_r32m16_Sreg = 288

MOV r32/m16, Sreg

o32 8C /r

386+

16/32/64-bit

§

Mov_r64m16_Sreg = 289

MOV r64/m16, Sreg

o64 8C /r

X64

64-bit

§

Lea_r16_m = 290

LEA r16, m

o16 8D /r

8086+

16/32/64-bit

§

Lea_r32_m = 291

LEA r32, m

o32 8D /r

386+

16/32/64-bit

§

Lea_r64_m = 292

LEA r64, m

o64 8D /r

X64

64-bit

§

Mov_Sreg_rm16 = 293

MOV Sreg, r/m16

o16 8E /r

8086+

16/32/64-bit

§

Mov_Sreg_r32m16 = 294

MOV Sreg, r32/m16

o32 8E /r

386+

16/32/64-bit

§

Mov_Sreg_r64m16 = 295

MOV Sreg, r64/m16

o64 8E /r

X64

64-bit

§

Pop_rm16 = 296

POP r/m16

o16 8F /0

8086+

16/32/64-bit

§

Pop_rm32 = 297

POP r/m32

o32 8F /0

386+

16/32-bit

§

Pop_rm64 = 298

POP r/m64

o64 8F /0

X64

64-bit

§

Nopw = 299

NOP

o16 90

8086+

16/32/64-bit

§

Nopd = 300

NOP

o32 90

8086+

16/32/64-bit

§

Nopq = 301

NOP

o64 90

8086+

64-bit

§

Xchg_r16_AX = 302

XCHG r16, AX

o16 90+rw

8086+

16/32/64-bit

§

Xchg_r32_EAX = 303

XCHG r32, EAX

o32 90+rd

386+

16/32/64-bit

§

Xchg_r64_RAX = 304

XCHG r64, RAX

o64 90+ro

X64

64-bit

§

Pause = 305

PAUSE

F3 90

Pentium 4 or later

16/32/64-bit

§

Cbw = 306

CBW

o16 98

8086+

16/32/64-bit

§

Cwde = 307

CWDE

o32 98

386+

16/32/64-bit

§

Cdqe = 308

CDQE

o64 98

X64

64-bit

§

Cwd = 309

CWD

o16 99

8086+

16/32/64-bit

§

Cdq = 310

CDQ

o32 99

386+

16/32/64-bit

§

Cqo = 311

CQO

o64 99

X64

64-bit

§

Call_ptr1616 = 312

CALL ptr16:16

o16 9A cd

8086+

16/32-bit

§

Call_ptr1632 = 313

CALL ptr16:32

o32 9A cp

386+

16/32-bit

§

Wait = 314

WAIT

9B

8086+

16/32/64-bit

§

Pushfw = 315

PUSHF

o16 9C

8086+

16/32/64-bit

§

Pushfd = 316

PUSHFD

o32 9C

386+

16/32-bit

§

Pushfq = 317

PUSHFQ

o64 9C

X64

64-bit

§

Popfw = 318

POPF

o16 9D

8086+

16/32/64-bit

§

Popfd = 319

POPFD

o32 9D

386+

16/32-bit

§

Popfq = 320

POPFQ

o64 9D

X64

64-bit

§

Sahf = 321

SAHF

9E

8086+

16/32/64-bit

§

Lahf = 322

LAHF

9F

8086+

16/32/64-bit

§

Mov_AL_moffs8 = 323

MOV AL, moffs8

A0 mo

8086+

16/32/64-bit

§

Mov_AX_moffs16 = 324

MOV AX, moffs16

o16 A1 mo

8086+

16/32/64-bit

§

Mov_EAX_moffs32 = 325

MOV EAX, moffs32

o32 A1 mo

386+

16/32/64-bit

§

Mov_RAX_moffs64 = 326

MOV RAX, moffs64

o64 A1 mo

X64

64-bit

§

Mov_moffs8_AL = 327

MOV moffs8, AL

A2 mo

8086+

16/32/64-bit

§

Mov_moffs16_AX = 328

MOV moffs16, AX

o16 A3 mo

8086+

16/32/64-bit

§

Mov_moffs32_EAX = 329

MOV moffs32, EAX

o32 A3 mo

386+

16/32/64-bit

§

Mov_moffs64_RAX = 330

MOV moffs64, RAX

o64 A3 mo

X64

64-bit

§

Movsb_m8_m8 = 331

MOVSB

A4

8086+

16/32/64-bit

§

Movsw_m16_m16 = 332

MOVSW

o16 A5

8086+

16/32/64-bit

§

Movsd_m32_m32 = 333

MOVSD

o32 A5

386+

16/32/64-bit

§

Movsq_m64_m64 = 334

MOVSQ

o64 A5

X64

64-bit

§

Cmpsb_m8_m8 = 335

CMPSB

A6

8086+

16/32/64-bit

§

Cmpsw_m16_m16 = 336

CMPSW

o16 A7

8086+

16/32/64-bit

§

Cmpsd_m32_m32 = 337

CMPSD

o32 A7

386+

16/32/64-bit

§

Cmpsq_m64_m64 = 338

CMPSQ

o64 A7

X64

64-bit

§

Test_AL_imm8 = 339

TEST AL, imm8

A8 ib

8086+

16/32/64-bit

§

Test_AX_imm16 = 340

TEST AX, imm16

o16 A9 iw

8086+

16/32/64-bit

§

Test_EAX_imm32 = 341

TEST EAX, imm32

o32 A9 id

386+

16/32/64-bit

§

Test_RAX_imm32 = 342

TEST RAX, imm32

o64 A9 id

X64

64-bit

§

Stosb_m8_AL = 343

STOSB

AA

8086+

16/32/64-bit

§

Stosw_m16_AX = 344

STOSW

o16 AB

8086+

16/32/64-bit

§

Stosd_m32_EAX = 345

STOSD

o32 AB

386+

16/32/64-bit

§

Stosq_m64_RAX = 346

STOSQ

o64 AB

X64

64-bit

§

Lodsb_AL_m8 = 347

LODSB

AC

8086+

16/32/64-bit

§

Lodsw_AX_m16 = 348

LODSW

o16 AD

8086+

16/32/64-bit

§

Lodsd_EAX_m32 = 349

LODSD

o32 AD

386+

16/32/64-bit

§

Lodsq_RAX_m64 = 350

LODSQ

o64 AD

X64

64-bit

§

Scasb_AL_m8 = 351

SCASB

AE

8086+

16/32/64-bit

§

Scasw_AX_m16 = 352

SCASW

o16 AF

8086+

16/32/64-bit

§

Scasd_EAX_m32 = 353

SCASD

o32 AF

386+

16/32/64-bit

§

Scasq_RAX_m64 = 354

SCASQ

o64 AF

X64

64-bit

§

Mov_r8_imm8 = 355

MOV r8, imm8

B0+rb ib

8086+

16/32/64-bit

§

Mov_r16_imm16 = 356

MOV r16, imm16

o16 B8+rw iw

8086+

16/32/64-bit

§

Mov_r32_imm32 = 357

MOV r32, imm32

o32 B8+rd id

386+

16/32/64-bit

§

Mov_r64_imm64 = 358

MOV r64, imm64

o64 B8+ro io

X64

64-bit

§

Rol_rm8_imm8 = 359

ROL r/m8, imm8

C0 /0 ib

186+

16/32/64-bit

§

Ror_rm8_imm8 = 360

ROR r/m8, imm8

C0 /1 ib

186+

16/32/64-bit

§

Rcl_rm8_imm8 = 361

RCL r/m8, imm8

C0 /2 ib

186+

16/32/64-bit

§

Rcr_rm8_imm8 = 362

RCR r/m8, imm8

C0 /3 ib

186+

16/32/64-bit

§

Shl_rm8_imm8 = 363

SHL r/m8, imm8

C0 /4 ib

186+

16/32/64-bit

§

Shr_rm8_imm8 = 364

SHR r/m8, imm8

C0 /5 ib

186+

16/32/64-bit

§

Sal_rm8_imm8 = 365

SAL r/m8, imm8

C0 /6 ib

186+

16/32/64-bit

§

Sar_rm8_imm8 = 366

SAR r/m8, imm8

C0 /7 ib

186+

16/32/64-bit

§

Rol_rm16_imm8 = 367

ROL r/m16, imm8

o16 C1 /0 ib

186+

16/32/64-bit

§

Rol_rm32_imm8 = 368

ROL r/m32, imm8

o32 C1 /0 ib

386+

16/32/64-bit

§

Rol_rm64_imm8 = 369

ROL r/m64, imm8

o64 C1 /0 ib

X64

64-bit

§

Ror_rm16_imm8 = 370

ROR r/m16, imm8

o16 C1 /1 ib

186+

16/32/64-bit

§

Ror_rm32_imm8 = 371

ROR r/m32, imm8

o32 C1 /1 ib

386+

16/32/64-bit

§

Ror_rm64_imm8 = 372

ROR r/m64, imm8

o64 C1 /1 ib

X64

64-bit

§

Rcl_rm16_imm8 = 373

RCL r/m16, imm8

o16 C1 /2 ib

186+

16/32/64-bit

§

Rcl_rm32_imm8 = 374

RCL r/m32, imm8

o32 C1 /2 ib

386+

16/32/64-bit

§

Rcl_rm64_imm8 = 375

RCL r/m64, imm8

o64 C1 /2 ib

X64

64-bit

§

Rcr_rm16_imm8 = 376

RCR r/m16, imm8

o16 C1 /3 ib

186+

16/32/64-bit

§

Rcr_rm32_imm8 = 377

RCR r/m32, imm8

o32 C1 /3 ib

386+

16/32/64-bit

§

Rcr_rm64_imm8 = 378

RCR r/m64, imm8

o64 C1 /3 ib

X64

64-bit

§

Shl_rm16_imm8 = 379

SHL r/m16, imm8

o16 C1 /4 ib

186+

16/32/64-bit

§

Shl_rm32_imm8 = 380

SHL r/m32, imm8

o32 C1 /4 ib

386+

16/32/64-bit

§

Shl_rm64_imm8 = 381

SHL r/m64, imm8

o64 C1 /4 ib

X64

64-bit

§

Shr_rm16_imm8 = 382

SHR r/m16, imm8

o16 C1 /5 ib

186+

16/32/64-bit

§

Shr_rm32_imm8 = 383

SHR r/m32, imm8

o32 C1 /5 ib

386+

16/32/64-bit

§

Shr_rm64_imm8 = 384

SHR r/m64, imm8

o64 C1 /5 ib

X64

64-bit

§

Sal_rm16_imm8 = 385

SAL r/m16, imm8

o16 C1 /6 ib

186+

16/32/64-bit

§

Sal_rm32_imm8 = 386

SAL r/m32, imm8

o32 C1 /6 ib

386+

16/32/64-bit

§

Sal_rm64_imm8 = 387

SAL r/m64, imm8

o64 C1 /6 ib

X64

64-bit

§

Sar_rm16_imm8 = 388

SAR r/m16, imm8

o16 C1 /7 ib

186+

16/32/64-bit

§

Sar_rm32_imm8 = 389

SAR r/m32, imm8

o32 C1 /7 ib

386+

16/32/64-bit

§

Sar_rm64_imm8 = 390

SAR r/m64, imm8

o64 C1 /7 ib

X64

64-bit

§

Retnw_imm16 = 391

RET imm16

o16 C2 iw

8086+

16/32/64-bit

§

Retnd_imm16 = 392

RET imm16

o32 C2 iw

386+

16/32-bit

§

Retnq_imm16 = 393

RET imm16

o64 C2 iw

X64

64-bit

§

Retnw = 394

RET

o16 C3

8086+

16/32/64-bit

§

Retnd = 395

RET

o32 C3

386+

16/32-bit

§

Retnq = 396

RET

o64 C3

X64

64-bit

§

Les_r16_m1616 = 397

LES r16, m16:16

o16 C4 /r

8086+

16/32-bit

§

Les_r32_m1632 = 398

LES r32, m16:32

o32 C4 /r

386+

16/32-bit

§

Lds_r16_m1616 = 399

LDS r16, m16:16

o16 C5 /r

8086+

16/32-bit

§

Lds_r32_m1632 = 400

LDS r32, m16:32

o32 C5 /r

386+

16/32-bit

§

Mov_rm8_imm8 = 401

MOV r/m8, imm8

C6 /0 ib

8086+

16/32/64-bit

§

Xabort_imm8 = 402

XABORT imm8

C6 F8 ib

RTM

16/32/64-bit

§

Mov_rm16_imm16 = 403

MOV r/m16, imm16

o16 C7 /0 iw

8086+

16/32/64-bit

§

Mov_rm32_imm32 = 404

MOV r/m32, imm32

o32 C7 /0 id

386+

16/32/64-bit

§

Mov_rm64_imm32 = 405

MOV r/m64, imm32

o64 C7 /0 id

X64

64-bit

§

Xbegin_rel16 = 406

XBEGIN rel16

o16 C7 F8 cw

RTM

16/32/64-bit

§

Xbegin_rel32 = 407

XBEGIN rel32

o32 C7 F8 cd

RTM

16/32/64-bit

§

Enterw_imm16_imm8 = 408

ENTER imm16, imm8

o16 C8 iw ib

186+

16/32/64-bit

§

Enterd_imm16_imm8 = 409

ENTER imm16, imm8

o32 C8 iw ib

386+

16/32-bit

§

Enterq_imm16_imm8 = 410

ENTER imm16, imm8

o64 C8 iw ib

X64

64-bit

§

Leavew = 411

LEAVE

o16 C9

186+

16/32/64-bit

§

Leaved = 412

LEAVE

o32 C9

386+

16/32-bit

§

Leaveq = 413

LEAVE

o64 C9

X64

64-bit

§

Retfw_imm16 = 414

RETF imm16

o16 CA iw

8086+

16/32/64-bit

§

Retfd_imm16 = 415

RETF imm16

o32 CA iw

386+

16/32/64-bit

§

Retfq_imm16 = 416

RETF imm16

o64 CA iw

X64

64-bit

§

Retfw = 417

RETF

o16 CB

8086+

16/32/64-bit

§

Retfd = 418

RETF

o32 CB

386+

16/32/64-bit

§

Retfq = 419

RETF

o64 CB

X64

64-bit

§

Int3 = 420

INT3

CC

8086+

16/32/64-bit

§

Int_imm8 = 421

INT imm8

CD ib

8086+

16/32/64-bit

§

Into = 422

INTO

CE

8086+

16/32-bit

§

Iretw = 423

IRET

o16 CF

8086+

16/32/64-bit

§

Iretd = 424

IRETD

o32 CF

386+

16/32/64-bit

§

Iretq = 425

IRETQ

o64 CF

X64

64-bit

§

Rol_rm8_1 = 426

ROL r/m8, 1

D0 /0

8086+

16/32/64-bit

§

Ror_rm8_1 = 427

ROR r/m8, 1

D0 /1

8086+

16/32/64-bit

§

Rcl_rm8_1 = 428

RCL r/m8, 1

D0 /2

8086+

16/32/64-bit

§

Rcr_rm8_1 = 429

RCR r/m8, 1

D0 /3

8086+

16/32/64-bit

§

Shl_rm8_1 = 430

SHL r/m8, 1

D0 /4

8086+

16/32/64-bit

§

Shr_rm8_1 = 431

SHR r/m8, 1

D0 /5

8086+

16/32/64-bit

§

Sal_rm8_1 = 432

SAL r/m8, 1

D0 /6

8086+

16/32/64-bit

§

Sar_rm8_1 = 433

SAR r/m8, 1

D0 /7

8086+

16/32/64-bit

§

Rol_rm16_1 = 434

ROL r/m16, 1

o16 D1 /0

8086+

16/32/64-bit

§

Rol_rm32_1 = 435

ROL r/m32, 1

o32 D1 /0

386+

16/32/64-bit

§

Rol_rm64_1 = 436

ROL r/m64, 1

o64 D1 /0

X64

64-bit

§

Ror_rm16_1 = 437

ROR r/m16, 1

o16 D1 /1

8086+

16/32/64-bit

§

Ror_rm32_1 = 438

ROR r/m32, 1

o32 D1 /1

386+

16/32/64-bit

§

Ror_rm64_1 = 439

ROR r/m64, 1

o64 D1 /1

X64

64-bit

§

Rcl_rm16_1 = 440

RCL r/m16, 1

o16 D1 /2

8086+

16/32/64-bit

§

Rcl_rm32_1 = 441

RCL r/m32, 1

o32 D1 /2

386+

16/32/64-bit

§

Rcl_rm64_1 = 442

RCL r/m64, 1

o64 D1 /2

X64

64-bit

§

Rcr_rm16_1 = 443

RCR r/m16, 1

o16 D1 /3

8086+

16/32/64-bit

§

Rcr_rm32_1 = 444

RCR r/m32, 1

o32 D1 /3

386+

16/32/64-bit

§

Rcr_rm64_1 = 445

RCR r/m64, 1

o64 D1 /3

X64

64-bit

§

Shl_rm16_1 = 446

SHL r/m16, 1

o16 D1 /4

8086+

16/32/64-bit

§

Shl_rm32_1 = 447

SHL r/m32, 1

o32 D1 /4

386+

16/32/64-bit

§

Shl_rm64_1 = 448

SHL r/m64, 1

o64 D1 /4

X64

64-bit

§

Shr_rm16_1 = 449

SHR r/m16, 1

o16 D1 /5

8086+

16/32/64-bit

§

Shr_rm32_1 = 450

SHR r/m32, 1

o32 D1 /5

386+

16/32/64-bit

§

Shr_rm64_1 = 451

SHR r/m64, 1

o64 D1 /5

X64

64-bit

§

Sal_rm16_1 = 452

SAL r/m16, 1

o16 D1 /6

8086+

16/32/64-bit

§

Sal_rm32_1 = 453

SAL r/m32, 1

o32 D1 /6

386+

16/32/64-bit

§

Sal_rm64_1 = 454

SAL r/m64, 1

o64 D1 /6

X64

64-bit

§

Sar_rm16_1 = 455

SAR r/m16, 1

o16 D1 /7

8086+

16/32/64-bit

§

Sar_rm32_1 = 456

SAR r/m32, 1

o32 D1 /7

386+

16/32/64-bit

§

Sar_rm64_1 = 457

SAR r/m64, 1

o64 D1 /7

X64

64-bit

§

Rol_rm8_CL = 458

ROL r/m8, CL

D2 /0

8086+

16/32/64-bit

§

Ror_rm8_CL = 459

ROR r/m8, CL

D2 /1

8086+

16/32/64-bit

§

Rcl_rm8_CL = 460

RCL r/m8, CL

D2 /2

8086+

16/32/64-bit

§

Rcr_rm8_CL = 461

RCR r/m8, CL

D2 /3

8086+

16/32/64-bit

§

Shl_rm8_CL = 462

SHL r/m8, CL

D2 /4

8086+

16/32/64-bit

§

Shr_rm8_CL = 463

SHR r/m8, CL

D2 /5

8086+

16/32/64-bit

§

Sal_rm8_CL = 464

SAL r/m8, CL

D2 /6

8086+

16/32/64-bit

§

Sar_rm8_CL = 465

SAR r/m8, CL

D2 /7

8086+

16/32/64-bit

§

Rol_rm16_CL = 466

ROL r/m16, CL

o16 D3 /0

8086+

16/32/64-bit

§

Rol_rm32_CL = 467

ROL r/m32, CL

o32 D3 /0

386+

16/32/64-bit

§

Rol_rm64_CL = 468

ROL r/m64, CL

o64 D3 /0

X64

64-bit

§

Ror_rm16_CL = 469

ROR r/m16, CL

o16 D3 /1

8086+

16/32/64-bit

§

Ror_rm32_CL = 470

ROR r/m32, CL

o32 D3 /1

386+

16/32/64-bit

§

Ror_rm64_CL = 471

ROR r/m64, CL

o64 D3 /1

X64

64-bit

§

Rcl_rm16_CL = 472

RCL r/m16, CL

o16 D3 /2

8086+

16/32/64-bit

§

Rcl_rm32_CL = 473

RCL r/m32, CL

o32 D3 /2

386+

16/32/64-bit

§

Rcl_rm64_CL = 474

RCL r/m64, CL

o64 D3 /2

X64

64-bit

§

Rcr_rm16_CL = 475

RCR r/m16, CL

o16 D3 /3

8086+

16/32/64-bit

§

Rcr_rm32_CL = 476

RCR r/m32, CL

o32 D3 /3

386+

16/32/64-bit

§

Rcr_rm64_CL = 477

RCR r/m64, CL

o64 D3 /3

X64

64-bit

§

Shl_rm16_CL = 478

SHL r/m16, CL

o16 D3 /4

8086+

16/32/64-bit

§

Shl_rm32_CL = 479

SHL r/m32, CL

o32 D3 /4

386+

16/32/64-bit

§

Shl_rm64_CL = 480

SHL r/m64, CL

o64 D3 /4

X64

64-bit

§

Shr_rm16_CL = 481

SHR r/m16, CL

o16 D3 /5

8086+

16/32/64-bit

§

Shr_rm32_CL = 482

SHR r/m32, CL

o32 D3 /5

386+

16/32/64-bit

§

Shr_rm64_CL = 483

SHR r/m64, CL

o64 D3 /5

X64

64-bit

§

Sal_rm16_CL = 484

SAL r/m16, CL

o16 D3 /6

8086+

16/32/64-bit

§

Sal_rm32_CL = 485

SAL r/m32, CL

o32 D3 /6

386+

16/32/64-bit

§

Sal_rm64_CL = 486

SAL r/m64, CL

o64 D3 /6

X64

64-bit

§

Sar_rm16_CL = 487

SAR r/m16, CL

o16 D3 /7

8086+

16/32/64-bit

§

Sar_rm32_CL = 488

SAR r/m32, CL

o32 D3 /7

386+

16/32/64-bit

§

Sar_rm64_CL = 489

SAR r/m64, CL

o64 D3 /7

X64

64-bit

§

Aam_imm8 = 490

AAM imm8

D4 ib

8086+

16/32-bit

§

Aad_imm8 = 491

AAD imm8

D5 ib

8086+

16/32-bit

§

Salc = 492

SALC

D6

8086+

16/32-bit

§

Xlat_m8 = 493

XLATB

D7

8086+

16/32/64-bit

§

Fadd_m32fp = 494

FADD m32fp

D8 /0

8087+

16/32/64-bit

§

Fmul_m32fp = 495

FMUL m32fp

D8 /1

8087+

16/32/64-bit

§

Fcom_m32fp = 496

FCOM m32fp

D8 /2

8087+

16/32/64-bit

§

Fcomp_m32fp = 497

FCOMP m32fp

D8 /3

8087+

16/32/64-bit

§

Fsub_m32fp = 498

FSUB m32fp

D8 /4

8087+

16/32/64-bit

§

Fsubr_m32fp = 499

FSUBR m32fp

D8 /5

8087+

16/32/64-bit

§

Fdiv_m32fp = 500

FDIV m32fp

D8 /6

8087+

16/32/64-bit

§

Fdivr_m32fp = 501

FDIVR m32fp

D8 /7

8087+

16/32/64-bit

§

Fadd_st0_sti = 502

FADD ST(0), ST(i)

D8 C0+i

8087+

16/32/64-bit

§

Fmul_st0_sti = 503

FMUL ST(0), ST(i)

D8 C8+i

8087+

16/32/64-bit

§

Fcom_st0_sti = 504

FCOM ST(i)

D8 D0+i

8087+

16/32/64-bit

§

Fcomp_st0_sti = 505

FCOMP ST(i)

D8 D8+i

8087+

16/32/64-bit

§

Fsub_st0_sti = 506

FSUB ST(0), ST(i)

D8 E0+i

8087+

16/32/64-bit

§

Fsubr_st0_sti = 507

FSUBR ST(0), ST(i)

D8 E8+i

8087+

16/32/64-bit

§

Fdiv_st0_sti = 508

FDIV ST(0), ST(i)

D8 F0+i

8087+

16/32/64-bit

§

Fdivr_st0_sti = 509

FDIVR ST(0), ST(i)

D8 F8+i

8087+

16/32/64-bit

§

Fld_m32fp = 510

FLD m32fp

D9 /0

8087+

16/32/64-bit

§

Fst_m32fp = 511

FST m32fp

D9 /2

8087+

16/32/64-bit

§

Fstp_m32fp = 512

FSTP m32fp

D9 /3

8087+

16/32/64-bit

§

Fldenv_m14byte = 513

FLDENV m14byte

o16 D9 /4

8087+

16/32/64-bit

§

Fldenv_m28byte = 514

FLDENV m28byte

o32 D9 /4

387+

16/32/64-bit

§

Fldcw_m2byte = 515

FLDCW m2byte

D9 /5

8087+

16/32/64-bit

§

Fnstenv_m14byte = 516

FNSTENV m14byte

o16 D9 /6

8087+

16/32/64-bit

§

Fstenv_m14byte = 517

FSTENV m14byte

9B o16 D9 /6

8087+

16/32/64-bit

§

Fnstenv_m28byte = 518

FNSTENV m28byte

o32 D9 /6

387+

16/32/64-bit

§

Fstenv_m28byte = 519

FSTENV m28byte

9B o32 D9 /6

387+

16/32/64-bit

§

Fnstcw_m2byte = 520

FNSTCW m2byte

D9 /7

8087+

16/32/64-bit

§

Fstcw_m2byte = 521

FSTCW m2byte

9B D9 /7

8087+

16/32/64-bit

§

Fld_sti = 522

FLD ST(i)

D9 C0+i

8087+

16/32/64-bit

§

Fxch_st0_sti = 523

FXCH ST(i)

D9 C8+i

8087+

16/32/64-bit

§

Fnop = 524

FNOP

D9 D0

8087+

16/32/64-bit

§

Fstpnce_sti = 525

FSTPNCE ST(i)

D9 D8+i

8087+

16/32/64-bit

§

Fchs = 526

FCHS

D9 E0

8087+

16/32/64-bit

§

Fabs = 527

FABS

D9 E1

8087+

16/32/64-bit

§

Ftst = 528

FTST

D9 E4

8087+

16/32/64-bit

§

Fxam = 529

FXAM

D9 E5

8087+

16/32/64-bit

§

Fld1 = 530

FLD1

D9 E8

8087+

16/32/64-bit

§

Fldl2t = 531

FLDL2T

D9 E9

8087+

16/32/64-bit

§

Fldl2e = 532

FLDL2E

D9 EA

8087+

16/32/64-bit

§

Fldpi = 533

FLDPI

D9 EB

8087+

16/32/64-bit

§

Fldlg2 = 534

FLDLG2

D9 EC

8087+

16/32/64-bit

§

Fldln2 = 535

FLDLN2

D9 ED

8087+

16/32/64-bit

§

Fldz = 536

FLDZ

D9 EE

8087+

16/32/64-bit

§

F2xm1 = 537

F2XM1

D9 F0

8087+

16/32/64-bit

§

Fyl2x = 538

FYL2X

D9 F1

8087+

16/32/64-bit

§

Fptan = 539

FPTAN

D9 F2

8087+

16/32/64-bit

§

Fpatan = 540

FPATAN

D9 F3

8087+

16/32/64-bit

§

Fxtract = 541

FXTRACT

D9 F4

8087+

16/32/64-bit

§

Fprem1 = 542

FPREM1

D9 F5

387+

16/32/64-bit

§

Fdecstp = 543

FDECSTP

D9 F6

8087+

16/32/64-bit

§

Fincstp = 544

FINCSTP

D9 F7

8087+

16/32/64-bit

§

Fprem = 545

FPREM

D9 F8

8087+

16/32/64-bit

§

Fyl2xp1 = 546

FYL2XP1

D9 F9

8087+

16/32/64-bit

§

Fsqrt = 547

FSQRT

D9 FA

8087+

16/32/64-bit

§

Fsincos = 548

FSINCOS

D9 FB

387+

16/32/64-bit

§

Frndint = 549

FRNDINT

D9 FC

8087+

16/32/64-bit

§

Fscale = 550

FSCALE

D9 FD

8087+

16/32/64-bit

§

Fsin = 551

FSIN

D9 FE

387+

16/32/64-bit

§

Fcos = 552

FCOS

D9 FF

387+

16/32/64-bit

§

Fiadd_m32int = 553

FIADD m32int

DA /0

8087+

16/32/64-bit

§

Fimul_m32int = 554

FIMUL m32int

DA /1

8087+

16/32/64-bit

§

Ficom_m32int = 555

FICOM m32int

DA /2

8087+

16/32/64-bit

§

Ficomp_m32int = 556

FICOMP m32int

DA /3

8087+

16/32/64-bit

§

Fisub_m32int = 557

FISUB m32int

DA /4

8087+

16/32/64-bit

§

Fisubr_m32int = 558

FISUBR m32int

DA /5

8087+

16/32/64-bit

§

Fidiv_m32int = 559

FIDIV m32int

DA /6

8087+

16/32/64-bit

§

Fidivr_m32int = 560

FIDIVR m32int

DA /7

8087+

16/32/64-bit

§

Fcmovb_st0_sti = 561

FCMOVB ST(0), ST(i)

DA C0+i

8087+ and CMOV

16/32/64-bit

§

Fcmove_st0_sti = 562

FCMOVE ST(0), ST(i)

DA C8+i

8087+ and CMOV

16/32/64-bit

§

Fcmovbe_st0_sti = 563

FCMOVBE ST(0), ST(i)

DA D0+i

8087+ and CMOV

16/32/64-bit

§

Fcmovu_st0_sti = 564

FCMOVU ST(0), ST(i)

DA D8+i

8087+ and CMOV

16/32/64-bit

§

Fucompp = 565

FUCOMPP

DA E9

387+

16/32/64-bit

§

Fild_m32int = 566

FILD m32int

DB /0

8087+

16/32/64-bit

§

Fisttp_m32int = 567

FISTTP m32int

DB /1

8087+ and SSE3

16/32/64-bit

§

Fist_m32int = 568

FIST m32int

DB /2

8087+

16/32/64-bit

§

Fistp_m32int = 569

FISTP m32int

DB /3

8087+

16/32/64-bit

§

Fld_m80fp = 570

FLD m80fp

DB /5

8087+

16/32/64-bit

§

Fstp_m80fp = 571

FSTP m80fp

DB /7

8087+

16/32/64-bit

§

Fcmovnb_st0_sti = 572

FCMOVNB ST(0), ST(i)

DB C0+i

8087+ and CMOV

16/32/64-bit

§

Fcmovne_st0_sti = 573

FCMOVNE ST(0), ST(i)

DB C8+i

8087+ and CMOV

16/32/64-bit

§

Fcmovnbe_st0_sti = 574

FCMOVNBE ST(0), ST(i)

DB D0+i

8087+ and CMOV

16/32/64-bit

§

Fcmovnu_st0_sti = 575

FCMOVNU ST(0), ST(i)

DB D8+i

8087+ and CMOV

16/32/64-bit

§

Fneni = 576

FNENI

DB E0

8087+

16/32/64-bit

§

Feni = 577

FENI

9B DB E0

8087+

16/32/64-bit

§

Fndisi = 578

FNDISI

DB E1

8087+

16/32/64-bit

§

Fdisi = 579

FDISI

9B DB E1

8087+

16/32/64-bit

§

Fnclex = 580

FNCLEX

DB E2

8087+

16/32/64-bit

§

Fclex = 581

FCLEX

9B DB E2

8087+

16/32/64-bit

§

Fninit = 582

FNINIT

DB E3

8087+

16/32/64-bit

§

Finit = 583

FINIT

9B DB E3

8087+

16/32/64-bit

§

Fnsetpm = 584

FNSETPM

DB E4

287+

16/32/64-bit

§

Fsetpm = 585

FSETPM

9B DB E4

287+

16/32/64-bit

§

Frstpm = 586

FRSTPM

DB E5

287 XL

16/32-bit

§

Fucomi_st0_sti = 587

FUCOMI ST, ST(i)

DB E8+i

8087+ and CMOV

16/32/64-bit

§

Fcomi_st0_sti = 588

FCOMI ST, ST(i)

DB F0+i

8087+ and CMOV

16/32/64-bit

§

Fadd_m64fp = 589

FADD m64fp

DC /0

8087+

16/32/64-bit

§

Fmul_m64fp = 590

FMUL m64fp

DC /1

8087+

16/32/64-bit

§

Fcom_m64fp = 591

FCOM m64fp

DC /2

8087+

16/32/64-bit

§

Fcomp_m64fp = 592

FCOMP m64fp

DC /3

8087+

16/32/64-bit

§

Fsub_m64fp = 593

FSUB m64fp

DC /4

8087+

16/32/64-bit

§

Fsubr_m64fp = 594

FSUBR m64fp

DC /5

8087+

16/32/64-bit

§

Fdiv_m64fp = 595

FDIV m64fp

DC /6

8087+

16/32/64-bit

§

Fdivr_m64fp = 596

FDIVR m64fp

DC /7

8087+

16/32/64-bit

§

Fadd_sti_st0 = 597

FADD ST(i), ST(0)

DC C0+i

8087+

16/32/64-bit

§

Fmul_sti_st0 = 598

FMUL ST(i), ST(0)

DC C8+i

8087+

16/32/64-bit

§

Fcom_st0_sti_DCD0 = 599

FCOM ST(i)

DC D0+i

8087+

16/32/64-bit

§

Fcomp_st0_sti_DCD8 = 600

FCOMP ST(i)

DC D8+i

8087+

16/32/64-bit

§

Fsubr_sti_st0 = 601

FSUBR ST(i), ST(0)

DC E0+i

8087+

16/32/64-bit

§

Fsub_sti_st0 = 602

FSUB ST(i), ST(0)

DC E8+i

8087+

16/32/64-bit

§

Fdivr_sti_st0 = 603

FDIVR ST(i), ST(0)

DC F0+i

8087+

16/32/64-bit

§

Fdiv_sti_st0 = 604

FDIV ST(i), ST(0)

DC F8+i

8087+

16/32/64-bit

§

Fld_m64fp = 605

FLD m64fp

DD /0

8087+

16/32/64-bit

§

Fisttp_m64int = 606

FISTTP m64int

DD /1

8087+ and SSE3

16/32/64-bit

§

Fst_m64fp = 607

FST m64fp

DD /2

8087+

16/32/64-bit

§

Fstp_m64fp = 608

FSTP m64fp

DD /3

8087+

16/32/64-bit

§

Frstor_m94byte = 609

FRSTOR m94byte

o16 DD /4

8087+

16/32/64-bit

§

Frstor_m108byte = 610

FRSTOR m108byte

o32 DD /4

387+

16/32/64-bit

§

Fnsave_m94byte = 611

FNSAVE m94byte

o16 DD /6

8087+

16/32/64-bit

§

Fsave_m94byte = 612

FSAVE m94byte

9B o16 DD /6

8087+

16/32/64-bit

§

Fnsave_m108byte = 613

FNSAVE m108byte

o32 DD /6

387+

16/32/64-bit

§

Fsave_m108byte = 614

FSAVE m108byte

9B o32 DD /6

387+

16/32/64-bit

§

Fnstsw_m2byte = 615

FNSTSW m2byte

DD /7

8087+

16/32/64-bit

§

Fstsw_m2byte = 616

FSTSW m2byte

9B DD /7

8087+

16/32/64-bit

§

Ffree_sti = 617

FFREE ST(i)

DD C0+i

8087+

16/32/64-bit

§

Fxch_st0_sti_DDC8 = 618

FXCH ST(i)

DD C8+i

8087+

16/32/64-bit

§

Fst_sti = 619

FST ST(i)

DD D0+i

8087+

16/32/64-bit

§

Fstp_sti = 620

FSTP ST(i)

DD D8+i

8087+

16/32/64-bit

§

Fucom_st0_sti = 621

FUCOM ST(i)

DD E0+i

8087+

16/32/64-bit

§

Fucomp_st0_sti = 622

FUCOMP ST(i)

DD E8+i

8087+

16/32/64-bit

§

Fiadd_m16int = 623

FIADD m16int

DE /0

8087+

16/32/64-bit

§

Fimul_m16int = 624

FIMUL m16int

DE /1

8087+

16/32/64-bit

§

Ficom_m16int = 625

FICOM m16int

DE /2

8087+

16/32/64-bit

§

Ficomp_m16int = 626

FICOMP m16int

DE /3

8087+

16/32/64-bit

§

Fisub_m16int = 627

FISUB m16int

DE /4

8087+

16/32/64-bit

§

Fisubr_m16int = 628

FISUBR m16int

DE /5

8087+

16/32/64-bit

§

Fidiv_m16int = 629

FIDIV m16int

DE /6

8087+

16/32/64-bit

§

Fidivr_m16int = 630

FIDIVR m16int

DE /7

8087+

16/32/64-bit

§

Faddp_sti_st0 = 631

FADDP ST(i), ST(0)

DE C0+i

8087+

16/32/64-bit

§

Fmulp_sti_st0 = 632

FMULP ST(i), ST(0)

DE C8+i

8087+

16/32/64-bit

§

Fcomp_st0_sti_DED0 = 633

FCOMP ST(i)

DE D0+i

8087+

16/32/64-bit

§

Fcompp = 634

FCOMPP

DE D9

8087+

16/32/64-bit

§

Fsubrp_sti_st0 = 635

FSUBRP ST(i), ST(0)

DE E0+i

8087+

16/32/64-bit

§

Fsubp_sti_st0 = 636

FSUBP ST(i), ST(0)

DE E8+i

8087+

16/32/64-bit

§

Fdivrp_sti_st0 = 637

FDIVRP ST(i), ST(0)

DE F0+i

8087+

16/32/64-bit

§

Fdivp_sti_st0 = 638

FDIVP ST(i), ST(0)

DE F8+i

8087+

16/32/64-bit

§

Fild_m16int = 639

FILD m16int

DF /0

8087+

16/32/64-bit

§

Fisttp_m16int = 640

FISTTP m16int

DF /1

8087+ and SSE3

16/32/64-bit

§

Fist_m16int = 641

FIST m16int

DF /2

8087+

16/32/64-bit

§

Fistp_m16int = 642

FISTP m16int

DF /3

8087+

16/32/64-bit

§

Fbld_m80bcd = 643

FBLD m80bcd

DF /4

8087+

16/32/64-bit

§

Fild_m64int = 644

FILD m64int

DF /5

8087+

16/32/64-bit

§

Fbstp_m80bcd = 645

FBSTP m80bcd

DF /6

8087+

16/32/64-bit

§

Fistp_m64int = 646

FISTP m64int

DF /7

8087+

16/32/64-bit

§

Ffreep_sti = 647

FFREEP ST(i)

DF C0+i

8087+

16/32/64-bit

§

Fxch_st0_sti_DFC8 = 648

FXCH ST(i)

DF C8+i

8087+

16/32/64-bit

§

Fstp_sti_DFD0 = 649

FSTP ST(i)

DF D0+i

8087+

16/32/64-bit

§

Fstp_sti_DFD8 = 650

FSTP ST(i)

DF D8+i

8087+

16/32/64-bit

§

Fnstsw_AX = 651

FNSTSW AX

DF E0

287+

16/32/64-bit

§

Fstsw_AX = 652

FSTSW AX

9B DF E0

287+

16/32/64-bit

§

Fstdw_AX = 653

FSTDW AX

9B DF E1

387 SL

16/32-bit

§

Fstsg_AX = 654

FSTSG AX

9B DF E2

387 SL

16/32-bit

§

Fucomip_st0_sti = 655

FUCOMIP ST, ST(i)

DF E8+i

8087+ and CMOV

16/32/64-bit

§

Fcomip_st0_sti = 656

FCOMIP ST, ST(i)

DF F0+i

8087+ and CMOV

16/32/64-bit

§

Loopne_rel8_16_CX = 657

LOOPNE rel8

a16 o16 E0 cb

8086+

16/32-bit

§

Loopne_rel8_32_CX = 658

LOOPNE rel8

a16 o32 E0 cb

386+

16/32-bit

§

Loopne_rel8_16_ECX = 659

LOOPNE rel8

a32 o16 E0 cb

386+

16/32/64-bit

§

Loopne_rel8_32_ECX = 660

LOOPNE rel8

a32 o32 E0 cb

386+

16/32-bit

§

Loopne_rel8_64_ECX = 661

LOOPNE rel8

a32 o64 E0 cb

X64

64-bit

§

Loopne_rel8_16_RCX = 662

LOOPNE rel8

a64 o16 E0 cb

X64

64-bit

§

Loopne_rel8_64_RCX = 663

LOOPNE rel8

a64 o64 E0 cb

X64

64-bit

§

Loope_rel8_16_CX = 664

LOOPE rel8

a16 o16 E1 cb

8086+

16/32-bit

§

Loope_rel8_32_CX = 665

LOOPE rel8

a16 o32 E1 cb

386+

16/32-bit

§

Loope_rel8_16_ECX = 666

LOOPE rel8

a32 o16 E1 cb

386+

16/32/64-bit

§

Loope_rel8_32_ECX = 667

LOOPE rel8

a32 o32 E1 cb

386+

16/32-bit

§

Loope_rel8_64_ECX = 668

LOOPE rel8

a32 o64 E1 cb

X64

64-bit

§

Loope_rel8_16_RCX = 669

LOOPE rel8

a64 o16 E1 cb

X64

64-bit

§

Loope_rel8_64_RCX = 670

LOOPE rel8

a64 o64 E1 cb

X64

64-bit

§

Loop_rel8_16_CX = 671

LOOP rel8

a16 o16 E2 cb

8086+

16/32-bit

§

Loop_rel8_32_CX = 672

LOOP rel8

a16 o32 E2 cb

386+

16/32-bit

§

Loop_rel8_16_ECX = 673

LOOP rel8

a32 o16 E2 cb

386+

16/32/64-bit

§

Loop_rel8_32_ECX = 674

LOOP rel8

a32 o32 E2 cb

386+

16/32-bit

§

Loop_rel8_64_ECX = 675

LOOP rel8

a32 o64 E2 cb

X64

64-bit

§

Loop_rel8_16_RCX = 676

LOOP rel8

a64 o16 E2 cb

X64

64-bit

§

Loop_rel8_64_RCX = 677

LOOP rel8

a64 o64 E2 cb

X64

64-bit

§

Jcxz_rel8_16 = 678

JCXZ rel8

a16 o16 E3 cb

8086+

16/32-bit

§

Jcxz_rel8_32 = 679

JCXZ rel8

a16 o32 E3 cb

386+

16/32-bit

§

Jecxz_rel8_16 = 680

JECXZ rel8

a32 o16 E3 cb

386+

16/32/64-bit

§

Jecxz_rel8_32 = 681

JECXZ rel8

a32 o32 E3 cb

386+

16/32-bit

§

Jecxz_rel8_64 = 682

JECXZ rel8

a32 o64 E3 cb

X64

64-bit

§

Jrcxz_rel8_16 = 683

JRCXZ rel8

a64 o16 E3 cb

X64

64-bit

§

Jrcxz_rel8_64 = 684

JRCXZ rel8

a64 o64 E3 cb

X64

64-bit

§

In_AL_imm8 = 685

IN AL, imm8

E4 ib

8086+

16/32/64-bit

§

In_AX_imm8 = 686

IN AX, imm8

o16 E5 ib

8086+

16/32/64-bit

§

In_EAX_imm8 = 687

IN EAX, imm8

o32 E5 ib

386+

16/32/64-bit

§

Out_imm8_AL = 688

OUT imm8, AL

E6 ib

8086+

16/32/64-bit

§

Out_imm8_AX = 689

OUT imm8, AX

o16 E7 ib

8086+

16/32/64-bit

§

Out_imm8_EAX = 690

OUT imm8, EAX

o32 E7 ib

386+

16/32/64-bit

§

Call_rel16 = 691

CALL rel16

o16 E8 cw

8086+

16/32/64-bit

§

Call_rel32_32 = 692

CALL rel32

o32 E8 cd

386+

16/32-bit

§

Call_rel32_64 = 693

CALL rel32

o64 E8 cd

X64

64-bit

§

Jmp_rel16 = 694

JMP rel16

o16 E9 cw

8086+

16/32/64-bit

§

Jmp_rel32_32 = 695

JMP rel32

o32 E9 cd

386+

16/32-bit

§

Jmp_rel32_64 = 696

JMP rel32

o64 E9 cd

X64

64-bit

§

Jmp_ptr1616 = 697

JMP ptr16:16

o16 EA cd

8086+

16/32-bit

§

Jmp_ptr1632 = 698

JMP ptr16:32

o32 EA cp

386+

16/32-bit

§

Jmp_rel8_16 = 699

JMP rel8

o16 EB cb

8086+

16/32/64-bit

§

Jmp_rel8_32 = 700

JMP rel8

o32 EB cb

386+

16/32-bit

§

Jmp_rel8_64 = 701

JMP rel8

o64 EB cb

X64

64-bit

§

In_AL_DX = 702

IN AL, DX

EC

8086+

16/32/64-bit

§

In_AX_DX = 703

IN AX, DX

o16 ED

8086+

16/32/64-bit

§

In_EAX_DX = 704

IN EAX, DX

o32 ED

386+

16/32/64-bit

§

Out_DX_AL = 705

OUT DX, AL

EE

8086+

16/32/64-bit

§

Out_DX_AX = 706

OUT DX, AX

o16 EF

8086+

16/32/64-bit

§

Out_DX_EAX = 707

OUT DX, EAX

o32 EF

386+

16/32/64-bit

§

Int1 = 708

INT1

F1

386+

16/32/64-bit

§

Hlt = 709

HLT

F4

8086+

16/32/64-bit

§

Cmc = 710

CMC

F5

8086+

16/32/64-bit

§

Test_rm8_imm8 = 711

TEST r/m8, imm8

F6 /0 ib

8086+

16/32/64-bit

§

Test_rm8_imm8_F6r1 = 712

TEST r/m8, imm8

F6 /1 ib

8086+

16/32/64-bit

§

Not_rm8 = 713

NOT r/m8

F6 /2

8086+

16/32/64-bit

§

Neg_rm8 = 714

NEG r/m8

F6 /3

8086+

16/32/64-bit

§

Mul_rm8 = 715

MUL r/m8

F6 /4

8086+

16/32/64-bit

§

Imul_rm8 = 716

IMUL r/m8

F6 /5

8086+

16/32/64-bit

§

Div_rm8 = 717

DIV r/m8

F6 /6

8086+

16/32/64-bit

§

Idiv_rm8 = 718

IDIV r/m8

F6 /7

8086+

16/32/64-bit

§

Test_rm16_imm16 = 719

TEST r/m16, imm16

o16 F7 /0 iw

8086+

16/32/64-bit

§

Test_rm32_imm32 = 720

TEST r/m32, imm32

o32 F7 /0 id

386+

16/32/64-bit

§

Test_rm64_imm32 = 721

TEST r/m64, imm32

o64 F7 /0 id

X64

64-bit

§

Test_rm16_imm16_F7r1 = 722

TEST r/m16, imm16

o16 F7 /1 iw

8086+

16/32/64-bit

§

Test_rm32_imm32_F7r1 = 723

TEST r/m32, imm32

o32 F7 /1 id

386+

16/32/64-bit

§

Test_rm64_imm32_F7r1 = 724

TEST r/m64, imm32

o64 F7 /1 id

X64

64-bit

§

Not_rm16 = 725

NOT r/m16

o16 F7 /2

8086+

16/32/64-bit

§

Not_rm32 = 726

NOT r/m32

o32 F7 /2

386+

16/32/64-bit

§

Not_rm64 = 727

NOT r/m64

o64 F7 /2

X64

64-bit

§

Neg_rm16 = 728

NEG r/m16

o16 F7 /3

8086+

16/32/64-bit

§

Neg_rm32 = 729

NEG r/m32

o32 F7 /3

386+

16/32/64-bit

§

Neg_rm64 = 730

NEG r/m64

o64 F7 /3

X64

64-bit

§

Mul_rm16 = 731

MUL r/m16

o16 F7 /4

8086+

16/32/64-bit

§

Mul_rm32 = 732

MUL r/m32

o32 F7 /4

386+

16/32/64-bit

§

Mul_rm64 = 733

MUL r/m64

o64 F7 /4

X64

64-bit

§

Imul_rm16 = 734

IMUL r/m16

o16 F7 /5

8086+

16/32/64-bit

§

Imul_rm32 = 735

IMUL r/m32

o32 F7 /5

386+

16/32/64-bit

§

Imul_rm64 = 736

IMUL r/m64

o64 F7 /5

X64

64-bit

§

Div_rm16 = 737

DIV r/m16

o16 F7 /6

8086+

16/32/64-bit

§

Div_rm32 = 738

DIV r/m32

o32 F7 /6

386+

16/32/64-bit

§

Div_rm64 = 739

DIV r/m64

o64 F7 /6

X64

64-bit

§

Idiv_rm16 = 740

IDIV r/m16

o16 F7 /7

8086+

16/32/64-bit

§

Idiv_rm32 = 741

IDIV r/m32

o32 F7 /7

386+

16/32/64-bit

§

Idiv_rm64 = 742

IDIV r/m64

o64 F7 /7

X64

64-bit

§

Clc = 743

CLC

F8

8086+

16/32/64-bit

§

Stc = 744

STC

F9

8086+

16/32/64-bit

§

Cli = 745

CLI

FA

8086+

16/32/64-bit

§

Sti = 746

STI

FB

8086+

16/32/64-bit

§

Cld = 747

CLD

FC

8086+

16/32/64-bit

§

Std = 748

STD

FD

8086+

16/32/64-bit

§

Inc_rm8 = 749

INC r/m8

FE /0

8086+

16/32/64-bit

§

Dec_rm8 = 750

DEC r/m8

FE /1

8086+

16/32/64-bit

§

Inc_rm16 = 751

INC r/m16

o16 FF /0

8086+

16/32/64-bit

§

Inc_rm32 = 752

INC r/m32

o32 FF /0

386+

16/32/64-bit

§

Inc_rm64 = 753

INC r/m64

o64 FF /0

X64

64-bit

§

Dec_rm16 = 754

DEC r/m16

o16 FF /1

8086+

16/32/64-bit

§

Dec_rm32 = 755

DEC r/m32

o32 FF /1

386+

16/32/64-bit

§

Dec_rm64 = 756

DEC r/m64

o64 FF /1

X64

64-bit

§

Call_rm16 = 757

CALL r/m16

o16 FF /2

8086+

16/32/64-bit

§

Call_rm32 = 758

CALL r/m32

o32 FF /2

386+

16/32-bit

§

Call_rm64 = 759

CALL r/m64

o64 FF /2

X64

64-bit

§

Call_m1616 = 760

CALL m16:16

o16 FF /3

8086+

16/32/64-bit

§

Call_m1632 = 761

CALL m16:32

o32 FF /3

386+

16/32/64-bit

§

Call_m1664 = 762

CALL m16:64

o64 FF /3

X64

64-bit

§

Jmp_rm16 = 763

JMP r/m16

o16 FF /4

8086+

16/32/64-bit

§

Jmp_rm32 = 764

JMP r/m32

o32 FF /4

386+

16/32-bit

§

Jmp_rm64 = 765

JMP r/m64

o64 FF /4

X64

64-bit

§

Jmp_m1616 = 766

JMP m16:16

o16 FF /5

8086+

16/32/64-bit

§

Jmp_m1632 = 767

JMP m16:32

o32 FF /5

386+

16/32/64-bit

§

Jmp_m1664 = 768

JMP m16:64

o64 FF /5

X64

64-bit

§

Push_rm16 = 769

PUSH r/m16

o16 FF /6

8086+

16/32/64-bit

§

Push_rm32 = 770

PUSH r/m32

o32 FF /6

386+

16/32-bit

§

Push_rm64 = 771

PUSH r/m64

o64 FF /6

X64

64-bit

§

Sldt_rm16 = 772

SLDT r/m16

o16 0F 00 /0

286+

16/32/64-bit

§

Sldt_r32m16 = 773

SLDT r32/m16

o32 0F 00 /0

386+

16/32/64-bit

§

Sldt_r64m16 = 774

SLDT r64/m16

o64 0F 00 /0

X64

64-bit

§

Str_rm16 = 775

STR r/m16

o16 0F 00 /1

286+

16/32/64-bit

§

Str_r32m16 = 776

STR r32/m16

o32 0F 00 /1

386+

16/32/64-bit

§

Str_r64m16 = 777

STR r64/m16

o64 0F 00 /1

X64

64-bit

§

Lldt_rm16 = 778

LLDT r/m16

o16 0F 00 /2

286+

16/32/64-bit

§

Lldt_r32m16 = 779

LLDT r32/m16

o32 0F 00 /2

386+

16/32/64-bit

§

Lldt_r64m16 = 780

LLDT r64/m16

o64 0F 00 /2

X64

64-bit

§

Ltr_rm16 = 781

LTR r/m16

o16 0F 00 /3

286+

16/32/64-bit

§

Ltr_r32m16 = 782

LTR r32/m16

o32 0F 00 /3

386+

16/32/64-bit

§

Ltr_r64m16 = 783

LTR r64/m16

o64 0F 00 /3

X64

64-bit

§

Verr_rm16 = 784

VERR r/m16

o16 0F 00 /4

286+

16/32/64-bit

§

Verr_r32m16 = 785

VERR r32/m16

o32 0F 00 /4

386+

16/32/64-bit

§

Verr_r64m16 = 786

VERR r64/m16

o64 0F 00 /4

X64

64-bit

§

Verw_rm16 = 787

VERW r/m16

o16 0F 00 /5

286+

16/32/64-bit

§

Verw_r32m16 = 788

VERW r32/m16

o32 0F 00 /5

386+

16/32/64-bit

§

Verw_r64m16 = 789

VERW r64/m16

o64 0F 00 /5

X64

64-bit

§

Jmpe_rm16 = 790

JMPE r/m16

o16 0F 00 /6

IA-64

16/32-bit

§

Jmpe_rm32 = 791

JMPE r/m32

o32 0F 00 /6

IA-64

16/32-bit

§

Sgdt_m1632_16 = 792

SGDT m

o16 0F 01 /0

286+

16/32-bit

§

Sgdt_m1632 = 793

SGDT m

o32 0F 01 /0

386+

16/32-bit

§

Sgdt_m1664 = 794

SGDT m

0F 01 /0

X64

64-bit

§

Sidt_m1632_16 = 795

SIDT m

o16 0F 01 /1

286+

16/32-bit

§

Sidt_m1632 = 796

SIDT m

o32 0F 01 /1

386+

16/32-bit

§

Sidt_m1664 = 797

SIDT m

0F 01 /1

X64

64-bit

§

Lgdt_m1632_16 = 798

LGDT m16&32

o16 0F 01 /2

286+

16/32-bit

§

Lgdt_m1632 = 799

LGDT m16&32

o32 0F 01 /2

386+

16/32-bit

§

Lgdt_m1664 = 800

LGDT m16&64

0F 01 /2

X64

64-bit

§

Lidt_m1632_16 = 801

LIDT m16&32

o16 0F 01 /3

286+

16/32-bit

§

Lidt_m1632 = 802

LIDT m16&32

o32 0F 01 /3

386+

16/32-bit

§

Lidt_m1664 = 803

LIDT m16&64

0F 01 /3

X64

64-bit

§

Smsw_rm16 = 804

SMSW r/m16

o16 0F 01 /4

286+

16/32/64-bit

§

Smsw_r32m16 = 805

SMSW r32/m16

o32 0F 01 /4

386+

16/32/64-bit

§

Smsw_r64m16 = 806

SMSW r64/m16

o64 0F 01 /4

X64

64-bit

§

Rstorssp_m64 = 807

RSTORSSP m64

F3 0F 01 /5

CET_SS

16/32/64-bit

§

Lmsw_rm16 = 808

LMSW r/m16

o16 0F 01 /6

286+

16/32/64-bit

§

Lmsw_r32m16 = 809

LMSW r32/m16

o32 0F 01 /6

386+

16/32/64-bit

§

Lmsw_r64m16 = 810

LMSW r64/m16

o64 0F 01 /6

X64

64-bit

§

Invlpg_m = 811

INVLPG m

0F 01 /7

486+

16/32/64-bit

§

Enclv = 812

ENCLV

NP 0F 01 C0

OSS

16/32/64-bit

§

Vmcall = 813

VMCALL

NP 0F 01 C1

VMX

16/32/64-bit

§

Vmlaunch = 814

VMLAUNCH

NP 0F 01 C2

VMX

16/32/64-bit

§

Vmresume = 815

VMRESUME

NP 0F 01 C3

VMX

16/32/64-bit

§

Vmxoff = 816

VMXOFF

NP 0F 01 C4

VMX

16/32/64-bit

§

Pconfig = 817

PCONFIG

NP 0F 01 C5

PCONFIG

16/32/64-bit

§

Monitorw = 818

MONITOR

a16 NP 0F 01 C8

MONITOR

16/32-bit

§

Monitord = 819

MONITOR

a32 NP 0F 01 C8

MONITOR

16/32/64-bit

§

Monitorq = 820

MONITOR

a64 NP 0F 01 C8

MONITOR

64-bit

§

Mwait = 821

MWAIT

NP 0F 01 C9

MONITOR

16/32/64-bit

§

Clac = 822

CLAC

NP 0F 01 CA

SMAP

16/32/64-bit

§

Stac = 823

STAC

NP 0F 01 CB

SMAP

16/32/64-bit

§

Encls = 824

ENCLS

NP 0F 01 CF

SGX1

16/32/64-bit

§

Xgetbv = 825

XGETBV

NP 0F 01 D0

XSAVE

16/32/64-bit

§

Xsetbv = 826

XSETBV

NP 0F 01 D1

XSAVE

16/32/64-bit

§

Vmfunc = 827

VMFUNC

NP 0F 01 D4

VMX

16/32/64-bit

§

Xend = 828

XEND

NP 0F 01 D5

RTM

16/32/64-bit

§

Xtest = 829

XTEST

NP 0F 01 D6

HLE or RTM

16/32/64-bit

§

Enclu = 830

ENCLU

NP 0F 01 D7

SGX1

16/32/64-bit

§

Vmrunw = 831

VMRUN

a16 0F 01 D8

SVM

16/32-bit

§

Vmrund = 832

VMRUN

a32 0F 01 D8

SVM

16/32/64-bit

§

Vmrunq = 833

VMRUN

a64 0F 01 D8

SVM

64-bit

§

Vmmcall = 834

VMMCALL

0F 01 D9

SVM

16/32/64-bit

§

Vmloadw = 835

VMLOAD

a16 0F 01 DA

SVM

16/32-bit

§

Vmloadd = 836

VMLOAD

a32 0F 01 DA

SVM

16/32/64-bit

§

Vmloadq = 837

VMLOAD

a64 0F 01 DA

SVM

64-bit

§

Vmsavew = 838

VMSAVE

a16 0F 01 DB

SVM

16/32-bit

§

Vmsaved = 839

VMSAVE

a32 0F 01 DB

SVM

16/32/64-bit

§

Vmsaveq = 840

VMSAVE

a64 0F 01 DB

SVM

64-bit

§

Stgi = 841

STGI

0F 01 DC

SKINIT or SVM

16/32/64-bit

§

Clgi = 842

CLGI

0F 01 DD

SVM

16/32/64-bit

§

Skinit = 843

SKINIT

0F 01 DE

SKINIT or SVM

16/32/64-bit

§

Invlpgaw = 844

INVLPGA

a16 0F 01 DF

SVM

16/32-bit

§

Invlpgad = 845

INVLPGA

a32 0F 01 DF

SVM

16/32/64-bit

§

Invlpgaq = 846

INVLPGA

a64 0F 01 DF

SVM

64-bit

§

Setssbsy = 847

SETSSBSY

F3 0F 01 E8

CET_SS

16/32/64-bit

§

Saveprevssp = 848

SAVEPREVSSP

F3 0F 01 EA

CET_SS

16/32/64-bit

§

Rdpkru = 849

RDPKRU

NP 0F 01 EE

PKU

16/32/64-bit

§

Wrpkru = 850

WRPKRU

NP 0F 01 EF

PKU

16/32/64-bit

§

Swapgs = 851

SWAPGS

0F 01 F8

X64

64-bit

§

Rdtscp = 852

RDTSCP

0F 01 F9

RDTSCP

16/32/64-bit

§

Monitorxw = 853

MONITORX

a16 NP 0F 01 FA

MONITORX

16/32-bit

§

Monitorxd = 854

MONITORX

a32 NP 0F 01 FA

MONITORX

16/32/64-bit

§

Monitorxq = 855

MONITORX

a64 NP 0F 01 FA

MONITORX

64-bit

§

Mcommit = 856

MCOMMIT

F3 0F 01 FA

MCOMMIT

16/32/64-bit

§

Mwaitx = 857

MWAITX

NP 0F 01 FB

MONITORX

16/32/64-bit

§

Clzerow = 858

CLZERO

a16 0F 01 FC

CLZERO

16/32-bit

§

Clzerod = 859

CLZERO

a32 0F 01 FC

CLZERO

16/32/64-bit

§

Clzeroq = 860

CLZERO

a64 0F 01 FC

CLZERO

64-bit

§

Rdpru = 861

RDPRU

NP 0F 01 FD

RDPRU

16/32/64-bit

§

Lar_r16_rm16 = 862

LAR r16, r/m16

o16 0F 02 /r

286+

16/32/64-bit

§

Lar_r32_r32m16 = 863

LAR r32, r32/m16

o32 0F 02 /r

386+

16/32/64-bit

§

Lar_r64_r64m16 = 864

LAR r64, r64/m16

o64 0F 02 /r

X64

64-bit

§

Lsl_r16_rm16 = 865

LSL r16, r/m16

o16 0F 03 /r

286+

16/32/64-bit

§

Lsl_r32_r32m16 = 866

LSL r32, r32/m16

o32 0F 03 /r

386+

16/32/64-bit

§

Lsl_r64_r64m16 = 867

LSL r64, r64/m16

o64 0F 03 /r

X64

64-bit

§

Storeall = 868

STOREALL

0F 04

286

16/32-bit

§

Loadall286 = 869

LOADALL

0F 05

286

16/32-bit

§

Syscall = 870

SYSCALL

0F 05

SYSCALL

16/32/64-bit

§

Clts = 871

CLTS

0F 06

286+

16/32/64-bit

§

Loadall386 = 872

LOADALL

0F 07

386

16/32-bit

§

Sysretd = 873

SYSRET

0F 07

SYSCALL

16/32/64-bit

§

Sysretq = 874

SYSRETQ

o64 0F 07

SYSCALL

64-bit

§

Invd = 875

INVD

0F 08

486+

16/32/64-bit

§

Wbinvd = 876

WBINVD

0F 09

486+

16/32/64-bit

§

Wbnoinvd = 877

WBNOINVD

F3 0F 09

WBNOINVD

16/32/64-bit

§

Cl1invmb = 878

CL1INVMB

0F 0A

CL1INVMB

16/32-bit

§

Ud2 = 879

UD2

0F 0B

286+

16/32/64-bit

§

Reservednop_rm16_r16_0F0D = 880

RESERVEDNOP r/m16, r16

o16 0F 0D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F0D = 881

RESERVEDNOP r/m32, r32

o32 0F 0D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F0D = 882

RESERVEDNOP r/m64, r64

o64 0F 0D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Prefetch_m8 = 883

PREFETCH m8

0F 0D /0

PREFETCHW

16/32/64-bit

§

Prefetchw_m8 = 884

PREFETCHW m8

0F 0D /1

PREFETCHW

16/32/64-bit

§

Prefetchwt1_m8 = 885

PREFETCHWT1 m8

0F 0D /2

PREFETCHWT1

16/32/64-bit

§

Femms = 886

FEMMS

0F 0E

3DNOW

16/32/64-bit

§

Umov_rm8_r8 = 887

UMOV r/m8, r8

0F 10 /r

386/486

16/32-bit

§

Umov_rm16_r16 = 888

UMOV r/m16, r16

o16 0F 11 /r

386/486

16/32-bit

§

Umov_rm32_r32 = 889

UMOV r/m32, r32

o32 0F 11 /r

386/486

16/32-bit

§

Umov_r8_rm8 = 890

UMOV r8, r/m8

0F 12 /r

386/486

16/32-bit

§

Umov_r16_rm16 = 891

UMOV r16, r/m16

o16 0F 13 /r

386/486

16/32-bit

§

Umov_r32_rm32 = 892

UMOV r32, r/m32

o32 0F 13 /r

386/486

16/32-bit

§

Movups_xmm_xmmm128 = 893

MOVUPS xmm1, xmm2/m128

NP 0F 10 /r

SSE

16/32/64-bit

§

VEX_Vmovups_xmm_xmmm128 = 894

VMOVUPS xmm1, xmm2/m128

VEX.128.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovups_ymm_ymmm256 = 895

VMOVUPS ymm1, ymm2/m256

VEX.256.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovups_xmm_k1z_xmmm128 = 896

VMOVUPS xmm1 {k1}{z}, xmm2/m128

EVEX.128.0F.W0 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_ymm_k1z_ymmm256 = 897

VMOVUPS ymm1 {k1}{z}, ymm2/m256

EVEX.256.0F.W0 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_zmm_k1z_zmmm512 = 898

VMOVUPS zmm1 {k1}{z}, zmm2/m512

EVEX.512.0F.W0 10 /r

AVX512F

16/32/64-bit

§

Movupd_xmm_xmmm128 = 899

MOVUPD xmm1, xmm2/m128

66 0F 10 /r

SSE2

16/32/64-bit

§

VEX_Vmovupd_xmm_xmmm128 = 900

VMOVUPD xmm1, xmm2/m128

VEX.128.66.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovupd_ymm_ymmm256 = 901

VMOVUPD ymm1, ymm2/m256

VEX.256.66.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovupd_xmm_k1z_xmmm128 = 902

VMOVUPD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W1 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_ymm_k1z_ymmm256 = 903

VMOVUPD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W1 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_zmm_k1z_zmmm512 = 904

VMOVUPD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W1 10 /r

AVX512F

16/32/64-bit

§

Movss_xmm_xmmm32 = 905

MOVSS xmm1, xmm2/m32

F3 0F 10 /r

SSE

16/32/64-bit

§

VEX_Vmovss_xmm_xmm_xmm = 906

VMOVSS xmm1, xmm2, xmm3

VEX.LIG.F3.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovss_xmm_m32 = 907

VMOVSS xmm1, m32

VEX.LIG.F3.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovss_xmm_k1z_xmm_xmm = 908

VMOVSS xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.0F.W0 10 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovss_xmm_k1z_m32 = 909

VMOVSS xmm1 {k1}{z}, m32

EVEX.LIG.F3.0F.W0 10 /r

AVX512F

16/32/64-bit

§

Movsd_xmm_xmmm64 = 910

MOVSD xmm1, xmm2/m64

F2 0F 10 /r

SSE2

16/32/64-bit

§

VEX_Vmovsd_xmm_xmm_xmm = 911

VMOVSD xmm1, xmm2, xmm3

VEX.LIG.F2.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovsd_xmm_m64 = 912

VMOVSD xmm1, m64

VEX.LIG.F2.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovsd_xmm_k1z_xmm_xmm = 913

VMOVSD xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F2.0F.W1 10 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovsd_xmm_k1z_m64 = 914

VMOVSD xmm1 {k1}{z}, m64

EVEX.LIG.F2.0F.W1 10 /r

AVX512F

16/32/64-bit

§

Movups_xmmm128_xmm = 915

MOVUPS xmm2/m128, xmm1

NP 0F 11 /r

SSE

16/32/64-bit

§

VEX_Vmovups_xmmm128_xmm = 916

VMOVUPS xmm2/m128, xmm1

VEX.128.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovups_ymmm256_ymm = 917

VMOVUPS ymm2/m256, ymm1

VEX.256.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovups_xmmm128_k1z_xmm = 918

VMOVUPS xmm2/m128 {k1}{z}, xmm1

EVEX.128.0F.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_ymmm256_k1z_ymm = 919

VMOVUPS ymm2/m256 {k1}{z}, ymm1

EVEX.256.0F.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_zmmm512_k1z_zmm = 920

VMOVUPS zmm2/m512 {k1}{z}, zmm1

EVEX.512.0F.W0 11 /r

AVX512F

16/32/64-bit

§

Movupd_xmmm128_xmm = 921

MOVUPD xmm2/m128, xmm1

66 0F 11 /r

SSE2

16/32/64-bit

§

VEX_Vmovupd_xmmm128_xmm = 922

VMOVUPD xmm2/m128, xmm1

VEX.128.66.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovupd_ymmm256_ymm = 923

VMOVUPD ymm2/m256, ymm1

VEX.256.66.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovupd_xmmm128_k1z_xmm = 924

VMOVUPD xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W1 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_ymmm256_k1z_ymm = 925

VMOVUPD ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W1 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_zmmm512_k1z_zmm = 926

VMOVUPD zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W1 11 /r

AVX512F

16/32/64-bit

§

Movss_xmmm32_xmm = 927

MOVSS xmm2/m32, xmm1

F3 0F 11 /r

SSE

16/32/64-bit

§

VEX_Vmovss_xmm_xmm_xmm_0F11 = 928

VMOVSS xmm1, xmm2, xmm3

VEX.LIG.F3.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovss_m32_xmm = 929

VMOVSS m32, xmm1

VEX.LIG.F3.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovss_xmm_k1z_xmm_xmm_0F11 = 930

VMOVSS xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.0F.W0 11 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovss_m32_k1_xmm = 931

VMOVSS m32 {k1}, xmm1

EVEX.LIG.F3.0F.W0 11 /r

AVX512F

16/32/64-bit

§

Movsd_xmmm64_xmm = 932

MOVSD xmm1/m64, xmm2

F2 0F 11 /r

SSE2

16/32/64-bit

§

VEX_Vmovsd_xmm_xmm_xmm_0F11 = 933

VMOVSD xmm1, xmm2, xmm3

VEX.LIG.F2.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovsd_m64_xmm = 934

VMOVSD m64, xmm1

VEX.LIG.F2.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovsd_xmm_k1z_xmm_xmm_0F11 = 935

VMOVSD xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F2.0F.W1 11 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovsd_m64_k1_xmm = 936

VMOVSD m64 {k1}, xmm1

EVEX.LIG.F2.0F.W1 11 /r

AVX512F

16/32/64-bit

§

Movhlps_xmm_xmm = 937

MOVHLPS xmm1, xmm2

NP 0F 12 /r

SSE

16/32/64-bit

§

Movlps_xmm_m64 = 938

MOVLPS xmm1, m64

NP 0F 12 /r

SSE

16/32/64-bit

§

VEX_Vmovhlps_xmm_xmm_xmm = 939

VMOVHLPS xmm1, xmm2, xmm3

VEX.128.0F.WIG 12 /r

AVX

16/32/64-bit

§

VEX_Vmovlps_xmm_xmm_m64 = 940

VMOVLPS xmm2, xmm1, m64

VEX.128.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovhlps_xmm_xmm_xmm = 941

VMOVHLPS xmm1, xmm2, xmm3

EVEX.128.0F.W0 12 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovlps_xmm_xmm_m64 = 942

VMOVLPS xmm2, xmm1, m64

EVEX.128.0F.W0 12 /r

AVX512F

16/32/64-bit

§

Movlpd_xmm_m64 = 943

MOVLPD xmm1, m64

66 0F 12 /r

SSE2

16/32/64-bit

§

VEX_Vmovlpd_xmm_xmm_m64 = 944

VMOVLPD xmm2, xmm1, m64

VEX.128.66.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovlpd_xmm_xmm_m64 = 945

VMOVLPD xmm2, xmm1, m64

EVEX.128.66.0F.W1 12 /r

AVX512F

16/32/64-bit

§

Movsldup_xmm_xmmm128 = 946

MOVSLDUP xmm1, xmm2/m128

F3 0F 12 /r

SSE3

16/32/64-bit

§

VEX_Vmovsldup_xmm_xmmm128 = 947

VMOVSLDUP xmm1, xmm2/m128

VEX.128.F3.0F.WIG 12 /r

AVX

16/32/64-bit

§

VEX_Vmovsldup_ymm_ymmm256 = 948

VMOVSLDUP ymm1, ymm2/m256

VEX.256.F3.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovsldup_xmm_k1z_xmmm128 = 949

VMOVSLDUP xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovsldup_ymm_k1z_ymmm256 = 950

VMOVSLDUP ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovsldup_zmm_k1z_zmmm512 = 951

VMOVSLDUP zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W0 12 /r

AVX512F

16/32/64-bit

§

Movddup_xmm_xmmm64 = 952

MOVDDUP xmm1, xmm2/m64

F2 0F 12 /r

SSE3

16/32/64-bit

§

VEX_Vmovddup_xmm_xmmm64 = 953

VMOVDDUP xmm1, xmm2/m64

VEX.128.F2.0F.WIG 12 /r

AVX

16/32/64-bit

§

VEX_Vmovddup_ymm_ymmm256 = 954

VMOVDDUP ymm1, ymm2/m256

VEX.256.F2.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovddup_xmm_k1z_xmmm64 = 955

VMOVDDUP xmm1 {k1}{z}, xmm2/m64

EVEX.128.F2.0F.W1 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovddup_ymm_k1z_ymmm256 = 956

VMOVDDUP ymm1 {k1}{z}, ymm2/m256

EVEX.256.F2.0F.W1 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovddup_zmm_k1z_zmmm512 = 957

VMOVDDUP zmm1 {k1}{z}, zmm2/m512

EVEX.512.F2.0F.W1 12 /r

AVX512F

16/32/64-bit

§

Movlps_m64_xmm = 958

MOVLPS m64, xmm1

NP 0F 13 /r

SSE

16/32/64-bit

§

VEX_Vmovlps_m64_xmm = 959

VMOVLPS m64, xmm1

VEX.128.0F.WIG 13 /r

AVX

16/32/64-bit

§

EVEX_Vmovlps_m64_xmm = 960

VMOVLPS m64, xmm1

EVEX.128.0F.W0 13 /r

AVX512F

16/32/64-bit

§

Movlpd_m64_xmm = 961

MOVLPD m64, xmm1

66 0F 13 /r

SSE2

16/32/64-bit

§

VEX_Vmovlpd_m64_xmm = 962

VMOVLPD m64, xmm1

VEX.128.66.0F.WIG 13 /r

AVX

16/32/64-bit

§

EVEX_Vmovlpd_m64_xmm = 963

VMOVLPD m64, xmm1

EVEX.128.66.0F.W1 13 /r

AVX512F

16/32/64-bit

§

Unpcklps_xmm_xmmm128 = 964

UNPCKLPS xmm1, xmm2/m128

NP 0F 14 /r

SSE

16/32/64-bit

§

VEX_Vunpcklps_xmm_xmm_xmmm128 = 965

VUNPCKLPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 14 /r

AVX

16/32/64-bit

§

VEX_Vunpcklps_ymm_ymm_ymmm256 = 966

VUNPCKLPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 14 /r

AVX

16/32/64-bit

§

EVEX_Vunpcklps_xmm_k1z_xmm_xmmm128b32 = 967

VUNPCKLPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklps_ymm_k1z_ymm_ymmm256b32 = 968

VUNPCKLPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklps_zmm_k1z_zmm_zmmm512b32 = 969

VUNPCKLPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 14 /r

AVX512F

16/32/64-bit

§

Unpcklpd_xmm_xmmm128 = 970

UNPCKLPD xmm1, xmm2/m128

66 0F 14 /r

SSE2

16/32/64-bit

§

VEX_Vunpcklpd_xmm_xmm_xmmm128 = 971

VUNPCKLPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 14 /r

AVX

16/32/64-bit

§

VEX_Vunpcklpd_ymm_ymm_ymmm256 = 972

VUNPCKLPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 14 /r

AVX

16/32/64-bit

§

EVEX_Vunpcklpd_xmm_k1z_xmm_xmmm128b64 = 973

VUNPCKLPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklpd_ymm_k1z_ymm_ymmm256b64 = 974

VUNPCKLPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklpd_zmm_k1z_zmm_zmmm512b64 = 975

VUNPCKLPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 14 /r

AVX512F

16/32/64-bit

§

Unpckhps_xmm_xmmm128 = 976

UNPCKHPS xmm1, xmm2/m128

NP 0F 15 /r

SSE

16/32/64-bit

§

VEX_Vunpckhps_xmm_xmm_xmmm128 = 977

VUNPCKHPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 15 /r

AVX

16/32/64-bit

§

VEX_Vunpckhps_ymm_ymm_ymmm256 = 978

VUNPCKHPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 15 /r

AVX

16/32/64-bit

§

EVEX_Vunpckhps_xmm_k1z_xmm_xmmm128b32 = 979

VUNPCKHPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhps_ymm_k1z_ymm_ymmm256b32 = 980

VUNPCKHPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhps_zmm_k1z_zmm_zmmm512b32 = 981

VUNPCKHPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 15 /r

AVX512F

16/32/64-bit

§

Unpckhpd_xmm_xmmm128 = 982

UNPCKHPD xmm1, xmm2/m128

66 0F 15 /r

SSE2

16/32/64-bit

§

VEX_Vunpckhpd_xmm_xmm_xmmm128 = 983

VUNPCKHPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 15 /r

AVX

16/32/64-bit

§

VEX_Vunpckhpd_ymm_ymm_ymmm256 = 984

VUNPCKHPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 15 /r

AVX

16/32/64-bit

§

EVEX_Vunpckhpd_xmm_k1z_xmm_xmmm128b64 = 985

VUNPCKHPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhpd_ymm_k1z_ymm_ymmm256b64 = 986

VUNPCKHPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhpd_zmm_k1z_zmm_zmmm512b64 = 987

VUNPCKHPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 15 /r

AVX512F

16/32/64-bit

§

Movlhps_xmm_xmm = 988

MOVLHPS xmm1, xmm2

NP 0F 16 /r

SSE

16/32/64-bit

§

VEX_Vmovlhps_xmm_xmm_xmm = 989

VMOVLHPS xmm1, xmm2, xmm3

VEX.128.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovlhps_xmm_xmm_xmm = 990

VMOVLHPS xmm1, xmm2, xmm3

EVEX.128.0F.W0 16 /r

AVX512F

16/32/64-bit

§

Movhps_xmm_m64 = 991

MOVHPS xmm1, m64

NP 0F 16 /r

SSE

16/32/64-bit

§

VEX_Vmovhps_xmm_xmm_m64 = 992

VMOVHPS xmm2, xmm1, m64

VEX.128.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovhps_xmm_xmm_m64 = 993

VMOVHPS xmm2, xmm1, m64

EVEX.128.0F.W0 16 /r

AVX512F

16/32/64-bit

§

Movhpd_xmm_m64 = 994

MOVHPD xmm1, m64

66 0F 16 /r

SSE2

16/32/64-bit

§

VEX_Vmovhpd_xmm_xmm_m64 = 995

VMOVHPD xmm2, xmm1, m64

VEX.128.66.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovhpd_xmm_xmm_m64 = 996

VMOVHPD xmm2, xmm1, m64

EVEX.128.66.0F.W1 16 /r

AVX512F

16/32/64-bit

§

Movshdup_xmm_xmmm128 = 997

MOVSHDUP xmm1, xmm2/m128

F3 0F 16 /r

SSE3

16/32/64-bit

§

VEX_Vmovshdup_xmm_xmmm128 = 998

VMOVSHDUP xmm1, xmm2/m128

VEX.128.F3.0F.WIG 16 /r

AVX

16/32/64-bit

§

VEX_Vmovshdup_ymm_ymmm256 = 999

VMOVSHDUP ymm1, ymm2/m256

VEX.256.F3.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovshdup_xmm_k1z_xmmm128 = 1_000

VMOVSHDUP xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W0 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovshdup_ymm_k1z_ymmm256 = 1_001

VMOVSHDUP ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W0 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovshdup_zmm_k1z_zmmm512 = 1_002

VMOVSHDUP zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W0 16 /r

AVX512F

16/32/64-bit

§

Movhps_m64_xmm = 1_003

MOVHPS m64, xmm1

NP 0F 17 /r

SSE

16/32/64-bit

§

VEX_Vmovhps_m64_xmm = 1_004

VMOVHPS m64, xmm1

VEX.128.0F.WIG 17 /r

AVX

16/32/64-bit

§

EVEX_Vmovhps_m64_xmm = 1_005

VMOVHPS m64, xmm1

EVEX.128.0F.W0 17 /r

AVX512F

16/32/64-bit

§

Movhpd_m64_xmm = 1_006

MOVHPD m64, xmm1

66 0F 17 /r

SSE2

16/32/64-bit

§

VEX_Vmovhpd_m64_xmm = 1_007

VMOVHPD m64, xmm1

VEX.128.66.0F.WIG 17 /r

AVX

16/32/64-bit

§

EVEX_Vmovhpd_m64_xmm = 1_008

VMOVHPD m64, xmm1

EVEX.128.66.0F.W1 17 /r

AVX512F

16/32/64-bit

§

Reservednop_rm16_r16_0F18 = 1_009

RESERVEDNOP r/m16, r16

o16 0F 18 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F18 = 1_010

RESERVEDNOP r/m32, r32

o32 0F 18 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F18 = 1_011

RESERVEDNOP r/m64, r64

o64 0F 18 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F19 = 1_012

RESERVEDNOP r/m16, r16

o16 0F 19 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F19 = 1_013

RESERVEDNOP r/m32, r32

o32 0F 19 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F19 = 1_014

RESERVEDNOP r/m64, r64

o64 0F 19 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1A = 1_015

RESERVEDNOP r/m16, r16

o16 0F 1A /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1A = 1_016

RESERVEDNOP r/m32, r32

o32 0F 1A /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1A = 1_017

RESERVEDNOP r/m64, r64

o64 0F 1A /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1B = 1_018

RESERVEDNOP r/m16, r16

o16 0F 1B /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1B = 1_019

RESERVEDNOP r/m32, r32

o32 0F 1B /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1B = 1_020

RESERVEDNOP r/m64, r64

o64 0F 1B /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1C = 1_021

RESERVEDNOP r/m16, r16

o16 0F 1C /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1C = 1_022

RESERVEDNOP r/m32, r32

o32 0F 1C /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1C = 1_023

RESERVEDNOP r/m64, r64

o64 0F 1C /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1D = 1_024

RESERVEDNOP r/m16, r16

o16 0F 1D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1D = 1_025

RESERVEDNOP r/m32, r32

o32 0F 1D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1D = 1_026

RESERVEDNOP r/m64, r64

o64 0F 1D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1E = 1_027

RESERVEDNOP r/m16, r16

o16 0F 1E /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1E = 1_028

RESERVEDNOP r/m32, r32

o32 0F 1E /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1E = 1_029

RESERVEDNOP r/m64, r64

o64 0F 1E /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1F = 1_030

RESERVEDNOP r/m16, r16

o16 0F 1F /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1F = 1_031

RESERVEDNOP r/m32, r32

o32 0F 1F /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1F = 1_032

RESERVEDNOP r/m64, r64

o64 0F 1F /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Prefetchnta_m8 = 1_033

PREFETCHNTA m8

0F 18 /0

SSE

16/32/64-bit

§

Prefetcht0_m8 = 1_034

PREFETCHT0 m8

0F 18 /1

SSE

16/32/64-bit

§

Prefetcht1_m8 = 1_035

PREFETCHT1 m8

0F 18 /2

SSE

16/32/64-bit

§

Prefetcht2_m8 = 1_036

PREFETCHT2 m8

0F 18 /3

SSE

16/32/64-bit

§

Bndldx_bnd_mib = 1_037

BNDLDX bnd, mib

NP 0F 1A /r

MPX

16/32/64-bit

§

Bndmov_bnd_bndm64 = 1_038

BNDMOV bnd1, bnd2/m64

66 0F 1A /r

MPX

16/32-bit

§

Bndmov_bnd_bndm128 = 1_039

BNDMOV bnd1, bnd2/m128

66 0F 1A /r

MPX

64-bit

§

Bndcl_bnd_rm32 = 1_040

BNDCL bnd, r/m32

F3 0F 1A /r

MPX

16/32-bit

§

Bndcl_bnd_rm64 = 1_041

BNDCL bnd, r/m64

F3 0F 1A /r

MPX

64-bit

§

Bndcu_bnd_rm32 = 1_042

BNDCU bnd, r/m32

F2 0F 1A /r

MPX

16/32-bit

§

Bndcu_bnd_rm64 = 1_043

BNDCU bnd, r/m64

F2 0F 1A /r

MPX

64-bit

§

Bndstx_mib_bnd = 1_044

BNDSTX mib, bnd

NP 0F 1B /r

MPX

16/32/64-bit

§

Bndmov_bndm64_bnd = 1_045

BNDMOV bnd1/m64, bnd2

66 0F 1B /r

MPX

16/32-bit

§

Bndmov_bndm128_bnd = 1_046

BNDMOV bnd1/m128, bnd2

66 0F 1B /r

MPX

64-bit

§

Bndmk_bnd_m32 = 1_047

BNDMK bnd, m32

F3 0F 1B /r

MPX

16/32-bit

§

Bndmk_bnd_m64 = 1_048

BNDMK bnd, m64

F3 0F 1B /r

MPX

64-bit

§

Bndcn_bnd_rm32 = 1_049

BNDCN bnd, r/m32

F2 0F 1B /r

MPX

16/32-bit

§

Bndcn_bnd_rm64 = 1_050

BNDCN bnd, r/m64

F2 0F 1B /r

MPX

64-bit

§

Cldemote_m8 = 1_051

CLDEMOTE m8

NP 0F 1C /0

CLDEMOTE

16/32/64-bit

§

Rdsspd_r32 = 1_052

RDSSPD r32

F3 0F 1E /1

CET_SS

16/32/64-bit

§

Rdsspq_r64 = 1_053

RDSSPQ r64

F3 o64 0F 1E /1

CET_SS

64-bit

§

Endbr64 = 1_054

ENDBR64

F3 0F 1E FA

CET_IBT

16/32/64-bit

§

Endbr32 = 1_055

ENDBR32

F3 0F 1E FB

CET_IBT

16/32/64-bit

§

Nop_rm16 = 1_056

NOP r/m16

o16 0F 1F /0

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Nop_rm32 = 1_057

NOP r/m32

o32 0F 1F /0

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Nop_rm64 = 1_058

NOP r/m64

o64 0F 1F /0

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Mov_r32_cr = 1_059

MOV r32, cr

0F 20 /r

386+

16/32-bit

§

Mov_r64_cr = 1_060

MOV r64, cr

0F 20 /r

X64

64-bit

§

Mov_r32_dr = 1_061

MOV r32, dr

0F 21 /r

386+

16/32-bit

§

Mov_r64_dr = 1_062

MOV r64, dr

0F 21 /r

X64

64-bit

§

Mov_cr_r32 = 1_063

MOV cr, r32

0F 22 /r

386+

16/32-bit

§

Mov_cr_r64 = 1_064

MOV cr, r64

0F 22 /r

X64

64-bit

§

Mov_dr_r32 = 1_065

MOV dr, r32

0F 23 /r

386+

16/32-bit

§

Mov_dr_r64 = 1_066

MOV dr, r64

0F 23 /r

X64

64-bit

§

Mov_r32_tr = 1_067

MOV r32, tr

0F 24 /r

386/486/Cyrix/Geode

16/32-bit

§

Mov_tr_r32 = 1_068

MOV tr, r32

0F 26 /r

386/486/Cyrix/Geode

16/32-bit

§

Movaps_xmm_xmmm128 = 1_069

MOVAPS xmm1, xmm2/m128

NP 0F 28 /r

SSE

16/32/64-bit

§

VEX_Vmovaps_xmm_xmmm128 = 1_070

VMOVAPS xmm1, xmm2/m128

VEX.128.0F.WIG 28 /r

AVX

16/32/64-bit

§

VEX_Vmovaps_ymm_ymmm256 = 1_071

VMOVAPS ymm1, ymm2/m256

VEX.256.0F.WIG 28 /r

AVX

16/32/64-bit

§

EVEX_Vmovaps_xmm_k1z_xmmm128 = 1_072

VMOVAPS xmm1 {k1}{z}, xmm2/m128

EVEX.128.0F.W0 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_ymm_k1z_ymmm256 = 1_073

VMOVAPS ymm1 {k1}{z}, ymm2/m256

EVEX.256.0F.W0 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_zmm_k1z_zmmm512 = 1_074

VMOVAPS zmm1 {k1}{z}, zmm2/m512

EVEX.512.0F.W0 28 /r

AVX512F

16/32/64-bit

§

Movapd_xmm_xmmm128 = 1_075

MOVAPD xmm1, xmm2/m128

66 0F 28 /r

SSE2

16/32/64-bit

§

VEX_Vmovapd_xmm_xmmm128 = 1_076

VMOVAPD xmm1, xmm2/m128

VEX.128.66.0F.WIG 28 /r

AVX

16/32/64-bit

§

VEX_Vmovapd_ymm_ymmm256 = 1_077

VMOVAPD ymm1, ymm2/m256

VEX.256.66.0F.WIG 28 /r

AVX

16/32/64-bit

§

EVEX_Vmovapd_xmm_k1z_xmmm128 = 1_078

VMOVAPD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_ymm_k1z_ymmm256 = 1_079

VMOVAPD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_zmm_k1z_zmmm512 = 1_080

VMOVAPD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W1 28 /r

AVX512F

16/32/64-bit

§

Movaps_xmmm128_xmm = 1_081

MOVAPS xmm2/m128, xmm1

NP 0F 29 /r

SSE

16/32/64-bit

§

VEX_Vmovaps_xmmm128_xmm = 1_082

VMOVAPS xmm2/m128, xmm1

VEX.128.0F.WIG 29 /r

AVX

16/32/64-bit

§

VEX_Vmovaps_ymmm256_ymm = 1_083

VMOVAPS ymm2/m256, ymm1

VEX.256.0F.WIG 29 /r

AVX

16/32/64-bit

§

EVEX_Vmovaps_xmmm128_k1z_xmm = 1_084

VMOVAPS xmm2/m128 {k1}{z}, xmm1

EVEX.128.0F.W0 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_ymmm256_k1z_ymm = 1_085

VMOVAPS ymm2/m256 {k1}{z}, ymm1

EVEX.256.0F.W0 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_zmmm512_k1z_zmm = 1_086

VMOVAPS zmm2/m512 {k1}{z}, zmm1

EVEX.512.0F.W0 29 /r

AVX512F

16/32/64-bit

§

Movapd_xmmm128_xmm = 1_087

MOVAPD xmm2/m128, xmm1

66 0F 29 /r

SSE2

16/32/64-bit

§

VEX_Vmovapd_xmmm128_xmm = 1_088

VMOVAPD xmm2/m128, xmm1

VEX.128.66.0F.WIG 29 /r

AVX

16/32/64-bit

§

VEX_Vmovapd_ymmm256_ymm = 1_089

VMOVAPD ymm2/m256, ymm1

VEX.256.66.0F.WIG 29 /r

AVX

16/32/64-bit

§

EVEX_Vmovapd_xmmm128_k1z_xmm = 1_090

VMOVAPD xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_ymmm256_k1z_ymm = 1_091

VMOVAPD ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_zmmm512_k1z_zmm = 1_092

VMOVAPD zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W1 29 /r

AVX512F

16/32/64-bit

§

Cvtpi2ps_xmm_mmm64 = 1_093

CVTPI2PS xmm, mm/m64

NP 0F 2A /r

SSE

16/32/64-bit

§

Cvtpi2pd_xmm_mmm64 = 1_094

CVTPI2PD xmm, mm/m64

66 0F 2A /r

SSE2

16/32/64-bit

§

Cvtsi2ss_xmm_rm32 = 1_095

CVTSI2SS xmm1, r/m32

F3 0F 2A /r

SSE

16/32/64-bit

§

Cvtsi2ss_xmm_rm64 = 1_096

CVTSI2SS xmm1, r/m64

F3 o64 0F 2A /r

SSE

64-bit

§

VEX_Vcvtsi2ss_xmm_xmm_rm32 = 1_097

VCVTSI2SS xmm1, xmm2, r/m32

VEX.LIG.F3.0F.W0 2A /r

AVX

16/32/64-bit

§

VEX_Vcvtsi2ss_xmm_xmm_rm64 = 1_098

VCVTSI2SS xmm1, xmm2, r/m64

VEX.LIG.F3.0F.W1 2A /r

AVX

64-bit

§

EVEX_Vcvtsi2ss_xmm_xmm_rm32_er = 1_099

VCVTSI2SS xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.0F.W0 2A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsi2ss_xmm_xmm_rm64_er = 1_100

VCVTSI2SS xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.0F.W1 2A /r

AVX512F

64-bit

§

Cvtsi2sd_xmm_rm32 = 1_101

CVTSI2SD xmm1, r/m32

F2 0F 2A /r

SSE2

16/32/64-bit

§

Cvtsi2sd_xmm_rm64 = 1_102

CVTSI2SD xmm1, r/m64

F2 o64 0F 2A /r

SSE2

64-bit

§

VEX_Vcvtsi2sd_xmm_xmm_rm32 = 1_103

VCVTSI2SD xmm1, xmm2, r/m32

VEX.LIG.F2.0F.W0 2A /r

AVX

16/32/64-bit

§

VEX_Vcvtsi2sd_xmm_xmm_rm64 = 1_104

VCVTSI2SD xmm1, xmm2, r/m64

VEX.LIG.F2.0F.W1 2A /r

AVX

64-bit

§

EVEX_Vcvtsi2sd_xmm_xmm_rm32_er = 1_105

VCVTSI2SD xmm1, xmm2, r/m32{er}

EVEX.LIG.F2.0F.W0 2A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsi2sd_xmm_xmm_rm64_er = 1_106

VCVTSI2SD xmm1, xmm2, r/m64{er}

EVEX.LIG.F2.0F.W1 2A /r

AVX512F

64-bit

§

Movntps_m128_xmm = 1_107

MOVNTPS m128, xmm1

NP 0F 2B /r

SSE

16/32/64-bit

§

VEX_Vmovntps_m128_xmm = 1_108

VMOVNTPS m128, xmm1

VEX.128.0F.WIG 2B /r

AVX

16/32/64-bit

§

VEX_Vmovntps_m256_ymm = 1_109

VMOVNTPS m256, ymm1

VEX.256.0F.WIG 2B /r

AVX

16/32/64-bit

§

EVEX_Vmovntps_m128_xmm = 1_110

VMOVNTPS m128, xmm1

EVEX.128.0F.W0 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntps_m256_ymm = 1_111

VMOVNTPS m256, ymm1

EVEX.256.0F.W0 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntps_m512_zmm = 1_112

VMOVNTPS m512, zmm1

EVEX.512.0F.W0 2B /r

AVX512F

16/32/64-bit

§

Movntpd_m128_xmm = 1_113

MOVNTPD m128, xmm1

66 0F 2B /r

SSE2

16/32/64-bit

§

VEX_Vmovntpd_m128_xmm = 1_114

VMOVNTPD m128, xmm1

VEX.128.66.0F.WIG 2B /r

AVX

16/32/64-bit

§

VEX_Vmovntpd_m256_ymm = 1_115

VMOVNTPD m256, ymm1

VEX.256.66.0F.WIG 2B /r

AVX

16/32/64-bit

§

EVEX_Vmovntpd_m128_xmm = 1_116

VMOVNTPD m128, xmm1

EVEX.128.66.0F.W1 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntpd_m256_ymm = 1_117

VMOVNTPD m256, ymm1

EVEX.256.66.0F.W1 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntpd_m512_zmm = 1_118

VMOVNTPD m512, zmm1

EVEX.512.66.0F.W1 2B /r

AVX512F

16/32/64-bit

§

Movntss_m32_xmm = 1_119

MOVNTSS m32, xmm1

F3 0F 2B /r

SSE4A

16/32/64-bit

§

Movntsd_m64_xmm = 1_120

MOVNTSD m64, xmm1

F2 0F 2B /r

SSE4A

16/32/64-bit

§

Cvttps2pi_mm_xmmm64 = 1_121

CVTTPS2PI mm, xmm/m64

NP 0F 2C /r

SSE

16/32/64-bit

§

Cvttpd2pi_mm_xmmm128 = 1_122

CVTTPD2PI mm, xmm/m128

66 0F 2C /r

SSE2

16/32/64-bit

§

Cvttss2si_r32_xmmm32 = 1_123

CVTTSS2SI r32, xmm1/m32

F3 0F 2C /r

SSE

16/32/64-bit

§

Cvttss2si_r64_xmmm32 = 1_124

CVTTSS2SI r64, xmm1/m32

F3 o64 0F 2C /r

SSE

64-bit

§

VEX_Vcvttss2si_r32_xmmm32 = 1_125

VCVTTSS2SI r32, xmm1/m32

VEX.LIG.F3.0F.W0 2C /r

AVX

16/32/64-bit

§

VEX_Vcvttss2si_r64_xmmm32 = 1_126

VCVTTSS2SI r64, xmm1/m32

VEX.LIG.F3.0F.W1 2C /r

AVX

64-bit

§

EVEX_Vcvttss2si_r32_xmmm32_sae = 1_127

VCVTTSS2SI r32, xmm1/m32{sae}

EVEX.LIG.F3.0F.W0 2C /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttss2si_r64_xmmm32_sae = 1_128

VCVTTSS2SI r64, xmm1/m32{sae}

EVEX.LIG.F3.0F.W1 2C /r

AVX512F

64-bit

§

Cvttsd2si_r32_xmmm64 = 1_129

CVTTSD2SI r32, xmm1/m64

F2 0F 2C /r

SSE2

16/32/64-bit

§

Cvttsd2si_r64_xmmm64 = 1_130

CVTTSD2SI r64, xmm1/m64

F2 o64 0F 2C /r

SSE2

64-bit

§

VEX_Vcvttsd2si_r32_xmmm64 = 1_131

VCVTTSD2SI r32, xmm1/m64

VEX.LIG.F2.0F.W0 2C /r

AVX

16/32/64-bit

§

VEX_Vcvttsd2si_r64_xmmm64 = 1_132

VCVTTSD2SI r64, xmm1/m64

VEX.LIG.F2.0F.W1 2C /r

AVX

64-bit

§

EVEX_Vcvttsd2si_r32_xmmm64_sae = 1_133

VCVTTSD2SI r32, xmm1/m64{sae}

EVEX.LIG.F2.0F.W0 2C /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttsd2si_r64_xmmm64_sae = 1_134

VCVTTSD2SI r64, xmm1/m64{sae}

EVEX.LIG.F2.0F.W1 2C /r

AVX512F

64-bit

§

Cvtps2pi_mm_xmmm64 = 1_135

CVTPS2PI mm, xmm/m64

NP 0F 2D /r

SSE

16/32/64-bit

§

Cvtpd2pi_mm_xmmm128 = 1_136

CVTPD2PI mm, xmm/m128

66 0F 2D /r

SSE2

16/32/64-bit

§

Cvtss2si_r32_xmmm32 = 1_137

CVTSS2SI r32, xmm1/m32

F3 0F 2D /r

SSE

16/32/64-bit

§

Cvtss2si_r64_xmmm32 = 1_138

CVTSS2SI r64, xmm1/m32

F3 o64 0F 2D /r

SSE

64-bit

§

VEX_Vcvtss2si_r32_xmmm32 = 1_139

VCVTSS2SI r32, xmm1/m32

VEX.LIG.F3.0F.W0 2D /r

AVX

16/32/64-bit

§

VEX_Vcvtss2si_r64_xmmm32 = 1_140

VCVTSS2SI r64, xmm1/m32

VEX.LIG.F3.0F.W1 2D /r

AVX

64-bit

§

EVEX_Vcvtss2si_r32_xmmm32_er = 1_141

VCVTSS2SI r32, xmm1/m32{er}

EVEX.LIG.F3.0F.W0 2D /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtss2si_r64_xmmm32_er = 1_142

VCVTSS2SI r64, xmm1/m32{er}

EVEX.LIG.F3.0F.W1 2D /r

AVX512F

64-bit

§

Cvtsd2si_r32_xmmm64 = 1_143

CVTSD2SI r32, xmm1/m64

F2 0F 2D /r

SSE2

16/32/64-bit

§

Cvtsd2si_r64_xmmm64 = 1_144

CVTSD2SI r64, xmm1/m64

F2 o64 0F 2D /r

SSE2

64-bit

§

VEX_Vcvtsd2si_r32_xmmm64 = 1_145

VCVTSD2SI r32, xmm1/m64

VEX.LIG.F2.0F.W0 2D /r

AVX

16/32/64-bit

§

VEX_Vcvtsd2si_r64_xmmm64 = 1_146

VCVTSD2SI r64, xmm1/m64

VEX.LIG.F2.0F.W1 2D /r

AVX

64-bit

§

EVEX_Vcvtsd2si_r32_xmmm64_er = 1_147

VCVTSD2SI r32, xmm1/m64{er}

EVEX.LIG.F2.0F.W0 2D /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsd2si_r64_xmmm64_er = 1_148

VCVTSD2SI r64, xmm1/m64{er}

EVEX.LIG.F2.0F.W1 2D /r

AVX512F

64-bit

§

Ucomiss_xmm_xmmm32 = 1_149

UCOMISS xmm1, xmm2/m32

NP 0F 2E /r

SSE

16/32/64-bit

§

VEX_Vucomiss_xmm_xmmm32 = 1_150

VUCOMISS xmm1, xmm2/m32

VEX.LIG.0F.WIG 2E /r

AVX

16/32/64-bit

§

EVEX_Vucomiss_xmm_xmmm32_sae = 1_151

VUCOMISS xmm1, xmm2/m32{sae}

EVEX.LIG.0F.W0 2E /r

AVX512F

16/32/64-bit

§

Ucomisd_xmm_xmmm64 = 1_152

UCOMISD xmm1, xmm2/m64

66 0F 2E /r

SSE2

16/32/64-bit

§

VEX_Vucomisd_xmm_xmmm64 = 1_153

VUCOMISD xmm1, xmm2/m64

VEX.LIG.66.0F.WIG 2E /r

AVX

16/32/64-bit

§

EVEX_Vucomisd_xmm_xmmm64_sae = 1_154

VUCOMISD xmm1, xmm2/m64{sae}

EVEX.LIG.66.0F.W1 2E /r

AVX512F

16/32/64-bit

§

Comiss_xmm_xmmm32 = 1_155

COMISS xmm1, xmm2/m32

NP 0F 2F /r

SSE

16/32/64-bit

§

Comisd_xmm_xmmm64 = 1_156

COMISD xmm1, xmm2/m64

66 0F 2F /r

SSE2

16/32/64-bit

§

VEX_Vcomiss_xmm_xmmm32 = 1_157

VCOMISS xmm1, xmm2/m32

VEX.LIG.0F.WIG 2F /r

AVX

16/32/64-bit

§

VEX_Vcomisd_xmm_xmmm64 = 1_158

VCOMISD xmm1, xmm2/m64

VEX.LIG.66.0F.WIG 2F /r

AVX

16/32/64-bit

§

EVEX_Vcomiss_xmm_xmmm32_sae = 1_159

VCOMISS xmm1, xmm2/m32{sae}

EVEX.LIG.0F.W0 2F /r

AVX512F

16/32/64-bit

§

EVEX_Vcomisd_xmm_xmmm64_sae = 1_160

VCOMISD xmm1, xmm2/m64{sae}

EVEX.LIG.66.0F.W1 2F /r

AVX512F

16/32/64-bit

§

Wrmsr = 1_161

WRMSR

0F 30

MSR

16/32/64-bit

§

Rdtsc = 1_162

RDTSC

0F 31

TSC

16/32/64-bit

§

Rdmsr = 1_163

RDMSR

0F 32

MSR

16/32/64-bit

§

Rdpmc = 1_164

RDPMC

0F 33

Pentium MMX or later, or Pentium Pro or later

16/32/64-bit

§

Sysenter = 1_165

SYSENTER

0F 34

SEP

16/32/64-bit

§

Sysexitd = 1_166

SYSEXIT

0F 35

SEP

16/32/64-bit

§

Sysexitq = 1_167

SYSEXITQ

o64 0F 35

SEP

64-bit

§

Getsecd = 1_168

GETSEC

NP 0F 37

SMX

16/32/64-bit

§

Cmovo_r16_rm16 = 1_169

CMOVO r16, r/m16

o16 0F 40 /r

CMOV

16/32/64-bit

§

Cmovo_r32_rm32 = 1_170

CMOVO r32, r/m32

o32 0F 40 /r

CMOV

16/32/64-bit

§

Cmovo_r64_rm64 = 1_171

CMOVO r64, r/m64

o64 0F 40 /r

CMOV

64-bit

§

Cmovno_r16_rm16 = 1_172

CMOVNO r16, r/m16

o16 0F 41 /r

CMOV

16/32/64-bit

§

Cmovno_r32_rm32 = 1_173

CMOVNO r32, r/m32

o32 0F 41 /r

CMOV

16/32/64-bit

§

Cmovno_r64_rm64 = 1_174

CMOVNO r64, r/m64

o64 0F 41 /r

CMOV

64-bit

§

Cmovb_r16_rm16 = 1_175

CMOVB r16, r/m16

o16 0F 42 /r

CMOV

16/32/64-bit

§

Cmovb_r32_rm32 = 1_176

CMOVB r32, r/m32

o32 0F 42 /r

CMOV

16/32/64-bit

§

Cmovb_r64_rm64 = 1_177

CMOVB r64, r/m64

o64 0F 42 /r

CMOV

64-bit

§

Cmovae_r16_rm16 = 1_178

CMOVAE r16, r/m16

o16 0F 43 /r

CMOV

16/32/64-bit

§

Cmovae_r32_rm32 = 1_179

CMOVAE r32, r/m32

o32 0F 43 /r

CMOV

16/32/64-bit

§

Cmovae_r64_rm64 = 1_180

CMOVAE r64, r/m64

o64 0F 43 /r

CMOV

64-bit

§

Cmove_r16_rm16 = 1_181

CMOVE r16, r/m16

o16 0F 44 /r

CMOV

16/32/64-bit

§

Cmove_r32_rm32 = 1_182

CMOVE r32, r/m32

o32 0F 44 /r

CMOV

16/32/64-bit

§

Cmove_r64_rm64 = 1_183

CMOVE r64, r/m64

o64 0F 44 /r

CMOV

64-bit

§

Cmovne_r16_rm16 = 1_184

CMOVNE r16, r/m16

o16 0F 45 /r

CMOV

16/32/64-bit

§

Cmovne_r32_rm32 = 1_185

CMOVNE r32, r/m32

o32 0F 45 /r

CMOV

16/32/64-bit

§

Cmovne_r64_rm64 = 1_186

CMOVNE r64, r/m64

o64 0F 45 /r

CMOV

64-bit

§

Cmovbe_r16_rm16 = 1_187

CMOVBE r16, r/m16

o16 0F 46 /r

CMOV

16/32/64-bit

§

Cmovbe_r32_rm32 = 1_188

CMOVBE r32, r/m32

o32 0F 46 /r

CMOV

16/32/64-bit

§

Cmovbe_r64_rm64 = 1_189

CMOVBE r64, r/m64

o64 0F 46 /r

CMOV

64-bit

§

Cmova_r16_rm16 = 1_190

CMOVA r16, r/m16

o16 0F 47 /r

CMOV

16/32/64-bit

§

Cmova_r32_rm32 = 1_191

CMOVA r32, r/m32

o32 0F 47 /r

CMOV

16/32/64-bit

§

Cmova_r64_rm64 = 1_192

CMOVA r64, r/m64

o64 0F 47 /r

CMOV

64-bit

§

Cmovs_r16_rm16 = 1_193

CMOVS r16, r/m16

o16 0F 48 /r

CMOV

16/32/64-bit

§

Cmovs_r32_rm32 = 1_194

CMOVS r32, r/m32

o32 0F 48 /r

CMOV

16/32/64-bit

§

Cmovs_r64_rm64 = 1_195

CMOVS r64, r/m64

o64 0F 48 /r

CMOV

64-bit

§

Cmovns_r16_rm16 = 1_196

CMOVNS r16, r/m16

o16 0F 49 /r

CMOV

16/32/64-bit

§

Cmovns_r32_rm32 = 1_197

CMOVNS r32, r/m32

o32 0F 49 /r

CMOV

16/32/64-bit

§

Cmovns_r64_rm64 = 1_198

CMOVNS r64, r/m64

o64 0F 49 /r

CMOV

64-bit

§

Cmovp_r16_rm16 = 1_199

CMOVP r16, r/m16

o16 0F 4A /r

CMOV

16/32/64-bit

§

Cmovp_r32_rm32 = 1_200

CMOVP r32, r/m32

o32 0F 4A /r

CMOV

16/32/64-bit

§

Cmovp_r64_rm64 = 1_201

CMOVP r64, r/m64

o64 0F 4A /r

CMOV

64-bit

§

Cmovnp_r16_rm16 = 1_202

CMOVNP r16, r/m16

o16 0F 4B /r

CMOV

16/32/64-bit

§

Cmovnp_r32_rm32 = 1_203

CMOVNP r32, r/m32

o32 0F 4B /r

CMOV

16/32/64-bit

§

Cmovnp_r64_rm64 = 1_204

CMOVNP r64, r/m64

o64 0F 4B /r

CMOV

64-bit

§

Cmovl_r16_rm16 = 1_205

CMOVL r16, r/m16

o16 0F 4C /r

CMOV

16/32/64-bit

§

Cmovl_r32_rm32 = 1_206

CMOVL r32, r/m32

o32 0F 4C /r

CMOV

16/32/64-bit

§

Cmovl_r64_rm64 = 1_207

CMOVL r64, r/m64

o64 0F 4C /r

CMOV

64-bit

§

Cmovge_r16_rm16 = 1_208

CMOVGE r16, r/m16

o16 0F 4D /r

CMOV

16/32/64-bit

§

Cmovge_r32_rm32 = 1_209

CMOVGE r32, r/m32

o32 0F 4D /r

CMOV

16/32/64-bit

§

Cmovge_r64_rm64 = 1_210

CMOVGE r64, r/m64

o64 0F 4D /r

CMOV

64-bit

§

Cmovle_r16_rm16 = 1_211

CMOVLE r16, r/m16

o16 0F 4E /r

CMOV

16/32/64-bit

§

Cmovle_r32_rm32 = 1_212

CMOVLE r32, r/m32

o32 0F 4E /r

CMOV

16/32/64-bit

§

Cmovle_r64_rm64 = 1_213

CMOVLE r64, r/m64

o64 0F 4E /r

CMOV

64-bit

§

Cmovg_r16_rm16 = 1_214

CMOVG r16, r/m16

o16 0F 4F /r

CMOV

16/32/64-bit

§

Cmovg_r32_rm32 = 1_215

CMOVG r32, r/m32

o32 0F 4F /r

CMOV

16/32/64-bit

§

Cmovg_r64_rm64 = 1_216

CMOVG r64, r/m64

o64 0F 4F /r

CMOV

64-bit

§

VEX_Kandw_kr_kr_kr = 1_217

KANDW k1, k2, k3

VEX.L1.0F.W0 41 /r

AVX512F

16/32/64-bit

§

VEX_Kandq_kr_kr_kr = 1_218

KANDQ k1, k2, k3

VEX.L1.0F.W1 41 /r

AVX512BW

16/32/64-bit

§

VEX_Kandb_kr_kr_kr = 1_219

KANDB k1, k2, k3

VEX.L1.66.0F.W0 41 /r

AVX512DQ

16/32/64-bit

§

VEX_Kandd_kr_kr_kr = 1_220

KANDD k1, k2, k3

VEX.L1.66.0F.W1 41 /r

AVX512BW

16/32/64-bit

§

VEX_Kandnw_kr_kr_kr = 1_221

KANDNW k1, k2, k3

VEX.L1.0F.W0 42 /r

AVX512F

16/32/64-bit

§

VEX_Kandnq_kr_kr_kr = 1_222

KANDNQ k1, k2, k3

VEX.L1.0F.W1 42 /r

AVX512BW

16/32/64-bit

§

VEX_Kandnb_kr_kr_kr = 1_223

KANDNB k1, k2, k3

VEX.L1.66.0F.W0 42 /r

AVX512DQ

16/32/64-bit

§

VEX_Kandnd_kr_kr_kr = 1_224

KANDND k1, k2, k3

VEX.L1.66.0F.W1 42 /r

AVX512BW

16/32/64-bit

§

VEX_Knotw_kr_kr = 1_225

KNOTW k1, k2

VEX.L0.0F.W0 44 /r

AVX512F

16/32/64-bit

§

VEX_Knotq_kr_kr = 1_226

KNOTQ k1, k2

VEX.L0.0F.W1 44 /r

AVX512BW

16/32/64-bit

§

VEX_Knotb_kr_kr = 1_227

KNOTB k1, k2

VEX.L0.66.0F.W0 44 /r

AVX512DQ

16/32/64-bit

§

VEX_Knotd_kr_kr = 1_228

KNOTD k1, k2

VEX.L0.66.0F.W1 44 /r

AVX512BW

16/32/64-bit

§

VEX_Korw_kr_kr_kr = 1_229

KORW k1, k2, k3

VEX.L1.0F.W0 45 /r

AVX512F

16/32/64-bit

§

VEX_Korq_kr_kr_kr = 1_230

KORQ k1, k2, k3

VEX.L1.0F.W1 45 /r

AVX512BW

16/32/64-bit

§

VEX_Korb_kr_kr_kr = 1_231

KORB k1, k2, k3

VEX.L1.66.0F.W0 45 /r

AVX512DQ

16/32/64-bit

§

VEX_Kord_kr_kr_kr = 1_232

KORD k1, k2, k3

VEX.L1.66.0F.W1 45 /r

AVX512BW

16/32/64-bit

§

VEX_Kxnorw_kr_kr_kr = 1_233

KXNORW k1, k2, k3

VEX.L1.0F.W0 46 /r

AVX512F

16/32/64-bit

§

VEX_Kxnorq_kr_kr_kr = 1_234

KXNORQ k1, k2, k3

VEX.L1.0F.W1 46 /r

AVX512BW

16/32/64-bit

§

VEX_Kxnorb_kr_kr_kr = 1_235

KXNORB k1, k2, k3

VEX.L1.66.0F.W0 46 /r

AVX512DQ

16/32/64-bit

§

VEX_Kxnord_kr_kr_kr = 1_236

KXNORD k1, k2, k3

VEX.L1.66.0F.W1 46 /r

AVX512BW

16/32/64-bit

§

VEX_Kxorw_kr_kr_kr = 1_237

KXORW k1, k2, k3

VEX.L1.0F.W0 47 /r

AVX512F

16/32/64-bit

§

VEX_Kxorq_kr_kr_kr = 1_238

KXORQ k1, k2, k3

VEX.L1.0F.W1 47 /r

AVX512BW

16/32/64-bit

§

VEX_Kxorb_kr_kr_kr = 1_239

KXORB k1, k2, k3

VEX.L1.66.0F.W0 47 /r

AVX512DQ

16/32/64-bit

§

VEX_Kxord_kr_kr_kr = 1_240

KXORD k1, k2, k3

VEX.L1.66.0F.W1 47 /r

AVX512BW

16/32/64-bit

§

VEX_Kaddw_kr_kr_kr = 1_241

KADDW k1, k2, k3

VEX.L1.0F.W0 4A /r

AVX512DQ

16/32/64-bit

§

VEX_Kaddq_kr_kr_kr = 1_242

KADDQ k1, k2, k3

VEX.L1.0F.W1 4A /r

AVX512BW

16/32/64-bit

§

VEX_Kaddb_kr_kr_kr = 1_243

KADDB k1, k2, k3

VEX.L1.66.0F.W0 4A /r

AVX512DQ

16/32/64-bit

§

VEX_Kaddd_kr_kr_kr = 1_244

KADDD k1, k2, k3

VEX.L1.66.0F.W1 4A /r

AVX512BW

16/32/64-bit

§

VEX_Kunpckwd_kr_kr_kr = 1_245

KUNPCKWD k1, k2, k3

VEX.L1.0F.W0 4B /r

AVX512BW

16/32/64-bit

§

VEX_Kunpckdq_kr_kr_kr = 1_246

KUNPCKDQ k1, k2, k3

VEX.L1.0F.W1 4B /r

AVX512BW

16/32/64-bit

§

VEX_Kunpckbw_kr_kr_kr = 1_247

KUNPCKBW k1, k2, k3

VEX.L1.66.0F.W0 4B /r

AVX512F

16/32/64-bit

§

Movmskps_r32_xmm = 1_248

MOVMSKPS r32, xmm

NP 0F 50 /r

SSE

16/32/64-bit

§

Movmskps_r64_xmm = 1_249

MOVMSKPS r64, xmm

NP o64 0F 50 /r

SSE

64-bit

§

VEX_Vmovmskps_r32_xmm = 1_250

VMOVMSKPS r32, xmm2

VEX.128.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskps_r64_xmm = 1_251

VMOVMSKPS r64, xmm2

VEX.128.0F.W1 50 /r

AVX

64-bit

§

VEX_Vmovmskps_r32_ymm = 1_252

VMOVMSKPS r32, ymm2

VEX.256.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskps_r64_ymm = 1_253

VMOVMSKPS r64, ymm2

VEX.256.0F.W1 50 /r

AVX

64-bit

§

Movmskpd_r32_xmm = 1_254

MOVMSKPD r32, xmm

66 0F 50 /r

SSE2

16/32/64-bit

§

Movmskpd_r64_xmm = 1_255

MOVMSKPD r64, xmm

66 o64 0F 50 /r

SSE2

64-bit

§

VEX_Vmovmskpd_r32_xmm = 1_256

VMOVMSKPD r32, xmm2

VEX.128.66.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskpd_r64_xmm = 1_257

VMOVMSKPD r64, xmm2

VEX.128.66.0F.W1 50 /r

AVX

64-bit

§

VEX_Vmovmskpd_r32_ymm = 1_258

VMOVMSKPD r32, ymm2

VEX.256.66.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskpd_r64_ymm = 1_259

VMOVMSKPD r64, ymm2

VEX.256.66.0F.W1 50 /r

AVX

64-bit

§

Sqrtps_xmm_xmmm128 = 1_260

SQRTPS xmm1, xmm2/m128

NP 0F 51 /r

SSE

16/32/64-bit

§

VEX_Vsqrtps_xmm_xmmm128 = 1_261

VSQRTPS xmm1, xmm2/m128

VEX.128.0F.WIG 51 /r

AVX

16/32/64-bit

§

VEX_Vsqrtps_ymm_ymmm256 = 1_262

VSQRTPS ymm1, ymm2/m256

VEX.256.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtps_xmm_k1z_xmmm128b32 = 1_263

VSQRTPS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtps_ymm_k1z_ymmm256b32 = 1_264

VSQRTPS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtps_zmm_k1z_zmmm512b32_er = 1_265

VSQRTPS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.0F.W0 51 /r

AVX512F

16/32/64-bit

§

Sqrtpd_xmm_xmmm128 = 1_266

SQRTPD xmm1, xmm2/m128

66 0F 51 /r

SSE2

16/32/64-bit

§

VEX_Vsqrtpd_xmm_xmmm128 = 1_267

VSQRTPD xmm1, xmm2/m128

VEX.128.66.0F.WIG 51 /r

AVX

16/32/64-bit

§

VEX_Vsqrtpd_ymm_ymmm256 = 1_268

VSQRTPD ymm1, ymm2/m256

VEX.256.66.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtpd_xmm_k1z_xmmm128b64 = 1_269

VSQRTPD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtpd_ymm_k1z_ymmm256b64 = 1_270

VSQRTPD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtpd_zmm_k1z_zmmm512b64_er = 1_271

VSQRTPD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 51 /r

AVX512F

16/32/64-bit

§

Sqrtss_xmm_xmmm32 = 1_272

SQRTSS xmm1, xmm2/m32

F3 0F 51 /r

SSE

16/32/64-bit

§

VEX_Vsqrtss_xmm_xmm_xmmm32 = 1_273

VSQRTSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtss_xmm_k1z_xmm_xmmm32_er = 1_274

VSQRTSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 51 /r

AVX512F

16/32/64-bit

§

Sqrtsd_xmm_xmmm64 = 1_275

SQRTSD xmm1, xmm2/m64

F2 0F 51 /r

SSE2

16/32/64-bit

§

VEX_Vsqrtsd_xmm_xmm_xmmm64 = 1_276

VSQRTSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtsd_xmm_k1z_xmm_xmmm64_er = 1_277

VSQRTSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 51 /r

AVX512F

16/32/64-bit

§

Rsqrtps_xmm_xmmm128 = 1_278

RSQRTPS xmm1, xmm2/m128

NP 0F 52 /r

SSE

16/32/64-bit

§

VEX_Vrsqrtps_xmm_xmmm128 = 1_279

VRSQRTPS xmm1, xmm2/m128

VEX.128.0F.WIG 52 /r

AVX

16/32/64-bit

§

VEX_Vrsqrtps_ymm_ymmm256 = 1_280

VRSQRTPS ymm1, ymm2/m256

VEX.256.0F.WIG 52 /r

AVX

16/32/64-bit

§

Rsqrtss_xmm_xmmm32 = 1_281

RSQRTSS xmm1, xmm2/m32

F3 0F 52 /r

SSE

16/32/64-bit

§

VEX_Vrsqrtss_xmm_xmm_xmmm32 = 1_282

VRSQRTSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 52 /r

AVX

16/32/64-bit

§

Rcpps_xmm_xmmm128 = 1_283

RCPPS xmm1, xmm2/m128

NP 0F 53 /r

SSE

16/32/64-bit

§

VEX_Vrcpps_xmm_xmmm128 = 1_284

VRCPPS xmm1, xmm2/m128

VEX.128.0F.WIG 53 /r

AVX

16/32/64-bit

§

VEX_Vrcpps_ymm_ymmm256 = 1_285

VRCPPS ymm1, ymm2/m256

VEX.256.0F.WIG 53 /r

AVX

16/32/64-bit

§

Rcpss_xmm_xmmm32 = 1_286

RCPSS xmm1, xmm2/m32

F3 0F 53 /r

SSE

16/32/64-bit

§

VEX_Vrcpss_xmm_xmm_xmmm32 = 1_287

VRCPSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 53 /r

AVX

16/32/64-bit

§

Andps_xmm_xmmm128 = 1_288

ANDPS xmm1, xmm2/m128

NP 0F 54 /r

SSE

16/32/64-bit

§

VEX_Vandps_xmm_xmm_xmmm128 = 1_289

VANDPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 54 /r

AVX

16/32/64-bit

§

VEX_Vandps_ymm_ymm_ymmm256 = 1_290

VANDPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 54 /r

AVX

16/32/64-bit

§

EVEX_Vandps_xmm_k1z_xmm_xmmm128b32 = 1_291

VANDPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandps_ymm_k1z_ymm_ymmm256b32 = 1_292

VANDPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandps_zmm_k1z_zmm_zmmm512b32 = 1_293

VANDPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 54 /r

AVX512DQ

16/32/64-bit

§

Andpd_xmm_xmmm128 = 1_294

ANDPD xmm1, xmm2/m128

66 0F 54 /r

SSE2

16/32/64-bit

§

VEX_Vandpd_xmm_xmm_xmmm128 = 1_295

VANDPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 54 /r

AVX

16/32/64-bit

§

VEX_Vandpd_ymm_ymm_ymmm256 = 1_296

VANDPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 54 /r

AVX

16/32/64-bit

§

EVEX_Vandpd_xmm_k1z_xmm_xmmm128b64 = 1_297

VANDPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandpd_ymm_k1z_ymm_ymmm256b64 = 1_298

VANDPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandpd_zmm_k1z_zmm_zmmm512b64 = 1_299

VANDPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 54 /r

AVX512DQ

16/32/64-bit

§

Andnps_xmm_xmmm128 = 1_300

ANDNPS xmm1, xmm2/m128

NP 0F 55 /r

SSE

16/32/64-bit

§

VEX_Vandnps_xmm_xmm_xmmm128 = 1_301

VANDNPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 55 /r

AVX

16/32/64-bit

§

VEX_Vandnps_ymm_ymm_ymmm256 = 1_302

VANDNPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 55 /r

AVX

16/32/64-bit

§

EVEX_Vandnps_xmm_k1z_xmm_xmmm128b32 = 1_303

VANDNPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnps_ymm_k1z_ymm_ymmm256b32 = 1_304

VANDNPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnps_zmm_k1z_zmm_zmmm512b32 = 1_305

VANDNPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 55 /r

AVX512DQ

16/32/64-bit

§

Andnpd_xmm_xmmm128 = 1_306

ANDNPD xmm1, xmm2/m128

66 0F 55 /r

SSE2

16/32/64-bit

§

VEX_Vandnpd_xmm_xmm_xmmm128 = 1_307

VANDNPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 55 /r

AVX

16/32/64-bit

§

VEX_Vandnpd_ymm_ymm_ymmm256 = 1_308

VANDNPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 55 /r

AVX

16/32/64-bit

§

EVEX_Vandnpd_xmm_k1z_xmm_xmmm128b64 = 1_309

VANDNPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnpd_ymm_k1z_ymm_ymmm256b64 = 1_310

VANDNPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnpd_zmm_k1z_zmm_zmmm512b64 = 1_311

VANDNPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 55 /r

AVX512DQ

16/32/64-bit

§

Orps_xmm_xmmm128 = 1_312

ORPS xmm1, xmm2/m128

NP 0F 56 /r

SSE

16/32/64-bit

§

VEX_Vorps_xmm_xmm_xmmm128 = 1_313

VORPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 56 /r

AVX

16/32/64-bit

§

VEX_Vorps_ymm_ymm_ymmm256 = 1_314

VORPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 56 /r

AVX

16/32/64-bit

§

EVEX_Vorps_xmm_k1z_xmm_xmmm128b32 = 1_315

VORPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorps_ymm_k1z_ymm_ymmm256b32 = 1_316

VORPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorps_zmm_k1z_zmm_zmmm512b32 = 1_317

VORPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 56 /r

AVX512DQ

16/32/64-bit

§

Orpd_xmm_xmmm128 = 1_318

ORPD xmm1, xmm2/m128

66 0F 56 /r

SSE2

16/32/64-bit

§

VEX_Vorpd_xmm_xmm_xmmm128 = 1_319

VORPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 56 /r

AVX

16/32/64-bit

§

VEX_Vorpd_ymm_ymm_ymmm256 = 1_320

VORPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 56 /r

AVX

16/32/64-bit

§

EVEX_Vorpd_xmm_k1z_xmm_xmmm128b64 = 1_321

VORPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorpd_ymm_k1z_ymm_ymmm256b64 = 1_322

VORPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorpd_zmm_k1z_zmm_zmmm512b64 = 1_323

VORPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 56 /r

AVX512DQ

16/32/64-bit

§

Xorps_xmm_xmmm128 = 1_324

XORPS xmm1, xmm2/m128

NP 0F 57 /r

SSE

16/32/64-bit

§

VEX_Vxorps_xmm_xmm_xmmm128 = 1_325

VXORPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 57 /r

AVX

16/32/64-bit

§

VEX_Vxorps_ymm_ymm_ymmm256 = 1_326

VXORPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 57 /r

AVX

16/32/64-bit

§

EVEX_Vxorps_xmm_k1z_xmm_xmmm128b32 = 1_327

VXORPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorps_ymm_k1z_ymm_ymmm256b32 = 1_328

VXORPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorps_zmm_k1z_zmm_zmmm512b32 = 1_329

VXORPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 57 /r

AVX512DQ

16/32/64-bit

§

Xorpd_xmm_xmmm128 = 1_330

XORPD xmm1, xmm2/m128

66 0F 57 /r

SSE2

16/32/64-bit

§

VEX_Vxorpd_xmm_xmm_xmmm128 = 1_331

VXORPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 57 /r

AVX

16/32/64-bit

§

VEX_Vxorpd_ymm_ymm_ymmm256 = 1_332

VXORPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 57 /r

AVX

16/32/64-bit

§

EVEX_Vxorpd_xmm_k1z_xmm_xmmm128b64 = 1_333

VXORPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorpd_ymm_k1z_ymm_ymmm256b64 = 1_334

VXORPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorpd_zmm_k1z_zmm_zmmm512b64 = 1_335

VXORPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 57 /r

AVX512DQ

16/32/64-bit

§

Addps_xmm_xmmm128 = 1_336

ADDPS xmm1, xmm2/m128

NP 0F 58 /r

SSE

16/32/64-bit

§

VEX_Vaddps_xmm_xmm_xmmm128 = 1_337

VADDPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 58 /r

AVX

16/32/64-bit

§

VEX_Vaddps_ymm_ymm_ymmm256 = 1_338

VADDPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddps_xmm_k1z_xmm_xmmm128b32 = 1_339

VADDPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddps_ymm_k1z_ymm_ymmm256b32 = 1_340

VADDPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddps_zmm_k1z_zmm_zmmm512b32_er = 1_341

VADDPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 58 /r

AVX512F

16/32/64-bit

§

Addpd_xmm_xmmm128 = 1_342

ADDPD xmm1, xmm2/m128

66 0F 58 /r

SSE2

16/32/64-bit

§

VEX_Vaddpd_xmm_xmm_xmmm128 = 1_343

VADDPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 58 /r

AVX

16/32/64-bit

§

VEX_Vaddpd_ymm_ymm_ymmm256 = 1_344

VADDPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddpd_xmm_k1z_xmm_xmmm128b64 = 1_345

VADDPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddpd_ymm_k1z_ymm_ymmm256b64 = 1_346

VADDPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddpd_zmm_k1z_zmm_zmmm512b64_er = 1_347

VADDPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 58 /r

AVX512F

16/32/64-bit

§

Addss_xmm_xmmm32 = 1_348

ADDSS xmm1, xmm2/m32

F3 0F 58 /r

SSE

16/32/64-bit

§

VEX_Vaddss_xmm_xmm_xmmm32 = 1_349

VADDSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddss_xmm_k1z_xmm_xmmm32_er = 1_350

VADDSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 58 /r

AVX512F

16/32/64-bit

§

Addsd_xmm_xmmm64 = 1_351

ADDSD xmm1, xmm2/m64

F2 0F 58 /r

SSE2

16/32/64-bit

§

VEX_Vaddsd_xmm_xmm_xmmm64 = 1_352

VADDSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddsd_xmm_k1z_xmm_xmmm64_er = 1_353

VADDSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 58 /r

AVX512F

16/32/64-bit

§

Mulps_xmm_xmmm128 = 1_354

MULPS xmm1, xmm2/m128

NP 0F 59 /r

SSE

16/32/64-bit

§

VEX_Vmulps_xmm_xmm_xmmm128 = 1_355

VMULPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 59 /r

AVX

16/32/64-bit

§

VEX_Vmulps_ymm_ymm_ymmm256 = 1_356

VMULPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulps_xmm_k1z_xmm_xmmm128b32 = 1_357

VMULPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulps_ymm_k1z_ymm_ymmm256b32 = 1_358

VMULPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulps_zmm_k1z_zmm_zmmm512b32_er = 1_359

VMULPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 59 /r

AVX512F

16/32/64-bit

§

Mulpd_xmm_xmmm128 = 1_360

MULPD xmm1, xmm2/m128

66 0F 59 /r

SSE2

16/32/64-bit

§

VEX_Vmulpd_xmm_xmm_xmmm128 = 1_361

VMULPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 59 /r

AVX

16/32/64-bit

§

VEX_Vmulpd_ymm_ymm_ymmm256 = 1_362

VMULPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulpd_xmm_k1z_xmm_xmmm128b64 = 1_363

VMULPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulpd_ymm_k1z_ymm_ymmm256b64 = 1_364

VMULPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulpd_zmm_k1z_zmm_zmmm512b64_er = 1_365

VMULPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 59 /r

AVX512F

16/32/64-bit

§

Mulss_xmm_xmmm32 = 1_366

MULSS xmm1, xmm2/m32

F3 0F 59 /r

SSE

16/32/64-bit

§

VEX_Vmulss_xmm_xmm_xmmm32 = 1_367

VMULSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulss_xmm_k1z_xmm_xmmm32_er = 1_368

VMULSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 59 /r

AVX512F

16/32/64-bit

§

Mulsd_xmm_xmmm64 = 1_369

MULSD xmm1, xmm2/m64

F2 0F 59 /r

SSE2

16/32/64-bit

§

VEX_Vmulsd_xmm_xmm_xmmm64 = 1_370

VMULSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulsd_xmm_k1z_xmm_xmmm64_er = 1_371

VMULSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 59 /r

AVX512F

16/32/64-bit

§

Cvtps2pd_xmm_xmmm64 = 1_372

CVTPS2PD xmm1, xmm2/m64

NP 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtps2pd_xmm_xmmm64 = 1_373

VCVTPS2PD xmm1, xmm2/m64

VEX.128.0F.WIG 5A /r

AVX

16/32/64-bit

§

VEX_Vcvtps2pd_ymm_xmmm128 = 1_374

VCVTPS2PD ymm1, xmm2/m128

VEX.256.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtps2pd_xmm_k1z_xmmm64b32 = 1_375

VCVTPS2PD xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.0F.W0 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2pd_ymm_k1z_xmmm128b32 = 1_376

VCVTPS2PD ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.0F.W0 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2pd_zmm_k1z_ymmm256b32_sae = 1_377

VCVTPS2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}

EVEX.512.0F.W0 5A /r

AVX512F

16/32/64-bit

§

Cvtpd2ps_xmm_xmmm128 = 1_378

CVTPD2PS xmm1, xmm2/m128

66 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtpd2ps_xmm_xmmm128 = 1_379

VCVTPD2PS xmm1, xmm2/m128

VEX.128.66.0F.WIG 5A /r

AVX

16/32/64-bit

§

VEX_Vcvtpd2ps_xmm_ymmm256 = 1_380

VCVTPD2PS xmm1, ymm2/m256

VEX.256.66.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtpd2ps_xmm_k1z_xmmm128b64 = 1_381

VCVTPD2PS xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2ps_xmm_k1z_ymmm256b64 = 1_382

VCVTPD2PS xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2ps_ymm_k1z_zmmm512b64_er = 1_383

VCVTPD2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 5A /r

AVX512F

16/32/64-bit

§

Cvtss2sd_xmm_xmmm32 = 1_384

CVTSS2SD xmm1, xmm2/m32

F3 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtss2sd_xmm_xmm_xmmm32 = 1_385

VCVTSS2SD xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtss2sd_xmm_k1z_xmm_xmmm32_sae = 1_386

VCVTSS2SD xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.F3.0F.W0 5A /r

AVX512F

16/32/64-bit

§

Cvtsd2ss_xmm_xmmm64 = 1_387

CVTSD2SS xmm1, xmm2/m64

F2 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtsd2ss_xmm_xmm_xmmm64 = 1_388

VCVTSD2SS xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtsd2ss_xmm_k1z_xmm_xmmm64_er = 1_389

VCVTSD2SS xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 5A /r

AVX512F

16/32/64-bit

§

Cvtdq2ps_xmm_xmmm128 = 1_390

CVTDQ2PS xmm1, xmm2/m128

NP 0F 5B /r

SSE2

16/32/64-bit

§

VEX_Vcvtdq2ps_xmm_xmmm128 = 1_391

VCVTDQ2PS xmm1, xmm2/m128

VEX.128.0F.WIG 5B /r

AVX

16/32/64-bit

§

VEX_Vcvtdq2ps_ymm_ymmm256 = 1_392

VCVTDQ2PS ymm1, ymm2/m256

VEX.256.0F.WIG 5B /r

AVX

16/32/64-bit

§

EVEX_Vcvtdq2ps_xmm_k1z_xmmm128b32 = 1_393

VCVTDQ2PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2ps_ymm_k1z_ymmm256b32 = 1_394

VCVTDQ2PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2ps_zmm_k1z_zmmm512b32_er = 1_395

VCVTDQ2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.0F.W0 5B /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtqq2ps_xmm_k1z_xmmm128b64 = 1_396

VCVTQQ2PS xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.0F.W1 5B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2ps_xmm_k1z_ymmm256b64 = 1_397

VCVTQQ2PS xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.0F.W1 5B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2ps_ymm_k1z_zmmm512b64_er = 1_398

VCVTQQ2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.0F.W1 5B /r

AVX512DQ

16/32/64-bit

§

Cvtps2dq_xmm_xmmm128 = 1_399

CVTPS2DQ xmm1, xmm2/m128

66 0F 5B /r

SSE2

16/32/64-bit

§

VEX_Vcvtps2dq_xmm_xmmm128 = 1_400

VCVTPS2DQ xmm1, xmm2/m128

VEX.128.66.0F.WIG 5B /r

AVX

16/32/64-bit

§

VEX_Vcvtps2dq_ymm_ymmm256 = 1_401

VCVTPS2DQ ymm1, ymm2/m256

VEX.256.66.0F.WIG 5B /r

AVX

16/32/64-bit

§

EVEX_Vcvtps2dq_xmm_k1z_xmmm128b32 = 1_402

VCVTPS2DQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2dq_ymm_k1z_ymmm256b32 = 1_403

VCVTPS2DQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2dq_zmm_k1z_zmmm512b32_er = 1_404

VCVTPS2DQ zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.66.0F.W0 5B /r

AVX512F

16/32/64-bit

§

Cvttps2dq_xmm_xmmm128 = 1_405

CVTTPS2DQ xmm1, xmm2/m128

F3 0F 5B /r

SSE2

16/32/64-bit

§

VEX_Vcvttps2dq_xmm_xmmm128 = 1_406

VCVTTPS2DQ xmm1, xmm2/m128

VEX.128.F3.0F.WIG 5B /r

AVX

16/32/64-bit

§

VEX_Vcvttps2dq_ymm_ymmm256 = 1_407

VCVTTPS2DQ ymm1, ymm2/m256

VEX.256.F3.0F.WIG 5B /r

AVX

16/32/64-bit

§

EVEX_Vcvttps2dq_xmm_k1z_xmmm128b32 = 1_408

VCVTTPS2DQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F3.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2dq_ymm_k1z_ymmm256b32 = 1_409

VCVTTPS2DQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F3.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2dq_zmm_k1z_zmmm512b32_sae = 1_410

VCVTTPS2DQ zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.F3.0F.W0 5B /r

AVX512F

16/32/64-bit

§

Subps_xmm_xmmm128 = 1_411

SUBPS xmm1, xmm2/m128

NP 0F 5C /r

SSE

16/32/64-bit

§

VEX_Vsubps_xmm_xmm_xmmm128 = 1_412

VSUBPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5C /r

AVX

16/32/64-bit

§

VEX_Vsubps_ymm_ymm_ymmm256 = 1_413

VSUBPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubps_xmm_k1z_xmm_xmmm128b32 = 1_414

VSUBPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubps_ymm_k1z_ymm_ymmm256b32 = 1_415

VSUBPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubps_zmm_k1z_zmm_zmmm512b32_er = 1_416

VSUBPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 5C /r

AVX512F

16/32/64-bit

§

Subpd_xmm_xmmm128 = 1_417

SUBPD xmm1, xmm2/m128

66 0F 5C /r

SSE2

16/32/64-bit

§

VEX_Vsubpd_xmm_xmm_xmmm128 = 1_418

VSUBPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5C /r

AVX

16/32/64-bit

§

VEX_Vsubpd_ymm_ymm_ymmm256 = 1_419

VSUBPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubpd_xmm_k1z_xmm_xmmm128b64 = 1_420

VSUBPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubpd_ymm_k1z_ymm_ymmm256b64 = 1_421

VSUBPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubpd_zmm_k1z_zmm_zmmm512b64_er = 1_422

VSUBPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 5C /r

AVX512F

16/32/64-bit

§

Subss_xmm_xmmm32 = 1_423

SUBSS xmm1, xmm2/m32

F3 0F 5C /r

SSE

16/32/64-bit

§

VEX_Vsubss_xmm_xmm_xmmm32 = 1_424

VSUBSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubss_xmm_k1z_xmm_xmmm32_er = 1_425

VSUBSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 5C /r

AVX512F

16/32/64-bit

§

Subsd_xmm_xmmm64 = 1_426

SUBSD xmm1, xmm2/m64

F2 0F 5C /r

SSE2

16/32/64-bit

§

VEX_Vsubsd_xmm_xmm_xmmm64 = 1_427

VSUBSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubsd_xmm_k1z_xmm_xmmm64_er = 1_428

VSUBSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 5C /r

AVX512F

16/32/64-bit

§

Minps_xmm_xmmm128 = 1_429

MINPS xmm1, xmm2/m128

NP 0F 5D /r

SSE

16/32/64-bit

§

VEX_Vminps_xmm_xmm_xmmm128 = 1_430

VMINPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5D /r

AVX

16/32/64-bit

§

VEX_Vminps_ymm_ymm_ymmm256 = 1_431

VMINPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminps_xmm_k1z_xmm_xmmm128b32 = 1_432

VMINPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminps_ymm_k1z_ymm_ymmm256b32 = 1_433

VMINPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminps_zmm_k1z_zmm_zmmm512b32_sae = 1_434

VMINPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}

EVEX.512.0F.W0 5D /r

AVX512F

16/32/64-bit

§

Minpd_xmm_xmmm128 = 1_435

MINPD xmm1, xmm2/m128

66 0F 5D /r

SSE2

16/32/64-bit

§

VEX_Vminpd_xmm_xmm_xmmm128 = 1_436

VMINPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5D /r

AVX

16/32/64-bit

§

VEX_Vminpd_ymm_ymm_ymmm256 = 1_437

VMINPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminpd_xmm_k1z_xmm_xmmm128b64 = 1_438

VMINPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminpd_ymm_k1z_ymm_ymmm256b64 = 1_439

VMINPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminpd_zmm_k1z_zmm_zmmm512b64_sae = 1_440

VMINPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}

EVEX.512.66.0F.W1 5D /r

AVX512F

16/32/64-bit

§

Minss_xmm_xmmm32 = 1_441

MINSS xmm1, xmm2/m32

F3 0F 5D /r

SSE

16/32/64-bit

§

VEX_Vminss_xmm_xmm_xmmm32 = 1_442

VMINSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminss_xmm_k1z_xmm_xmmm32_sae = 1_443

VMINSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.F3.0F.W0 5D /r

AVX512F

16/32/64-bit

§

Minsd_xmm_xmmm64 = 1_444

MINSD xmm1, xmm2/m64

F2 0F 5D /r

SSE2

16/32/64-bit

§

VEX_Vminsd_xmm_xmm_xmmm64 = 1_445

VMINSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminsd_xmm_k1z_xmm_xmmm64_sae = 1_446

VMINSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.F2.0F.W1 5D /r

AVX512F

16/32/64-bit

§

Divps_xmm_xmmm128 = 1_447

DIVPS xmm1, xmm2/m128

NP 0F 5E /r

SSE

16/32/64-bit

§

VEX_Vdivps_xmm_xmm_xmmm128 = 1_448

VDIVPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5E /r

AVX

16/32/64-bit

§

VEX_Vdivps_ymm_ymm_ymmm256 = 1_449

VDIVPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivps_xmm_k1z_xmm_xmmm128b32 = 1_450

VDIVPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivps_ymm_k1z_ymm_ymmm256b32 = 1_451

VDIVPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivps_zmm_k1z_zmm_zmmm512b32_er = 1_452

VDIVPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 5E /r

AVX512F

16/32/64-bit

§

Divpd_xmm_xmmm128 = 1_453

DIVPD xmm1, xmm2/m128

66 0F 5E /r

SSE2

16/32/64-bit

§

VEX_Vdivpd_xmm_xmm_xmmm128 = 1_454

VDIVPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5E /r

AVX

16/32/64-bit

§

VEX_Vdivpd_ymm_ymm_ymmm256 = 1_455

VDIVPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivpd_xmm_k1z_xmm_xmmm128b64 = 1_456

VDIVPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivpd_ymm_k1z_ymm_ymmm256b64 = 1_457

VDIVPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivpd_zmm_k1z_zmm_zmmm512b64_er = 1_458

VDIVPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 5E /r

AVX512F

16/32/64-bit

§

Divss_xmm_xmmm32 = 1_459

DIVSS xmm1, xmm2/m32

F3 0F 5E /r

SSE

16/32/64-bit

§

VEX_Vdivss_xmm_xmm_xmmm32 = 1_460

VDIVSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivss_xmm_k1z_xmm_xmmm32_er = 1_461

VDIVSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 5E /r

AVX512F

16/32/64-bit

§

Divsd_xmm_xmmm64 = 1_462

DIVSD xmm1, xmm2/m64

F2 0F 5E /r

SSE2

16/32/64-bit

§

VEX_Vdivsd_xmm_xmm_xmmm64 = 1_463

VDIVSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivsd_xmm_k1z_xmm_xmmm64_er = 1_464

VDIVSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 5E /r

AVX512F

16/32/64-bit

§

Maxps_xmm_xmmm128 = 1_465

MAXPS xmm1, xmm2/m128

NP 0F 5F /r

SSE

16/32/64-bit

§

VEX_Vmaxps_xmm_xmm_xmmm128 = 1_466

VMAXPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5F /r

AVX

16/32/64-bit

§

VEX_Vmaxps_ymm_ymm_ymmm256 = 1_467

VMAXPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxps_xmm_k1z_xmm_xmmm128b32 = 1_468

VMAXPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxps_ymm_k1z_ymm_ymmm256b32 = 1_469

VMAXPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxps_zmm_k1z_zmm_zmmm512b32_sae = 1_470

VMAXPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}

EVEX.512.0F.W0 5F /r

AVX512F

16/32/64-bit

§

Maxpd_xmm_xmmm128 = 1_471

MAXPD xmm1, xmm2/m128

66 0F 5F /r

SSE2

16/32/64-bit

§

VEX_Vmaxpd_xmm_xmm_xmmm128 = 1_472

VMAXPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5F /r

AVX

16/32/64-bit

§

VEX_Vmaxpd_ymm_ymm_ymmm256 = 1_473

VMAXPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxpd_xmm_k1z_xmm_xmmm128b64 = 1_474

VMAXPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxpd_ymm_k1z_ymm_ymmm256b64 = 1_475

VMAXPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxpd_zmm_k1z_zmm_zmmm512b64_sae = 1_476

VMAXPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}

EVEX.512.66.0F.W1 5F /r

AVX512F

16/32/64-bit

§

Maxss_xmm_xmmm32 = 1_477

MAXSS xmm1, xmm2/m32

F3 0F 5F /r

SSE

16/32/64-bit

§

VEX_Vmaxss_xmm_xmm_xmmm32 = 1_478

VMAXSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxss_xmm_k1z_xmm_xmmm32_sae = 1_479

VMAXSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.F3.0F.W0 5F /r

AVX512F

16/32/64-bit

§

Maxsd_xmm_xmmm64 = 1_480

MAXSD xmm1, xmm2/m64

F2 0F 5F /r

SSE2

16/32/64-bit

§

VEX_Vmaxsd_xmm_xmm_xmmm64 = 1_481

VMAXSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxsd_xmm_k1z_xmm_xmmm64_sae = 1_482

VMAXSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.F2.0F.W1 5F /r

AVX512F

16/32/64-bit

§

Punpcklbw_mm_mmm32 = 1_483

PUNPCKLBW mm, mm/m32

NP 0F 60 /r

MMX

16/32/64-bit

§

Punpcklbw_xmm_xmmm128 = 1_484

PUNPCKLBW xmm1, xmm2/m128

66 0F 60 /r

SSE2

16/32/64-bit

§

VEX_Vpunpcklbw_xmm_xmm_xmmm128 = 1_485

VPUNPCKLBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 60 /r

AVX

16/32/64-bit

§

VEX_Vpunpcklbw_ymm_ymm_ymmm256 = 1_486

VPUNPCKLBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 60 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpcklbw_xmm_k1z_xmm_xmmm128 = 1_487

VPUNPCKLBW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 60 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklbw_ymm_k1z_ymm_ymmm256 = 1_488

VPUNPCKLBW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 60 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklbw_zmm_k1z_zmm_zmmm512 = 1_489

VPUNPCKLBW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 60 /r

AVX512BW

16/32/64-bit

§

Punpcklwd_mm_mmm32 = 1_490

PUNPCKLWD mm, mm/m32

NP 0F 61 /r

MMX

16/32/64-bit

§

Punpcklwd_xmm_xmmm128 = 1_491

PUNPCKLWD xmm1, xmm2/m128

66 0F 61 /r

SSE2

16/32/64-bit

§

VEX_Vpunpcklwd_xmm_xmm_xmmm128 = 1_492

VPUNPCKLWD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 61 /r

AVX

16/32/64-bit

§

VEX_Vpunpcklwd_ymm_ymm_ymmm256 = 1_493

VPUNPCKLWD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 61 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpcklwd_xmm_k1z_xmm_xmmm128 = 1_494

VPUNPCKLWD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 61 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklwd_ymm_k1z_ymm_ymmm256 = 1_495

VPUNPCKLWD ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 61 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklwd_zmm_k1z_zmm_zmmm512 = 1_496

VPUNPCKLWD zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 61 /r

AVX512BW

16/32/64-bit

§

Punpckldq_mm_mmm32 = 1_497

PUNPCKLDQ mm, mm/m32

NP 0F 62 /r

MMX

16/32/64-bit

§

Punpckldq_xmm_xmmm128 = 1_498

PUNPCKLDQ xmm1, xmm2/m128

66 0F 62 /r

SSE2

16/32/64-bit

§

VEX_Vpunpckldq_xmm_xmm_xmmm128 = 1_499

VPUNPCKLDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 62 /r

AVX

16/32/64-bit

§

VEX_Vpunpckldq_ymm_ymm_ymmm256 = 1_500

VPUNPCKLDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 62 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckldq_xmm_k1z_xmm_xmmm128b32 = 1_501

VPUNPCKLDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 62 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckldq_ymm_k1z_ymm_ymmm256b32 = 1_502

VPUNPCKLDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 62 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckldq_zmm_k1z_zmm_zmmm512b32 = 1_503

VPUNPCKLDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 62 /r

AVX512F

16/32/64-bit

§

Packsswb_mm_mmm64 = 1_504

PACKSSWB mm1, mm2/m64

NP 0F 63 /r

MMX

16/32/64-bit

§

Packsswb_xmm_xmmm128 = 1_505

PACKSSWB xmm1, xmm2/m128

66 0F 63 /r

SSE2

16/32/64-bit

§

VEX_Vpacksswb_xmm_xmm_xmmm128 = 1_506

VPACKSSWB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 63 /r

AVX

16/32/64-bit

§

VEX_Vpacksswb_ymm_ymm_ymmm256 = 1_507

VPACKSSWB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 63 /r

AVX2

16/32/64-bit

§

EVEX_Vpacksswb_xmm_k1z_xmm_xmmm128 = 1_508

VPACKSSWB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 63 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpacksswb_ymm_k1z_ymm_ymmm256 = 1_509

VPACKSSWB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 63 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpacksswb_zmm_k1z_zmm_zmmm512 = 1_510

VPACKSSWB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 63 /r

AVX512BW

16/32/64-bit

§

Pcmpgtb_mm_mmm64 = 1_511

PCMPGTB mm, mm/m64

NP 0F 64 /r

MMX

16/32/64-bit

§

Pcmpgtb_xmm_xmmm128 = 1_512

PCMPGTB xmm1, xmm2/m128

66 0F 64 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpgtb_xmm_xmm_xmmm128 = 1_513

VPCMPGTB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 64 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtb_ymm_ymm_ymmm256 = 1_514

VPCMPGTB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 64 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtb_kr_k1_xmm_xmmm128 = 1_515

VPCMPGTB k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 64 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtb_kr_k1_ymm_ymmm256 = 1_516

VPCMPGTB k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 64 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtb_kr_k1_zmm_zmmm512 = 1_517

VPCMPGTB k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 64 /r

AVX512BW

16/32/64-bit

§

Pcmpgtw_mm_mmm64 = 1_518

PCMPGTW mm, mm/m64

NP 0F 65 /r

MMX

16/32/64-bit

§

Pcmpgtw_xmm_xmmm128 = 1_519

PCMPGTW xmm1, xmm2/m128

66 0F 65 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpgtw_xmm_xmm_xmmm128 = 1_520

VPCMPGTW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 65 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtw_ymm_ymm_ymmm256 = 1_521

VPCMPGTW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 65 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtw_kr_k1_xmm_xmmm128 = 1_522

VPCMPGTW k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 65 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtw_kr_k1_ymm_ymmm256 = 1_523

VPCMPGTW k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 65 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtw_kr_k1_zmm_zmmm512 = 1_524

VPCMPGTW k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 65 /r

AVX512BW

16/32/64-bit

§

Pcmpgtd_mm_mmm64 = 1_525

PCMPGTD mm, mm/m64

NP 0F 66 /r

MMX

16/32/64-bit

§

Pcmpgtd_xmm_xmmm128 = 1_526

PCMPGTD xmm1, xmm2/m128

66 0F 66 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpgtd_xmm_xmm_xmmm128 = 1_527

VPCMPGTD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 66 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtd_ymm_ymm_ymmm256 = 1_528

VPCMPGTD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 66 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtd_kr_k1_xmm_xmmm128b32 = 1_529

VPCMPGTD k1 {k2}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 66 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtd_kr_k1_ymm_ymmm256b32 = 1_530

VPCMPGTD k1 {k2}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 66 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtd_kr_k1_zmm_zmmm512b32 = 1_531

VPCMPGTD k1 {k2}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 66 /r

AVX512F

16/32/64-bit

§

Packuswb_mm_mmm64 = 1_532

PACKUSWB mm, mm/m64

NP 0F 67 /r

MMX

16/32/64-bit

§

Packuswb_xmm_xmmm128 = 1_533

PACKUSWB xmm1, xmm2/m128

66 0F 67 /r

SSE2

16/32/64-bit

§

VEX_Vpackuswb_xmm_xmm_xmmm128 = 1_534

VPACKUSWB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 67 /r

AVX

16/32/64-bit

§

VEX_Vpackuswb_ymm_ymm_ymmm256 = 1_535

VPACKUSWB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 67 /r

AVX2

16/32/64-bit

§

EVEX_Vpackuswb_xmm_k1z_xmm_xmmm128 = 1_536

VPACKUSWB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 67 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackuswb_ymm_k1z_ymm_ymmm256 = 1_537

VPACKUSWB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 67 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackuswb_zmm_k1z_zmm_zmmm512 = 1_538

VPACKUSWB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 67 /r

AVX512BW

16/32/64-bit

§

Punpckhbw_mm_mmm64 = 1_539

PUNPCKHBW mm, mm/m64

NP 0F 68 /r

MMX

16/32/64-bit

§

Punpckhbw_xmm_xmmm128 = 1_540

PUNPCKHBW xmm1, xmm2/m128

66 0F 68 /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhbw_xmm_xmm_xmmm128 = 1_541

VPUNPCKHBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 68 /r

AVX

16/32/64-bit

§

VEX_Vpunpckhbw_ymm_ymm_ymmm256 = 1_542

VPUNPCKHBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 68 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhbw_xmm_k1z_xmm_xmmm128 = 1_543

VPUNPCKHBW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 68 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhbw_ymm_k1z_ymm_ymmm256 = 1_544

VPUNPCKHBW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 68 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhbw_zmm_k1z_zmm_zmmm512 = 1_545

VPUNPCKHBW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 68 /r

AVX512BW

16/32/64-bit

§

Punpckhwd_mm_mmm64 = 1_546

PUNPCKHWD mm, mm/m64

NP 0F 69 /r

MMX

16/32/64-bit

§

Punpckhwd_xmm_xmmm128 = 1_547

PUNPCKHWD xmm1, xmm2/m128

66 0F 69 /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhwd_xmm_xmm_xmmm128 = 1_548

VPUNPCKHWD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 69 /r

AVX

16/32/64-bit

§

VEX_Vpunpckhwd_ymm_ymm_ymmm256 = 1_549

VPUNPCKHWD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 69 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhwd_xmm_k1z_xmm_xmmm128 = 1_550

VPUNPCKHWD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 69 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhwd_ymm_k1z_ymm_ymmm256 = 1_551

VPUNPCKHWD ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 69 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhwd_zmm_k1z_zmm_zmmm512 = 1_552

VPUNPCKHWD zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 69 /r

AVX512BW

16/32/64-bit

§

Punpckhdq_mm_mmm64 = 1_553

PUNPCKHDQ mm, mm/m64

NP 0F 6A /r

MMX

16/32/64-bit

§

Punpckhdq_xmm_xmmm128 = 1_554

PUNPCKHDQ xmm1, xmm2/m128

66 0F 6A /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhdq_xmm_xmm_xmmm128 = 1_555

VPUNPCKHDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6A /r

AVX

16/32/64-bit

§

VEX_Vpunpckhdq_ymm_ymm_ymmm256 = 1_556

VPUNPCKHDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6A /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhdq_xmm_k1z_xmm_xmmm128b32 = 1_557

VPUNPCKHDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 6A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhdq_ymm_k1z_ymm_ymmm256b32 = 1_558

VPUNPCKHDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 6A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhdq_zmm_k1z_zmm_zmmm512b32 = 1_559

VPUNPCKHDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 6A /r

AVX512F

16/32/64-bit

§

Packssdw_mm_mmm64 = 1_560

PACKSSDW mm1, mm2/m64

NP 0F 6B /r

MMX

16/32/64-bit

§

Packssdw_xmm_xmmm128 = 1_561

PACKSSDW xmm1, xmm2/m128

66 0F 6B /r

SSE2

16/32/64-bit

§

VEX_Vpackssdw_xmm_xmm_xmmm128 = 1_562

VPACKSSDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6B /r

AVX

16/32/64-bit

§

VEX_Vpackssdw_ymm_ymm_ymmm256 = 1_563

VPACKSSDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6B /r

AVX2

16/32/64-bit

§

EVEX_Vpackssdw_xmm_k1z_xmm_xmmm128b32 = 1_564

VPACKSSDW xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 6B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackssdw_ymm_k1z_ymm_ymmm256b32 = 1_565

VPACKSSDW ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 6B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackssdw_zmm_k1z_zmm_zmmm512b32 = 1_566

VPACKSSDW zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 6B /r

AVX512BW

16/32/64-bit

§

Punpcklqdq_xmm_xmmm128 = 1_567

PUNPCKLQDQ xmm1, xmm2/m128

66 0F 6C /r

SSE2

16/32/64-bit

§

VEX_Vpunpcklqdq_xmm_xmm_xmmm128 = 1_568

VPUNPCKLQDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6C /r

AVX

16/32/64-bit

§

VEX_Vpunpcklqdq_ymm_ymm_ymmm256 = 1_569

VPUNPCKLQDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6C /r

AVX2

16/32/64-bit

§

EVEX_Vpunpcklqdq_xmm_k1z_xmm_xmmm128b64 = 1_570

VPUNPCKLQDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 6C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpcklqdq_ymm_k1z_ymm_ymmm256b64 = 1_571

VPUNPCKLQDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 6C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpcklqdq_zmm_k1z_zmm_zmmm512b64 = 1_572

VPUNPCKLQDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 6C /r

AVX512F

16/32/64-bit

§

Punpckhqdq_xmm_xmmm128 = 1_573

PUNPCKHQDQ xmm1, xmm2/m128

66 0F 6D /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhqdq_xmm_xmm_xmmm128 = 1_574

VPUNPCKHQDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6D /r

AVX

16/32/64-bit

§

VEX_Vpunpckhqdq_ymm_ymm_ymmm256 = 1_575

VPUNPCKHQDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6D /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhqdq_xmm_k1z_xmm_xmmm128b64 = 1_576

VPUNPCKHQDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 6D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhqdq_ymm_k1z_ymm_ymmm256b64 = 1_577

VPUNPCKHQDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 6D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhqdq_zmm_k1z_zmm_zmmm512b64 = 1_578

VPUNPCKHQDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 6D /r

AVX512F

16/32/64-bit

§

Movd_mm_rm32 = 1_579

MOVD mm, r/m32

NP 0F 6E /r

MMX

16/32/64-bit

§

Movq_mm_rm64 = 1_580

MOVQ mm, r/m64

NP o64 0F 6E /r

MMX

64-bit

§

Movd_xmm_rm32 = 1_581

MOVD xmm, r/m32

66 0F 6E /r

SSE2

16/32/64-bit

§

Movq_xmm_rm64 = 1_582

MOVQ xmm, r/m64

66 o64 0F 6E /r

SSE2

64-bit

§

VEX_Vmovd_xmm_rm32 = 1_583

VMOVD xmm1, r/m32

VEX.128.66.0F.W0 6E /r

AVX

16/32/64-bit

§

VEX_Vmovq_xmm_rm64 = 1_584

VMOVQ xmm1, r/m64

VEX.128.66.0F.W1 6E /r

AVX

64-bit

§

EVEX_Vmovd_xmm_rm32 = 1_585

VMOVD xmm1, r/m32

EVEX.128.66.0F.W0 6E /r

AVX512F

16/32/64-bit

§

EVEX_Vmovq_xmm_rm64 = 1_586

VMOVQ xmm1, r/m64

EVEX.128.66.0F.W1 6E /r

AVX512F

64-bit

§

Movq_mm_mmm64 = 1_587

MOVQ mm, mm/m64

NP 0F 6F /r

MMX

16/32/64-bit

§

Movdqa_xmm_xmmm128 = 1_588

MOVDQA xmm1, xmm2/m128

66 0F 6F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqa_xmm_xmmm128 = 1_589

VMOVDQA xmm1, xmm2/m128

VEX.128.66.0F.WIG 6F /r

AVX

16/32/64-bit

§

VEX_Vmovdqa_ymm_ymmm256 = 1_590

VMOVDQA ymm1, ymm2/m256

VEX.256.66.0F.WIG 6F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqa32_xmm_k1z_xmmm128 = 1_591

VMOVDQA32 xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_ymm_k1z_ymmm256 = 1_592

VMOVDQA32 ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_zmm_k1z_zmmm512 = 1_593

VMOVDQA32 zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W0 6F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_xmm_k1z_xmmm128 = 1_594

VMOVDQA64 xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_ymm_k1z_ymmm256 = 1_595

VMOVDQA64 ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_zmm_k1z_zmmm512 = 1_596

VMOVDQA64 zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W1 6F /r

AVX512F

16/32/64-bit

§

Movdqu_xmm_xmmm128 = 1_597

MOVDQU xmm1, xmm2/m128

F3 0F 6F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqu_xmm_xmmm128 = 1_598

VMOVDQU xmm1, xmm2/m128

VEX.128.F3.0F.WIG 6F /r

AVX

16/32/64-bit

§

VEX_Vmovdqu_ymm_ymmm256 = 1_599

VMOVDQU ymm1, ymm2/m256

VEX.256.F3.0F.WIG 6F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqu32_xmm_k1z_xmmm128 = 1_600

VMOVDQU32 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_ymm_k1z_ymmm256 = 1_601

VMOVDQU32 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_zmm_k1z_zmmm512 = 1_602

VMOVDQU32 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W0 6F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_xmm_k1z_xmmm128 = 1_603

VMOVDQU64 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_ymm_k1z_ymmm256 = 1_604

VMOVDQU64 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_zmm_k1z_zmmm512 = 1_605

VMOVDQU64 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W1 6F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu8_xmm_k1z_xmmm128 = 1_606

VMOVDQU8 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F2.0F.W0 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_ymm_k1z_ymmm256 = 1_607

VMOVDQU8 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F2.0F.W0 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_zmm_k1z_zmmm512 = 1_608

VMOVDQU8 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F2.0F.W0 6F /r

AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_xmm_k1z_xmmm128 = 1_609

VMOVDQU16 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F2.0F.W1 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_ymm_k1z_ymmm256 = 1_610

VMOVDQU16 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F2.0F.W1 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_zmm_k1z_zmmm512 = 1_611

VMOVDQU16 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F2.0F.W1 6F /r

AVX512BW

16/32/64-bit

§

Pshufw_mm_mmm64_imm8 = 1_612

PSHUFW mm1, mm2/m64, imm8

NP 0F 70 /r ib

SSE

16/32/64-bit

§

Pshufd_xmm_xmmm128_imm8 = 1_613

PSHUFD xmm1, xmm2/m128, imm8

66 0F 70 /r ib

SSE2

16/32/64-bit

§

VEX_Vpshufd_xmm_xmmm128_imm8 = 1_614

VPSHUFD xmm1, xmm2/m128, imm8

VEX.128.66.0F.WIG 70 /r ib

AVX

16/32/64-bit

§

VEX_Vpshufd_ymm_ymmm256_imm8 = 1_615

VPSHUFD ymm1, ymm2/m256, imm8

VEX.256.66.0F.WIG 70 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpshufd_xmm_k1z_xmmm128b32_imm8 = 1_616

VPSHUFD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 70 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpshufd_ymm_k1z_ymmm256b32_imm8 = 1_617

VPSHUFD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 70 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpshufd_zmm_k1z_zmmm512b32_imm8 = 1_618

VPSHUFD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 70 /r ib

AVX512F

16/32/64-bit

§

Pshufhw_xmm_xmmm128_imm8 = 1_619

PSHUFHW xmm1, xmm2/m128, imm8

F3 0F 70 /r ib

SSE2

16/32/64-bit

§

VEX_Vpshufhw_xmm_xmmm128_imm8 = 1_620

VPSHUFHW xmm1, xmm2/m128, imm8

VEX.128.F3.0F.WIG 70 /r ib

AVX

16/32/64-bit

§

VEX_Vpshufhw_ymm_ymmm256_imm8 = 1_621

VPSHUFHW ymm1, ymm2/m256, imm8

VEX.256.F3.0F.WIG 70 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpshufhw_xmm_k1z_xmmm128_imm8 = 1_622

VPSHUFHW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.F3.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufhw_ymm_k1z_ymmm256_imm8 = 1_623

VPSHUFHW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.F3.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufhw_zmm_k1z_zmmm512_imm8 = 1_624

VPSHUFHW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.F3.0F.WIG 70 /r ib

AVX512BW

16/32/64-bit

§

Pshuflw_xmm_xmmm128_imm8 = 1_625

PSHUFLW xmm1, xmm2/m128, imm8

F2 0F 70 /r ib

SSE2

16/32/64-bit

§

VEX_Vpshuflw_xmm_xmmm128_imm8 = 1_626

VPSHUFLW xmm1, xmm2/m128, imm8

VEX.128.F2.0F.WIG 70 /r ib

AVX

16/32/64-bit

§

VEX_Vpshuflw_ymm_ymmm256_imm8 = 1_627

VPSHUFLW ymm1, ymm2/m256, imm8

VEX.256.F2.0F.WIG 70 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpshuflw_xmm_k1z_xmmm128_imm8 = 1_628

VPSHUFLW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.F2.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshuflw_ymm_k1z_ymmm256_imm8 = 1_629

VPSHUFLW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.F2.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshuflw_zmm_k1z_zmmm512_imm8 = 1_630

VPSHUFLW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.F2.0F.WIG 70 /r ib

AVX512BW

16/32/64-bit

§

Psrlw_mm_imm8 = 1_631

PSRLW mm, imm8

NP 0F 71 /2 ib

MMX

16/32/64-bit

§

Psrlw_xmm_imm8 = 1_632

PSRLW xmm1, imm8

66 0F 71 /2 ib

SSE2

16/32/64-bit

§

VEX_Vpsrlw_xmm_xmm_imm8 = 1_633

VPSRLW xmm1, xmm2, imm8

VEX.128.66.0F.WIG 71 /2 ib

AVX

16/32/64-bit

§

VEX_Vpsrlw_ymm_ymm_imm8 = 1_634

VPSRLW ymm1, ymm2, imm8

VEX.256.66.0F.WIG 71 /2 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrlw_xmm_k1z_xmmm128_imm8 = 1_635

VPSRLW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.66.0F.WIG 71 /2 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_ymm_k1z_ymmm256_imm8 = 1_636

VPSRLW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.66.0F.WIG 71 /2 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_zmm_k1z_zmmm512_imm8 = 1_637

VPSRLW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.66.0F.WIG 71 /2 ib

AVX512BW

16/32/64-bit

§

Psraw_mm_imm8 = 1_638

PSRAW mm, imm8

NP 0F 71 /4 ib

MMX

16/32/64-bit

§

Psraw_xmm_imm8 = 1_639

PSRAW xmm1, imm8

66 0F 71 /4 ib

SSE2

16/32/64-bit

§

VEX_Vpsraw_xmm_xmm_imm8 = 1_640

VPSRAW xmm1, xmm2, imm8

VEX.128.66.0F.WIG 71 /4 ib

AVX

16/32/64-bit

§

VEX_Vpsraw_ymm_ymm_imm8 = 1_641

VPSRAW ymm1, ymm2, imm8

VEX.256.66.0F.WIG 71 /4 ib

AVX2

16/32/64-bit

§

EVEX_Vpsraw_xmm_k1z_xmmm128_imm8 = 1_642

VPSRAW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.66.0F.WIG 71 /4 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_ymm_k1z_ymmm256_imm8 = 1_643

VPSRAW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.66.0F.WIG 71 /4 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_zmm_k1z_zmmm512_imm8 = 1_644

VPSRAW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.66.0F.WIG 71 /4 ib

AVX512BW

16/32/64-bit

§

Psllw_mm_imm8 = 1_645

PSLLW mm1, imm8

NP 0F 71 /6 ib

MMX

16/32/64-bit

§

Psllw_xmm_imm8 = 1_646

PSLLW xmm1, imm8

66 0F 71 /6 ib

SSE2

16/32/64-bit

§

VEX_Vpsllw_xmm_xmm_imm8 = 1_647

VPSLLW xmm1, xmm2, imm8

VEX.128.66.0F.WIG 71 /6 ib

AVX

16/32/64-bit

§

VEX_Vpsllw_ymm_ymm_imm8 = 1_648

VPSLLW ymm1, ymm2, imm8

VEX.256.66.0F.WIG 71 /6 ib

AVX2

16/32/64-bit

§

EVEX_Vpsllw_xmm_k1z_xmmm128_imm8 = 1_649

VPSLLW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.66.0F.WIG 71 /6 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_ymm_k1z_ymmm256_imm8 = 1_650

VPSLLW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.66.0F.WIG 71 /6 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_zmm_k1z_zmmm512_imm8 = 1_651

VPSLLW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.66.0F.WIG 71 /6 ib

AVX512BW

16/32/64-bit

§

EVEX_Vprord_xmm_k1z_xmmm128b32_imm8 = 1_652

VPRORD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprord_ymm_k1z_ymmm256b32_imm8 = 1_653

VPRORD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprord_zmm_k1z_zmmm512b32_imm8 = 1_654

VPRORD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /0 ib

AVX512F

16/32/64-bit

§

EVEX_Vprorq_xmm_k1z_xmmm128b64_imm8 = 1_655

VPRORQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorq_ymm_k1z_ymmm256b64_imm8 = 1_656

VPRORQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorq_zmm_k1z_zmmm512b64_imm8 = 1_657

VPRORQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 72 /0 ib

AVX512F

16/32/64-bit

§

EVEX_Vprold_xmm_k1z_xmmm128b32_imm8 = 1_658

VPROLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprold_ymm_k1z_ymmm256b32_imm8 = 1_659

VPROLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprold_zmm_k1z_zmmm512b32_imm8 = 1_660

VPROLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /1 ib

AVX512F

16/32/64-bit

§

EVEX_Vprolq_xmm_k1z_xmmm128b64_imm8 = 1_661

VPROLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolq_ymm_k1z_ymmm256b64_imm8 = 1_662

VPROLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolq_zmm_k1z_zmmm512b64_imm8 = 1_663

VPROLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 72 /1 ib

AVX512F

16/32/64-bit

§

Psrld_mm_imm8 = 1_664

PSRLD mm, imm8

NP 0F 72 /2 ib

MMX

16/32/64-bit

§

Psrld_xmm_imm8 = 1_665

PSRLD xmm1, imm8

66 0F 72 /2 ib

SSE2

16/32/64-bit

§

VEX_Vpsrld_xmm_xmm_imm8 = 1_666

VPSRLD xmm1, xmm2, imm8

VEX.128.66.0F.WIG 72 /2 ib

AVX

16/32/64-bit

§

VEX_Vpsrld_ymm_ymm_imm8 = 1_667

VPSRLD ymm1, ymm2, imm8

VEX.256.66.0F.WIG 72 /2 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrld_xmm_k1z_xmmm128b32_imm8 = 1_668

VPSRLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_ymm_k1z_ymmm256b32_imm8 = 1_669

VPSRLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_zmm_k1z_zmmm512b32_imm8 = 1_670

VPSRLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /2 ib

AVX512F

16/32/64-bit

§

Psrad_mm_imm8 = 1_671

PSRAD mm, imm8

NP 0F 72 /4 ib

MMX

16/32/64-bit

§

Psrad_xmm_imm8 = 1_672

PSRAD xmm1, imm8

66 0F 72 /4 ib

SSE2

16/32/64-bit

§

VEX_Vpsrad_xmm_xmm_imm8 = 1_673

VPSRAD xmm1, xmm2, imm8

VEX.128.66.0F.WIG 72 /4 ib

AVX

16/32/64-bit

§

VEX_Vpsrad_ymm_ymm_imm8 = 1_674

VPSRAD ymm1, ymm2, imm8

VEX.256.66.0F.WIG 72 /4 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrad_xmm_k1z_xmmm128b32_imm8 = 1_675

VPSRAD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_ymm_k1z_ymmm256b32_imm8 = 1_676

VPSRAD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_zmm_k1z_zmmm512b32_imm8 = 1_677

VPSRAD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /4 ib

AVX512F

16/32/64-bit

§

EVEX_Vpsraq_xmm_k1z_xmmm128b64_imm8 = 1_678

VPSRAQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_ymm_k1z_ymmm256b64_imm8 = 1_679

VPSRAQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_zmm_k1z_zmmm512b64_imm8 = 1_680

VPSRAQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 72 /4 ib

AVX512F

16/32/64-bit

§

Pslld_mm_imm8 = 1_681

PSLLD mm, imm8

NP 0F 72 /6 ib

MMX

16/32/64-bit

§

Pslld_xmm_imm8 = 1_682

PSLLD xmm1, imm8

66 0F 72 /6 ib

SSE2

16/32/64-bit

§

VEX_Vpslld_xmm_xmm_imm8 = 1_683

VPSLLD xmm1, xmm2, imm8

VEX.128.66.0F.WIG 72 /6 ib

AVX

16/32/64-bit

§

VEX_Vpslld_ymm_ymm_imm8 = 1_684

VPSLLD ymm1, ymm2, imm8

VEX.256.66.0F.WIG 72 /6 ib

AVX2

16/32/64-bit

§

EVEX_Vpslld_xmm_k1z_xmmm128b32_imm8 = 1_685

VPSLLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_ymm_k1z_ymmm256b32_imm8 = 1_686

VPSLLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_zmm_k1z_zmmm512b32_imm8 = 1_687

VPSLLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /6 ib

AVX512F

16/32/64-bit

§

Psrlq_mm_imm8 = 1_688

PSRLQ mm, imm8

NP 0F 73 /2 ib

MMX

16/32/64-bit

§

Psrlq_xmm_imm8 = 1_689

PSRLQ xmm1, imm8

66 0F 73 /2 ib

SSE2

16/32/64-bit

§

VEX_Vpsrlq_xmm_xmm_imm8 = 1_690

VPSRLQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /2 ib

AVX

16/32/64-bit

§

VEX_Vpsrlq_ymm_ymm_imm8 = 1_691

VPSRLQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /2 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrlq_xmm_k1z_xmmm128b64_imm8 = 1_692

VPSRLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 73 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_ymm_k1z_ymmm256b64_imm8 = 1_693

VPSRLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 73 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_zmm_k1z_zmmm512b64_imm8 = 1_694

VPSRLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 73 /2 ib

AVX512F

16/32/64-bit

§

Psrldq_xmm_imm8 = 1_695

PSRLDQ xmm1, imm8

66 0F 73 /3 ib

SSE2

16/32/64-bit

§

VEX_Vpsrldq_xmm_xmm_imm8 = 1_696

VPSRLDQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /3 ib

AVX

16/32/64-bit

§

VEX_Vpsrldq_ymm_ymm_imm8 = 1_697

VPSRLDQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /3 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrldq_xmm_xmmm128_imm8 = 1_698

VPSRLDQ xmm1, xmm2/m128, imm8

EVEX.128.66.0F.WIG 73 /3 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrldq_ymm_ymmm256_imm8 = 1_699

VPSRLDQ ymm1, ymm2/m256, imm8

EVEX.256.66.0F.WIG 73 /3 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrldq_zmm_zmmm512_imm8 = 1_700

VPSRLDQ zmm1, zmm2/m512, imm8

EVEX.512.66.0F.WIG 73 /3 ib

AVX512BW

16/32/64-bit

§

Psllq_mm_imm8 = 1_701

PSLLQ mm, imm8

NP 0F 73 /6 ib

MMX

16/32/64-bit

§

Psllq_xmm_imm8 = 1_702

PSLLQ xmm1, imm8

66 0F 73 /6 ib

SSE2

16/32/64-bit

§

VEX_Vpsllq_xmm_xmm_imm8 = 1_703

VPSLLQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /6 ib

AVX

16/32/64-bit

§

VEX_Vpsllq_ymm_ymm_imm8 = 1_704

VPSLLQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /6 ib

AVX2

16/32/64-bit

§

EVEX_Vpsllq_xmm_k1z_xmmm128b64_imm8 = 1_705

VPSLLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 73 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_ymm_k1z_ymmm256b64_imm8 = 1_706

VPSLLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 73 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_zmm_k1z_zmmm512b64_imm8 = 1_707

VPSLLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 73 /6 ib

AVX512F

16/32/64-bit

§

Pslldq_xmm_imm8 = 1_708

PSLLDQ xmm1, imm8

66 0F 73 /7 ib

SSE2

16/32/64-bit

§

VEX_Vpslldq_xmm_xmm_imm8 = 1_709

VPSLLDQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /7 ib

AVX

16/32/64-bit

§

VEX_Vpslldq_ymm_ymm_imm8 = 1_710

VPSLLDQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /7 ib

AVX2

16/32/64-bit

§

EVEX_Vpslldq_xmm_xmmm128_imm8 = 1_711

VPSLLDQ xmm1, xmm2/m128, imm8

EVEX.128.66.0F.WIG 73 /7 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpslldq_ymm_ymmm256_imm8 = 1_712

VPSLLDQ ymm1, ymm2/m256, imm8

EVEX.256.66.0F.WIG 73 /7 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpslldq_zmm_zmmm512_imm8 = 1_713

VPSLLDQ zmm1, zmm2/m512, imm8

EVEX.512.66.0F.WIG 73 /7 ib

AVX512BW

16/32/64-bit

§

Pcmpeqb_mm_mmm64 = 1_714

PCMPEQB mm, mm/m64

NP 0F 74 /r

MMX

16/32/64-bit

§

Pcmpeqb_xmm_xmmm128 = 1_715

PCMPEQB xmm1, xmm2/m128

66 0F 74 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpeqb_xmm_xmm_xmmm128 = 1_716

VPCMPEQB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 74 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqb_ymm_ymm_ymmm256 = 1_717

VPCMPEQB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 74 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqb_kr_k1_xmm_xmmm128 = 1_718

VPCMPEQB k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 74 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqb_kr_k1_ymm_ymmm256 = 1_719

VPCMPEQB k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 74 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqb_kr_k1_zmm_zmmm512 = 1_720

VPCMPEQB k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 74 /r

AVX512BW

16/32/64-bit

§

Pcmpeqw_mm_mmm64 = 1_721

PCMPEQW mm, mm/m64

NP 0F 75 /r

MMX

16/32/64-bit

§

Pcmpeqw_xmm_xmmm128 = 1_722

PCMPEQW xmm1, xmm2/m128

66 0F 75 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpeqw_xmm_xmm_xmmm128 = 1_723

VPCMPEQW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 75 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqw_ymm_ymm_ymmm256 = 1_724

VPCMPEQW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 75 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqw_kr_k1_xmm_xmmm128 = 1_725

VPCMPEQW k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqw_kr_k1_ymm_ymmm256 = 1_726

VPCMPEQW k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqw_kr_k1_zmm_zmmm512 = 1_727

VPCMPEQW k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 75 /r

AVX512BW

16/32/64-bit

§

Pcmpeqd_mm_mmm64 = 1_728

PCMPEQD mm, mm/m64

NP 0F 76 /r

MMX

16/32/64-bit

§

Pcmpeqd_xmm_xmmm128 = 1_729

PCMPEQD xmm1, xmm2/m128

66 0F 76 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpeqd_xmm_xmm_xmmm128 = 1_730

VPCMPEQD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 76 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqd_ymm_ymm_ymmm256 = 1_731

VPCMPEQD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 76 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqd_kr_k1_xmm_xmmm128b32 = 1_732

VPCMPEQD k1 {k2}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqd_kr_k1_ymm_ymmm256b32 = 1_733

VPCMPEQD k1 {k2}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqd_kr_k1_zmm_zmmm512b32 = 1_734

VPCMPEQD k1 {k2}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 76 /r

AVX512F

16/32/64-bit

§

Emms = 1_735

EMMS

NP 0F 77

MMX

16/32/64-bit

§

VEX_Vzeroupper = 1_736

VZEROUPPER

VEX.128.0F.WIG 77

AVX

16/32/64-bit

§

VEX_Vzeroall = 1_737

VZEROALL

VEX.256.0F.WIG 77

AVX

16/32/64-bit

§

Vmread_rm32_r32 = 1_738

VMREAD r/m32, r32

NP 0F 78 /r

VMX

16/32-bit

§

Vmread_rm64_r64 = 1_739

VMREAD r/m64, r64

NP 0F 78 /r

VMX

64-bit

§

EVEX_Vcvttps2udq_xmm_k1z_xmmm128b32 = 1_740

VCVTTPS2UDQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2udq_ymm_k1z_ymmm256b32 = 1_741

VCVTTPS2UDQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2udq_zmm_k1z_zmmm512b32_sae = 1_742

VCVTTPS2UDQ zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.0F.W0 78 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2udq_xmm_k1z_xmmm128b64 = 1_743

VCVTTPD2UDQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.0F.W1 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2udq_xmm_k1z_ymmm256b64 = 1_744

VCVTTPD2UDQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.0F.W1 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2udq_ymm_k1z_zmmm512b64_sae = 1_745

VCVTTPD2UDQ ymm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.0F.W1 78 /r

AVX512F

16/32/64-bit

§

Extrq_xmm_imm8_imm8 = 1_746

EXTRQ xmm1, imm8, imm8

66 0F 78 /0 ib ib

SSE4A

16/32/64-bit

§

EVEX_Vcvttps2uqq_xmm_k1z_xmmm64b32 = 1_747

VCVTTPS2UQQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2uqq_ymm_k1z_xmmm128b32 = 1_748

VCVTTPS2UQQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2uqq_zmm_k1z_ymmm256b32_sae = 1_749

VCVTTPS2UQQ zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}

EVEX.512.66.0F.W0 78 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2uqq_xmm_k1z_xmmm128b64 = 1_750

VCVTTPD2UQQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2uqq_ymm_k1z_ymmm256b64 = 1_751

VCVTTPD2UQQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2uqq_zmm_k1z_zmmm512b64_sae = 1_752

VCVTTPD2UQQ zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F.W1 78 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvttss2usi_r32_xmmm32_sae = 1_753

VCVTTSS2USI r32, xmm1/m32{sae}

EVEX.LIG.F3.0F.W0 78 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttss2usi_r64_xmmm32_sae = 1_754

VCVTTSS2USI r64, xmm1/m32{sae}

EVEX.LIG.F3.0F.W1 78 /r

AVX512F

64-bit

§

Insertq_xmm_xmm_imm8_imm8 = 1_755

INSERTQ xmm1, xmm2, imm8, imm8

F2 0F 78 /r ib ib

SSE4A

16/32/64-bit

§

EVEX_Vcvttsd2usi_r32_xmmm64_sae = 1_756

VCVTTSD2USI r32, xmm1/m64{sae}

EVEX.LIG.F2.0F.W0 78 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttsd2usi_r64_xmmm64_sae = 1_757

VCVTTSD2USI r64, xmm1/m64{sae}

EVEX.LIG.F2.0F.W1 78 /r

AVX512F

64-bit

§

Vmwrite_r32_rm32 = 1_758

VMWRITE r32, r/m32

NP 0F 79 /r

VMX

16/32-bit

§

Vmwrite_r64_rm64 = 1_759

VMWRITE r64, r/m64

NP 0F 79 /r

VMX

64-bit

§

EVEX_Vcvtps2udq_xmm_k1z_xmmm128b32 = 1_760

VCVTPS2UDQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2udq_ymm_k1z_ymmm256b32 = 1_761

VCVTPS2UDQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2udq_zmm_k1z_zmmm512b32_er = 1_762

VCVTPS2UDQ zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.0F.W0 79 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2udq_xmm_k1z_xmmm128b64 = 1_763

VCVTPD2UDQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.0F.W1 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2udq_xmm_k1z_ymmm256b64 = 1_764

VCVTPD2UDQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.0F.W1 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2udq_ymm_k1z_zmmm512b64_er = 1_765

VCVTPD2UDQ ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.0F.W1 79 /r

AVX512F

16/32/64-bit

§

Extrq_xmm_xmm = 1_766

EXTRQ xmm1, xmm2

66 0F 79 /r

SSE4A

16/32/64-bit

§

EVEX_Vcvtps2uqq_xmm_k1z_xmmm64b32 = 1_767

VCVTPS2UQQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2uqq_ymm_k1z_xmmm128b32 = 1_768

VCVTPS2UQQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2uqq_zmm_k1z_ymmm256b32_er = 1_769

VCVTPS2UQQ zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.66.0F.W0 79 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2uqq_xmm_k1z_xmmm128b64 = 1_770

VCVTPD2UQQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2uqq_ymm_k1z_ymmm256b64 = 1_771

VCVTPD2UQQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2uqq_zmm_k1z_zmmm512b64_er = 1_772

VCVTPD2UQQ zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 79 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtss2usi_r32_xmmm32_er = 1_773

VCVTSS2USI r32, xmm1/m32{er}

EVEX.LIG.F3.0F.W0 79 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtss2usi_r64_xmmm32_er = 1_774

VCVTSS2USI r64, xmm1/m32{er}

EVEX.LIG.F3.0F.W1 79 /r

AVX512F

64-bit

§

Insertq_xmm_xmm = 1_775

INSERTQ xmm1, xmm2

F2 0F 79 /r

SSE4A

16/32/64-bit

§

EVEX_Vcvtsd2usi_r32_xmmm64_er = 1_776

VCVTSD2USI r32, xmm1/m64{er}

EVEX.LIG.F2.0F.W0 79 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsd2usi_r64_xmmm64_er = 1_777

VCVTSD2USI r64, xmm1/m64{er}

EVEX.LIG.F2.0F.W1 79 /r

AVX512F

64-bit

§

EVEX_Vcvttps2qq_xmm_k1z_xmmm64b32 = 1_778

VCVTTPS2QQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2qq_ymm_k1z_xmmm128b32 = 1_779

VCVTTPS2QQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2qq_zmm_k1z_ymmm256b32_sae = 1_780

VCVTTPS2QQ zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}

EVEX.512.66.0F.W0 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2qq_xmm_k1z_xmmm128b64 = 1_781

VCVTTPD2QQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2qq_ymm_k1z_ymmm256b64 = 1_782

VCVTTPD2QQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2qq_zmm_k1z_zmmm512b64_sae = 1_783

VCVTTPD2QQ zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F.W1 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtudq2pd_xmm_k1z_xmmm64b32 = 1_784

VCVTUDQ2PD xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.F3.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2pd_ymm_k1z_xmmm128b32 = 1_785

VCVTUDQ2PD ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.F3.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2pd_zmm_k1z_ymmm256b32_er = 1_786

VCVTUDQ2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.F3.0F.W0 7A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtuqq2pd_xmm_k1z_xmmm128b64 = 1_787

VCVTUQQ2PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F3.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2pd_ymm_k1z_ymmm256b64 = 1_788

VCVTUQQ2PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F3.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2pd_zmm_k1z_zmmm512b64_er = 1_789

VCVTUQQ2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F3.0F.W1 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtudq2ps_xmm_k1z_xmmm128b32 = 1_790

VCVTUDQ2PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F2.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2ps_ymm_k1z_ymmm256b32 = 1_791

VCVTUDQ2PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F2.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2ps_zmm_k1z_zmmm512b32_er = 1_792

VCVTUDQ2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.F2.0F.W0 7A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtuqq2ps_xmm_k1z_xmmm128b64 = 1_793

VCVTUQQ2PS xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F2.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2ps_xmm_k1z_ymmm256b64 = 1_794

VCVTUQQ2PS xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F2.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2ps_ymm_k1z_zmmm512b64_er = 1_795

VCVTUQQ2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F2.0F.W1 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2qq_xmm_k1z_xmmm64b32 = 1_796

VCVTPS2QQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2qq_ymm_k1z_xmmm128b32 = 1_797

VCVTPS2QQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2qq_zmm_k1z_ymmm256b32_er = 1_798

VCVTPS2QQ zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.66.0F.W0 7B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2qq_xmm_k1z_xmmm128b64 = 1_799

VCVTPD2QQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2qq_ymm_k1z_ymmm256b64 = 1_800

VCVTPD2QQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2qq_zmm_k1z_zmmm512b64_er = 1_801

VCVTPD2QQ zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 7B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtusi2ss_xmm_xmm_rm32_er = 1_802

VCVTUSI2SS xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.0F.W0 7B /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtusi2ss_xmm_xmm_rm64_er = 1_803

VCVTUSI2SS xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.0F.W1 7B /r

AVX512F

64-bit

§

EVEX_Vcvtusi2sd_xmm_xmm_rm32_er = 1_804

VCVTUSI2SD xmm1, xmm2, r/m32{er}

EVEX.LIG.F2.0F.W0 7B /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtusi2sd_xmm_xmm_rm64_er = 1_805

VCVTUSI2SD xmm1, xmm2, r/m64{er}

EVEX.LIG.F2.0F.W1 7B /r

AVX512F

64-bit

§

Haddpd_xmm_xmmm128 = 1_806

HADDPD xmm1, xmm2/m128

66 0F 7C /r

SSE3

16/32/64-bit

§

VEX_Vhaddpd_xmm_xmm_xmmm128 = 1_807

VHADDPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 7C /r

AVX

16/32/64-bit

§

VEX_Vhaddpd_ymm_ymm_ymmm256 = 1_808

VHADDPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 7C /r

AVX

16/32/64-bit

§

Haddps_xmm_xmmm128 = 1_809

HADDPS xmm1, xmm2/m128

F2 0F 7C /r

SSE3

16/32/64-bit

§

VEX_Vhaddps_xmm_xmm_xmmm128 = 1_810

VHADDPS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F.WIG 7C /r

AVX

16/32/64-bit

§

VEX_Vhaddps_ymm_ymm_ymmm256 = 1_811

VHADDPS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F.WIG 7C /r

AVX

16/32/64-bit

§

Hsubpd_xmm_xmmm128 = 1_812

HSUBPD xmm1, xmm2/m128

66 0F 7D /r

SSE3

16/32/64-bit

§

VEX_Vhsubpd_xmm_xmm_xmmm128 = 1_813

VHSUBPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 7D /r

AVX

16/32/64-bit

§

VEX_Vhsubpd_ymm_ymm_ymmm256 = 1_814

VHSUBPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 7D /r

AVX

16/32/64-bit

§

Hsubps_xmm_xmmm128 = 1_815

HSUBPS xmm1, xmm2/m128

F2 0F 7D /r

SSE3

16/32/64-bit

§

VEX_Vhsubps_xmm_xmm_xmmm128 = 1_816

VHSUBPS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F.WIG 7D /r

AVX

16/32/64-bit

§

VEX_Vhsubps_ymm_ymm_ymmm256 = 1_817

VHSUBPS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F.WIG 7D /r

AVX

16/32/64-bit

§

Movd_rm32_mm = 1_818

MOVD r/m32, mm

NP 0F 7E /r

MMX

16/32/64-bit

§

Movq_rm64_mm = 1_819

MOVQ r/m64, mm

NP o64 0F 7E /r

MMX

64-bit

§

Movd_rm32_xmm = 1_820

MOVD r/m32, xmm

66 0F 7E /r

SSE2

16/32/64-bit

§

Movq_rm64_xmm = 1_821

MOVQ r/m64, xmm

66 o64 0F 7E /r

SSE2

64-bit

§

VEX_Vmovd_rm32_xmm = 1_822

VMOVD r/m32, xmm1

VEX.128.66.0F.W0 7E /r

AVX

16/32/64-bit

§

VEX_Vmovq_rm64_xmm = 1_823

VMOVQ r/m64, xmm1

VEX.128.66.0F.W1 7E /r

AVX

64-bit

§

EVEX_Vmovd_rm32_xmm = 1_824

VMOVD r/m32, xmm1

EVEX.128.66.0F.W0 7E /r

AVX512F

16/32/64-bit

§

EVEX_Vmovq_rm64_xmm = 1_825

VMOVQ r/m64, xmm1

EVEX.128.66.0F.W1 7E /r

AVX512F

64-bit

§

Movq_xmm_xmmm64 = 1_826

MOVQ xmm1, xmm2/m64

F3 0F 7E /r

SSE2

16/32/64-bit

§

VEX_Vmovq_xmm_xmmm64 = 1_827

VMOVQ xmm1, xmm2/m64

VEX.128.F3.0F.WIG 7E /r

AVX

16/32/64-bit

§

EVEX_Vmovq_xmm_xmmm64 = 1_828

VMOVQ xmm1, xmm2/m64

EVEX.128.F3.0F.W1 7E /r

AVX512F

16/32/64-bit

§

Movq_mmm64_mm = 1_829

MOVQ mm/m64, mm

NP 0F 7F /r

MMX

16/32/64-bit

§

Movdqa_xmmm128_xmm = 1_830

MOVDQA xmm2/m128, xmm1

66 0F 7F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqa_xmmm128_xmm = 1_831

VMOVDQA xmm2/m128, xmm1

VEX.128.66.0F.WIG 7F /r

AVX

16/32/64-bit

§

VEX_Vmovdqa_ymmm256_ymm = 1_832

VMOVDQA ymm2/m256, ymm1

VEX.256.66.0F.WIG 7F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqa32_xmmm128_k1z_xmm = 1_833

VMOVDQA32 xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_ymmm256_k1z_ymm = 1_834

VMOVDQA32 ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_zmmm512_k1z_zmm = 1_835

VMOVDQA32 zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W0 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_xmmm128_k1z_xmm = 1_836

VMOVDQA64 xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_ymmm256_k1z_ymm = 1_837

VMOVDQA64 ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_zmmm512_k1z_zmm = 1_838

VMOVDQA64 zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W1 7F /r

AVX512F

16/32/64-bit

§

Movdqu_xmmm128_xmm = 1_839

MOVDQU xmm2/m128, xmm1

F3 0F 7F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqu_xmmm128_xmm = 1_840

VMOVDQU xmm2/m128, xmm1

VEX.128.F3.0F.WIG 7F /r

AVX

16/32/64-bit

§

VEX_Vmovdqu_ymmm256_ymm = 1_841

VMOVDQU ymm2/m256, ymm1

VEX.256.F3.0F.WIG 7F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqu32_xmmm128_k1z_xmm = 1_842

VMOVDQU32 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F3.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_ymmm256_k1z_ymm = 1_843

VMOVDQU32 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F3.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_zmmm512_k1z_zmm = 1_844

VMOVDQU32 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F3.0F.W0 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_xmmm128_k1z_xmm = 1_845

VMOVDQU64 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F3.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_ymmm256_k1z_ymm = 1_846

VMOVDQU64 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F3.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_zmmm512_k1z_zmm = 1_847

VMOVDQU64 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F3.0F.W1 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu8_xmmm128_k1z_xmm = 1_848

VMOVDQU8 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F2.0F.W0 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_ymmm256_k1z_ymm = 1_849

VMOVDQU8 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F2.0F.W0 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_zmmm512_k1z_zmm = 1_850

VMOVDQU8 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F2.0F.W0 7F /r

AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_xmmm128_k1z_xmm = 1_851

VMOVDQU16 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F2.0F.W1 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_ymmm256_k1z_ymm = 1_852

VMOVDQU16 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F2.0F.W1 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_zmmm512_k1z_zmm = 1_853

VMOVDQU16 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F2.0F.W1 7F /r

AVX512BW

16/32/64-bit

§

Jo_rel16 = 1_854

JO rel16

o16 0F 80 cw

386+

16/32/64-bit

§

Jo_rel32_32 = 1_855

JO rel32

o32 0F 80 cd

386+

16/32-bit

§

Jo_rel32_64 = 1_856

JO rel32

o64 0F 80 cd

X64

64-bit

§

Jno_rel16 = 1_857

JNO rel16

o16 0F 81 cw

386+

16/32/64-bit

§

Jno_rel32_32 = 1_858

JNO rel32

o32 0F 81 cd

386+

16/32-bit

§

Jno_rel32_64 = 1_859

JNO rel32

o64 0F 81 cd

X64

64-bit

§

Jb_rel16 = 1_860

JB rel16

o16 0F 82 cw

386+

16/32/64-bit

§

Jb_rel32_32 = 1_861

JB rel32

o32 0F 82 cd

386+

16/32-bit

§

Jb_rel32_64 = 1_862

JB rel32

o64 0F 82 cd

X64

64-bit

§

Jae_rel16 = 1_863

JAE rel16

o16 0F 83 cw

386+

16/32/64-bit

§

Jae_rel32_32 = 1_864

JAE rel32

o32 0F 83 cd

386+

16/32-bit

§

Jae_rel32_64 = 1_865

JAE rel32

o64 0F 83 cd

X64

64-bit

§

Je_rel16 = 1_866

JE rel16

o16 0F 84 cw

386+

16/32/64-bit

§

Je_rel32_32 = 1_867

JE rel32

o32 0F 84 cd

386+

16/32-bit

§

Je_rel32_64 = 1_868

JE rel32

o64 0F 84 cd

X64

64-bit

§

Jne_rel16 = 1_869

JNE rel16

o16 0F 85 cw

386+

16/32/64-bit

§

Jne_rel32_32 = 1_870

JNE rel32

o32 0F 85 cd

386+

16/32-bit

§

Jne_rel32_64 = 1_871

JNE rel32

o64 0F 85 cd

X64

64-bit

§

Jbe_rel16 = 1_872

JBE rel16

o16 0F 86 cw

386+

16/32/64-bit

§

Jbe_rel32_32 = 1_873

JBE rel32

o32 0F 86 cd

386+

16/32-bit

§

Jbe_rel32_64 = 1_874

JBE rel32

o64 0F 86 cd

X64

64-bit

§

Ja_rel16 = 1_875

JA rel16

o16 0F 87 cw

386+

16/32/64-bit

§

Ja_rel32_32 = 1_876

JA rel32

o32 0F 87 cd

386+

16/32-bit

§

Ja_rel32_64 = 1_877

JA rel32

o64 0F 87 cd

X64

64-bit

§

Js_rel16 = 1_878

JS rel16

o16 0F 88 cw

386+

16/32/64-bit

§

Js_rel32_32 = 1_879

JS rel32

o32 0F 88 cd

386+

16/32-bit

§

Js_rel32_64 = 1_880

JS rel32

o64 0F 88 cd

X64

64-bit

§

Jns_rel16 = 1_881

JNS rel16

o16 0F 89 cw

386+

16/32/64-bit

§

Jns_rel32_32 = 1_882

JNS rel32

o32 0F 89 cd

386+

16/32-bit

§

Jns_rel32_64 = 1_883

JNS rel32

o64 0F 89 cd

X64

64-bit

§

Jp_rel16 = 1_884

JP rel16

o16 0F 8A cw

386+

16/32/64-bit

§

Jp_rel32_32 = 1_885

JP rel32

o32 0F 8A cd

386+

16/32-bit

§

Jp_rel32_64 = 1_886

JP rel32

o64 0F 8A cd

X64

64-bit

§

Jnp_rel16 = 1_887

JNP rel16

o16 0F 8B cw

386+

16/32/64-bit

§

Jnp_rel32_32 = 1_888

JNP rel32

o32 0F 8B cd

386+

16/32-bit

§

Jnp_rel32_64 = 1_889

JNP rel32

o64 0F 8B cd

X64

64-bit

§

Jl_rel16 = 1_890

JL rel16

o16 0F 8C cw

386+

16/32/64-bit

§

Jl_rel32_32 = 1_891

JL rel32

o32 0F 8C cd

386+

16/32-bit

§

Jl_rel32_64 = 1_892

JL rel32

o64 0F 8C cd

X64

64-bit

§

Jge_rel16 = 1_893

JGE rel16

o16 0F 8D cw

386+

16/32/64-bit

§

Jge_rel32_32 = 1_894

JGE rel32

o32 0F 8D cd

386+

16/32-bit

§

Jge_rel32_64 = 1_895

JGE rel32

o64 0F 8D cd

X64

64-bit

§

Jle_rel16 = 1_896

JLE rel16

o16 0F 8E cw

386+

16/32/64-bit

§

Jle_rel32_32 = 1_897

JLE rel32

o32 0F 8E cd

386+

16/32-bit

§

Jle_rel32_64 = 1_898

JLE rel32

o64 0F 8E cd

X64

64-bit

§

Jg_rel16 = 1_899

JG rel16

o16 0F 8F cw

386+

16/32/64-bit

§

Jg_rel32_32 = 1_900

JG rel32

o32 0F 8F cd

386+

16/32-bit

§

Jg_rel32_64 = 1_901

JG rel32

o64 0F 8F cd

X64

64-bit

§

Seto_rm8 = 1_902

SETO r/m8

0F 90 /r

386+

16/32/64-bit

§

Setno_rm8 = 1_903

SETNO r/m8

0F 91 /r

386+

16/32/64-bit

§

Setb_rm8 = 1_904

SETB r/m8

0F 92 /r

386+

16/32/64-bit

§

Setae_rm8 = 1_905

SETAE r/m8

0F 93 /r

386+

16/32/64-bit

§

Sete_rm8 = 1_906

SETE r/m8

0F 94 /r

386+

16/32/64-bit

§

Setne_rm8 = 1_907

SETNE r/m8

0F 95 /r

386+

16/32/64-bit

§

Setbe_rm8 = 1_908

SETBE r/m8

0F 96 /r

386+

16/32/64-bit

§

Seta_rm8 = 1_909

SETA r/m8

0F 97 /r

386+

16/32/64-bit

§

Sets_rm8 = 1_910

SETS r/m8

0F 98 /r

386+

16/32/64-bit

§

Setns_rm8 = 1_911

SETNS r/m8

0F 99 /r

386+

16/32/64-bit

§

Setp_rm8 = 1_912

SETP r/m8

0F 9A /r

386+

16/32/64-bit

§

Setnp_rm8 = 1_913

SETNP r/m8

0F 9B /r

386+

16/32/64-bit

§

Setl_rm8 = 1_914

SETL r/m8

0F 9C /r

386+

16/32/64-bit

§

Setge_rm8 = 1_915

SETGE r/m8

0F 9D /r

386+

16/32/64-bit

§

Setle_rm8 = 1_916

SETLE r/m8

0F 9E /r

386+

16/32/64-bit

§

Setg_rm8 = 1_917

SETG r/m8

0F 9F /r

386+

16/32/64-bit

§

VEX_Kmovw_kr_km16 = 1_918

KMOVW k1, k2/m16

VEX.L0.0F.W0 90 /r

AVX512F

16/32/64-bit

§

VEX_Kmovq_kr_km64 = 1_919

KMOVQ k1, k2/m64

VEX.L0.0F.W1 90 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovb_kr_km8 = 1_920

KMOVB k1, k2/m8

VEX.L0.66.0F.W0 90 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_kr_km32 = 1_921

KMOVD k1, k2/m32

VEX.L0.66.0F.W1 90 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovw_m16_kr = 1_922

KMOVW m16, k1

VEX.L0.0F.W0 91 /r

AVX512F

16/32/64-bit

§

VEX_Kmovq_m64_kr = 1_923

KMOVQ m64, k1

VEX.L0.0F.W1 91 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovb_m8_kr = 1_924

KMOVB m8, k1

VEX.L0.66.0F.W0 91 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_m32_kr = 1_925

KMOVD m32, k1

VEX.L0.66.0F.W1 91 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovw_kr_r32 = 1_926

KMOVW k1, r32

VEX.L0.0F.W0 92 /r

AVX512F

16/32/64-bit

§

VEX_Kmovb_kr_r32 = 1_927

KMOVB k1, r32

VEX.L0.66.0F.W0 92 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_kr_r32 = 1_928

KMOVD k1, r32

VEX.L0.F2.0F.W0 92 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovq_kr_r64 = 1_929

KMOVQ k1, r64

VEX.L0.F2.0F.W1 92 /r

AVX512BW

64-bit

§

VEX_Kmovw_r32_kr = 1_930

KMOVW r32, k1

VEX.L0.0F.W0 93 /r

AVX512F

16/32/64-bit

§

VEX_Kmovb_r32_kr = 1_931

KMOVB r32, k1

VEX.L0.66.0F.W0 93 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_r32_kr = 1_932

KMOVD r32, k1

VEX.L0.F2.0F.W0 93 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovq_r64_kr = 1_933

KMOVQ r64, k1

VEX.L0.F2.0F.W1 93 /r

AVX512BW

64-bit

§

VEX_Kortestw_kr_kr = 1_934

KORTESTW k1, k2

VEX.L0.0F.W0 98 /r

AVX512F

16/32/64-bit

§

VEX_Kortestq_kr_kr = 1_935

KORTESTQ k1, k2

VEX.L0.0F.W1 98 /r

AVX512BW

16/32/64-bit

§

VEX_Kortestb_kr_kr = 1_936

KORTESTB k1, k2

VEX.L0.66.0F.W0 98 /r

AVX512DQ

16/32/64-bit

§

VEX_Kortestd_kr_kr = 1_937

KORTESTD k1, k2

VEX.L0.66.0F.W1 98 /r

AVX512BW

16/32/64-bit

§

VEX_Ktestw_kr_kr = 1_938

KTESTW k1, k2

VEX.L0.0F.W0 99 /r

AVX512DQ

16/32/64-bit

§

VEX_Ktestq_kr_kr = 1_939

KTESTQ k1, k2

VEX.L0.0F.W1 99 /r

AVX512BW

16/32/64-bit

§

VEX_Ktestb_kr_kr = 1_940

KTESTB k1, k2

VEX.L0.66.0F.W0 99 /r

AVX512DQ

16/32/64-bit

§

VEX_Ktestd_kr_kr = 1_941

KTESTD k1, k2

VEX.L0.66.0F.W1 99 /r

AVX512BW

16/32/64-bit

§

Pushw_FS = 1_942

PUSH FS

o16 0F A0

386+

16/32/64-bit

§

Pushd_FS = 1_943

PUSH FS

o32 0F A0

386+

16/32-bit

§

Pushq_FS = 1_944

PUSH FS

o64 0F A0

X64

64-bit

§

Popw_FS = 1_945

POP FS

o16 0F A1

386+

16/32/64-bit

§

Popd_FS = 1_946

POP FS

o32 0F A1

386+

16/32-bit

§

Popq_FS = 1_947

POP FS

o64 0F A1

X64

64-bit

§

Cpuid = 1_948

CPUID

0F A2

CPUID

16/32/64-bit

§

Bt_rm16_r16 = 1_949

BT r/m16, r16

o16 0F A3 /r

386+

16/32/64-bit

§

Bt_rm32_r32 = 1_950

BT r/m32, r32

o32 0F A3 /r

386+

16/32/64-bit

§

Bt_rm64_r64 = 1_951

BT r/m64, r64

o64 0F A3 /r

X64

64-bit

§

Shld_rm16_r16_imm8 = 1_952

SHLD r/m16, r16, imm8

o16 0F A4 /r ib

386+

16/32/64-bit

§

Shld_rm32_r32_imm8 = 1_953

SHLD r/m32, r32, imm8

o32 0F A4 /r ib

386+

16/32/64-bit

§

Shld_rm64_r64_imm8 = 1_954

SHLD r/m64, r64, imm8

o64 0F A4 /r ib

X64

64-bit

§

Shld_rm16_r16_CL = 1_955

SHLD r/m16, r16, CL

o16 0F A5 /r

386+

16/32/64-bit

§

Shld_rm32_r32_CL = 1_956

SHLD r/m32, r32, CL

o32 0F A5 /r

386+

16/32/64-bit

§

Shld_rm64_r64_CL = 1_957

SHLD r/m64, r64, CL

o64 0F A5 /r

X64

64-bit

§

Montmul_16 = 1_958

MONTMUL

a16 F3 0F A6 C0

PADLOCK_PMM

16/32-bit

§

Montmul_32 = 1_959

MONTMUL

a32 F3 0F A6 C0

PADLOCK_PMM

16/32/64-bit

§

Montmul_64 = 1_960

MONTMUL

a64 F3 0F A6 C0

PADLOCK_PMM

64-bit

§

Xsha1_16 = 1_961

XSHA1

a16 F3 0F A6 C8

PADLOCK_PHE

16/32-bit

§

Xsha1_32 = 1_962

XSHA1

a32 F3 0F A6 C8

PADLOCK_PHE

16/32/64-bit

§

Xsha1_64 = 1_963

XSHA1

a64 F3 0F A6 C8

PADLOCK_PHE

64-bit

§

Xsha256_16 = 1_964

XSHA256

a16 F3 0F A6 D0

PADLOCK_PHE

16/32-bit

§

Xsha256_32 = 1_965

XSHA256

a32 F3 0F A6 D0

PADLOCK_PHE

16/32/64-bit

§

Xsha256_64 = 1_966

XSHA256

a64 F3 0F A6 D0

PADLOCK_PHE

64-bit

§

Xbts_r16_rm16 = 1_967

XBTS r16, r/m16

o16 0F A6 /r

386 A0

16/32-bit

§

Xbts_r32_rm32 = 1_968

XBTS r32, r/m32

o32 0F A6 /r

386 A0

16/32-bit

§

Xstore_16 = 1_969

XSTORE

a16 0F A7 C0

PADLOCK_RNG

16/32-bit

§

Xstore_32 = 1_970

XSTORE

a32 0F A7 C0

PADLOCK_RNG

16/32/64-bit

§

Xstore_64 = 1_971

XSTORE

a64 0F A7 C0

PADLOCK_RNG

64-bit

§

Xcryptecb_16 = 1_972

XCRYPTECB

a16 F3 0F A7 C8

PADLOCK_ACE

16/32-bit

§

Xcryptecb_32 = 1_973

XCRYPTECB

a32 F3 0F A7 C8

PADLOCK_ACE

16/32/64-bit

§

Xcryptecb_64 = 1_974

XCRYPTECB

a64 F3 0F A7 C8

PADLOCK_ACE

64-bit

§

Xcryptcbc_16 = 1_975

XCRYPTCBC

a16 F3 0F A7 D0

PADLOCK_ACE

16/32-bit

§

Xcryptcbc_32 = 1_976

XCRYPTCBC

a32 F3 0F A7 D0

PADLOCK_ACE

16/32/64-bit

§

Xcryptcbc_64 = 1_977

XCRYPTCBC

a64 F3 0F A7 D0

PADLOCK_ACE

64-bit

§

Xcryptctr_16 = 1_978

XCRYPTCTR

a16 F3 0F A7 D8

PADLOCK_ACE

16/32-bit

§

Xcryptctr_32 = 1_979

XCRYPTCTR

a32 F3 0F A7 D8

PADLOCK_ACE

16/32/64-bit

§

Xcryptctr_64 = 1_980

XCRYPTCTR

a64 F3 0F A7 D8

PADLOCK_ACE

64-bit

§

Xcryptcfb_16 = 1_981

XCRYPTCFB

a16 F3 0F A7 E0

PADLOCK_ACE

16/32-bit

§

Xcryptcfb_32 = 1_982

XCRYPTCFB

a32 F3 0F A7 E0

PADLOCK_ACE

16/32/64-bit

§

Xcryptcfb_64 = 1_983

XCRYPTCFB

a64 F3 0F A7 E0

PADLOCK_ACE

64-bit

§

Xcryptofb_16 = 1_984

XCRYPTOFB

a16 F3 0F A7 E8

PADLOCK_ACE

16/32-bit

§

Xcryptofb_32 = 1_985

XCRYPTOFB

a32 F3 0F A7 E8

PADLOCK_ACE

16/32/64-bit

§

Xcryptofb_64 = 1_986

XCRYPTOFB

a64 F3 0F A7 E8

PADLOCK_ACE

64-bit

§

Ibts_rm16_r16 = 1_987

IBTS r/m16, r16

o16 0F A7 /r

386 A0

16/32-bit

§

Ibts_rm32_r32 = 1_988

IBTS r/m32, r32

o32 0F A7 /r

386 A0

16/32-bit

§

Cmpxchg486_rm8_r8 = 1_989

CMPXCHG r/m8, r8

0F A6 /r

486 A

16/32-bit

§

Cmpxchg486_rm16_r16 = 1_990

CMPXCHG r/m16, r16

o16 0F A7 /r

486 A

16/32-bit

§

Cmpxchg486_rm32_r32 = 1_991

CMPXCHG r/m32, r32

o32 0F A7 /r

486 A

16/32-bit

§

Pushw_GS = 1_992

PUSH GS

o16 0F A8

386+

16/32/64-bit

§

Pushd_GS = 1_993

PUSH GS

o32 0F A8

386+

16/32-bit

§

Pushq_GS = 1_994

PUSH GS

o64 0F A8

X64

64-bit

§

Popw_GS = 1_995

POP GS

o16 0F A9

386+

16/32/64-bit

§

Popd_GS = 1_996

POP GS

o32 0F A9

386+

16/32-bit

§

Popq_GS = 1_997

POP GS

o64 0F A9

X64

64-bit

§

Rsm = 1_998

RSM

0F AA

386+

16/32/64-bit

§

Bts_rm16_r16 = 1_999

BTS r/m16, r16

o16 0F AB /r

386+

16/32/64-bit

§

Bts_rm32_r32 = 2_000

BTS r/m32, r32

o32 0F AB /r

386+

16/32/64-bit

§

Bts_rm64_r64 = 2_001

BTS r/m64, r64

o64 0F AB /r

X64

64-bit

§

Shrd_rm16_r16_imm8 = 2_002

SHRD r/m16, r16, imm8

o16 0F AC /r ib

386+

16/32/64-bit

§

Shrd_rm32_r32_imm8 = 2_003

SHRD r/m32, r32, imm8

o32 0F AC /r ib

386+

16/32/64-bit

§

Shrd_rm64_r64_imm8 = 2_004

SHRD r/m64, r64, imm8

o64 0F AC /r ib

X64

64-bit

§

Shrd_rm16_r16_CL = 2_005

SHRD r/m16, r16, CL

o16 0F AD /r

386+

16/32/64-bit

§

Shrd_rm32_r32_CL = 2_006

SHRD r/m32, r32, CL

o32 0F AD /r

386+

16/32/64-bit

§

Shrd_rm64_r64_CL = 2_007

SHRD r/m64, r64, CL

o64 0F AD /r

X64

64-bit

§

Fxsave_m512byte = 2_008

FXSAVE m512byte

NP 0F AE /0

FXSR

16/32/64-bit

§

Fxsave64_m512byte = 2_009

FXSAVE64 m512byte

NP o64 0F AE /0

FXSR

64-bit

§

Rdfsbase_r32 = 2_010

RDFSBASE r32

F3 0F AE /0

FSGSBASE

64-bit

§

Rdfsbase_r64 = 2_011

RDFSBASE r64

F3 o64 0F AE /0

FSGSBASE

64-bit

§

Fxrstor_m512byte = 2_012

FXRSTOR m512byte

NP 0F AE /1

FXSR

16/32/64-bit

§

Fxrstor64_m512byte = 2_013

FXRSTOR64 m512byte

NP o64 0F AE /1

FXSR

64-bit

§

Rdgsbase_r32 = 2_014

RDGSBASE r32

F3 0F AE /1

FSGSBASE

64-bit

§

Rdgsbase_r64 = 2_015

RDGSBASE r64

F3 o64 0F AE /1

FSGSBASE

64-bit

§

Ldmxcsr_m32 = 2_016

LDMXCSR m32

NP 0F AE /2

SSE

16/32/64-bit

§

Wrfsbase_r32 = 2_017

WRFSBASE r32

F3 0F AE /2

FSGSBASE

64-bit

§

Wrfsbase_r64 = 2_018

WRFSBASE r64

F3 o64 0F AE /2

FSGSBASE

64-bit

§

VEX_Vldmxcsr_m32 = 2_019

VLDMXCSR m32

VEX.LZ.0F.WIG AE /2

AVX

16/32/64-bit

§

Stmxcsr_m32 = 2_020

STMXCSR m32

NP 0F AE /3

SSE

16/32/64-bit

§

Wrgsbase_r32 = 2_021

WRGSBASE r32

F3 0F AE /3

FSGSBASE

64-bit

§

Wrgsbase_r64 = 2_022

WRGSBASE r64

F3 o64 0F AE /3

FSGSBASE

64-bit

§

VEX_Vstmxcsr_m32 = 2_023

VSTMXCSR m32

VEX.LZ.0F.WIG AE /3

AVX

16/32/64-bit

§

Xsave_mem = 2_024

XSAVE mem

NP 0F AE /4

XSAVE

16/32/64-bit

§

Xsave64_mem = 2_025

XSAVE64 mem

NP o64 0F AE /4

XSAVE

64-bit

§

Ptwrite_rm32 = 2_026

PTWRITE r/m32

F3 0F AE /4

PTWRITE

16/32/64-bit

§

Ptwrite_rm64 = 2_027

PTWRITE r/m64

F3 o64 0F AE /4

PTWRITE

64-bit

§

Xrstor_mem = 2_028

XRSTOR mem

NP 0F AE /5

XSAVE

16/32/64-bit

§

Xrstor64_mem = 2_029

XRSTOR64 mem

NP o64 0F AE /5

XSAVE

64-bit

§

Incsspd_r32 = 2_030

INCSSPD r32

F3 0F AE /5

CET_SS

16/32/64-bit

§

Incsspq_r64 = 2_031

INCSSPQ r64

F3 o64 0F AE /5

CET_SS

64-bit

§

Xsaveopt_mem = 2_032

XSAVEOPT mem

NP 0F AE /6

XSAVEOPT

16/32/64-bit

§

Xsaveopt64_mem = 2_033

XSAVEOPT64 mem

NP o64 0F AE /6

XSAVEOPT

64-bit

§

Clwb_m8 = 2_034

CLWB m8

66 0F AE /6

CLWB

16/32/64-bit

§

Tpause_r32 = 2_035

TPAUSE r32, <edx>, <eax>

66 0F AE /6

WAITPKG

16/32/64-bit

§

Tpause_r64 = 2_036

TPAUSE r64, <edx>, <eax>

66 o64 0F AE /6

WAITPKG

64-bit

§

Clrssbsy_m64 = 2_037

CLRSSBSY m64

F3 0F AE /6

CET_SS

16/32/64-bit

§

Umonitor_r16 = 2_038

UMONITOR r16

a16 F3 0F AE /6

WAITPKG

16/32-bit

§

Umonitor_r32 = 2_039

UMONITOR r32

a32 F3 0F AE /6

WAITPKG

16/32/64-bit

§

Umonitor_r64 = 2_040

UMONITOR r64

a64 F3 0F AE /6

WAITPKG

64-bit

§

Umwait_r32 = 2_041

UMWAIT r32, <edx>, <eax>

F2 0F AE /6

WAITPKG

16/32/64-bit

§

Umwait_r64 = 2_042

UMWAIT r64, <edx>, <eax>

F2 o64 0F AE /6

WAITPKG

64-bit

§

Clflush_m8 = 2_043

CLFLUSH m8

NP 0F AE /7

CLFSH

16/32/64-bit

§

Clflushopt_m8 = 2_044

CLFLUSHOPT m8

66 0F AE /7

CLFLUSHOPT

16/32/64-bit

§

Lfence = 2_045

LFENCE

NP 0F AE E8

SSE2

16/32/64-bit

§

Lfence_E9 = 2_046

LFENCE

NP 0F AE E9

SSE2

16/32/64-bit

§

Lfence_EA = 2_047

LFENCE

NP 0F AE EA

SSE2

16/32/64-bit

§

Lfence_EB = 2_048

LFENCE

NP 0F AE EB

SSE2

16/32/64-bit

§

Lfence_EC = 2_049

LFENCE

NP 0F AE EC

SSE2

16/32/64-bit

§

Lfence_ED = 2_050

LFENCE

NP 0F AE ED

SSE2

16/32/64-bit

§

Lfence_EE = 2_051

LFENCE

NP 0F AE EE

SSE2

16/32/64-bit

§

Lfence_EF = 2_052

LFENCE

NP 0F AE EF

SSE2

16/32/64-bit

§

Mfence = 2_053

MFENCE

NP 0F AE F0

SSE2

16/32/64-bit

§

Mfence_F1 = 2_054

MFENCE

NP 0F AE F1

SSE2

16/32/64-bit

§

Mfence_F2 = 2_055

MFENCE

NP 0F AE F2

SSE2

16/32/64-bit

§

Mfence_F3 = 2_056

MFENCE

NP 0F AE F3

SSE2

16/32/64-bit

§

Mfence_F4 = 2_057

MFENCE

NP 0F AE F4

SSE2

16/32/64-bit

§

Mfence_F5 = 2_058

MFENCE

NP 0F AE F5

SSE2

16/32/64-bit

§

Mfence_F6 = 2_059

MFENCE

NP 0F AE F6

SSE2

16/32/64-bit

§

Mfence_F7 = 2_060

MFENCE

NP 0F AE F7

SSE2

16/32/64-bit

§

Sfence = 2_061

SFENCE

NP 0F AE F8

SSE

16/32/64-bit

§

Sfence_F9 = 2_062

SFENCE

NP 0F AE F9

SSE

16/32/64-bit

§

Sfence_FA = 2_063

SFENCE

NP 0F AE FA

SSE

16/32/64-bit

§

Sfence_FB = 2_064

SFENCE

NP 0F AE FB

SSE

16/32/64-bit

§

Sfence_FC = 2_065

SFENCE

NP 0F AE FC

SSE

16/32/64-bit

§

Sfence_FD = 2_066

SFENCE

NP 0F AE FD

SSE

16/32/64-bit

§

Sfence_FE = 2_067

SFENCE

NP 0F AE FE

SSE

16/32/64-bit

§

Sfence_FF = 2_068

SFENCE

NP 0F AE FF

SSE

16/32/64-bit

§

Pcommit = 2_069

PCOMMIT

66 0F AE F8

PCOMMIT

16/32/64-bit

§

Imul_r16_rm16 = 2_070

IMUL r16, r/m16

o16 0F AF /r

386+

16/32/64-bit

§

Imul_r32_rm32 = 2_071

IMUL r32, r/m32

o32 0F AF /r

386+

16/32/64-bit

§

Imul_r64_rm64 = 2_072

IMUL r64, r/m64

o64 0F AF /r

X64

64-bit

§

Cmpxchg_rm8_r8 = 2_073

CMPXCHG r/m8, r8

0F B0 /r

486+

16/32/64-bit

§

Cmpxchg_rm16_r16 = 2_074

CMPXCHG r/m16, r16

o16 0F B1 /r

486+

16/32/64-bit

§

Cmpxchg_rm32_r32 = 2_075

CMPXCHG r/m32, r32

o32 0F B1 /r

486+

16/32/64-bit

§

Cmpxchg_rm64_r64 = 2_076

CMPXCHG r/m64, r64

o64 0F B1 /r

X64

64-bit

§

Lss_r16_m1616 = 2_077

LSS r16, m16:16

o16 0F B2 /r

386+

16/32/64-bit

§

Lss_r32_m1632 = 2_078

LSS r32, m16:32

o32 0F B2 /r

386+

16/32/64-bit

§

Lss_r64_m1664 = 2_079

LSS r64, m16:64

o64 0F B2 /r

X64

64-bit

§

Btr_rm16_r16 = 2_080

BTR r/m16, r16

o16 0F B3 /r

386+

16/32/64-bit

§

Btr_rm32_r32 = 2_081

BTR r/m32, r32

o32 0F B3 /r

386+

16/32/64-bit

§

Btr_rm64_r64 = 2_082

BTR r/m64, r64

o64 0F B3 /r

X64

64-bit

§

Lfs_r16_m1616 = 2_083

LFS r16, m16:16

o16 0F B4 /r

386+

16/32/64-bit

§

Lfs_r32_m1632 = 2_084

LFS r32, m16:32

o32 0F B4 /r

386+

16/32/64-bit

§

Lfs_r64_m1664 = 2_085

LFS r64, m16:64

o64 0F B4 /r

X64

64-bit

§

Lgs_r16_m1616 = 2_086

LGS r16, m16:16

o16 0F B5 /r

386+

16/32/64-bit

§

Lgs_r32_m1632 = 2_087

LGS r32, m16:32

o32 0F B5 /r

386+

16/32/64-bit

§

Lgs_r64_m1664 = 2_088

LGS r64, m16:64

o64 0F B5 /r

X64

64-bit

§

Movzx_r16_rm8 = 2_089

MOVZX r16, r/m8

o16 0F B6 /r

386+

16/32/64-bit

§

Movzx_r32_rm8 = 2_090

MOVZX r32, r/m8

o32 0F B6 /r

386+

16/32/64-bit

§

Movzx_r64_rm8 = 2_091

MOVZX r64, r/m8

o64 0F B6 /r

X64

64-bit

§

Movzx_r16_rm16 = 2_092

MOVZX r16, r/m16

o16 0F B7 /r

386+

16/32/64-bit

§

Movzx_r32_rm16 = 2_093

MOVZX r32, r/m16

o32 0F B7 /r

386+

16/32/64-bit

§

Movzx_r64_rm16 = 2_094

MOVZX r64, r/m16

o64 0F B7 /r

X64

64-bit

§

Jmpe_disp16 = 2_095

JMPE disp16

o16 0F B8 cw

IA-64

16/32-bit

§

Jmpe_disp32 = 2_096

JMPE disp32

o32 0F B8 cd

IA-64

16/32-bit

§

Popcnt_r16_rm16 = 2_097

POPCNT r16, r/m16

o16 F3 0F B8 /r

POPCNT

16/32/64-bit

§

Popcnt_r32_rm32 = 2_098

POPCNT r32, r/m32

o32 F3 0F B8 /r

POPCNT

16/32/64-bit

§

Popcnt_r64_rm64 = 2_099

POPCNT r64, r/m64

F3 o64 0F B8 /r

POPCNT

64-bit

§

Ud1_r16_rm16 = 2_100

UD1 r16, r/m16

o16 0F B9 /r

286+

16/32/64-bit

§

Ud1_r32_rm32 = 2_101

UD1 r32, r/m32

o32 0F B9 /r

386+

16/32/64-bit

§

Ud1_r64_rm64 = 2_102

UD1 r64, r/m64

o64 0F B9 /r

X64

64-bit

§

Bt_rm16_imm8 = 2_103

BT r/m16, imm8

o16 0F BA /4 ib

386+

16/32/64-bit

§

Bt_rm32_imm8 = 2_104

BT r/m32, imm8

o32 0F BA /4 ib

386+

16/32/64-bit

§

Bt_rm64_imm8 = 2_105

BT r/m64, imm8

o64 0F BA /4 ib

X64

64-bit

§

Bts_rm16_imm8 = 2_106

BTS r/m16, imm8

o16 0F BA /5 ib

386+

16/32/64-bit

§

Bts_rm32_imm8 = 2_107

BTS r/m32, imm8

o32 0F BA /5 ib

386+

16/32/64-bit

§

Bts_rm64_imm8 = 2_108

BTS r/m64, imm8

o64 0F BA /5 ib

X64

64-bit

§

Btr_rm16_imm8 = 2_109

BTR r/m16, imm8

o16 0F BA /6 ib

386+

16/32/64-bit

§

Btr_rm32_imm8 = 2_110

BTR r/m32, imm8

o32 0F BA /6 ib

386+

16/32/64-bit

§

Btr_rm64_imm8 = 2_111

BTR r/m64, imm8

o64 0F BA /6 ib

X64

64-bit

§

Btc_rm16_imm8 = 2_112

BTC r/m16, imm8

o16 0F BA /7 ib

386+

16/32/64-bit

§

Btc_rm32_imm8 = 2_113

BTC r/m32, imm8

o32 0F BA /7 ib

386+

16/32/64-bit

§

Btc_rm64_imm8 = 2_114

BTC r/m64, imm8

o64 0F BA /7 ib

X64

64-bit

§

Btc_rm16_r16 = 2_115

BTC r/m16, r16

o16 0F BB /r

386+

16/32/64-bit

§

Btc_rm32_r32 = 2_116

BTC r/m32, r32

o32 0F BB /r

386+

16/32/64-bit

§

Btc_rm64_r64 = 2_117

BTC r/m64, r64

o64 0F BB /r

X64

64-bit

§

Bsf_r16_rm16 = 2_118

BSF r16, r/m16

o16 0F BC /r

386+

16/32/64-bit

§

Bsf_r32_rm32 = 2_119

BSF r32, r/m32

o32 0F BC /r

386+

16/32/64-bit

§

Bsf_r64_rm64 = 2_120

BSF r64, r/m64

o64 0F BC /r

X64

64-bit

§

Tzcnt_r16_rm16 = 2_121

TZCNT r16, r/m16

o16 F3 0F BC /r

BMI1

16/32/64-bit

§

Tzcnt_r32_rm32 = 2_122

TZCNT r32, r/m32

o32 F3 0F BC /r

BMI1

16/32/64-bit

§

Tzcnt_r64_rm64 = 2_123

TZCNT r64, r/m64

F3 o64 0F BC /r

BMI1

64-bit

§

Bsr_r16_rm16 = 2_124

BSR r16, r/m16

o16 0F BD /r

386+

16/32/64-bit

§

Bsr_r32_rm32 = 2_125

BSR r32, r/m32

o32 0F BD /r

386+

16/32/64-bit

§

Bsr_r64_rm64 = 2_126

BSR r64, r/m64

o64 0F BD /r

X64

64-bit

§

Lzcnt_r16_rm16 = 2_127

LZCNT r16, r/m16

o16 F3 0F BD /r

LZCNT

16/32/64-bit

§

Lzcnt_r32_rm32 = 2_128

LZCNT r32, r/m32

o32 F3 0F BD /r

LZCNT

16/32/64-bit

§

Lzcnt_r64_rm64 = 2_129

LZCNT r64, r/m64

F3 o64 0F BD /r

LZCNT

64-bit

§

Movsx_r16_rm8 = 2_130

MOVSX r16, r/m8

o16 0F BE /r

386+

16/32/64-bit

§

Movsx_r32_rm8 = 2_131

MOVSX r32, r/m8

o32 0F BE /r

386+

16/32/64-bit

§

Movsx_r64_rm8 = 2_132

MOVSX r64, r/m8

o64 0F BE /r

X64

64-bit

§

Movsx_r16_rm16 = 2_133

MOVSX r16, r/m16

o16 0F BF /r

386+

16/32/64-bit

§

Movsx_r32_rm16 = 2_134

MOVSX r32, r/m16

o32 0F BF /r

386+

16/32/64-bit

§

Movsx_r64_rm16 = 2_135

MOVSX r64, r/m16

o64 0F BF /r

X64

64-bit

§

Xadd_rm8_r8 = 2_136

XADD r/m8, r8

0F C0 /r

486+

16/32/64-bit

§

Xadd_rm16_r16 = 2_137

XADD r/m16, r16

o16 0F C1 /r

486+

16/32/64-bit

§

Xadd_rm32_r32 = 2_138

XADD r/m32, r32

o32 0F C1 /r

486+

16/32/64-bit

§

Xadd_rm64_r64 = 2_139

XADD r/m64, r64

o64 0F C1 /r

X64

64-bit

§

Cmpps_xmm_xmmm128_imm8 = 2_140

CMPPS xmm1, xmm2/m128, imm8

NP 0F C2 /r ib

SSE

16/32/64-bit

§

VEX_Vcmpps_xmm_xmm_xmmm128_imm8 = 2_141

VCMPPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

VEX_Vcmpps_ymm_ymm_ymmm256_imm8 = 2_142

VCMPPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmpps_kr_k1_xmm_xmmm128b32_imm8 = 2_143

VCMPPS k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.0F.W0 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmpps_kr_k1_ymm_ymmm256b32_imm8 = 2_144

VCMPPS k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.0F.W0 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmpps_kr_k1_zmm_zmmm512b32_imm8_sae = 2_145

VCMPPS k1 {k2}, zmm2, zmm3/m512/m32bcst{sae}, imm8

EVEX.512.0F.W0 C2 /r ib

AVX512F

16/32/64-bit

§

Cmppd_xmm_xmmm128_imm8 = 2_146

CMPPD xmm1, xmm2/m128, imm8

66 0F C2 /r ib

SSE2

16/32/64-bit

§

VEX_Vcmppd_xmm_xmm_xmmm128_imm8 = 2_147

VCMPPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

VEX_Vcmppd_ymm_ymm_ymmm256_imm8 = 2_148

VCMPPD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmppd_kr_k1_xmm_xmmm128b64_imm8 = 2_149

VCMPPD k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F.W1 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmppd_kr_k1_ymm_ymmm256b64_imm8 = 2_150

VCMPPD k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F.W1 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmppd_kr_k1_zmm_zmmm512b64_imm8_sae = 2_151

VCMPPD k1 {k2}, zmm2, zmm3/m512/m64bcst{sae}, imm8

EVEX.512.66.0F.W1 C2 /r ib

AVX512F

16/32/64-bit

§

Cmpss_xmm_xmmm32_imm8 = 2_152

CMPSS xmm1, xmm2/m32, imm8

F3 0F C2 /r ib

SSE

16/32/64-bit

§

VEX_Vcmpss_xmm_xmm_xmmm32_imm8 = 2_153

VCMPSS xmm1, xmm2, xmm3/m32, imm8

VEX.LIG.F3.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmpss_kr_k1_xmm_xmmm32_imm8_sae = 2_154

VCMPSS k1 {k2}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.F3.0F.W0 C2 /r ib

AVX512F

16/32/64-bit

§

Cmpsd_xmm_xmmm64_imm8 = 2_155

CMPSD xmm1, xmm2/m64, imm8

F2 0F C2 /r ib

SSE2

16/32/64-bit

§

VEX_Vcmpsd_xmm_xmm_xmmm64_imm8 = 2_156

VCMPSD xmm1, xmm2, xmm3/m64, imm8

VEX.LIG.F2.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmpsd_kr_k1_xmm_xmmm64_imm8_sae = 2_157

VCMPSD k1 {k2}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.F2.0F.W1 C2 /r ib

AVX512F

16/32/64-bit

§

Movnti_m32_r32 = 2_158

MOVNTI m32, r32

NP 0F C3 /r

SSE2

16/32/64-bit

§

Movnti_m64_r64 = 2_159

MOVNTI m64, r64

NP o64 0F C3 /r

SSE2

64-bit

§

Pinsrw_mm_r32m16_imm8 = 2_160

PINSRW mm, r32/m16, imm8

NP 0F C4 /r ib

SSE

16/32/64-bit

§

Pinsrw_mm_r64m16_imm8 = 2_161

PINSRW mm, r64/m16, imm8

NP o64 0F C4 /r ib

SSE

64-bit

§

Pinsrw_xmm_r32m16_imm8 = 2_162

PINSRW xmm, r32/m16, imm8

66 0F C4 /r ib

SSE2

16/32/64-bit

§

Pinsrw_xmm_r64m16_imm8 = 2_163

PINSRW xmm, r64/m16, imm8

66 o64 0F C4 /r ib

SSE2

64-bit

§

VEX_Vpinsrw_xmm_xmm_r32m16_imm8 = 2_164

VPINSRW xmm1, xmm2, r32/m16, imm8

VEX.128.66.0F.W0 C4 /r ib

AVX

16/32/64-bit

§

VEX_Vpinsrw_xmm_xmm_r64m16_imm8 = 2_165

VPINSRW xmm1, xmm2, r64/m16, imm8

VEX.128.66.0F.W1 C4 /r ib

AVX

64-bit

§

EVEX_Vpinsrw_xmm_xmm_r32m16_imm8 = 2_166

VPINSRW xmm1, xmm2, r32/m16, imm8

EVEX.128.66.0F.W0 C4 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpinsrw_xmm_xmm_r64m16_imm8 = 2_167

VPINSRW xmm1, xmm2, r64/m16, imm8

EVEX.128.66.0F.W1 C4 /r ib

AVX512BW

64-bit

§

Pextrw_r32_mm_imm8 = 2_168

PEXTRW r32, mm, imm8

NP 0F C5 /r ib

SSE

16/32/64-bit

§

Pextrw_r64_mm_imm8 = 2_169

PEXTRW r64, mm, imm8

NP o64 0F C5 /r ib

SSE

64-bit

§

Pextrw_r32_xmm_imm8 = 2_170

PEXTRW r32, xmm, imm8

66 0F C5 /r ib

SSE2

16/32/64-bit

§

Pextrw_r64_xmm_imm8 = 2_171

PEXTRW r64, xmm, imm8

66 o64 0F C5 /r ib

SSE2

64-bit

§

VEX_Vpextrw_r32_xmm_imm8 = 2_172

VPEXTRW r32, xmm1, imm8

VEX.128.66.0F.W0 C5 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrw_r64_xmm_imm8 = 2_173

VPEXTRW r64, xmm1, imm8

VEX.128.66.0F.W1 C5 /r ib

AVX

64-bit

§

EVEX_Vpextrw_r32_xmm_imm8 = 2_174

VPEXTRW r32, xmm1, imm8

EVEX.128.66.0F.W0 C5 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpextrw_r64_xmm_imm8 = 2_175

VPEXTRW r64, xmm1, imm8

EVEX.128.66.0F.W1 C5 /r ib

AVX512BW

64-bit

§

Shufps_xmm_xmmm128_imm8 = 2_176

SHUFPS xmm1, xmm2/m128, imm8

NP 0F C6 /r ib

SSE

16/32/64-bit

§

VEX_Vshufps_xmm_xmm_xmmm128_imm8 = 2_177

VSHUFPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

VEX_Vshufps_ymm_ymm_ymmm256_imm8 = 2_178

VSHUFPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

EVEX_Vshufps_xmm_k1z_xmm_xmmm128b32_imm8 = 2_179

VSHUFPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.0F.W0 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufps_ymm_k1z_ymm_ymmm256b32_imm8 = 2_180

VSHUFPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.0F.W0 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufps_zmm_k1z_zmm_zmmm512b32_imm8 = 2_181

VSHUFPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.0F.W0 C6 /r ib

AVX512F

16/32/64-bit

§

Shufpd_xmm_xmmm128_imm8 = 2_182

SHUFPD xmm1, xmm2/m128, imm8

66 0F C6 /r ib

SSE2

16/32/64-bit

§

VEX_Vshufpd_xmm_xmm_xmmm128_imm8 = 2_183

VSHUFPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

VEX_Vshufpd_ymm_ymm_ymmm256_imm8 = 2_184

VSHUFPD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

EVEX_Vshufpd_xmm_k1z_xmm_xmmm128b64_imm8 = 2_185

VSHUFPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F.W1 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufpd_ymm_k1z_ymm_ymmm256b64_imm8 = 2_186

VSHUFPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F.W1 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufpd_zmm_k1z_zmm_zmmm512b64_imm8 = 2_187

VSHUFPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F.W1 C6 /r ib

AVX512F

16/32/64-bit

§

Cmpxchg8b_m64 = 2_188

CMPXCHG8B m64

0F C7 /1

CX8

16/32/64-bit

§

Cmpxchg16b_m128 = 2_189

CMPXCHG16B m128

o64 0F C7 /1

CMPXCHG16B

64-bit

§

Xrstors_mem = 2_190

XRSTORS mem

NP 0F C7 /3

XSAVES

16/32/64-bit

§

Xrstors64_mem = 2_191

XRSTORS64 mem

NP o64 0F C7 /3

XSAVES

64-bit

§

Xsavec_mem = 2_192

XSAVEC mem

NP 0F C7 /4

XSAVEC

16/32/64-bit

§

Xsavec64_mem = 2_193

XSAVEC64 mem

NP o64 0F C7 /4

XSAVEC

64-bit

§

Xsaves_mem = 2_194

XSAVES mem

NP 0F C7 /5

XSAVES

16/32/64-bit

§

Xsaves64_mem = 2_195

XSAVES64 mem

NP o64 0F C7 /5

XSAVES

64-bit

§

Vmptrld_m64 = 2_196

VMPTRLD m64

NP 0F C7 /6

VMX

16/32/64-bit

§

Vmclear_m64 = 2_197

VMCLEAR m64

66 0F C7 /6

VMX

16/32/64-bit

§

Vmxon_m64 = 2_198

VMXON m64

F3 0F C7 /6

VMX

16/32/64-bit

§

Rdrand_r16 = 2_199

RDRAND r16

o16 0F C7 /6

RDRAND

16/32/64-bit

§

Rdrand_r32 = 2_200

RDRAND r32

o32 0F C7 /6

RDRAND

16/32/64-bit

§

Rdrand_r64 = 2_201

RDRAND r64

o64 0F C7 /6

RDRAND

64-bit

§

Vmptrst_m64 = 2_202

VMPTRST m64

NP 0F C7 /7

VMX

16/32/64-bit

§

Rdseed_r16 = 2_203

RDSEED r16

o16 0F C7 /7

RDSEED

16/32/64-bit

§

Rdseed_r32 = 2_204

RDSEED r32

o32 0F C7 /7

RDSEED

16/32/64-bit

§

Rdseed_r64 = 2_205

RDSEED r64

o64 0F C7 /7

RDSEED

64-bit

§

Rdpid_r32 = 2_206

RDPID r32

F3 0F C7 /7

RDPID

16/32-bit

§

Rdpid_r64 = 2_207

RDPID r64

F3 0F C7 /7

RDPID

64-bit

§

Bswap_r16 = 2_208

BSWAP r16

o16 0F C8+rw

486+

16/32/64-bit

§

Bswap_r32 = 2_209

BSWAP r32

o32 0F C8+rd

486+

16/32/64-bit

§

Bswap_r64 = 2_210

BSWAP r64

o64 0F C8+ro

X64

64-bit

§

Addsubpd_xmm_xmmm128 = 2_211

ADDSUBPD xmm1, xmm2/m128

66 0F D0 /r

SSE3

16/32/64-bit

§

VEX_Vaddsubpd_xmm_xmm_xmmm128 = 2_212

VADDSUBPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D0 /r

AVX

16/32/64-bit

§

VEX_Vaddsubpd_ymm_ymm_ymmm256 = 2_213

VADDSUBPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D0 /r

AVX

16/32/64-bit

§

Addsubps_xmm_xmmm128 = 2_214

ADDSUBPS xmm1, xmm2/m128

F2 0F D0 /r

SSE3

16/32/64-bit

§

VEX_Vaddsubps_xmm_xmm_xmmm128 = 2_215

VADDSUBPS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F.WIG D0 /r

AVX

16/32/64-bit

§

VEX_Vaddsubps_ymm_ymm_ymmm256 = 2_216

VADDSUBPS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F.WIG D0 /r

AVX

16/32/64-bit

§

Psrlw_mm_mmm64 = 2_217

PSRLW mm, mm/m64

NP 0F D1 /r

MMX

16/32/64-bit

§

Psrlw_xmm_xmmm128 = 2_218

PSRLW xmm1, xmm2/m128

66 0F D1 /r

SSE2

16/32/64-bit

§

VEX_Vpsrlw_xmm_xmm_xmmm128 = 2_219

VPSRLW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D1 /r

AVX

16/32/64-bit

§

VEX_Vpsrlw_ymm_ymm_xmmm128 = 2_220

VPSRLW ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG D1 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrlw_xmm_k1z_xmm_xmmm128 = 2_221

VPSRLW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_ymm_k1z_ymm_xmmm128 = 2_222

VPSRLW ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.WIG D1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_zmm_k1z_zmm_xmmm128 = 2_223

VPSRLW zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.WIG D1 /r

AVX512BW

16/32/64-bit

§

Psrld_mm_mmm64 = 2_224

PSRLD mm, mm/m64

NP 0F D2 /r

MMX

16/32/64-bit

§

Psrld_xmm_xmmm128 = 2_225

PSRLD xmm1, xmm2/m128

66 0F D2 /r

SSE2

16/32/64-bit

§

VEX_Vpsrld_xmm_xmm_xmmm128 = 2_226

VPSRLD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D2 /r

AVX

16/32/64-bit

§

VEX_Vpsrld_ymm_ymm_xmmm128 = 2_227

VPSRLD ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG D2 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrld_xmm_k1z_xmm_xmmm128 = 2_228

VPSRLD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W0 D2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_ymm_k1z_ymm_xmmm128 = 2_229

VPSRLD ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W0 D2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_zmm_k1z_zmm_xmmm128 = 2_230

VPSRLD zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W0 D2 /r

AVX512F

16/32/64-bit

§

Psrlq_mm_mmm64 = 2_231

PSRLQ mm, mm/m64

NP 0F D3 /r

MMX

16/32/64-bit

§

Psrlq_xmm_xmmm128 = 2_232

PSRLQ xmm1, xmm2/m128

66 0F D3 /r

SSE2

16/32/64-bit

§

VEX_Vpsrlq_xmm_xmm_xmmm128 = 2_233

VPSRLQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D3 /r

AVX

16/32/64-bit

§

VEX_Vpsrlq_ymm_ymm_xmmm128 = 2_234

VPSRLQ ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG D3 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrlq_xmm_k1z_xmm_xmmm128 = 2_235

VPSRLQ xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W1 D3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_ymm_k1z_ymm_xmmm128 = 2_236

VPSRLQ ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W1 D3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_zmm_k1z_zmm_xmmm128 = 2_237

VPSRLQ zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W1 D3 /r

AVX512F

16/32/64-bit

§

Paddq_mm_mmm64 = 2_238

PADDQ mm, mm/m64

NP 0F D4 /r

MMX

16/32/64-bit

§

Paddq_xmm_xmmm128 = 2_239

PADDQ xmm1, xmm2/m128

66 0F D4 /r

SSE2

16/32/64-bit

§

VEX_Vpaddq_xmm_xmm_xmmm128 = 2_240

VPADDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D4 /r

AVX

16/32/64-bit

§

VEX_Vpaddq_ymm_ymm_ymmm256 = 2_241

VPADDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D4 /r

AVX2

16/32/64-bit

§

EVEX_Vpaddq_xmm_k1z_xmm_xmmm128b64 = 2_242

VPADDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 D4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddq_ymm_k1z_ymm_ymmm256b64 = 2_243

VPADDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 D4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddq_zmm_k1z_zmm_zmmm512b64 = 2_244

VPADDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 D4 /r

AVX512F

16/32/64-bit

§

Pmullw_mm_mmm64 = 2_245

PMULLW mm, mm/m64

NP 0F D5 /r

MMX

16/32/64-bit

§

Pmullw_xmm_xmmm128 = 2_246

PMULLW xmm1, xmm2/m128

66 0F D5 /r

SSE2

16/32/64-bit

§

VEX_Vpmullw_xmm_xmm_xmmm128 = 2_247

VPMULLW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D5 /r

AVX

16/32/64-bit

§

VEX_Vpmullw_ymm_ymm_ymmm256 = 2_248

VPMULLW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D5 /r

AVX2

16/32/64-bit

§

EVEX_Vpmullw_xmm_k1z_xmm_xmmm128 = 2_249

VPMULLW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmullw_ymm_k1z_ymm_ymmm256 = 2_250

VPMULLW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG D5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmullw_zmm_k1z_zmm_zmmm512 = 2_251

VPMULLW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG D5 /r

AVX512BW

16/32/64-bit

§

Movq_xmmm64_xmm = 2_252

MOVQ xmm2/m64, xmm1

66 0F D6 /r

SSE2

16/32/64-bit

§

VEX_Vmovq_xmmm64_xmm = 2_253

VMOVQ xmm1/m64, xmm2

VEX.128.66.0F.WIG D6 /r

AVX

16/32/64-bit

§

EVEX_Vmovq_xmmm64_xmm = 2_254

VMOVQ xmm1/m64, xmm2

EVEX.128.66.0F.W1 D6 /r

AVX512F

16/32/64-bit

§

Movq2dq_xmm_mm = 2_255

MOVQ2DQ xmm, mm

F3 0F D6 /r

SSE2

16/32/64-bit

§

Movdq2q_mm_xmm = 2_256

MOVDQ2Q mm, xmm

F2 0F D6 /r

SSE2

16/32/64-bit

§

Pmovmskb_r32_mm = 2_257

PMOVMSKB r32, mm

NP 0F D7 /r

SSE

16/32/64-bit

§

Pmovmskb_r64_mm = 2_258

PMOVMSKB r64, mm

NP o64 0F D7 /r

SSE

64-bit

§

Pmovmskb_r32_xmm = 2_259

PMOVMSKB r32, xmm

66 0F D7 /r

SSE2

16/32/64-bit

§

Pmovmskb_r64_xmm = 2_260

PMOVMSKB r64, xmm

66 o64 0F D7 /r

SSE2

64-bit

§

VEX_Vpmovmskb_r32_xmm = 2_261

VPMOVMSKB r32, xmm1

VEX.128.66.0F.W0 D7 /r

AVX

16/32/64-bit

§

VEX_Vpmovmskb_r64_xmm = 2_262

VPMOVMSKB r64, xmm1

VEX.128.66.0F.W1 D7 /r

AVX

64-bit

§

VEX_Vpmovmskb_r32_ymm = 2_263

VPMOVMSKB r32, ymm1

VEX.256.66.0F.W0 D7 /r

AVX2

16/32/64-bit

§

VEX_Vpmovmskb_r64_ymm = 2_264

VPMOVMSKB r64, ymm1

VEX.256.66.0F.W1 D7 /r

AVX2

64-bit

§

Psubusb_mm_mmm64 = 2_265

PSUBUSB mm, mm/m64

NP 0F D8 /r

MMX

16/32/64-bit

§

Psubusb_xmm_xmmm128 = 2_266

PSUBUSB xmm1, xmm2/m128

66 0F D8 /r

SSE2

16/32/64-bit

§

VEX_Vpsubusb_xmm_xmm_xmmm128 = 2_267

VPSUBUSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D8 /r

AVX

16/32/64-bit

§

VEX_Vpsubusb_ymm_ymm_ymmm256 = 2_268

VPSUBUSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D8 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubusb_xmm_k1z_xmm_xmmm128 = 2_269

VPSUBUSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusb_ymm_k1z_ymm_ymmm256 = 2_270

VPSUBUSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG D8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusb_zmm_k1z_zmm_zmmm512 = 2_271

VPSUBUSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG D8 /r

AVX512BW

16/32/64-bit

§

Psubusw_mm_mmm64 = 2_272

PSUBUSW mm, mm/m64

NP 0F D9 /r

MMX

16/32/64-bit

§

Psubusw_xmm_xmmm128 = 2_273

PSUBUSW xmm1, xmm2/m128

66 0F D9 /r

SSE2

16/32/64-bit

§

VEX_Vpsubusw_xmm_xmm_xmmm128 = 2_274

VPSUBUSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D9 /r

AVX

16/32/64-bit

§

VEX_Vpsubusw_ymm_ymm_ymmm256 = 2_275

VPSUBUSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D9 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubusw_xmm_k1z_xmm_xmmm128 = 2_276

VPSUBUSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusw_ymm_k1z_ymm_ymmm256 = 2_277

VPSUBUSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG D9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusw_zmm_k1z_zmm_zmmm512 = 2_278

VPSUBUSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG D9 /r

AVX512BW

16/32/64-bit

§

Pminub_mm_mmm64 = 2_279

PMINUB mm1, mm2/m64

NP 0F DA /r

SSE

16/32/64-bit

§

Pminub_xmm_xmmm128 = 2_280

PMINUB xmm1, xmm2/m128

66 0F DA /r

SSE2

16/32/64-bit

§

VEX_Vpminub_xmm_xmm_xmmm128 = 2_281

VPMINUB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DA /r

AVX

16/32/64-bit

§

VEX_Vpminub_ymm_ymm_ymmm256 = 2_282

VPMINUB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DA /r

AVX2

16/32/64-bit

§

EVEX_Vpminub_xmm_k1z_xmm_xmmm128 = 2_283

VPMINUB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminub_ymm_k1z_ymm_ymmm256 = 2_284

VPMINUB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminub_zmm_k1z_zmm_zmmm512 = 2_285

VPMINUB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DA /r

AVX512BW

16/32/64-bit

§

Pand_mm_mmm64 = 2_286

PAND mm, mm/m64

NP 0F DB /r

MMX

16/32/64-bit

§

Pand_xmm_xmmm128 = 2_287

PAND xmm1, xmm2/m128

66 0F DB /r

SSE2

16/32/64-bit

§

VEX_Vpand_xmm_xmm_xmmm128 = 2_288

VPAND xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DB /r

AVX

16/32/64-bit

§

VEX_Vpand_ymm_ymm_ymmm256 = 2_289

VPAND ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DB /r

AVX2

16/32/64-bit

§

EVEX_Vpandd_xmm_k1z_xmm_xmmm128b32 = 2_290

VPANDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandd_ymm_k1z_ymm_ymmm256b32 = 2_291

VPANDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandd_zmm_k1z_zmm_zmmm512b32 = 2_292

VPANDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 DB /r

AVX512F

16/32/64-bit

§

EVEX_Vpandq_xmm_k1z_xmm_xmmm128b64 = 2_293

VPANDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandq_ymm_k1z_ymm_ymmm256b64 = 2_294

VPANDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandq_zmm_k1z_zmm_zmmm512b64 = 2_295

VPANDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 DB /r

AVX512F

16/32/64-bit

§

Paddusb_mm_mmm64 = 2_296

PADDUSB mm, mm/m64

NP 0F DC /r

MMX

16/32/64-bit

§

Paddusb_xmm_xmmm128 = 2_297

PADDUSB xmm1, xmm2/m128

66 0F DC /r

SSE2

16/32/64-bit

§

VEX_Vpaddusb_xmm_xmm_xmmm128 = 2_298

VPADDUSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DC /r

AVX

16/32/64-bit

§

VEX_Vpaddusb_ymm_ymm_ymmm256 = 2_299

VPADDUSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DC /r

AVX2

16/32/64-bit

§

EVEX_Vpaddusb_xmm_k1z_xmm_xmmm128 = 2_300

VPADDUSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusb_ymm_k1z_ymm_ymmm256 = 2_301

VPADDUSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusb_zmm_k1z_zmm_zmmm512 = 2_302

VPADDUSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DC /r

AVX512BW

16/32/64-bit

§

Paddusw_mm_mmm64 = 2_303

PADDUSW mm, mm/m64

NP 0F DD /r

MMX

16/32/64-bit

§

Paddusw_xmm_xmmm128 = 2_304

PADDUSW xmm1, xmm2/m128

66 0F DD /r

SSE2

16/32/64-bit

§

VEX_Vpaddusw_xmm_xmm_xmmm128 = 2_305

VPADDUSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DD /r

AVX

16/32/64-bit

§

VEX_Vpaddusw_ymm_ymm_ymmm256 = 2_306

VPADDUSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DD /r

AVX2

16/32/64-bit

§

EVEX_Vpaddusw_xmm_k1z_xmm_xmmm128 = 2_307

VPADDUSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusw_ymm_k1z_ymm_ymmm256 = 2_308

VPADDUSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusw_zmm_k1z_zmm_zmmm512 = 2_309

VPADDUSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DD /r

AVX512BW

16/32/64-bit

§

Pmaxub_mm_mmm64 = 2_310

PMAXUB mm1, mm2/m64

NP 0F DE /r

SSE

16/32/64-bit

§

Pmaxub_xmm_xmmm128 = 2_311

PMAXUB xmm1, xmm2/m128

66 0F DE /r

SSE2

16/32/64-bit

§

VEX_Vpmaxub_xmm_xmm_xmmm128 = 2_312

VPMAXUB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DE /r

AVX

16/32/64-bit

§

VEX_Vpmaxub_ymm_ymm_ymmm256 = 2_313

VPMAXUB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DE /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxub_xmm_k1z_xmm_xmmm128 = 2_314

VPMAXUB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxub_ymm_k1z_ymm_ymmm256 = 2_315

VPMAXUB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxub_zmm_k1z_zmm_zmmm512 = 2_316

VPMAXUB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DE /r

AVX512BW

16/32/64-bit

§

Pandn_mm_mmm64 = 2_317

PANDN mm, mm/m64

NP 0F DF /r

MMX

16/32/64-bit

§

Pandn_xmm_xmmm128 = 2_318

PANDN xmm1, xmm2/m128

66 0F DF /r

SSE2

16/32/64-bit

§

VEX_Vpandn_xmm_xmm_xmmm128 = 2_319

VPANDN xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DF /r

AVX

16/32/64-bit

§

VEX_Vpandn_ymm_ymm_ymmm256 = 2_320

VPANDN ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DF /r

AVX2

16/32/64-bit

§

EVEX_Vpandnd_xmm_k1z_xmm_xmmm128b32 = 2_321

VPANDND xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnd_ymm_k1z_ymm_ymmm256b32 = 2_322

VPANDND ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnd_zmm_k1z_zmm_zmmm512b32 = 2_323

VPANDND zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 DF /r

AVX512F

16/32/64-bit

§

EVEX_Vpandnq_xmm_k1z_xmm_xmmm128b64 = 2_324

VPANDNQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnq_ymm_k1z_ymm_ymmm256b64 = 2_325

VPANDNQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnq_zmm_k1z_zmm_zmmm512b64 = 2_326

VPANDNQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 DF /r

AVX512F

16/32/64-bit

§

Pavgb_mm_mmm64 = 2_327

PAVGB mm1, mm2/m64

NP 0F E0 /r

SSE

16/32/64-bit

§

Pavgb_xmm_xmmm128 = 2_328

PAVGB xmm1, xmm2/m128

66 0F E0 /r

SSE2

16/32/64-bit

§

VEX_Vpavgb_xmm_xmm_xmmm128 = 2_329

VPAVGB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E0 /r

AVX

16/32/64-bit

§

VEX_Vpavgb_ymm_ymm_ymmm256 = 2_330

VPAVGB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E0 /r

AVX2

16/32/64-bit

§

EVEX_Vpavgb_xmm_k1z_xmm_xmmm128 = 2_331

VPAVGB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E0 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgb_ymm_k1z_ymm_ymmm256 = 2_332

VPAVGB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E0 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgb_zmm_k1z_zmm_zmmm512 = 2_333

VPAVGB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E0 /r

AVX512BW

16/32/64-bit

§

Psraw_mm_mmm64 = 2_334

PSRAW mm, mm/m64

NP 0F E1 /r

MMX

16/32/64-bit

§

Psraw_xmm_xmmm128 = 2_335

PSRAW xmm1, xmm2/m128

66 0F E1 /r

SSE2

16/32/64-bit

§

VEX_Vpsraw_xmm_xmm_xmmm128 = 2_336

VPSRAW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E1 /r

AVX

16/32/64-bit

§

VEX_Vpsraw_ymm_ymm_xmmm128 = 2_337

VPSRAW ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG E1 /r

AVX2

16/32/64-bit

§

EVEX_Vpsraw_xmm_k1z_xmm_xmmm128 = 2_338

VPSRAW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_ymm_k1z_ymm_xmmm128 = 2_339

VPSRAW ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.WIG E1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_zmm_k1z_zmm_xmmm128 = 2_340

VPSRAW zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.WIG E1 /r

AVX512BW

16/32/64-bit

§

Psrad_mm_mmm64 = 2_341

PSRAD mm, mm/m64

NP 0F E2 /r

MMX

16/32/64-bit

§

Psrad_xmm_xmmm128 = 2_342

PSRAD xmm1, xmm2/m128

66 0F E2 /r

SSE2

16/32/64-bit

§

VEX_Vpsrad_xmm_xmm_xmmm128 = 2_343

VPSRAD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E2 /r

AVX

16/32/64-bit

§

VEX_Vpsrad_ymm_ymm_xmmm128 = 2_344

VPSRAD ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG E2 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrad_xmm_k1z_xmm_xmmm128 = 2_345

VPSRAD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W0 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_ymm_k1z_ymm_xmmm128 = 2_346

VPSRAD ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W0 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_zmm_k1z_zmm_xmmm128 = 2_347

VPSRAD zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W0 E2 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsraq_xmm_k1z_xmm_xmmm128 = 2_348

VPSRAQ xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W1 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_ymm_k1z_ymm_xmmm128 = 2_349

VPSRAQ ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W1 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_zmm_k1z_zmm_xmmm128 = 2_350

VPSRAQ zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W1 E2 /r

AVX512F

16/32/64-bit

§

Pavgw_mm_mmm64 = 2_351

PAVGW mm1, mm2/m64

NP 0F E3 /r

SSE

16/32/64-bit

§

Pavgw_xmm_xmmm128 = 2_352

PAVGW xmm1, xmm2/m128

66 0F E3 /r

SSE2

16/32/64-bit

§

VEX_Vpavgw_xmm_xmm_xmmm128 = 2_353

VPAVGW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E3 /r

AVX

16/32/64-bit

§

VEX_Vpavgw_ymm_ymm_ymmm256 = 2_354

VPAVGW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E3 /r

AVX2

16/32/64-bit

§

EVEX_Vpavgw_xmm_k1z_xmm_xmmm128 = 2_355

VPAVGW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E3 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgw_ymm_k1z_ymm_ymmm256 = 2_356

VPAVGW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E3 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgw_zmm_k1z_zmm_zmmm512 = 2_357

VPAVGW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E3 /r

AVX512BW

16/32/64-bit

§

Pmulhuw_mm_mmm64 = 2_358

PMULHUW mm1, mm2/m64

NP 0F E4 /r

SSE

16/32/64-bit

§

Pmulhuw_xmm_xmmm128 = 2_359

PMULHUW xmm1, xmm2/m128

66 0F E4 /r

SSE2

16/32/64-bit

§

VEX_Vpmulhuw_xmm_xmm_xmmm128 = 2_360

VPMULHUW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E4 /r

AVX

16/32/64-bit

§

VEX_Vpmulhuw_ymm_ymm_ymmm256 = 2_361

VPMULHUW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E4 /r

AVX2

16/32/64-bit

§

EVEX_Vpmulhuw_xmm_k1z_xmm_xmmm128 = 2_362

VPMULHUW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E4 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhuw_ymm_k1z_ymm_ymmm256 = 2_363

VPMULHUW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E4 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhuw_zmm_k1z_zmm_zmmm512 = 2_364

VPMULHUW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E4 /r

AVX512BW

16/32/64-bit

§

Pmulhw_mm_mmm64 = 2_365

PMULHW mm, mm/m64

NP 0F E5 /r

MMX

16/32/64-bit

§

Pmulhw_xmm_xmmm128 = 2_366

PMULHW xmm1, xmm2/m128

66 0F E5 /r

SSE2

16/32/64-bit

§

VEX_Vpmulhw_xmm_xmm_xmmm128 = 2_367

VPMULHW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E5 /r

AVX

16/32/64-bit

§

VEX_Vpmulhw_ymm_ymm_ymmm256 = 2_368

VPMULHW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E5 /r

AVX2

16/32/64-bit

§

EVEX_Vpmulhw_xmm_k1z_xmm_xmmm128 = 2_369

VPMULHW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhw_ymm_k1z_ymm_ymmm256 = 2_370

VPMULHW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhw_zmm_k1z_zmm_zmmm512 = 2_371

VPMULHW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E5 /r

AVX512BW

16/32/64-bit

§

Cvttpd2dq_xmm_xmmm128 = 2_372

CVTTPD2DQ xmm1, xmm2/m128

66 0F E6 /r

SSE2

16/32/64-bit

§

VEX_Vcvttpd2dq_xmm_xmmm128 = 2_373

VCVTTPD2DQ xmm1, xmm2/m128

VEX.128.66.0F.WIG E6 /r

AVX

16/32/64-bit

§

VEX_Vcvttpd2dq_xmm_ymmm256 = 2_374

VCVTTPD2DQ xmm1, ymm2/m256

VEX.256.66.0F.WIG E6 /r

AVX

16/32/64-bit

§

EVEX_Vcvttpd2dq_xmm_k1z_xmmm128b64 = 2_375

VCVTTPD2DQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2dq_xmm_k1z_ymmm256b64 = 2_376

VCVTTPD2DQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2dq_ymm_k1z_zmmm512b64_sae = 2_377

VCVTTPD2DQ ymm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F.W1 E6 /r

AVX512F

16/32/64-bit

§

Cvtdq2pd_xmm_xmmm64 = 2_378

CVTDQ2PD xmm1, xmm2/m64

F3 0F E6 /r

SSE2

16/32/64-bit

§

VEX_Vcvtdq2pd_xmm_xmmm64 = 2_379

VCVTDQ2PD xmm1, xmm2/m64

VEX.128.F3.0F.WIG E6 /r

AVX

16/32/64-bit

§

VEX_Vcvtdq2pd_ymm_xmmm128 = 2_380

VCVTDQ2PD ymm1, xmm2/m128

VEX.256.F3.0F.WIG E6 /r

AVX

16/32/64-bit

§

EVEX_Vcvtdq2pd_xmm_k1z_xmmm64b32 = 2_381

VCVTDQ2PD xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.F3.0F.W0 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2pd_ymm_k1z_xmmm128b32 = 2_382

VCVTDQ2PD ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.F3.0F.W0 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2pd_zmm_k1z_ymmm256b32_er = 2_383

VCVTDQ2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.F3.0F.W0 E6 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtqq2pd_xmm_k1z_xmmm128b64 = 2_384

VCVTQQ2PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F3.0F.W1 E6 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2pd_ymm_k1z_ymmm256b64 = 2_385

VCVTQQ2PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F3.0F.W1 E6 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2pd_zmm_k1z_zmmm512b64_er = 2_386

VCVTQQ2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F3.0F.W1 E6 /r

AVX512DQ

16/32/64-bit

§

Cvtpd2dq_xmm_xmmm128 = 2_387

CVTPD2DQ xmm1, xmm2/m128

F2 0F E6 /r

SSE2

16/32/64-bit

§

VEX_Vcvtpd2dq_xmm_xmmm128 = 2_388

VCVTPD2DQ xmm1, xmm2/m128

VEX.128.F2.0F.WIG E6 /r

AVX

16/32/64-bit

§

VEX_Vcvtpd2dq_xmm_ymmm256 = 2_389

VCVTPD2DQ xmm1, ymm2/m256

VEX.256.F2.0F.WIG E6 /r

AVX

16/32/64-bit

§

EVEX_Vcvtpd2dq_xmm_k1z_xmmm128b64 = 2_390

VCVTPD2DQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F2.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2dq_xmm_k1z_ymmm256b64 = 2_391

VCVTPD2DQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F2.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2dq_ymm_k1z_zmmm512b64_er = 2_392

VCVTPD2DQ ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F2.0F.W1 E6 /r

AVX512F

16/32/64-bit

§

Movntq_m64_mm = 2_393

MOVNTQ m64, mm

NP 0F E7 /r

SSE

16/32/64-bit

§

Movntdq_m128_xmm = 2_394

MOVNTDQ m128, xmm1

66 0F E7 /r

SSE2

16/32/64-bit

§

VEX_Vmovntdq_m128_xmm = 2_395

VMOVNTDQ m128, xmm1

VEX.128.66.0F.WIG E7 /r

AVX

16/32/64-bit

§

VEX_Vmovntdq_m256_ymm = 2_396

VMOVNTDQ m256, ymm1

VEX.256.66.0F.WIG E7 /r

AVX

16/32/64-bit

§

EVEX_Vmovntdq_m128_xmm = 2_397

VMOVNTDQ m128, xmm1

EVEX.128.66.0F.W0 E7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdq_m256_ymm = 2_398

VMOVNTDQ m256, ymm1

EVEX.256.66.0F.W0 E7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdq_m512_zmm = 2_399

VMOVNTDQ m512, zmm1

EVEX.512.66.0F.W0 E7 /r

AVX512F

16/32/64-bit

§

Psubsb_mm_mmm64 = 2_400

PSUBSB mm, mm/m64

NP 0F E8 /r

MMX

16/32/64-bit

§

Psubsb_xmm_xmmm128 = 2_401

PSUBSB xmm1, xmm2/m128

66 0F E8 /r

SSE2

16/32/64-bit

§

VEX_Vpsubsb_xmm_xmm_xmmm128 = 2_402

VPSUBSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E8 /r

AVX

16/32/64-bit

§

VEX_Vpsubsb_ymm_ymm_ymmm256 = 2_403

VPSUBSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E8 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubsb_xmm_k1z_xmm_xmmm128 = 2_404

VPSUBSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsb_ymm_k1z_ymm_ymmm256 = 2_405

VPSUBSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsb_zmm_k1z_zmm_zmmm512 = 2_406

VPSUBSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E8 /r

AVX512BW

16/32/64-bit

§

Psubsw_mm_mmm64 = 2_407

PSUBSW mm, mm/m64

NP 0F E9 /r

MMX

16/32/64-bit

§

Psubsw_xmm_xmmm128 = 2_408

PSUBSW xmm1, xmm2/m128

66 0F E9 /r

SSE2

16/32/64-bit

§

VEX_Vpsubsw_xmm_xmm_xmmm128 = 2_409

VPSUBSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E9 /r

AVX

16/32/64-bit

§

VEX_Vpsubsw_ymm_ymm_ymmm256 = 2_410

VPSUBSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E9 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubsw_xmm_k1z_xmm_xmmm128 = 2_411

VPSUBSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsw_ymm_k1z_ymm_ymmm256 = 2_412

VPSUBSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsw_zmm_k1z_zmm_zmmm512 = 2_413

VPSUBSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E9 /r

AVX512BW

16/32/64-bit

§

Pminsw_mm_mmm64 = 2_414

PMINSW mm1, mm2/m64

NP 0F EA /r

SSE

16/32/64-bit

§

Pminsw_xmm_xmmm128 = 2_415

PMINSW xmm1, xmm2/m128

66 0F EA /r

SSE2

16/32/64-bit

§

VEX_Vpminsw_xmm_xmm_xmmm128 = 2_416

VPMINSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EA /r

AVX

16/32/64-bit

§

VEX_Vpminsw_ymm_ymm_ymmm256 = 2_417

VPMINSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EA /r

AVX2

16/32/64-bit

§

EVEX_Vpminsw_xmm_k1z_xmm_xmmm128 = 2_418

VPMINSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG EA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsw_ymm_k1z_ymm_ymmm256 = 2_419

VPMINSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG EA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsw_zmm_k1z_zmm_zmmm512 = 2_420

VPMINSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG EA /r

AVX512BW

16/32/64-bit

§

Por_mm_mmm64 = 2_421

POR mm, mm/m64

NP 0F EB /r

MMX

16/32/64-bit

§

Por_xmm_xmmm128 = 2_422

POR xmm1, xmm2/m128

66 0F EB /r

SSE2

16/32/64-bit

§

VEX_Vpor_xmm_xmm_xmmm128 = 2_423

VPOR xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EB /r

AVX

16/32/64-bit

§

VEX_Vpor_ymm_ymm_ymmm256 = 2_424

VPOR ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EB /r

AVX2

16/32/64-bit

§

EVEX_Vpord_xmm_k1z_xmm_xmmm128b32 = 2_425

VPORD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpord_ymm_k1z_ymm_ymmm256b32 = 2_426

VPORD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpord_zmm_k1z_zmm_zmmm512b32 = 2_427

VPORD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 EB /r

AVX512F

16/32/64-bit

§

EVEX_Vporq_xmm_k1z_xmm_xmmm128b64 = 2_428

VPORQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vporq_ymm_k1z_ymm_ymmm256b64 = 2_429

VPORQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vporq_zmm_k1z_zmm_zmmm512b64 = 2_430

VPORQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 EB /r

AVX512F

16/32/64-bit

§

Paddsb_mm_mmm64 = 2_431

PADDSB mm, mm/m64

NP 0F EC /r

MMX

16/32/64-bit

§

Paddsb_xmm_xmmm128 = 2_432

PADDSB xmm1, xmm2/m128

66 0F EC /r

SSE2

16/32/64-bit

§

VEX_Vpaddsb_xmm_xmm_xmmm128 = 2_433

VPADDSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EC /r

AVX

16/32/64-bit

§

VEX_Vpaddsb_ymm_ymm_ymmm256 = 2_434

VPADDSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EC /r

AVX2

16/32/64-bit

§

EVEX_Vpaddsb_xmm_k1z_xmm_xmmm128 = 2_435

VPADDSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG EC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsb_ymm_k1z_ymm_ymmm256 = 2_436

VPADDSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG EC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsb_zmm_k1z_zmm_zmmm512 = 2_437

VPADDSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG EC /r

AVX512BW

16/32/64-bit

§

Paddsw_mm_mmm64 = 2_438

PADDSW mm, mm/m64

NP 0F ED /r

MMX

16/32/64-bit

§

Paddsw_xmm_xmmm128 = 2_439

PADDSW xmm1, xmm2/m128

66 0F ED /r

SSE2

16/32/64-bit

§

VEX_Vpaddsw_xmm_xmm_xmmm128 = 2_440

VPADDSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG ED /r

AVX

16/32/64-bit

§

VEX_Vpaddsw_ymm_ymm_ymmm256 = 2_441

VPADDSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG ED /r

AVX2

16/32/64-bit

§

EVEX_Vpaddsw_xmm_k1z_xmm_xmmm128 = 2_442

VPADDSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG ED /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsw_ymm_k1z_ymm_ymmm256 = 2_443

VPADDSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG ED /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsw_zmm_k1z_zmm_zmmm512 = 2_444

VPADDSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG ED /r

AVX512BW

16/32/64-bit

§

Pmaxsw_mm_mmm64 = 2_445

PMAXSW mm1, mm2/m64

NP 0F EE /r

SSE

16/32/64-bit

§

Pmaxsw_xmm_xmmm128 = 2_446

PMAXSW xmm1, xmm2/m128

66 0F EE /r

SSE2

16/32/64-bit

§

VEX_Vpmaxsw_xmm_xmm_xmmm128 = 2_447

VPMAXSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EE /r

AVX

16/32/64-bit

§

VEX_Vpmaxsw_ymm_ymm_ymmm256 = 2_448

VPMAXSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EE /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxsw_xmm_k1z_xmm_xmmm128 = 2_449

VPMAXSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG EE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsw_ymm_k1z_ymm_ymmm256 = 2_450

VPMAXSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG EE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsw_zmm_k1z_zmm_zmmm512 = 2_451

VPMAXSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG EE /r

AVX512BW

16/32/64-bit

§

Pxor_mm_mmm64 = 2_452

PXOR mm, mm/m64

NP 0F EF /r

MMX

16/32/64-bit

§

Pxor_xmm_xmmm128 = 2_453

PXOR xmm1, xmm2/m128

66 0F EF /r

SSE2

16/32/64-bit

§

VEX_Vpxor_xmm_xmm_xmmm128 = 2_454

VPXOR xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EF /r

AVX

16/32/64-bit

§

VEX_Vpxor_ymm_ymm_ymmm256 = 2_455

VPXOR ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EF /r

AVX2

16/32/64-bit

§

EVEX_Vpxord_xmm_k1z_xmm_xmmm128b32 = 2_456

VPXORD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxord_ymm_k1z_ymm_ymmm256b32 = 2_457

VPXORD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxord_zmm_k1z_zmm_zmmm512b32 = 2_458

VPXORD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 EF /r

AVX512F

16/32/64-bit

§

EVEX_Vpxorq_xmm_k1z_xmm_xmmm128b64 = 2_459

VPXORQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxorq_ymm_k1z_ymm_ymmm256b64 = 2_460

VPXORQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxorq_zmm_k1z_zmm_zmmm512b64 = 2_461

VPXORQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 EF /r

AVX512F

16/32/64-bit

§

Lddqu_xmm_m128 = 2_462

LDDQU xmm1, m128

F2 0F F0 /r

SSE3

16/32/64-bit

§

VEX_Vlddqu_xmm_m128 = 2_463

VLDDQU xmm1, m128

VEX.128.F2.0F.WIG F0 /r

AVX

16/32/64-bit

§

VEX_Vlddqu_ymm_m256 = 2_464

VLDDQU ymm1, m256

VEX.256.F2.0F.WIG F0 /r

AVX

16/32/64-bit

§

Psllw_mm_mmm64 = 2_465

PSLLW mm, mm/m64

NP 0F F1 /r

MMX

16/32/64-bit

§

Psllw_xmm_xmmm128 = 2_466

PSLLW xmm1, xmm2/m128

66 0F F1 /r

SSE2

16/32/64-bit

§

VEX_Vpsllw_xmm_xmm_xmmm128 = 2_467

VPSLLW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F1 /r

AVX

16/32/64-bit

§

VEX_Vpsllw_ymm_ymm_xmmm128 = 2_468

VPSLLW ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG F1 /r

AVX2

16/32/64-bit

§

EVEX_Vpsllw_xmm_k1z_xmm_xmmm128 = 2_469

VPSLLW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_ymm_k1z_ymm_xmmm128 = 2_470

VPSLLW ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.WIG F1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_zmm_k1z_zmm_xmmm128 = 2_471

VPSLLW zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.WIG F1 /r

AVX512BW

16/32/64-bit

§

Pslld_mm_mmm64 = 2_472

PSLLD mm, mm/m64

NP 0F F2 /r

MMX

16/32/64-bit

§

Pslld_xmm_xmmm128 = 2_473

PSLLD xmm1, xmm2/m128

66 0F F2 /r

SSE2

16/32/64-bit

§

VEX_Vpslld_xmm_xmm_xmmm128 = 2_474

VPSLLD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F2 /r

AVX

16/32/64-bit

§

VEX_Vpslld_ymm_ymm_xmmm128 = 2_475

VPSLLD ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG F2 /r

AVX2

16/32/64-bit

§

EVEX_Vpslld_xmm_k1z_xmm_xmmm128 = 2_476

VPSLLD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W0 F2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_ymm_k1z_ymm_xmmm128 = 2_477

VPSLLD ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W0 F2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_zmm_k1z_zmm_xmmm128 = 2_478

VPSLLD zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W0 F2 /r

AVX512F

16/32/64-bit

§

Psllq_mm_mmm64 = 2_479

PSLLQ mm, mm/m64

NP 0F F3 /r

MMX

16/32/64-bit

§

Psllq_xmm_xmmm128 = 2_480

PSLLQ xmm1, xmm2/m128

66 0F F3 /r

SSE2

16/32/64-bit

§

VEX_Vpsllq_xmm_xmm_xmmm128 = 2_481

VPSLLQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F3 /r

AVX

16/32/64-bit

§

VEX_Vpsllq_ymm_ymm_xmmm128 = 2_482

VPSLLQ ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG F3 /r

AVX2

16/32/64-bit

§

EVEX_Vpsllq_xmm_k1z_xmm_xmmm128 = 2_483

VPSLLQ xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W1 F3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_ymm_k1z_ymm_xmmm128 = 2_484

VPSLLQ ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W1 F3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_zmm_k1z_zmm_xmmm128 = 2_485

VPSLLQ zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W1 F3 /r

AVX512F

16/32/64-bit

§

Pmuludq_mm_mmm64 = 2_486

PMULUDQ mm1, mm2/m64

NP 0F F4 /r

SSE2

16/32/64-bit

§

Pmuludq_xmm_xmmm128 = 2_487

PMULUDQ xmm1, xmm2/m128

66 0F F4 /r

SSE2

16/32/64-bit

§

VEX_Vpmuludq_xmm_xmm_xmmm128 = 2_488

VPMULUDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F4 /r

AVX

16/32/64-bit

§

VEX_Vpmuludq_ymm_ymm_ymmm256 = 2_489

VPMULUDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F4 /r

AVX2

16/32/64-bit

§

EVEX_Vpmuludq_xmm_k1z_xmm_xmmm128b64 = 2_490

VPMULUDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 F4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuludq_ymm_k1z_ymm_ymmm256b64 = 2_491

VPMULUDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 F4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuludq_zmm_k1z_zmm_zmmm512b64 = 2_492

VPMULUDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 F4 /r

AVX512F

16/32/64-bit

§

Pmaddwd_mm_mmm64 = 2_493

PMADDWD mm, mm/m64

NP 0F F5 /r

MMX

16/32/64-bit

§

Pmaddwd_xmm_xmmm128 = 2_494

PMADDWD xmm1, xmm2/m128

66 0F F5 /r

SSE2

16/32/64-bit

§

VEX_Vpmaddwd_xmm_xmm_xmmm128 = 2_495

VPMADDWD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F5 /r

AVX

16/32/64-bit

§

VEX_Vpmaddwd_ymm_ymm_ymmm256 = 2_496

VPMADDWD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F5 /r

AVX2

16/32/64-bit

§

EVEX_Vpmaddwd_xmm_k1z_xmm_xmmm128 = 2_497

VPMADDWD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddwd_ymm_k1z_ymm_ymmm256 = 2_498

VPMADDWD ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddwd_zmm_k1z_zmm_zmmm512 = 2_499

VPMADDWD zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F5 /r

AVX512BW

16/32/64-bit

§

Psadbw_mm_mmm64 = 2_500

PSADBW mm1, mm2/m64

NP 0F F6 /r

SSE

16/32/64-bit

§

Psadbw_xmm_xmmm128 = 2_501

PSADBW xmm1, xmm2/m128

66 0F F6 /r

SSE2

16/32/64-bit

§

VEX_Vpsadbw_xmm_xmm_xmmm128 = 2_502

VPSADBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F6 /r

AVX

16/32/64-bit

§

VEX_Vpsadbw_ymm_ymm_ymmm256 = 2_503

VPSADBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F6 /r

AVX2

16/32/64-bit

§

EVEX_Vpsadbw_xmm_xmm_xmmm128 = 2_504

VPSADBW xmm1, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F6 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsadbw_ymm_ymm_ymmm256 = 2_505

VPSADBW ymm1, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F6 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsadbw_zmm_zmm_zmmm512 = 2_506

VPSADBW zmm1, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F6 /r

AVX512BW

16/32/64-bit

§

Maskmovq_rDI_mm_mm = 2_507

MASKMOVQ mm1, mm2

NP 0F F7 /r

SSE

16/32/64-bit

§

Maskmovdqu_rDI_xmm_xmm = 2_508

MASKMOVDQU xmm1, xmm2

66 0F F7 /r

SSE2

16/32/64-bit

§

VEX_Vmaskmovdqu_rDI_xmm_xmm = 2_509

VMASKMOVDQU xmm1, xmm2

VEX.128.66.0F.WIG F7 /r

AVX

16/32/64-bit

§

Psubb_mm_mmm64 = 2_510

PSUBB mm, mm/m64

NP 0F F8 /r

MMX

16/32/64-bit

§

Psubb_xmm_xmmm128 = 2_511

PSUBB xmm1, xmm2/m128

66 0F F8 /r

SSE2

16/32/64-bit

§

VEX_Vpsubb_xmm_xmm_xmmm128 = 2_512

VPSUBB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F8 /r

AVX

16/32/64-bit

§

VEX_Vpsubb_ymm_ymm_ymmm256 = 2_513

VPSUBB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F8 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubb_xmm_k1z_xmm_xmmm128 = 2_514

VPSUBB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubb_ymm_k1z_ymm_ymmm256 = 2_515

VPSUBB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubb_zmm_k1z_zmm_zmmm512 = 2_516

VPSUBB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F8 /r

AVX512BW

16/32/64-bit

§

Psubw_mm_mmm64 = 2_517

PSUBW mm, mm/m64

NP 0F F9 /r

MMX

16/32/64-bit

§

Psubw_xmm_xmmm128 = 2_518

PSUBW xmm1, xmm2/m128

66 0F F9 /r

SSE2

16/32/64-bit

§

VEX_Vpsubw_xmm_xmm_xmmm128 = 2_519

VPSUBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F9 /r

AVX

16/32/64-bit

§

VEX_Vpsubw_ymm_ymm_ymmm256 = 2_520

VPSUBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F9 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubw_xmm_k1z_xmm_xmmm128 = 2_521

VPSUBW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubw_ymm_k1z_ymm_ymmm256 = 2_522

VPSUBW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubw_zmm_k1z_zmm_zmmm512 = 2_523

VPSUBW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F9 /r

AVX512BW

16/32/64-bit

§

Psubd_mm_mmm64 = 2_524

PSUBD mm, mm/m64

NP 0F FA /r

MMX

16/32/64-bit

§

Psubd_xmm_xmmm128 = 2_525

PSUBD xmm1, xmm2/m128

66 0F FA /r

SSE2

16/32/64-bit

§

VEX_Vpsubd_xmm_xmm_xmmm128 = 2_526

VPSUBD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FA /r

AVX

16/32/64-bit

§

VEX_Vpsubd_ymm_ymm_ymmm256 = 2_527

VPSUBD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FA /r

AVX2

16/32/64-bit

§

EVEX_Vpsubd_xmm_k1z_xmm_xmmm128b32 = 2_528

VPSUBD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 FA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubd_ymm_k1z_ymm_ymmm256b32 = 2_529

VPSUBD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 FA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubd_zmm_k1z_zmm_zmmm512b32 = 2_530

VPSUBD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 FA /r

AVX512F

16/32/64-bit

§

Psubq_mm_mmm64 = 2_531

PSUBQ mm1, mm2/m64

NP 0F FB /r

SSE2

16/32/64-bit

§

Psubq_xmm_xmmm128 = 2_532

PSUBQ xmm1, xmm2/m128

66 0F FB /r

SSE2

16/32/64-bit

§

VEX_Vpsubq_xmm_xmm_xmmm128 = 2_533

VPSUBQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FB /r

AVX

16/32/64-bit

§

VEX_Vpsubq_ymm_ymm_ymmm256 = 2_534

VPSUBQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FB /r

AVX2

16/32/64-bit

§

EVEX_Vpsubq_xmm_k1z_xmm_xmmm128b64 = 2_535

VPSUBQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 FB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubq_ymm_k1z_ymm_ymmm256b64 = 2_536

VPSUBQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 FB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubq_zmm_k1z_zmm_zmmm512b64 = 2_537

VPSUBQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 FB /r

AVX512F

16/32/64-bit

§

Paddb_mm_mmm64 = 2_538

PADDB mm, mm/m64

NP 0F FC /r

MMX

16/32/64-bit

§

Paddb_xmm_xmmm128 = 2_539

PADDB xmm1, xmm2/m128

66 0F FC /r

SSE2

16/32/64-bit

§

VEX_Vpaddb_xmm_xmm_xmmm128 = 2_540

VPADDB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FC /r

AVX

16/32/64-bit

§

VEX_Vpaddb_ymm_ymm_ymmm256 = 2_541

VPADDB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FC /r

AVX2

16/32/64-bit

§

EVEX_Vpaddb_xmm_k1z_xmm_xmmm128 = 2_542

VPADDB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG FC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddb_ymm_k1z_ymm_ymmm256 = 2_543

VPADDB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG FC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddb_zmm_k1z_zmm_zmmm512 = 2_544

VPADDB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG FC /r

AVX512BW

16/32/64-bit

§

Paddw_mm_mmm64 = 2_545

PADDW mm, mm/m64

NP 0F FD /r

MMX

16/32/64-bit

§

Paddw_xmm_xmmm128 = 2_546

PADDW xmm1, xmm2/m128

66 0F FD /r

SSE2

16/32/64-bit

§

VEX_Vpaddw_xmm_xmm_xmmm128 = 2_547

VPADDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FD /r

AVX

16/32/64-bit

§

VEX_Vpaddw_ymm_ymm_ymmm256 = 2_548

VPADDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FD /r

AVX2

16/32/64-bit

§

EVEX_Vpaddw_xmm_k1z_xmm_xmmm128 = 2_549

VPADDW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG FD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddw_ymm_k1z_ymm_ymmm256 = 2_550

VPADDW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG FD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddw_zmm_k1z_zmm_zmmm512 = 2_551

VPADDW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG FD /r

AVX512BW

16/32/64-bit

§

Paddd_mm_mmm64 = 2_552

PADDD mm, mm/m64

NP 0F FE /r

MMX

16/32/64-bit

§

Paddd_xmm_xmmm128 = 2_553

PADDD xmm1, xmm2/m128

66 0F FE /r

SSE2

16/32/64-bit

§

VEX_Vpaddd_xmm_xmm_xmmm128 = 2_554

VPADDD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FE /r

AVX

16/32/64-bit

§

VEX_Vpaddd_ymm_ymm_ymmm256 = 2_555

VPADDD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FE /r

AVX2

16/32/64-bit

§

EVEX_Vpaddd_xmm_k1z_xmm_xmmm128b32 = 2_556

VPADDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 FE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddd_ymm_k1z_ymm_ymmm256b32 = 2_557

VPADDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 FE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddd_zmm_k1z_zmm_zmmm512b32 = 2_558

VPADDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 FE /r

AVX512F

16/32/64-bit

§

Ud0_r16_rm16 = 2_559

UD0 r16, r/m16

o16 0F FF /r

286+

16/32/64-bit

§

Ud0_r32_rm32 = 2_560

UD0 r32, r/m32

o32 0F FF /r

386+

16/32/64-bit

§

Ud0_r64_rm64 = 2_561

UD0 r64, r/m64

o64 0F FF /r

X64

64-bit

§

Pshufb_mm_mmm64 = 2_562

PSHUFB mm1, mm2/m64

NP 0F 38 00 /r

SSSE3

16/32/64-bit

§

Pshufb_xmm_xmmm128 = 2_563

PSHUFB xmm1, xmm2/m128

66 0F 38 00 /r

SSSE3

16/32/64-bit

§

VEX_Vpshufb_xmm_xmm_xmmm128 = 2_564

VPSHUFB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 00 /r

AVX

16/32/64-bit

§

VEX_Vpshufb_ymm_ymm_ymmm256 = 2_565

VPSHUFB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 00 /r

AVX2

16/32/64-bit

§

EVEX_Vpshufb_xmm_k1z_xmm_xmmm128 = 2_566

VPSHUFB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 00 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufb_ymm_k1z_ymm_ymmm256 = 2_567

VPSHUFB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 00 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufb_zmm_k1z_zmm_zmmm512 = 2_568

VPSHUFB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 00 /r

AVX512BW

16/32/64-bit

§

Phaddw_mm_mmm64 = 2_569

PHADDW mm1, mm2/m64

NP 0F 38 01 /r

SSSE3

16/32/64-bit

§

Phaddw_xmm_xmmm128 = 2_570

PHADDW xmm1, xmm2/m128

66 0F 38 01 /r

SSSE3

16/32/64-bit

§

VEX_Vphaddw_xmm_xmm_xmmm128 = 2_571

VPHADDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 01 /r

AVX

16/32/64-bit

§

VEX_Vphaddw_ymm_ymm_ymmm256 = 2_572

VPHADDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 01 /r

AVX2

16/32/64-bit

§

Phaddd_mm_mmm64 = 2_573

PHADDD mm1, mm2/m64

NP 0F 38 02 /r

SSSE3

16/32/64-bit

§

Phaddd_xmm_xmmm128 = 2_574

PHADDD xmm1, xmm2/m128

66 0F 38 02 /r

SSSE3

16/32/64-bit

§

VEX_Vphaddd_xmm_xmm_xmmm128 = 2_575

VPHADDD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 02 /r

AVX

16/32/64-bit

§

VEX_Vphaddd_ymm_ymm_ymmm256 = 2_576

VPHADDD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 02 /r

AVX2

16/32/64-bit

§

Phaddsw_mm_mmm64 = 2_577

PHADDSW mm1, mm2/m64

NP 0F 38 03 /r

SSSE3

16/32/64-bit

§

Phaddsw_xmm_xmmm128 = 2_578

PHADDSW xmm1, xmm2/m128

66 0F 38 03 /r

SSSE3

16/32/64-bit

§

VEX_Vphaddsw_xmm_xmm_xmmm128 = 2_579

VPHADDSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 03 /r

AVX

16/32/64-bit

§

VEX_Vphaddsw_ymm_ymm_ymmm256 = 2_580

VPHADDSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 03 /r

AVX2

16/32/64-bit

§

Pmaddubsw_mm_mmm64 = 2_581

PMADDUBSW mm1, mm2/m64

NP 0F 38 04 /r

SSSE3

16/32/64-bit

§

Pmaddubsw_xmm_xmmm128 = 2_582

PMADDUBSW xmm1, xmm2/m128

66 0F 38 04 /r

SSSE3

16/32/64-bit

§

VEX_Vpmaddubsw_xmm_xmm_xmmm128 = 2_583

VPMADDUBSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 04 /r

AVX

16/32/64-bit

§

VEX_Vpmaddubsw_ymm_ymm_ymmm256 = 2_584

VPMADDUBSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 04 /r

AVX2

16/32/64-bit

§

EVEX_Vpmaddubsw_xmm_k1z_xmm_xmmm128 = 2_585

VPMADDUBSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 04 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddubsw_ymm_k1z_ymm_ymmm256 = 2_586

VPMADDUBSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 04 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddubsw_zmm_k1z_zmm_zmmm512 = 2_587

VPMADDUBSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 04 /r

AVX512BW

16/32/64-bit

§

Phsubw_mm_mmm64 = 2_588

PHSUBW mm1, mm2/m64

NP 0F 38 05 /r

SSSE3

16/32/64-bit

§

Phsubw_xmm_xmmm128 = 2_589

PHSUBW xmm1, xmm2/m128

66 0F 38 05 /r

SSSE3

16/32/64-bit

§

VEX_Vphsubw_xmm_xmm_xmmm128 = 2_590

VPHSUBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 05 /r

AVX

16/32/64-bit

§

VEX_Vphsubw_ymm_ymm_ymmm256 = 2_591

VPHSUBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 05 /r

AVX2

16/32/64-bit

§

Phsubd_mm_mmm64 = 2_592

PHSUBD mm1, mm2/m64

NP 0F 38 06 /r

SSSE3

16/32/64-bit

§

Phsubd_xmm_xmmm128 = 2_593

PHSUBD xmm1, xmm2/m128

66 0F 38 06 /r

SSSE3

16/32/64-bit

§

VEX_Vphsubd_xmm_xmm_xmmm128 = 2_594

VPHSUBD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 06 /r

AVX

16/32/64-bit

§

VEX_Vphsubd_ymm_ymm_ymmm256 = 2_595

VPHSUBD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 06 /r

AVX2

16/32/64-bit

§

Phsubsw_mm_mmm64 = 2_596

PHSUBSW mm1, mm2/m64

NP 0F 38 07 /r

SSSE3

16/32/64-bit

§

Phsubsw_xmm_xmmm128 = 2_597

PHSUBSW xmm1, xmm2/m128

66 0F 38 07 /r

SSSE3

16/32/64-bit

§

VEX_Vphsubsw_xmm_xmm_xmmm128 = 2_598

VPHSUBSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 07 /r

AVX

16/32/64-bit

§

VEX_Vphsubsw_ymm_ymm_ymmm256 = 2_599

VPHSUBSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 07 /r

AVX2

16/32/64-bit

§

Psignb_mm_mmm64 = 2_600

PSIGNB mm1, mm2/m64

NP 0F 38 08 /r

SSSE3

16/32/64-bit

§

Psignb_xmm_xmmm128 = 2_601

PSIGNB xmm1, xmm2/m128

66 0F 38 08 /r

SSSE3

16/32/64-bit

§

VEX_Vpsignb_xmm_xmm_xmmm128 = 2_602

VPSIGNB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 08 /r

AVX

16/32/64-bit

§

VEX_Vpsignb_ymm_ymm_ymmm256 = 2_603

VPSIGNB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 08 /r

AVX2

16/32/64-bit

§

Psignw_mm_mmm64 = 2_604

PSIGNW mm1, mm2/m64

NP 0F 38 09 /r

SSSE3

16/32/64-bit

§

Psignw_xmm_xmmm128 = 2_605

PSIGNW xmm1, xmm2/m128

66 0F 38 09 /r

SSSE3

16/32/64-bit

§

VEX_Vpsignw_xmm_xmm_xmmm128 = 2_606

VPSIGNW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 09 /r

AVX

16/32/64-bit

§

VEX_Vpsignw_ymm_ymm_ymmm256 = 2_607

VPSIGNW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 09 /r

AVX2

16/32/64-bit

§

Psignd_mm_mmm64 = 2_608

PSIGND mm1, mm2/m64

NP 0F 38 0A /r

SSSE3

16/32/64-bit

§

Psignd_xmm_xmmm128 = 2_609

PSIGND xmm1, xmm2/m128

66 0F 38 0A /r

SSSE3

16/32/64-bit

§

VEX_Vpsignd_xmm_xmm_xmmm128 = 2_610

VPSIGND xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 0A /r

AVX

16/32/64-bit

§

VEX_Vpsignd_ymm_ymm_ymmm256 = 2_611

VPSIGND ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 0A /r

AVX2

16/32/64-bit

§

Pmulhrsw_mm_mmm64 = 2_612

PMULHRSW mm1, mm2/m64

NP 0F 38 0B /r

SSSE3

16/32/64-bit

§

Pmulhrsw_xmm_xmmm128 = 2_613

PMULHRSW xmm1, xmm2/m128

66 0F 38 0B /r

SSSE3

16/32/64-bit

§

VEX_Vpmulhrsw_xmm_xmm_xmmm128 = 2_614

VPMULHRSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 0B /r

AVX

16/32/64-bit

§

VEX_Vpmulhrsw_ymm_ymm_ymmm256 = 2_615

VPMULHRSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 0B /r

AVX2

16/32/64-bit

§

EVEX_Vpmulhrsw_xmm_k1z_xmm_xmmm128 = 2_616

VPMULHRSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 0B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhrsw_ymm_k1z_ymm_ymmm256 = 2_617

VPMULHRSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 0B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhrsw_zmm_k1z_zmm_zmmm512 = 2_618

VPMULHRSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 0B /r

AVX512BW

16/32/64-bit

§

VEX_Vpermilps_xmm_xmm_xmmm128 = 2_619

VPERMILPS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 0C /r

AVX

16/32/64-bit

§

VEX_Vpermilps_ymm_ymm_ymmm256 = 2_620

VPERMILPS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 0C /r

AVX

16/32/64-bit

§

EVEX_Vpermilps_xmm_k1z_xmm_xmmm128b32 = 2_621

VPERMILPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 0C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_ymm_k1z_ymm_ymmm256b32 = 2_622

VPERMILPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 0C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_zmm_k1z_zmm_zmmm512b32 = 2_623

VPERMILPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 0C /r

AVX512F

16/32/64-bit

§

VEX_Vpermilpd_xmm_xmm_xmmm128 = 2_624

VPERMILPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 0D /r

AVX

16/32/64-bit

§

VEX_Vpermilpd_ymm_ymm_ymmm256 = 2_625

VPERMILPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 0D /r

AVX

16/32/64-bit

§

EVEX_Vpermilpd_xmm_k1z_xmm_xmmm128b64 = 2_626

VPERMILPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 0D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_ymm_k1z_ymm_ymmm256b64 = 2_627

VPERMILPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 0D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_zmm_k1z_zmm_zmmm512b64 = 2_628

VPERMILPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 0D /r

AVX512F

16/32/64-bit

§

VEX_Vtestps_xmm_xmmm128 = 2_629

VTESTPS xmm1, xmm2/m128

VEX.128.66.0F38.W0 0E /r

AVX

16/32/64-bit

§

VEX_Vtestps_ymm_ymmm256 = 2_630

VTESTPS ymm1, ymm2/m256

VEX.256.66.0F38.W0 0E /r

AVX

16/32/64-bit

§

VEX_Vtestpd_xmm_xmmm128 = 2_631

VTESTPD xmm1, xmm2/m128

VEX.128.66.0F38.W0 0F /r

AVX

16/32/64-bit

§

VEX_Vtestpd_ymm_ymmm256 = 2_632

VTESTPD ymm1, ymm2/m256

VEX.256.66.0F38.W0 0F /r

AVX

16/32/64-bit

§

Pblendvb_xmm_xmmm128 = 2_633

PBLENDVB xmm1, xmm2/m128, <XMM0>

66 0F 38 10 /r

SSE4.1

16/32/64-bit

§

EVEX_Vpsrlvw_xmm_k1z_xmm_xmmm128 = 2_634

VPSRLVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlvw_ymm_k1z_ymm_ymmm256 = 2_635

VPSRLVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlvw_zmm_k1z_zmm_zmmm512 = 2_636

VPSRLVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 10 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovuswb_xmmm64_k1z_xmm = 2_637

VPMOVUSWB xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovuswb_xmmm128_k1z_ymm = 2_638

VPMOVUSWB xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovuswb_ymmm256_k1z_zmm = 2_639

VPMOVUSWB ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 10 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpsravw_xmm_k1z_xmm_xmmm128 = 2_640

VPSRAVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 11 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsravw_ymm_k1z_ymm_ymmm256 = 2_641

VPSRAVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 11 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsravw_zmm_k1z_zmm_zmmm512 = 2_642

VPSRAVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 11 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovusdb_xmmm32_k1z_xmm = 2_643

VPMOVUSDB xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdb_xmmm64_k1z_ymm = 2_644

VPMOVUSDB xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdb_xmmm128_k1z_zmm = 2_645

VPMOVUSDB xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 11 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsllvw_xmm_k1z_xmm_xmmm128 = 2_646

VPSLLVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 12 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllvw_ymm_k1z_ymm_ymmm256 = 2_647

VPSLLVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 12 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllvw_zmm_k1z_zmm_zmmm512 = 2_648

VPSLLVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 12 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovusqb_xmmm16_k1z_xmm = 2_649

VPMOVUSQB xmm1/m16 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqb_xmmm32_k1z_ymm = 2_650

VPMOVUSQB xmm1/m32 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqb_xmmm64_k1z_zmm = 2_651

VPMOVUSQB xmm1/m64 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 12 /r

AVX512F

16/32/64-bit

§

VEX_Vcvtph2ps_xmm_xmmm64 = 2_652

VCVTPH2PS xmm1, xmm2/m64

VEX.128.66.0F38.W0 13 /r

F16C

16/32/64-bit

§

VEX_Vcvtph2ps_ymm_xmmm128 = 2_653

VCVTPH2PS ymm1, xmm2/m128

VEX.256.66.0F38.W0 13 /r

F16C

16/32/64-bit

§

EVEX_Vcvtph2ps_xmm_k1z_xmmm64 = 2_654

VCVTPH2PS xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtph2ps_ymm_k1z_xmmm128 = 2_655

VCVTPH2PS ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtph2ps_zmm_k1z_ymmm256_sae = 2_656

VCVTPH2PS zmm1 {k1}{z}, ymm2/m256{sae}

EVEX.512.66.0F38.W0 13 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovusdw_xmmm64_k1z_xmm = 2_657

VPMOVUSDW xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdw_xmmm128_k1z_ymm = 2_658

VPMOVUSDW xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdw_ymmm256_k1z_zmm = 2_659

VPMOVUSDW ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 13 /r

AVX512F

16/32/64-bit

§

Blendvps_xmm_xmmm128 = 2_660

BLENDVPS xmm1, xmm2/m128, <XMM0>

66 0F 38 14 /r

SSE4.1

16/32/64-bit

§

EVEX_Vprorvd_xmm_k1z_xmm_xmmm128b32 = 2_661

VPRORVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvd_ymm_k1z_ymm_ymmm256b32 = 2_662

VPRORVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvd_zmm_k1z_zmm_zmmm512b32 = 2_663

VPRORVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 14 /r

AVX512F

16/32/64-bit

§

EVEX_Vprorvq_xmm_k1z_xmm_xmmm128b64 = 2_664

VPRORVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvq_ymm_k1z_ymm_ymmm256b64 = 2_665

VPRORVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvq_zmm_k1z_zmm_zmmm512b64 = 2_666

VPRORVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 14 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovusqw_xmmm32_k1z_xmm = 2_667

VPMOVUSQW xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqw_xmmm64_k1z_ymm = 2_668

VPMOVUSQW xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqw_xmmm128_k1z_zmm = 2_669

VPMOVUSQW xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 14 /r

AVX512F

16/32/64-bit

§

Blendvpd_xmm_xmmm128 = 2_670

BLENDVPD xmm1, xmm2/m128, <XMM0>

66 0F 38 15 /r

SSE4.1

16/32/64-bit

§

EVEX_Vprolvd_xmm_k1z_xmm_xmmm128b32 = 2_671

VPROLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvd_ymm_k1z_ymm_ymmm256b32 = 2_672

VPROLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvd_zmm_k1z_zmm_zmmm512b32 = 2_673

VPROLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 15 /r

AVX512F

16/32/64-bit

§

EVEX_Vprolvq_xmm_k1z_xmm_xmmm128b64 = 2_674

VPROLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvq_ymm_k1z_ymm_ymmm256b64 = 2_675

VPROLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvq_zmm_k1z_zmm_zmmm512b64 = 2_676

VPROLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 15 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovusqd_xmmm64_k1z_xmm = 2_677

VPMOVUSQD xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqd_xmmm128_k1z_ymm = 2_678

VPMOVUSQD xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqd_ymmm256_k1z_zmm = 2_679

VPMOVUSQD ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 15 /r

AVX512F

16/32/64-bit

§

VEX_Vpermps_ymm_ymm_ymmm256 = 2_680

VPERMPS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 16 /r

AVX2

16/32/64-bit

§

EVEX_Vpermps_ymm_k1z_ymm_ymmm256b32 = 2_681

VPERMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermps_zmm_k1z_zmm_zmmm512b32 = 2_682

VPERMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 16 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermpd_ymm_k1z_ymm_ymmm256b64 = 2_683

VPERMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermpd_zmm_k1z_zmm_zmmm512b64 = 2_684

VPERMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 16 /r

AVX512F

16/32/64-bit

§

Ptest_xmm_xmmm128 = 2_685

PTEST xmm1, xmm2/m128

66 0F 38 17 /r

SSE4.1

16/32/64-bit

§

VEX_Vptest_xmm_xmmm128 = 2_686

VPTEST xmm1, xmm2/m128

VEX.128.66.0F38.WIG 17 /r

AVX

16/32/64-bit

§

VEX_Vptest_ymm_ymmm256 = 2_687

VPTEST ymm1, ymm2/m256

VEX.256.66.0F38.WIG 17 /r

AVX

16/32/64-bit

§

VEX_Vbroadcastss_xmm_m32 = 2_688

VBROADCASTSS xmm1, m32

VEX.128.66.0F38.W0 18 /r

AVX

16/32/64-bit

§

VEX_Vbroadcastss_ymm_m32 = 2_689

VBROADCASTSS ymm1, m32

VEX.256.66.0F38.W0 18 /r

AVX

16/32/64-bit

§

EVEX_Vbroadcastss_xmm_k1z_xmmm32 = 2_690

VBROADCASTSS xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.W0 18 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastss_ymm_k1z_xmmm32 = 2_691

VBROADCASTSS ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.W0 18 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastss_zmm_k1z_xmmm32 = 2_692

VBROADCASTSS zmm1 {k1}{z}, xmm2/m32

EVEX.512.66.0F38.W0 18 /r

AVX512F

16/32/64-bit

§

VEX_Vbroadcastsd_ymm_m64 = 2_693

VBROADCASTSD ymm1, m64

VEX.256.66.0F38.W0 19 /r

AVX

16/32/64-bit

§

EVEX_Vbroadcastf32x2_ymm_k1z_xmmm64 = 2_694

VBROADCASTF32X2 ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W0 19 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf32x2_zmm_k1z_xmmm64 = 2_695

VBROADCASTF32X2 zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W0 19 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastsd_ymm_k1z_xmmm64 = 2_696

VBROADCASTSD ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W1 19 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastsd_zmm_k1z_xmmm64 = 2_697

VBROADCASTSD zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W1 19 /r

AVX512F

16/32/64-bit

§

VEX_Vbroadcastf128_ymm_m128 = 2_698

VBROADCASTF128 ymm1, m128

VEX.256.66.0F38.W0 1A /r

AVX

16/32/64-bit

§

EVEX_Vbroadcastf32x4_ymm_k1z_m128 = 2_699

VBROADCASTF32X4 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W0 1A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastf32x4_zmm_k1z_m128 = 2_700

VBROADCASTF32X4 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W0 1A /r

AVX512F

16/32/64-bit

§

EVEX_Vbroadcastf64x2_ymm_k1z_m128 = 2_701

VBROADCASTF64X2 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W1 1A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf64x2_zmm_k1z_m128 = 2_702

VBROADCASTF64X2 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W1 1A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf32x8_zmm_k1z_m256 = 2_703

VBROADCASTF32X8 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W0 1B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf64x4_zmm_k1z_m256 = 2_704

VBROADCASTF64X4 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W1 1B /r

AVX512F

16/32/64-bit

§

Pabsb_mm_mmm64 = 2_705

PABSB mm1, mm2/m64

NP 0F 38 1C /r

SSSE3

16/32/64-bit

§

Pabsb_xmm_xmmm128 = 2_706

PABSB xmm1, xmm2/m128

66 0F 38 1C /r

SSSE3

16/32/64-bit

§

VEX_Vpabsb_xmm_xmmm128 = 2_707

VPABSB xmm1, xmm2/m128

VEX.128.66.0F38.WIG 1C /r

AVX

16/32/64-bit

§

VEX_Vpabsb_ymm_ymmm256 = 2_708

VPABSB ymm1, ymm2/m256

VEX.256.66.0F38.WIG 1C /r

AVX2

16/32/64-bit

§

EVEX_Vpabsb_xmm_k1z_xmmm128 = 2_709

VPABSB xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.WIG 1C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsb_ymm_k1z_ymmm256 = 2_710

VPABSB ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.WIG 1C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsb_zmm_k1z_zmmm512 = 2_711

VPABSB zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.WIG 1C /r

AVX512BW

16/32/64-bit

§

Pabsw_mm_mmm64 = 2_712

PABSW mm1, mm2/m64

NP 0F 38 1D /r

SSSE3

16/32/64-bit

§

Pabsw_xmm_xmmm128 = 2_713

PABSW xmm1, xmm2/m128

66 0F 38 1D /r

SSSE3

16/32/64-bit

§

VEX_Vpabsw_xmm_xmmm128 = 2_714

VPABSW xmm1, xmm2/m128

VEX.128.66.0F38.WIG 1D /r

AVX

16/32/64-bit

§

VEX_Vpabsw_ymm_ymmm256 = 2_715

VPABSW ymm1, ymm2/m256

VEX.256.66.0F38.WIG 1D /r

AVX2

16/32/64-bit

§

EVEX_Vpabsw_xmm_k1z_xmmm128 = 2_716

VPABSW xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.WIG 1D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsw_ymm_k1z_ymmm256 = 2_717

VPABSW ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.WIG 1D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsw_zmm_k1z_zmmm512 = 2_718

VPABSW zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.WIG 1D /r

AVX512BW

16/32/64-bit

§

Pabsd_mm_mmm64 = 2_719

PABSD mm1, mm2/m64

NP 0F 38 1E /r

SSSE3

16/32/64-bit

§

Pabsd_xmm_xmmm128 = 2_720

PABSD xmm1, xmm2/m128

66 0F 38 1E /r

SSSE3

16/32/64-bit

§

VEX_Vpabsd_xmm_xmmm128 = 2_721

VPABSD xmm1, xmm2/m128

VEX.128.66.0F38.WIG 1E /r

AVX

16/32/64-bit

§

VEX_Vpabsd_ymm_ymmm256 = 2_722

VPABSD ymm1, ymm2/m256

VEX.256.66.0F38.WIG 1E /r

AVX2

16/32/64-bit

§

EVEX_Vpabsd_xmm_k1z_xmmm128b32 = 2_723

VPABSD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 1E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsd_ymm_k1z_ymmm256b32 = 2_724

VPABSD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 1E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsd_zmm_k1z_zmmm512b32 = 2_725

VPABSD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 1E /r

AVX512F

16/32/64-bit

§

EVEX_Vpabsq_xmm_k1z_xmmm128b64 = 2_726

VPABSQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 1F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsq_ymm_k1z_ymmm256b64 = 2_727

VPABSQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 1F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsq_zmm_k1z_zmmm512b64 = 2_728

VPABSQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 1F /r

AVX512F

16/32/64-bit

§

Pmovsxbw_xmm_xmmm64 = 2_729

PMOVSXBW xmm1, xmm2/m64

66 0F 38 20 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxbw_xmm_xmmm64 = 2_730

VPMOVSXBW xmm1, xmm2/m64

VEX.128.66.0F38.WIG 20 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxbw_ymm_xmmm128 = 2_731

VPMOVSXBW ymm1, xmm2/m128

VEX.256.66.0F38.WIG 20 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxbw_xmm_k1z_xmmm64 = 2_732

VPMOVSXBW xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovsxbw_ymm_k1z_xmmm128 = 2_733

VPMOVSXBW ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovsxbw_zmm_k1z_ymmm256 = 2_734

VPMOVSXBW zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 20 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovswb_xmmm64_k1z_xmm = 2_735

VPMOVSWB xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovswb_xmmm128_k1z_ymm = 2_736

VPMOVSWB xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovswb_ymmm256_k1z_zmm = 2_737

VPMOVSWB ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 20 /r

AVX512BW

16/32/64-bit

§

Pmovsxbd_xmm_xmmm32 = 2_738

PMOVSXBD xmm1, xmm2/m32

66 0F 38 21 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxbd_xmm_xmmm32 = 2_739

VPMOVSXBD xmm1, xmm2/m32

VEX.128.66.0F38.WIG 21 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxbd_ymm_xmmm64 = 2_740

VPMOVSXBD ymm1, xmm2/m64

VEX.256.66.0F38.WIG 21 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxbd_xmm_k1z_xmmm32 = 2_741

VPMOVSXBD xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbd_ymm_k1z_xmmm64 = 2_742

VPMOVSXBD ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbd_zmm_k1z_xmmm128 = 2_743

VPMOVSXBD zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 21 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsdb_xmmm32_k1z_xmm = 2_744

VPMOVSDB xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdb_xmmm64_k1z_ymm = 2_745

VPMOVSDB xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdb_xmmm128_k1z_zmm = 2_746

VPMOVSDB xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 21 /r

AVX512F

16/32/64-bit

§

Pmovsxbq_xmm_xmmm16 = 2_747

PMOVSXBQ xmm1, xmm2/m16

66 0F 38 22 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxbq_xmm_xmmm16 = 2_748

VPMOVSXBQ xmm1, xmm2/m16

VEX.128.66.0F38.WIG 22 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxbq_ymm_xmmm32 = 2_749

VPMOVSXBQ ymm1, xmm2/m32

VEX.256.66.0F38.WIG 22 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxbq_xmm_k1z_xmmm16 = 2_750

VPMOVSXBQ xmm1 {k1}{z}, xmm2/m16

EVEX.128.66.0F38.WIG 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbq_ymm_k1z_xmmm32 = 2_751

VPMOVSXBQ ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.WIG 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbq_zmm_k1z_xmmm64 = 2_752

VPMOVSXBQ zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.WIG 22 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsqb_xmmm16_k1z_xmm = 2_753

VPMOVSQB xmm1/m16 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqb_xmmm32_k1z_ymm = 2_754

VPMOVSQB xmm1/m32 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqb_xmmm64_k1z_zmm = 2_755

VPMOVSQB xmm1/m64 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 22 /r

AVX512F

16/32/64-bit

§

Pmovsxwd_xmm_xmmm64 = 2_756

PMOVSXWD xmm1, xmm2/m64

66 0F 38 23 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxwd_xmm_xmmm64 = 2_757

VPMOVSXWD xmm1, xmm2/m64

VEX.128.66.0F38.WIG 23 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxwd_ymm_xmmm128 = 2_758

VPMOVSXWD ymm1, xmm2/m128

VEX.256.66.0F38.WIG 23 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxwd_xmm_k1z_xmmm64 = 2_759

VPMOVSXWD xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwd_ymm_k1z_xmmm128 = 2_760

VPMOVSXWD ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwd_zmm_k1z_ymmm256 = 2_761

VPMOVSXWD zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 23 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsdw_xmmm64_k1z_xmm = 2_762

VPMOVSDW xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdw_xmmm128_k1z_ymm = 2_763

VPMOVSDW xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdw_ymmm256_k1z_zmm = 2_764

VPMOVSDW ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 23 /r

AVX512F

16/32/64-bit

§

Pmovsxwq_xmm_xmmm32 = 2_765

PMOVSXWQ xmm1, xmm2/m32

66 0F 38 24 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxwq_xmm_xmmm32 = 2_766

VPMOVSXWQ xmm1, xmm2/m32

VEX.128.66.0F38.WIG 24 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxwq_ymm_xmmm64 = 2_767

VPMOVSXWQ ymm1, xmm2/m64

VEX.256.66.0F38.WIG 24 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxwq_xmm_k1z_xmmm32 = 2_768

VPMOVSXWQ xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwq_ymm_k1z_xmmm64 = 2_769

VPMOVSXWQ ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwq_zmm_k1z_xmmm128 = 2_770

VPMOVSXWQ zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 24 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsqw_xmmm32_k1z_xmm = 2_771

VPMOVSQW xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqw_xmmm64_k1z_ymm = 2_772

VPMOVSQW xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqw_xmmm128_k1z_zmm = 2_773

VPMOVSQW xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 24 /r

AVX512F

16/32/64-bit

§

Pmovsxdq_xmm_xmmm64 = 2_774

PMOVSXDQ xmm1, xmm2/m64

66 0F 38 25 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxdq_xmm_xmmm64 = 2_775

VPMOVSXDQ xmm1, xmm2/m64

VEX.128.66.0F38.WIG 25 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxdq_ymm_xmmm128 = 2_776

VPMOVSXDQ ymm1, xmm2/m128

VEX.256.66.0F38.WIG 25 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxdq_xmm_k1z_xmmm64 = 2_777

VPMOVSXDQ xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxdq_ymm_k1z_xmmm128 = 2_778

VPMOVSXDQ ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxdq_zmm_k1z_ymmm256 = 2_779

VPMOVSXDQ zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.W0 25 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsqd_xmmm64_k1z_xmm = 2_780

VPMOVSQD xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqd_xmmm128_k1z_ymm = 2_781

VPMOVSQD xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqd_ymmm256_k1z_zmm = 2_782

VPMOVSQD ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 25 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestmb_kr_k1_xmm_xmmm128 = 2_783

VPTESTMB k2 {k1}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmb_kr_k1_ymm_ymmm256 = 2_784

VPTESTMB k2 {k1}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmb_kr_k1_zmm_zmmm512 = 2_785

VPTESTMB k2 {k1}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestmw_kr_k1_xmm_xmmm128 = 2_786

VPTESTMW k2 {k1}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmw_kr_k1_ymm_ymmm256 = 2_787

VPTESTMW k2 {k1}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmw_kr_k1_zmm_zmmm512 = 2_788

VPTESTMW k2 {k1}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestnmb_kr_k1_xmm_xmmm128 = 2_789

VPTESTNMB k2 {k1}, xmm2, xmm3/m128

EVEX.128.F3.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmb_kr_k1_ymm_ymmm256 = 2_790

VPTESTNMB k2 {k1}, ymm2, ymm3/m256

EVEX.256.F3.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmb_kr_k1_zmm_zmmm512 = 2_791

VPTESTNMB k2 {k1}, zmm2, zmm3/m512

EVEX.512.F3.0F38.W0 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestnmw_kr_k1_xmm_xmmm128 = 2_792

VPTESTNMW k2 {k1}, xmm2, xmm3/m128

EVEX.128.F3.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmw_kr_k1_ymm_ymmm256 = 2_793

VPTESTNMW k2 {k1}, ymm2, ymm3/m256

EVEX.256.F3.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmw_kr_k1_zmm_zmmm512 = 2_794

VPTESTNMW k2 {k1}, zmm2, zmm3/m512

EVEX.512.F3.0F38.W1 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestmd_kr_k1_xmm_xmmm128b32 = 2_795

VPTESTMD k2 {k1}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmd_kr_k1_ymm_ymmm256b32 = 2_796

VPTESTMD k2 {k1}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmd_kr_k1_zmm_zmmm512b32 = 2_797

VPTESTMD k2 {k1}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 27 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestmq_kr_k1_xmm_xmmm128b64 = 2_798

VPTESTMQ k2 {k1}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmq_kr_k1_ymm_ymmm256b64 = 2_799

VPTESTMQ k2 {k1}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmq_kr_k1_zmm_zmmm512b64 = 2_800

VPTESTMQ k2 {k1}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 27 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestnmd_kr_k1_xmm_xmmm128b32 = 2_801

VPTESTNMD k2 {k1}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmd_kr_k1_ymm_ymmm256b32 = 2_802

VPTESTNMD k2 {k1}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmd_kr_k1_zmm_zmmm512b32 = 2_803

VPTESTNMD k2 {k1}, zmm2, zmm3/m512/m32bcst

EVEX.512.F3.0F38.W0 27 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestnmq_kr_k1_xmm_xmmm128b64 = 2_804

VPTESTNMQ k2 {k1}, xmm2, xmm3/m128/m64bcst

EVEX.128.F3.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmq_kr_k1_ymm_ymmm256b64 = 2_805

VPTESTNMQ k2 {k1}, ymm2, ymm3/m256/m64bcst

EVEX.256.F3.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmq_kr_k1_zmm_zmmm512b64 = 2_806

VPTESTNMQ k2 {k1}, zmm2, zmm3/m512/m64bcst

EVEX.512.F3.0F38.W1 27 /r

AVX512F

16/32/64-bit

§

Pmuldq_xmm_xmmm128 = 2_807

PMULDQ xmm1, xmm2/m128

66 0F 38 28 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmuldq_xmm_xmm_xmmm128 = 2_808

VPMULDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 28 /r

AVX

16/32/64-bit

§

VEX_Vpmuldq_ymm_ymm_ymmm256 = 2_809

VPMULDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 28 /r

AVX2

16/32/64-bit

§

EVEX_Vpmuldq_xmm_k1z_xmm_xmmm128b64 = 2_810

VPMULDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuldq_ymm_k1z_ymm_ymmm256b64 = 2_811

VPMULDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuldq_zmm_k1z_zmm_zmmm512b64 = 2_812

VPMULDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 28 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovm2b_xmm_kr = 2_813

VPMOVM2B xmm1, k1

EVEX.128.F3.0F38.W0 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2b_ymm_kr = 2_814

VPMOVM2B ymm1, k1

EVEX.256.F3.0F38.W0 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2b_zmm_kr = 2_815

VPMOVM2B zmm1, k1

EVEX.512.F3.0F38.W0 28 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2w_xmm_kr = 2_816

VPMOVM2W xmm1, k1

EVEX.128.F3.0F38.W1 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2w_ymm_kr = 2_817

VPMOVM2W ymm1, k1

EVEX.256.F3.0F38.W1 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2w_zmm_kr = 2_818

VPMOVM2W zmm1, k1

EVEX.512.F3.0F38.W1 28 /r

AVX512BW

16/32/64-bit

§

Pcmpeqq_xmm_xmmm128 = 2_819

PCMPEQQ xmm1, xmm2/m128

66 0F 38 29 /r

SSE4.1

16/32/64-bit

§

VEX_Vpcmpeqq_xmm_xmm_xmmm128 = 2_820

VPCMPEQQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 29 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqq_ymm_ymm_ymmm256 = 2_821

VPCMPEQQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 29 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqq_kr_k1_xmm_xmmm128b64 = 2_822

VPCMPEQQ k1 {k2}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqq_kr_k1_ymm_ymmm256b64 = 2_823

VPCMPEQQ k1 {k2}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqq_kr_k1_zmm_zmmm512b64 = 2_824

VPCMPEQQ k1 {k2}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 29 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovb2m_kr_xmm = 2_825

VPMOVB2M k1, xmm1

EVEX.128.F3.0F38.W0 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovb2m_kr_ymm = 2_826

VPMOVB2M k1, ymm1

EVEX.256.F3.0F38.W0 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovb2m_kr_zmm = 2_827

VPMOVB2M k1, zmm1

EVEX.512.F3.0F38.W0 29 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovw2m_kr_xmm = 2_828

VPMOVW2M k1, xmm1

EVEX.128.F3.0F38.W1 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovw2m_kr_ymm = 2_829

VPMOVW2M k1, ymm1

EVEX.256.F3.0F38.W1 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovw2m_kr_zmm = 2_830

VPMOVW2M k1, zmm1

EVEX.512.F3.0F38.W1 29 /r

AVX512BW

16/32/64-bit

§

Movntdqa_xmm_m128 = 2_831

MOVNTDQA xmm1, m128

66 0F 38 2A /r

SSE4.1

16/32/64-bit

§

VEX_Vmovntdqa_xmm_m128 = 2_832

VMOVNTDQA xmm1, m128

VEX.128.66.0F38.WIG 2A /r

AVX

16/32/64-bit

§

VEX_Vmovntdqa_ymm_m256 = 2_833

VMOVNTDQA ymm1, m256

VEX.256.66.0F38.WIG 2A /r

AVX2

16/32/64-bit

§

EVEX_Vmovntdqa_xmm_m128 = 2_834

VMOVNTDQA xmm1, m128

EVEX.128.66.0F38.W0 2A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdqa_ymm_m256 = 2_835

VMOVNTDQA ymm1, m256

EVEX.256.66.0F38.W0 2A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdqa_zmm_m512 = 2_836

VMOVNTDQA zmm1, m512

EVEX.512.66.0F38.W0 2A /r

AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastmb2q_xmm_kr = 2_837

VPBROADCASTMB2Q xmm1, k1

EVEX.128.F3.0F38.W1 2A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmb2q_ymm_kr = 2_838

VPBROADCASTMB2Q ymm1, k1

EVEX.256.F3.0F38.W1 2A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmb2q_zmm_kr = 2_839

VPBROADCASTMB2Q zmm1, k1

EVEX.512.F3.0F38.W1 2A /r

AVX512CD

16/32/64-bit

§

Packusdw_xmm_xmmm128 = 2_840

PACKUSDW xmm1, xmm2/m128

66 0F 38 2B /r

SSE4.1

16/32/64-bit

§

VEX_Vpackusdw_xmm_xmm_xmmm128 = 2_841

VPACKUSDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 2B /r

AVX

16/32/64-bit

§

VEX_Vpackusdw_ymm_ymm_ymmm256 = 2_842

VPACKUSDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 2B /r

AVX2

16/32/64-bit

§

EVEX_Vpackusdw_xmm_k1z_xmm_xmmm128b32 = 2_843

VPACKUSDW xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 2B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackusdw_ymm_k1z_ymm_ymmm256b32 = 2_844

VPACKUSDW ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 2B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackusdw_zmm_k1z_zmm_zmmm512b32 = 2_845

VPACKUSDW zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 2B /r

AVX512BW

16/32/64-bit

§

VEX_Vmaskmovps_xmm_xmm_m128 = 2_846

VMASKMOVPS xmm1, xmm2, m128

VEX.128.66.0F38.W0 2C /r

AVX

16/32/64-bit

§

VEX_Vmaskmovps_ymm_ymm_m256 = 2_847

VMASKMOVPS ymm1, ymm2, m256

VEX.256.66.0F38.W0 2C /r

AVX

16/32/64-bit

§

EVEX_Vscalefps_xmm_k1z_xmm_xmmm128b32 = 2_848

VSCALEFPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefps_ymm_k1z_ymm_ymmm256b32 = 2_849

VSCALEFPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefps_zmm_k1z_zmm_zmmm512b32_er = 2_850

VSCALEFPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 2C /r

AVX512F

16/32/64-bit

§

EVEX_Vscalefpd_xmm_k1z_xmm_xmmm128b64 = 2_851

VSCALEFPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefpd_ymm_k1z_ymm_ymmm256b64 = 2_852

VSCALEFPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefpd_zmm_k1z_zmm_zmmm512b64_er = 2_853

VSCALEFPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 2C /r

AVX512F

16/32/64-bit

§

VEX_Vmaskmovpd_xmm_xmm_m128 = 2_854

VMASKMOVPD xmm1, xmm2, m128

VEX.128.66.0F38.W0 2D /r

AVX

16/32/64-bit

§

VEX_Vmaskmovpd_ymm_ymm_m256 = 2_855

VMASKMOVPD ymm1, ymm2, m256

VEX.256.66.0F38.W0 2D /r

AVX

16/32/64-bit

§

EVEX_Vscalefss_xmm_k1z_xmm_xmmm32_er = 2_856

VSCALEFSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 2D /r

AVX512F

16/32/64-bit

§

EVEX_Vscalefsd_xmm_k1z_xmm_xmmm64_er = 2_857

VSCALEFSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 2D /r

AVX512F

16/32/64-bit

§

VEX_Vmaskmovps_m128_xmm_xmm = 2_858

VMASKMOVPS m128, xmm1, xmm2

VEX.128.66.0F38.W0 2E /r

AVX

16/32/64-bit

§

VEX_Vmaskmovps_m256_ymm_ymm = 2_859

VMASKMOVPS m256, ymm1, ymm2

VEX.256.66.0F38.W0 2E /r

AVX

16/32/64-bit

§

VEX_Vmaskmovpd_m128_xmm_xmm = 2_860

VMASKMOVPD m128, xmm1, xmm2

VEX.128.66.0F38.W0 2F /r

AVX

16/32/64-bit

§

VEX_Vmaskmovpd_m256_ymm_ymm = 2_861

VMASKMOVPD m256, ymm1, ymm2

VEX.256.66.0F38.W0 2F /r

AVX

16/32/64-bit

§

Pmovzxbw_xmm_xmmm64 = 2_862

PMOVZXBW xmm1, xmm2/m64

66 0F 38 30 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxbw_xmm_xmmm64 = 2_863

VPMOVZXBW xmm1, xmm2/m64

VEX.128.66.0F38.WIG 30 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxbw_ymm_xmmm128 = 2_864

VPMOVZXBW ymm1, xmm2/m128

VEX.256.66.0F38.WIG 30 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxbw_xmm_k1z_xmmm64 = 2_865

VPMOVZXBW xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovzxbw_ymm_k1z_xmmm128 = 2_866

VPMOVZXBW ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovzxbw_zmm_k1z_ymmm256 = 2_867

VPMOVZXBW zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 30 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovwb_xmmm64_k1z_xmm = 2_868

VPMOVWB xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovwb_xmmm128_k1z_ymm = 2_869

VPMOVWB xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovwb_ymmm256_k1z_zmm = 2_870

VPMOVWB ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 30 /r

AVX512BW

16/32/64-bit

§

Pmovzxbd_xmm_xmmm32 = 2_871

PMOVZXBD xmm1, xmm2/m32

66 0F 38 31 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxbd_xmm_xmmm32 = 2_872

VPMOVZXBD xmm1, xmm2/m32

VEX.128.66.0F38.WIG 31 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxbd_ymm_xmmm64 = 2_873

VPMOVZXBD ymm1, xmm2/m64

VEX.256.66.0F38.WIG 31 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxbd_xmm_k1z_xmmm32 = 2_874

VPMOVZXBD xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbd_ymm_k1z_xmmm64 = 2_875

VPMOVZXBD ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbd_zmm_k1z_xmmm128 = 2_876

VPMOVZXBD zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 31 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovdb_xmmm32_k1z_xmm = 2_877

VPMOVDB xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdb_xmmm64_k1z_ymm = 2_878

VPMOVDB xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdb_xmmm128_k1z_zmm = 2_879

VPMOVDB xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 31 /r

AVX512F

16/32/64-bit

§

Pmovzxbq_xmm_xmmm16 = 2_880

PMOVZXBQ xmm1, xmm2/m16

66 0F 38 32 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxbq_xmm_xmmm16 = 2_881

VPMOVZXBQ xmm1, xmm2/m16

VEX.128.66.0F38.WIG 32 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxbq_ymm_xmmm32 = 2_882

VPMOVZXBQ ymm1, xmm2/m32

VEX.256.66.0F38.WIG 32 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxbq_xmm_k1z_xmmm16 = 2_883

VPMOVZXBQ xmm1 {k1}{z}, xmm2/m16

EVEX.128.66.0F38.WIG 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbq_ymm_k1z_xmmm32 = 2_884

VPMOVZXBQ ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.WIG 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbq_zmm_k1z_xmmm64 = 2_885

VPMOVZXBQ zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.WIG 32 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovqb_xmmm16_k1z_xmm = 2_886

VPMOVQB xmm1/m16 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqb_xmmm32_k1z_ymm = 2_887

VPMOVQB xmm1/m32 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqb_xmmm64_k1z_zmm = 2_888

VPMOVQB xmm1/m64 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 32 /r

AVX512F

16/32/64-bit

§

Pmovzxwd_xmm_xmmm64 = 2_889

PMOVZXWD xmm1, xmm2/m64

66 0F 38 33 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxwd_xmm_xmmm64 = 2_890

VPMOVZXWD xmm1, xmm2/m64

VEX.128.66.0F38.WIG 33 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxwd_ymm_xmmm128 = 2_891

VPMOVZXWD ymm1, xmm2/m128

VEX.256.66.0F38.WIG 33 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxwd_xmm_k1z_xmmm64 = 2_892

VPMOVZXWD xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwd_ymm_k1z_xmmm128 = 2_893

VPMOVZXWD ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwd_zmm_k1z_ymmm256 = 2_894

VPMOVZXWD zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 33 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovdw_xmmm64_k1z_xmm = 2_895

VPMOVDW xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdw_xmmm128_k1z_ymm = 2_896

VPMOVDW xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdw_ymmm256_k1z_zmm = 2_897

VPMOVDW ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 33 /r

AVX512F

16/32/64-bit

§

Pmovzxwq_xmm_xmmm32 = 2_898

PMOVZXWQ xmm1, xmm2/m32

66 0F 38 34 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxwq_xmm_xmmm32 = 2_899

VPMOVZXWQ xmm1, xmm2/m32

VEX.128.66.0F38.WIG 34 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxwq_ymm_xmmm64 = 2_900

VPMOVZXWQ ymm1, xmm2/m64

VEX.256.66.0F38.WIG 34 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxwq_xmm_k1z_xmmm32 = 2_901

VPMOVZXWQ xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwq_ymm_k1z_xmmm64 = 2_902

VPMOVZXWQ ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwq_zmm_k1z_xmmm128 = 2_903

VPMOVZXWQ zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 34 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovqw_xmmm32_k1z_xmm = 2_904

VPMOVQW xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqw_xmmm64_k1z_ymm = 2_905

VPMOVQW xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqw_xmmm128_k1z_zmm = 2_906

VPMOVQW xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 34 /r

AVX512F

16/32/64-bit

§

Pmovzxdq_xmm_xmmm64 = 2_907

PMOVZXDQ xmm1, xmm2/m64

66 0F 38 35 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxdq_xmm_xmmm64 = 2_908

VPMOVZXDQ xmm1, xmm2/m64

VEX.128.66.0F38.WIG 35 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxdq_ymm_xmmm128 = 2_909

VPMOVZXDQ ymm1, xmm2/m128

VEX.256.66.0F38.WIG 35 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxdq_xmm_k1z_xmmm64 = 2_910

VPMOVZXDQ xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxdq_ymm_k1z_xmmm128 = 2_911

VPMOVZXDQ ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxdq_zmm_k1z_ymmm256 = 2_912

VPMOVZXDQ zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.W0 35 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovqd_xmmm64_k1z_xmm = 2_913

VPMOVQD xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqd_xmmm128_k1z_ymm = 2_914

VPMOVQD xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqd_ymmm256_k1z_zmm = 2_915

VPMOVQD ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 35 /r

AVX512F

16/32/64-bit

§

VEX_Vpermd_ymm_ymm_ymmm256 = 2_916

VPERMD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 36 /r

AVX2

16/32/64-bit

§

EVEX_Vpermd_ymm_k1z_ymm_ymmm256b32 = 2_917

VPERMD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 36 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermd_zmm_k1z_zmm_zmmm512b32 = 2_918

VPERMD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 36 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermq_ymm_k1z_ymm_ymmm256b64 = 2_919

VPERMQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 36 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermq_zmm_k1z_zmm_zmmm512b64 = 2_920

VPERMQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 36 /r

AVX512F

16/32/64-bit

§

Pcmpgtq_xmm_xmmm128 = 2_921

PCMPGTQ xmm1, xmm2/m128

66 0F 38 37 /r

SSE4.2

16/32/64-bit

§

VEX_Vpcmpgtq_xmm_xmm_xmmm128 = 2_922

VPCMPGTQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 37 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtq_ymm_ymm_ymmm256 = 2_923

VPCMPGTQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 37 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtq_kr_k1_xmm_xmmm128b64 = 2_924

VPCMPGTQ k1 {k2}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 37 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtq_kr_k1_ymm_ymmm256b64 = 2_925

VPCMPGTQ k1 {k2}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 37 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtq_kr_k1_zmm_zmmm512b64 = 2_926

VPCMPGTQ k1 {k2}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 37 /r

AVX512F

16/32/64-bit

§

Pminsb_xmm_xmmm128 = 2_927

PMINSB xmm1, xmm2/m128

66 0F 38 38 /r

SSE4.1

16/32/64-bit

§

VEX_Vpminsb_xmm_xmm_xmmm128 = 2_928

VPMINSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 38 /r

AVX

16/32/64-bit

§

VEX_Vpminsb_ymm_ymm_ymmm256 = 2_929

VPMINSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 38 /r

AVX2

16/32/64-bit

§

EVEX_Vpminsb_xmm_k1z_xmm_xmmm128 = 2_930

VPMINSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 38 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsb_ymm_k1z_ymm_ymmm256 = 2_931

VPMINSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 38 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsb_zmm_k1z_zmm_zmmm512 = 2_932

VPMINSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 38 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2d_xmm_kr = 2_933

VPMOVM2D xmm1, k1

EVEX.128.F3.0F38.W0 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2d_ymm_kr = 2_934

VPMOVM2D ymm1, k1

EVEX.256.F3.0F38.W0 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2d_zmm_kr = 2_935

VPMOVM2D zmm1, k1

EVEX.512.F3.0F38.W0 38 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2q_xmm_kr = 2_936

VPMOVM2Q xmm1, k1

EVEX.128.F3.0F38.W1 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2q_ymm_kr = 2_937

VPMOVM2Q ymm1, k1

EVEX.256.F3.0F38.W1 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2q_zmm_kr = 2_938

VPMOVM2Q zmm1, k1

EVEX.512.F3.0F38.W1 38 /r

AVX512DQ

16/32/64-bit

§

Pminsd_xmm_xmmm128 = 2_939

PMINSD xmm1, xmm2/m128

66 0F 38 39 /r

SSE4.1

16/32/64-bit

§

VEX_Vpminsd_xmm_xmm_xmmm128 = 2_940

VPMINSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 39 /r

AVX

16/32/64-bit

§

VEX_Vpminsd_ymm_ymm_ymmm256 = 2_941

VPMINSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 39 /r

AVX2

16/32/64-bit

§

EVEX_Vpminsd_xmm_k1z_xmm_xmmm128b32 = 2_942

VPMINSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsd_ymm_k1z_ymm_ymmm256b32 = 2_943

VPMINSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsd_zmm_k1z_zmm_zmmm512b32 = 2_944

VPMINSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 39 /r

AVX512F

16/32/64-bit

§

EVEX_Vpminsq_xmm_k1z_xmm_xmmm128b64 = 2_945

VPMINSQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsq_ymm_k1z_ymm_ymmm256b64 = 2_946

VPMINSQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsq_zmm_k1z_zmm_zmmm512b64 = 2_947

VPMINSQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 39 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovd2m_kr_xmm = 2_948

VPMOVD2M k1, xmm1

EVEX.128.F3.0F38.W0 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovd2m_kr_ymm = 2_949

VPMOVD2M k1, ymm1

EVEX.256.F3.0F38.W0 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovd2m_kr_zmm = 2_950

VPMOVD2M k1, zmm1

EVEX.512.F3.0F38.W0 39 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vpmovq2m_kr_xmm = 2_951

VPMOVQ2M k1, xmm1

EVEX.128.F3.0F38.W1 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovq2m_kr_ymm = 2_952

VPMOVQ2M k1, ymm1

EVEX.256.F3.0F38.W1 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovq2m_kr_zmm = 2_953

VPMOVQ2M k1, zmm1

EVEX.512.F3.0F38.W1 39 /r

AVX512DQ

16/32/64-bit

§

Pminuw_xmm_xmmm128 = 2_954

PMINUW xmm1, xmm2/m128

66 0F 38 3A /r

SSE4.1

16/32/64-bit

§

VEX_Vpminuw_xmm_xmm_xmmm128 = 2_955

VPMINUW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3A /r

AVX

16/32/64-bit

§

VEX_Vpminuw_ymm_ymm_ymmm256 = 2_956

VPMINUW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3A /r

AVX2

16/32/64-bit

§

EVEX_Vpminuw_xmm_k1z_xmm_xmmm128 = 2_957

VPMINUW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 3A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminuw_ymm_k1z_ymm_ymmm256 = 2_958

VPMINUW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 3A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminuw_zmm_k1z_zmm_zmmm512 = 2_959

VPMINUW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 3A /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastmw2d_xmm_kr = 2_960

VPBROADCASTMW2D xmm1, k1

EVEX.128.F3.0F38.W0 3A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmw2d_ymm_kr = 2_961

VPBROADCASTMW2D ymm1, k1

EVEX.256.F3.0F38.W0 3A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmw2d_zmm_kr = 2_962

VPBROADCASTMW2D zmm1, k1

EVEX.512.F3.0F38.W0 3A /r

AVX512CD

16/32/64-bit

§

Pminud_xmm_xmmm128 = 2_963

PMINUD xmm1, xmm2/m128

66 0F 38 3B /r

SSE4.1

16/32/64-bit

§

VEX_Vpminud_xmm_xmm_xmmm128 = 2_964

VPMINUD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3B /r

AVX

16/32/64-bit

§

VEX_Vpminud_ymm_ymm_ymmm256 = 2_965

VPMINUD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3B /r

AVX2

16/32/64-bit

§

EVEX_Vpminud_xmm_k1z_xmm_xmmm128b32 = 2_966

VPMINUD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminud_ymm_k1z_ymm_ymmm256b32 = 2_967

VPMINUD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminud_zmm_k1z_zmm_zmmm512b32 = 2_968

VPMINUD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 3B /r

AVX512F

16/32/64-bit

§

EVEX_Vpminuq_xmm_k1z_xmm_xmmm128b64 = 2_969

VPMINUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminuq_ymm_k1z_ymm_ymmm256b64 = 2_970

VPMINUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminuq_zmm_k1z_zmm_zmmm512b64 = 2_971

VPMINUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 3B /r

AVX512F

16/32/64-bit

§

Pmaxsb_xmm_xmmm128 = 2_972

PMAXSB xmm1, xmm2/m128

66 0F 38 3C /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxsb_xmm_xmm_xmmm128 = 2_973

VPMAXSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3C /r

AVX

16/32/64-bit

§

VEX_Vpmaxsb_ymm_ymm_ymmm256 = 2_974

VPMAXSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3C /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxsb_xmm_k1z_xmm_xmmm128 = 2_975

VPMAXSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 3C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsb_ymm_k1z_ymm_ymmm256 = 2_976

VPMAXSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 3C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsb_zmm_k1z_zmm_zmmm512 = 2_977

VPMAXSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 3C /r

AVX512BW

16/32/64-bit

§

Pmaxsd_xmm_xmmm128 = 2_978

PMAXSD xmm1, xmm2/m128

66 0F 38 3D /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxsd_xmm_xmm_xmmm128 = 2_979

VPMAXSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3D /r

AVX

16/32/64-bit

§

VEX_Vpmaxsd_ymm_ymm_ymmm256 = 2_980

VPMAXSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3D /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxsd_xmm_k1z_xmm_xmmm128b32 = 2_981

VPMAXSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsd_ymm_k1z_ymm_ymmm256b32 = 2_982

VPMAXSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsd_zmm_k1z_zmm_zmmm512b32 = 2_983

VPMAXSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 3D /r

AVX512F

16/32/64-bit

§

EVEX_Vpmaxsq_xmm_k1z_xmm_xmmm128b64 = 2_984

VPMAXSQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsq_ymm_k1z_ymm_ymmm256b64 = 2_985

VPMAXSQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsq_zmm_k1z_zmm_zmmm512b64 = 2_986

VPMAXSQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 3D /r

AVX512F

16/32/64-bit

§

Pmaxuw_xmm_xmmm128 = 2_987

PMAXUW xmm1, xmm2/m128

66 0F 38 3E /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxuw_xmm_xmm_xmmm128 = 2_988

VPMAXUW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3E /r

AVX

16/32/64-bit

§

VEX_Vpmaxuw_ymm_ymm_ymmm256 = 2_989

VPMAXUW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3E /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxuw_xmm_k1z_xmm_xmmm128 = 2_990

VPMAXUW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 3E /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxuw_ymm_k1z_ymm_ymmm256 = 2_991

VPMAXUW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 3E /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxuw_zmm_k1z_zmm_zmmm512 = 2_992

VPMAXUW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 3E /r

AVX512BW

16/32/64-bit

§

Pmaxud_xmm_xmmm128 = 2_993

PMAXUD xmm1, xmm2/m128

66 0F 38 3F /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxud_xmm_xmm_xmmm128 = 2_994

VPMAXUD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3F /r

AVX

16/32/64-bit

§

VEX_Vpmaxud_ymm_ymm_ymmm256 = 2_995

VPMAXUD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3F /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxud_xmm_k1z_xmm_xmmm128b32 = 2_996

VPMAXUD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxud_ymm_k1z_ymm_ymmm256b32 = 2_997

VPMAXUD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxud_zmm_k1z_zmm_zmmm512b32 = 2_998

VPMAXUD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 3F /r

AVX512F

16/32/64-bit

§

EVEX_Vpmaxuq_xmm_k1z_xmm_xmmm128b64 = 2_999

VPMAXUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxuq_ymm_k1z_ymm_ymmm256b64 = 3_000

VPMAXUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxuq_zmm_k1z_zmm_zmmm512b64 = 3_001

VPMAXUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 3F /r

AVX512F

16/32/64-bit

§

Pmulld_xmm_xmmm128 = 3_002

PMULLD xmm1, xmm2/m128

66 0F 38 40 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmulld_xmm_xmm_xmmm128 = 3_003

VPMULLD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 40 /r

AVX

16/32/64-bit

§

VEX_Vpmulld_ymm_ymm_ymmm256 = 3_004

VPMULLD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 40 /r

AVX2

16/32/64-bit

§

EVEX_Vpmulld_xmm_k1z_xmm_xmmm128b32 = 3_005

VPMULLD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 40 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmulld_ymm_k1z_ymm_ymmm256b32 = 3_006

VPMULLD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 40 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmulld_zmm_k1z_zmm_zmmm512b32 = 3_007

VPMULLD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 40 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmullq_xmm_k1z_xmm_xmmm128b64 = 3_008

VPMULLQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 40 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmullq_ymm_k1z_ymm_ymmm256b64 = 3_009

VPMULLQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 40 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmullq_zmm_k1z_zmm_zmmm512b64 = 3_010

VPMULLQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 40 /r

AVX512DQ

16/32/64-bit

§

Phminposuw_xmm_xmmm128 = 3_011

PHMINPOSUW xmm1, xmm2/m128

66 0F 38 41 /r

SSE4.1

16/32/64-bit

§

VEX_Vphminposuw_xmm_xmmm128 = 3_012

VPHMINPOSUW xmm1, xmm2/m128

VEX.128.66.0F38.WIG 41 /r

AVX

16/32/64-bit

§

EVEX_Vgetexpps_xmm_k1z_xmmm128b32 = 3_013

VGETEXPPS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexpps_ymm_k1z_ymmm256b32 = 3_014

VGETEXPPS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexpps_zmm_k1z_zmmm512b32_sae = 3_015

VGETEXPPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 42 /r

AVX512F

16/32/64-bit

§

EVEX_Vgetexppd_xmm_k1z_xmmm128b64 = 3_016

VGETEXPPD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexppd_ymm_k1z_ymmm256b64 = 3_017

VGETEXPPD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexppd_zmm_k1z_zmmm512b64_sae = 3_018

VGETEXPPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 42 /r

AVX512F

16/32/64-bit

§

EVEX_Vgetexpss_xmm_k1z_xmm_xmmm32_sae = 3_019

VGETEXPSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.66.0F38.W0 43 /r

AVX512F

16/32/64-bit

§

EVEX_Vgetexpsd_xmm_k1z_xmm_xmmm64_sae = 3_020

VGETEXPSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.66.0F38.W1 43 /r

AVX512F

16/32/64-bit

§

EVEX_Vplzcntd_xmm_k1z_xmmm128b32 = 3_021

VPLZCNTD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntd_ymm_k1z_ymmm256b32 = 3_022

VPLZCNTD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntd_zmm_k1z_zmmm512b32 = 3_023

VPLZCNTD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 44 /r

AVX512CD

16/32/64-bit

§

EVEX_Vplzcntq_xmm_k1z_xmmm128b64 = 3_024

VPLZCNTQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntq_ymm_k1z_ymmm256b64 = 3_025

VPLZCNTQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntq_zmm_k1z_zmmm512b64 = 3_026

VPLZCNTQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 44 /r

AVX512CD

16/32/64-bit

§

VEX_Vpsrlvd_xmm_xmm_xmmm128 = 3_027

VPSRLVD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 45 /r

AVX2

16/32/64-bit

§

VEX_Vpsrlvd_ymm_ymm_ymmm256 = 3_028

VPSRLVD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 45 /r

AVX2

16/32/64-bit

§

VEX_Vpsrlvq_xmm_xmm_xmmm128 = 3_029

VPSRLVQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 45 /r

AVX2

16/32/64-bit

§

VEX_Vpsrlvq_ymm_ymm_ymmm256 = 3_030

VPSRLVQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 45 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrlvd_xmm_k1z_xmm_xmmm128b32 = 3_031

VPSRLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvd_ymm_k1z_ymm_ymmm256b32 = 3_032

VPSRLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvd_zmm_k1z_zmm_zmmm512b32 = 3_033

VPSRLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 45 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsrlvq_xmm_k1z_xmm_xmmm128b64 = 3_034

VPSRLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvq_ymm_k1z_ymm_ymmm256b64 = 3_035

VPSRLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvq_zmm_k1z_zmm_zmmm512b64 = 3_036

VPSRLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 45 /r

AVX512F

16/32/64-bit

§

VEX_Vpsravd_xmm_xmm_xmmm128 = 3_037

VPSRAVD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 46 /r

AVX2

16/32/64-bit

§

VEX_Vpsravd_ymm_ymm_ymmm256 = 3_038

VPSRAVD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 46 /r

AVX2

16/32/64-bit

§

EVEX_Vpsravd_xmm_k1z_xmm_xmmm128b32 = 3_039

VPSRAVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravd_ymm_k1z_ymm_ymmm256b32 = 3_040

VPSRAVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravd_zmm_k1z_zmm_zmmm512b32 = 3_041

VPSRAVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 46 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsravq_xmm_k1z_xmm_xmmm128b64 = 3_042

VPSRAVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravq_ymm_k1z_ymm_ymmm256b64 = 3_043

VPSRAVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravq_zmm_k1z_zmm_zmmm512b64 = 3_044

VPSRAVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 46 /r

AVX512F

16/32/64-bit

§

VEX_Vpsllvd_xmm_xmm_xmmm128 = 3_045

VPSLLVD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 47 /r

AVX2

16/32/64-bit

§

VEX_Vpsllvd_ymm_ymm_ymmm256 = 3_046

VPSLLVD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 47 /r

AVX2

16/32/64-bit

§

VEX_Vpsllvq_xmm_xmm_xmmm128 = 3_047

VPSLLVQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 47 /r

AVX2

16/32/64-bit

§

VEX_Vpsllvq_ymm_ymm_ymmm256 = 3_048

VPSLLVQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 47 /r

AVX2

16/32/64-bit

§

EVEX_Vpsllvd_xmm_k1z_xmm_xmmm128b32 = 3_049

VPSLLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvd_ymm_k1z_ymm_ymmm256b32 = 3_050

VPSLLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvd_zmm_k1z_zmm_zmmm512b32 = 3_051

VPSLLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 47 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsllvq_xmm_k1z_xmm_xmmm128b64 = 3_052

VPSLLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvq_ymm_k1z_ymm_ymmm256b64 = 3_053

VPSLLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvq_zmm_k1z_zmm_zmmm512b64 = 3_054

VPSLLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 47 /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14ps_xmm_k1z_xmmm128b32 = 3_055

VRCP14PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14ps_ymm_k1z_ymmm256b32 = 3_056

VRCP14PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14ps_zmm_k1z_zmmm512b32 = 3_057

VRCP14PS zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 4C /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14pd_xmm_k1z_xmmm128b64 = 3_058

VRCP14PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14pd_ymm_k1z_ymmm256b64 = 3_059

VRCP14PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14pd_zmm_k1z_zmmm512b64 = 3_060

VRCP14PD zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 4C /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14ss_xmm_k1z_xmm_xmmm32 = 3_061

VRCP14SS xmm1 {k1}{z}, xmm2, xmm3/m32

EVEX.LIG.66.0F38.W0 4D /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14sd_xmm_k1z_xmm_xmmm64 = 3_062

VRCP14SD xmm1 {k1}{z}, xmm2, xmm3/m64

EVEX.LIG.66.0F38.W1 4D /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ps_xmm_k1z_xmmm128b32 = 3_063

VRSQRT14PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ps_ymm_k1z_ymmm256b32 = 3_064

VRSQRT14PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ps_zmm_k1z_zmmm512b32 = 3_065

VRSQRT14PS zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 4E /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14pd_xmm_k1z_xmmm128b64 = 3_066

VRSQRT14PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14pd_ymm_k1z_ymmm256b64 = 3_067

VRSQRT14PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14pd_zmm_k1z_zmmm512b64 = 3_068

VRSQRT14PD zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 4E /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ss_xmm_k1z_xmm_xmmm32 = 3_069

VRSQRT14SS xmm1 {k1}{z}, xmm2, xmm3/m32

EVEX.LIG.66.0F38.W0 4F /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14sd_xmm_k1z_xmm_xmmm64 = 3_070

VRSQRT14SD xmm1 {k1}{z}, xmm2, xmm3/m64

EVEX.LIG.66.0F38.W1 4F /r

AVX512F

16/32/64-bit

§

EVEX_Vpdpbusd_xmm_k1z_xmm_xmmm128b32 = 3_071

VPDPBUSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 50 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusd_ymm_k1z_ymm_ymmm256b32 = 3_072

VPDPBUSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 50 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusd_zmm_k1z_zmm_zmmm512b32 = 3_073

VPDPBUSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 50 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusds_xmm_k1z_xmm_xmmm128b32 = 3_074

VPDPBUSDS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 51 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusds_ymm_k1z_ymm_ymmm256b32 = 3_075

VPDPBUSDS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 51 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusds_zmm_k1z_zmm_zmmm512b32 = 3_076

VPDPBUSDS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 51 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssd_xmm_k1z_xmm_xmmm128b32 = 3_077

VPDPWSSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 52 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssd_ymm_k1z_ymm_ymmm256b32 = 3_078

VPDPWSSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 52 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssd_zmm_k1z_zmm_zmmm512b32 = 3_079

VPDPWSSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 52 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vdpbf16ps_xmm_k1z_xmm_xmmm128b32 = 3_080

VDPBF16PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.0F38.W0 52 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vdpbf16ps_ymm_k1z_ymm_ymmm256b32 = 3_081

VDPBF16PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.0F38.W0 52 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vdpbf16ps_zmm_k1z_zmm_zmmm512b32 = 3_082

VDPBF16PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.F3.0F38.W0 52 /r

AVX512F and AVX512_BF16

16/32/64-bit

§

EVEX_Vp4dpwssd_zmm_k1z_zmmp3_m128 = 3_083

VP4DPWSSD zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 52 /r

AVX512_4VNNIW

16/32/64-bit

§

EVEX_Vpdpwssds_xmm_k1z_xmm_xmmm128b32 = 3_084

VPDPWSSDS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 53 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssds_ymm_k1z_ymm_ymmm256b32 = 3_085

VPDPWSSDS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 53 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssds_zmm_k1z_zmm_zmmm512b32 = 3_086

VPDPWSSDS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 53 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vp4dpwssds_zmm_k1z_zmmp3_m128 = 3_087

VP4DPWSSDS zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 53 /r

AVX512_4VNNIW

16/32/64-bit

§

EVEX_Vpopcntb_xmm_k1z_xmmm128 = 3_088

VPOPCNTB xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntb_ymm_k1z_ymmm256 = 3_089

VPOPCNTB ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntb_zmm_k1z_zmmm512 = 3_090

VPOPCNTB zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 54 /r

AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntw_xmm_k1z_xmmm128 = 3_091

VPOPCNTW xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntw_ymm_k1z_ymmm256 = 3_092

VPOPCNTW ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntw_zmm_k1z_zmmm512 = 3_093

VPOPCNTW zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 54 /r

AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntd_xmm_k1z_xmmm128b32 = 3_094

VPOPCNTD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntd_ymm_k1z_ymmm256b32 = 3_095

VPOPCNTD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntd_zmm_k1z_zmmm512b32 = 3_096

VPOPCNTD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 55 /r

AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntq_xmm_k1z_xmmm128b64 = 3_097

VPOPCNTQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntq_ymm_k1z_ymmm256b64 = 3_098

VPOPCNTQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntq_zmm_k1z_zmmm512b64 = 3_099

VPOPCNTQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 55 /r

AVX512_VPOPCNTDQ

16/32/64-bit

§

VEX_Vpbroadcastd_xmm_xmmm32 = 3_100

VPBROADCASTD xmm1, xmm2/m32

VEX.128.66.0F38.W0 58 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastd_ymm_xmmm32 = 3_101

VPBROADCASTD ymm1, xmm2/m32

VEX.256.66.0F38.W0 58 /r

AVX2

16/32/64-bit

§

EVEX_Vpbroadcastd_xmm_k1z_xmmm32 = 3_102

VPBROADCASTD xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_ymm_k1z_xmmm32 = 3_103

VPBROADCASTD ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_zmm_k1z_xmmm32 = 3_104

VPBROADCASTD zmm1 {k1}{z}, xmm2/m32

EVEX.512.66.0F38.W0 58 /r

AVX512F

16/32/64-bit

§

VEX_Vpbroadcastq_xmm_xmmm64 = 3_105

VPBROADCASTQ xmm1, xmm2/m64

VEX.128.66.0F38.W0 59 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastq_ymm_xmmm64 = 3_106

VPBROADCASTQ ymm1, xmm2/m64

VEX.256.66.0F38.W0 59 /r

AVX2

16/32/64-bit

§

EVEX_Vbroadcasti32x2_xmm_k1z_xmmm64 = 3_107

VBROADCASTI32X2 xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 59 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti32x2_ymm_k1z_xmmm64 = 3_108

VBROADCASTI32X2 ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W0 59 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti32x2_zmm_k1z_xmmm64 = 3_109

VBROADCASTI32X2 zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W0 59 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vpbroadcastq_xmm_k1z_xmmm64 = 3_110

VPBROADCASTQ xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastq_ymm_k1z_xmmm64 = 3_111

VPBROADCASTQ ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastq_zmm_k1z_xmmm64 = 3_112

VPBROADCASTQ zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W1 59 /r

AVX512F

16/32/64-bit

§

VEX_Vbroadcasti128_ymm_m128 = 3_113

VBROADCASTI128 ymm1, m128

VEX.256.66.0F38.W0 5A /r

AVX2

16/32/64-bit

§

EVEX_Vbroadcasti32x4_ymm_k1z_m128 = 3_114

VBROADCASTI32X4 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W0 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcasti32x4_zmm_k1z_m128 = 3_115

VBROADCASTI32X4 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W0 5A /r

AVX512F

16/32/64-bit

§

EVEX_Vbroadcasti64x2_ymm_k1z_m128 = 3_116

VBROADCASTI64X2 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W1 5A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti64x2_zmm_k1z_m128 = 3_117

VBROADCASTI64X2 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W1 5A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti32x8_zmm_k1z_m256 = 3_118

VBROADCASTI32X8 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W0 5B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti64x4_zmm_k1z_m256 = 3_119

VBROADCASTI64X4 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W1 5B /r

AVX512F

16/32/64-bit

§

EVEX_Vpexpandb_xmm_k1z_xmmm128 = 3_120

VPEXPANDB xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandb_ymm_k1z_ymmm256 = 3_121

VPEXPANDB ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandb_zmm_k1z_zmmm512 = 3_122

VPEXPANDB zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 62 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandw_xmm_k1z_xmmm128 = 3_123

VPEXPANDW xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandw_ymm_k1z_ymmm256 = 3_124

VPEXPANDW ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandw_zmm_k1z_zmmm512 = 3_125

VPEXPANDW zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 62 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressb_xmmm128_k1z_xmm = 3_126

VPCOMPRESSB xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W0 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressb_ymmm256_k1z_ymm = 3_127

VPCOMPRESSB ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W0 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressb_zmmm512_k1z_zmm = 3_128

VPCOMPRESSB zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W0 63 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressw_xmmm128_k1z_xmm = 3_129

VPCOMPRESSW xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W1 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressw_ymmm256_k1z_ymm = 3_130

VPCOMPRESSW ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W1 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressw_zmmm512_k1z_zmm = 3_131

VPCOMPRESSW zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W1 63 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpblendmd_xmm_k1z_xmm_xmmm128b32 = 3_132

VPBLENDMD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmd_ymm_k1z_ymm_ymmm256b32 = 3_133

VPBLENDMD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmd_zmm_k1z_zmm_zmmm512b32 = 3_134

VPBLENDMD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 64 /r

AVX512F

16/32/64-bit

§

EVEX_Vpblendmq_xmm_k1z_xmm_xmmm128b64 = 3_135

VPBLENDMQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmq_ymm_k1z_ymm_ymmm256b64 = 3_136

VPBLENDMQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmq_zmm_k1z_zmm_zmmm512b64 = 3_137

VPBLENDMQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 64 /r

AVX512F

16/32/64-bit

§

EVEX_Vblendmps_xmm_k1z_xmm_xmmm128b32 = 3_138

VBLENDMPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmps_ymm_k1z_ymm_ymmm256b32 = 3_139

VBLENDMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmps_zmm_k1z_zmm_zmmm512b32 = 3_140

VBLENDMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 65 /r

AVX512F

16/32/64-bit

§

EVEX_Vblendmpd_xmm_k1z_xmm_xmmm128b64 = 3_141

VBLENDMPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmpd_ymm_k1z_ymm_ymmm256b64 = 3_142

VBLENDMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmpd_zmm_k1z_zmm_zmmm512b64 = 3_143

VBLENDMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 65 /r

AVX512F

16/32/64-bit

§

EVEX_Vpblendmb_xmm_k1z_xmm_xmmm128 = 3_144

VPBLENDMB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmb_ymm_k1z_ymm_ymmm256 = 3_145

VPBLENDMB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmb_zmm_k1z_zmm_zmmm512 = 3_146

VPBLENDMB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 66 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpblendmw_xmm_k1z_xmm_xmmm128 = 3_147

VPBLENDMW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmw_ymm_k1z_ymm_ymmm256 = 3_148

VPBLENDMW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmw_zmm_k1z_zmm_zmmm512 = 3_149

VPBLENDMW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 66 /r

AVX512BW

16/32/64-bit

§

EVEX_Vp2intersectd_kp1_xmm_xmmm128b32 = 3_150

VP2INTERSECTD k1+1, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.0F38.W0 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectd_kp1_ymm_ymmm256b32 = 3_151

VP2INTERSECTD k1+1, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.0F38.W0 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectd_kp1_zmm_zmmm512b32 = 3_152

VP2INTERSECTD k1+1, zmm2, zmm3/m512/m32bcst

EVEX.512.F2.0F38.W0 68 /r

AVX512F and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectq_kp1_xmm_xmmm128b64 = 3_153

VP2INTERSECTQ k1+1, xmm2, xmm3/m128/m64bcst

EVEX.128.F2.0F38.W1 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectq_kp1_ymm_ymmm256b64 = 3_154

VP2INTERSECTQ k1+1, ymm2, ymm3/m256/m64bcst

EVEX.256.F2.0F38.W1 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectq_kp1_zmm_zmmm512b64 = 3_155

VP2INTERSECTQ k1+1, zmm2, zmm3/m512/m64bcst

EVEX.512.F2.0F38.W1 68 /r

AVX512F and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vpshldvw_xmm_k1z_xmm_xmmm128 = 3_156

VPSHLDVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 70 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvw_ymm_k1z_ymm_ymmm256 = 3_157

VPSHLDVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 70 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvw_zmm_k1z_zmm_zmmm512 = 3_158

VPSHLDVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 70 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvd_xmm_k1z_xmm_xmmm128b32 = 3_159

VPSHLDVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvd_ymm_k1z_ymm_ymmm256b32 = 3_160

VPSHLDVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvd_zmm_k1z_zmm_zmmm512b32 = 3_161

VPSHLDVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 71 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvq_xmm_k1z_xmm_xmmm128b64 = 3_162

VPSHLDVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvq_ymm_k1z_ymm_ymmm256b64 = 3_163

VPSHLDVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvq_zmm_k1z_zmm_zmmm512b64 = 3_164

VPSHLDVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 71 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvw_xmm_k1z_xmm_xmmm128 = 3_165

VPSHRDVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 72 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvw_ymm_k1z_ymm_ymmm256 = 3_166

VPSHRDVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 72 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvw_zmm_k1z_zmm_zmmm512 = 3_167

VPSHRDVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 72 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vcvtneps2bf16_xmm_k1z_xmmm128b32 = 3_168

VCVTNEPS2BF16 xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F3.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtneps2bf16_xmm_k1z_ymmm256b32 = 3_169

VCVTNEPS2BF16 xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F3.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtneps2bf16_ymm_k1z_zmmm512b32 = 3_170

VCVTNEPS2BF16 ymm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.F3.0F38.W0 72 /r

AVX512F and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtne2ps2bf16_xmm_k1z_xmm_xmmm128b32 = 3_171

VCVTNE2PS2BF16 xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtne2ps2bf16_ymm_k1z_ymm_ymmm256b32 = 3_172

VCVTNE2PS2BF16 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtne2ps2bf16_zmm_k1z_zmm_zmmm512b32 = 3_173

VCVTNE2PS2BF16 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.F2.0F38.W0 72 /r

AVX512F and AVX512_BF16

16/32/64-bit

§

EVEX_Vpshrdvd_xmm_k1z_xmm_xmmm128b32 = 3_174

VPSHRDVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvd_ymm_k1z_ymm_ymmm256b32 = 3_175

VPSHRDVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvd_zmm_k1z_zmm_zmmm512b32 = 3_176

VPSHRDVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 73 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvq_xmm_k1z_xmm_xmmm128b64 = 3_177

VPSHRDVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvq_ymm_k1z_ymm_ymmm256b64 = 3_178

VPSHRDVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvq_zmm_k1z_zmm_zmmm512b64 = 3_179

VPSHRDVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 73 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpermi2b_xmm_k1z_xmm_xmmm128 = 3_180

VPERMI2B xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 75 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermi2b_ymm_k1z_ymm_ymmm256 = 3_181

VPERMI2B ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 75 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermi2b_zmm_k1z_zmm_zmmm512 = 3_182

VPERMI2B zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 75 /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermi2w_xmm_k1z_xmm_xmmm128 = 3_183

VPERMI2W xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermi2w_ymm_k1z_ymm_ymmm256 = 3_184

VPERMI2W ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermi2w_zmm_k1z_zmm_zmmm512 = 3_185

VPERMI2W zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 75 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpermi2d_xmm_k1z_xmm_xmmm128b32 = 3_186

VPERMI2D xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2d_ymm_k1z_ymm_ymmm256b32 = 3_187

VPERMI2D ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2d_zmm_k1z_zmm_zmmm512b32 = 3_188

VPERMI2D zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 76 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermi2q_xmm_k1z_xmm_xmmm128b64 = 3_189

VPERMI2Q xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2q_ymm_k1z_ymm_ymmm256b64 = 3_190

VPERMI2Q ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2q_zmm_k1z_zmm_zmmm512b64 = 3_191

VPERMI2Q zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 76 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermi2ps_xmm_k1z_xmm_xmmm128b32 = 3_192

VPERMI2PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2ps_ymm_k1z_ymm_ymmm256b32 = 3_193

VPERMI2PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2ps_zmm_k1z_zmm_zmmm512b32 = 3_194

VPERMI2PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 77 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermi2pd_xmm_k1z_xmm_xmmm128b64 = 3_195

VPERMI2PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2pd_ymm_k1z_ymm_ymmm256b64 = 3_196

VPERMI2PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2pd_zmm_k1z_zmm_zmmm512b64 = 3_197

VPERMI2PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 77 /r

AVX512F

16/32/64-bit

§

VEX_Vpbroadcastb_xmm_xmmm8 = 3_198

VPBROADCASTB xmm1, xmm2/m8

VEX.128.66.0F38.W0 78 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastb_ymm_xmmm8 = 3_199

VPBROADCASTB ymm1, xmm2/m8

VEX.256.66.0F38.W0 78 /r

AVX2

16/32/64-bit

§

EVEX_Vpbroadcastb_xmm_k1z_xmmm8 = 3_200

VPBROADCASTB xmm1 {k1}{z}, xmm2/m8

EVEX.128.66.0F38.W0 78 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_ymm_k1z_xmmm8 = 3_201

VPBROADCASTB ymm1 {k1}{z}, xmm2/m8

EVEX.256.66.0F38.W0 78 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_zmm_k1z_xmmm8 = 3_202

VPBROADCASTB zmm1 {k1}{z}, xmm2/m8

EVEX.512.66.0F38.W0 78 /r

AVX512BW

16/32/64-bit

§

VEX_Vpbroadcastw_xmm_xmmm16 = 3_203

VPBROADCASTW xmm1, xmm2/m16

VEX.128.66.0F38.W0 79 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastw_ymm_xmmm16 = 3_204

VPBROADCASTW ymm1, xmm2/m16

VEX.256.66.0F38.W0 79 /r

AVX2

16/32/64-bit

§

EVEX_Vpbroadcastw_xmm_k1z_xmmm16 = 3_205

VPBROADCASTW xmm1 {k1}{z}, xmm2/m16

EVEX.128.66.0F38.W0 79 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_ymm_k1z_xmmm16 = 3_206

VPBROADCASTW ymm1 {k1}{z}, xmm2/m16

EVEX.256.66.0F38.W0 79 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_zmm_k1z_xmmm16 = 3_207

VPBROADCASTW zmm1 {k1}{z}, xmm2/m16

EVEX.512.66.0F38.W0 79 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_xmm_k1z_r32 = 3_208

VPBROADCASTB xmm1 {k1}{z}, r32

EVEX.128.66.0F38.W0 7A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_ymm_k1z_r32 = 3_209

VPBROADCASTB ymm1 {k1}{z}, r32

EVEX.256.66.0F38.W0 7A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_zmm_k1z_r32 = 3_210

VPBROADCASTB zmm1 {k1}{z}, r32

EVEX.512.66.0F38.W0 7A /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_xmm_k1z_r32 = 3_211

VPBROADCASTW xmm1 {k1}{z}, r32

EVEX.128.66.0F38.W0 7B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_ymm_k1z_r32 = 3_212

VPBROADCASTW ymm1 {k1}{z}, r32

EVEX.256.66.0F38.W0 7B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_zmm_k1z_r32 = 3_213

VPBROADCASTW zmm1 {k1}{z}, r32

EVEX.512.66.0F38.W0 7B /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastd_xmm_k1z_r32 = 3_214

VPBROADCASTD xmm1 {k1}{z}, r32

EVEX.128.66.0F38.W0 7C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_ymm_k1z_r32 = 3_215

VPBROADCASTD ymm1 {k1}{z}, r32

EVEX.256.66.0F38.W0 7C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_zmm_k1z_r32 = 3_216

VPBROADCASTD zmm1 {k1}{z}, r32

EVEX.512.66.0F38.W0 7C /r

AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastq_xmm_k1z_r64 = 3_217

VPBROADCASTQ xmm1 {k1}{z}, r64

EVEX.128.66.0F38.W1 7C /r

AVX512VL and AVX512F

64-bit

§

EVEX_Vpbroadcastq_ymm_k1z_r64 = 3_218

VPBROADCASTQ ymm1 {k1}{z}, r64

EVEX.256.66.0F38.W1 7C /r

AVX512VL and AVX512F

64-bit

§

EVEX_Vpbroadcastq_zmm_k1z_r64 = 3_219

VPBROADCASTQ zmm1 {k1}{z}, r64

EVEX.512.66.0F38.W1 7C /r

AVX512F

64-bit

§

EVEX_Vpermt2b_xmm_k1z_xmm_xmmm128 = 3_220

VPERMT2B xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 7D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermt2b_ymm_k1z_ymm_ymmm256 = 3_221

VPERMT2B ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 7D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermt2b_zmm_k1z_zmm_zmmm512 = 3_222

VPERMT2B zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 7D /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermt2w_xmm_k1z_xmm_xmmm128 = 3_223

VPERMT2W xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 7D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermt2w_ymm_k1z_ymm_ymmm256 = 3_224

VPERMT2W ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 7D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermt2w_zmm_k1z_zmm_zmmm512 = 3_225

VPERMT2W zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 7D /r

AVX512BW

16/32/64-bit

§

EVEX_Vpermt2d_xmm_k1z_xmm_xmmm128b32 = 3_226

VPERMT2D xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2d_ymm_k1z_ymm_ymmm256b32 = 3_227

VPERMT2D ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2d_zmm_k1z_zmm_zmmm512b32 = 3_228

VPERMT2D zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 7E /r

AVX512F

16/32/64-bit

§

EVEX_Vpermt2q_xmm_k1z_xmm_xmmm128b64 = 3_229

VPERMT2Q xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2q_ymm_k1z_ymm_ymmm256b64 = 3_230

VPERMT2Q ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2q_zmm_k1z_zmm_zmmm512b64 = 3_231

VPERMT2Q zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 7E /r

AVX512F

16/32/64-bit

§

EVEX_Vpermt2ps_xmm_k1z_xmm_xmmm128b32 = 3_232

VPERMT2PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2ps_ymm_k1z_ymm_ymmm256b32 = 3_233

VPERMT2PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2ps_zmm_k1z_zmm_zmmm512b32 = 3_234

VPERMT2PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vpermt2pd_xmm_k1z_xmm_xmmm128b64 = 3_235

VPERMT2PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2pd_ymm_k1z_ymm_ymmm256b64 = 3_236

VPERMT2PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2pd_zmm_k1z_zmm_zmmm512b64 = 3_237

VPERMT2PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 7F /r

AVX512F

16/32/64-bit

§

Invept_r32_m128 = 3_238

INVEPT r32, m128

66 0F 38 80 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 20]

16/32-bit

§

Invept_r64_m128 = 3_239

INVEPT r64, m128

66 0F 38 80 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 20]

64-bit

§

Invvpid_r32_m128 = 3_240

INVVPID r32, m128

66 0F 38 81 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 32]

16/32-bit

§

Invvpid_r64_m128 = 3_241

INVVPID r64, m128

66 0F 38 81 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 32]

64-bit

§

Invpcid_r32_m128 = 3_242

INVPCID r32, m128

66 0F 38 82 /r

INVPCID

16/32-bit

§

Invpcid_r64_m128 = 3_243

INVPCID r64, m128

66 0F 38 82 /r

INVPCID

64-bit

§

EVEX_Vpmultishiftqb_xmm_k1z_xmm_xmmm128b64 = 3_244

VPMULTISHIFTQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 83 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpmultishiftqb_ymm_k1z_ymm_ymmm256b64 = 3_245

VPMULTISHIFTQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 83 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpmultishiftqb_zmm_k1z_zmm_zmmm512b64 = 3_246

VPMULTISHIFTQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 83 /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vexpandps_xmm_k1z_xmmm128 = 3_247

VEXPANDPS xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandps_ymm_k1z_ymmm256 = 3_248

VEXPANDPS ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandps_zmm_k1z_zmmm512 = 3_249

VEXPANDPS zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 88 /r

AVX512F

16/32/64-bit

§

EVEX_Vexpandpd_xmm_k1z_xmmm128 = 3_250

VEXPANDPD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandpd_ymm_k1z_ymmm256 = 3_251

VEXPANDPD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandpd_zmm_k1z_zmmm512 = 3_252

VEXPANDPD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 88 /r

AVX512F

16/32/64-bit

§

EVEX_Vpexpandd_xmm_k1z_xmmm128 = 3_253

VPEXPANDD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandd_ymm_k1z_ymmm256 = 3_254

VPEXPANDD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandd_zmm_k1z_zmmm512 = 3_255

VPEXPANDD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 89 /r

AVX512F

16/32/64-bit

§

EVEX_Vpexpandq_xmm_k1z_xmmm128 = 3_256

VPEXPANDQ xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandq_ymm_k1z_ymmm256 = 3_257

VPEXPANDQ ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandq_zmm_k1z_zmmm512 = 3_258

VPEXPANDQ zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 89 /r

AVX512F

16/32/64-bit

§

EVEX_Vcompressps_xmmm128_k1z_xmm = 3_259

VCOMPRESSPS xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W0 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompressps_ymmm256_k1z_ymm = 3_260

VCOMPRESSPS ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W0 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompressps_zmmm512_k1z_zmm = 3_261

VCOMPRESSPS zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W0 8A /r

AVX512F

16/32/64-bit

§

EVEX_Vcompresspd_xmmm128_k1z_xmm = 3_262

VCOMPRESSPD xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W1 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompresspd_ymmm256_k1z_ymm = 3_263

VCOMPRESSPD ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W1 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompresspd_zmmm512_k1z_zmm = 3_264

VCOMPRESSPD zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W1 8A /r

AVX512F

16/32/64-bit

§

EVEX_Vpcompressd_xmmm128_k1z_xmm = 3_265

VPCOMPRESSD xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W0 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressd_ymmm256_k1z_ymm = 3_266

VPCOMPRESSD ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W0 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressd_zmmm512_k1z_zmm = 3_267

VPCOMPRESSD zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W0 8B /r

AVX512F

16/32/64-bit

§

EVEX_Vpcompressq_xmmm128_k1z_xmm = 3_268

VPCOMPRESSQ xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W1 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressq_ymmm256_k1z_ymm = 3_269

VPCOMPRESSQ ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W1 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressq_zmmm512_k1z_zmm = 3_270

VPCOMPRESSQ zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W1 8B /r

AVX512F

16/32/64-bit

§

VEX_Vpmaskmovd_xmm_xmm_m128 = 3_271

VPMASKMOVD xmm1, xmm2, m128

VEX.128.66.0F38.W0 8C /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovd_ymm_ymm_m256 = 3_272

VPMASKMOVD ymm1, ymm2, m256

VEX.256.66.0F38.W0 8C /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_xmm_xmm_m128 = 3_273

VPMASKMOVQ xmm1, xmm2, m128

VEX.128.66.0F38.W1 8C /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_ymm_ymm_m256 = 3_274

VPMASKMOVQ ymm1, ymm2, m256

VEX.256.66.0F38.W1 8C /r

AVX2

16/32/64-bit

§

EVEX_Vpermb_xmm_k1z_xmm_xmmm128 = 3_275

VPERMB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 8D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermb_ymm_k1z_ymm_ymmm256 = 3_276

VPERMB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 8D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermb_zmm_k1z_zmm_zmmm512 = 3_277

VPERMB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 8D /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermw_xmm_k1z_xmm_xmmm128 = 3_278

VPERMW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 8D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermw_ymm_k1z_ymm_ymmm256 = 3_279

VPERMW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 8D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermw_zmm_k1z_zmm_zmmm512 = 3_280

VPERMW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 8D /r

AVX512BW

16/32/64-bit

§

VEX_Vpmaskmovd_m128_xmm_xmm = 3_281

VPMASKMOVD m128, xmm1, xmm2

VEX.128.66.0F38.W0 8E /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovd_m256_ymm_ymm = 3_282

VPMASKMOVD m256, ymm1, ymm2

VEX.256.66.0F38.W0 8E /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_m128_xmm_xmm = 3_283

VPMASKMOVQ m128, xmm1, xmm2

VEX.128.66.0F38.W1 8E /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_m256_ymm_ymm = 3_284

VPMASKMOVQ m256, ymm1, ymm2

VEX.256.66.0F38.W1 8E /r

AVX2

16/32/64-bit

§

EVEX_Vpshufbitqmb_kr_k1_xmm_xmmm128 = 3_285

VPSHUFBITQMB k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 8F /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpshufbitqmb_kr_k1_ymm_ymmm256 = 3_286

VPSHUFBITQMB k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 8F /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpshufbitqmb_kr_k1_zmm_zmmm512 = 3_287

VPSHUFBITQMB k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 8F /r

AVX512_BITALG

16/32/64-bit

§

VEX_Vpgatherdd_xmm_vm32x_xmm = 3_288

VPGATHERDD xmm1, vm32x, xmm2

VEX.128.66.0F38.W0 90 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherdd_ymm_vm32y_ymm = 3_289

VPGATHERDD ymm1, vm32y, ymm2

VEX.256.66.0F38.W0 90 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherdq_xmm_vm32x_xmm = 3_290

VPGATHERDQ xmm1, vm32x, xmm2

VEX.128.66.0F38.W1 90 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherdq_ymm_vm32x_ymm = 3_291

VPGATHERDQ ymm1, vm32x, ymm2

VEX.256.66.0F38.W1 90 /r

AVX2

16/32/64-bit

§

EVEX_Vpgatherdd_xmm_k1_vm32x = 3_292

VPGATHERDD xmm1 {k1}, vm32x

EVEX.128.66.0F38.W0 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdd_ymm_k1_vm32y = 3_293

VPGATHERDD ymm1 {k1}, vm32y

EVEX.256.66.0F38.W0 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdd_zmm_k1_vm32z = 3_294

VPGATHERDD zmm1 {k1}, vm32z

EVEX.512.66.0F38.W0 90 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpgatherdq_xmm_k1_vm32x = 3_295

VPGATHERDQ xmm1 {k1}, vm32x

EVEX.128.66.0F38.W1 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdq_ymm_k1_vm32x = 3_296

VPGATHERDQ ymm1 {k1}, vm32x

EVEX.256.66.0F38.W1 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdq_zmm_k1_vm32y = 3_297

VPGATHERDQ zmm1 {k1}, vm32y

EVEX.512.66.0F38.W1 90 /vsib

AVX512F

16/32/64-bit

§

VEX_Vpgatherqd_xmm_vm64x_xmm = 3_298

VPGATHERQD xmm1, vm64x, xmm2

VEX.128.66.0F38.W0 91 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherqd_xmm_vm64y_xmm = 3_299

VPGATHERQD xmm1, vm64y, xmm2

VEX.256.66.0F38.W0 91 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherqq_xmm_vm64x_xmm = 3_300

VPGATHERQQ xmm1, vm64x, xmm2

VEX.128.66.0F38.W1 91 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherqq_ymm_vm64y_ymm = 3_301

VPGATHERQQ ymm1, vm64y, ymm2

VEX.256.66.0F38.W1 91 /r

AVX2

16/32/64-bit

§

EVEX_Vpgatherqd_xmm_k1_vm64x = 3_302

VPGATHERQD xmm1 {k1}, vm64x

EVEX.128.66.0F38.W0 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqd_xmm_k1_vm64y = 3_303

VPGATHERQD xmm1 {k1}, vm64y

EVEX.256.66.0F38.W0 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqd_ymm_k1_vm64z = 3_304

VPGATHERQD ymm1 {k1}, vm64z

EVEX.512.66.0F38.W0 91 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpgatherqq_xmm_k1_vm64x = 3_305

VPGATHERQQ xmm1 {k1}, vm64x

EVEX.128.66.0F38.W1 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqq_ymm_k1_vm64y = 3_306

VPGATHERQQ ymm1 {k1}, vm64y

EVEX.256.66.0F38.W1 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqq_zmm_k1_vm64z = 3_307

VPGATHERQQ zmm1 {k1}, vm64z

EVEX.512.66.0F38.W1 91 /vsib

AVX512F

16/32/64-bit

§

VEX_Vgatherdps_xmm_vm32x_xmm = 3_308

VGATHERDPS xmm1, vm32x, xmm2

VEX.128.66.0F38.W0 92 /r

AVX2

16/32/64-bit

§

VEX_Vgatherdps_ymm_vm32y_ymm = 3_309

VGATHERDPS ymm1, vm32y, ymm2

VEX.256.66.0F38.W0 92 /r

AVX2

16/32/64-bit

§

VEX_Vgatherdpd_xmm_vm32x_xmm = 3_310

VGATHERDPD xmm1, vm32x, xmm2

VEX.128.66.0F38.W1 92 /r

AVX2

16/32/64-bit

§

VEX_Vgatherdpd_ymm_vm32x_ymm = 3_311

VGATHERDPD ymm1, vm32x, ymm2

VEX.256.66.0F38.W1 92 /r

AVX2

16/32/64-bit

§

EVEX_Vgatherdps_xmm_k1_vm32x = 3_312

VGATHERDPS xmm1 {k1}, vm32x

EVEX.128.66.0F38.W0 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdps_ymm_k1_vm32y = 3_313

VGATHERDPS ymm1 {k1}, vm32y

EVEX.256.66.0F38.W0 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdps_zmm_k1_vm32z = 3_314

VGATHERDPS zmm1 {k1}, vm32z

EVEX.512.66.0F38.W0 92 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vgatherdpd_xmm_k1_vm32x = 3_315

VGATHERDPD xmm1 {k1}, vm32x

EVEX.128.66.0F38.W1 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdpd_ymm_k1_vm32x = 3_316

VGATHERDPD ymm1 {k1}, vm32x

EVEX.256.66.0F38.W1 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdpd_zmm_k1_vm32y = 3_317

VGATHERDPD zmm1 {k1}, vm32y

EVEX.512.66.0F38.W1 92 /vsib

AVX512F

16/32/64-bit

§

VEX_Vgatherqps_xmm_vm64x_xmm = 3_318

VGATHERQPS xmm1, vm64x, xmm2

VEX.128.66.0F38.W0 93 /r

AVX2

16/32/64-bit

§

VEX_Vgatherqps_xmm_vm64y_xmm = 3_319

VGATHERQPS xmm1, vm64y, xmm2

VEX.256.66.0F38.W0 93 /r

AVX2

16/32/64-bit

§

VEX_Vgatherqpd_xmm_vm64x_xmm = 3_320

VGATHERQPD xmm1, vm64x, xmm2

VEX.128.66.0F38.W1 93 /r

AVX2

16/32/64-bit

§

VEX_Vgatherqpd_ymm_vm64y_ymm = 3_321

VGATHERQPD ymm1, vm64y, ymm2

VEX.256.66.0F38.W1 93 /r

AVX2

16/32/64-bit

§

EVEX_Vgatherqps_xmm_k1_vm64x = 3_322

VGATHERQPS xmm1 {k1}, vm64x

EVEX.128.66.0F38.W0 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqps_xmm_k1_vm64y = 3_323

VGATHERQPS xmm1 {k1}, vm64y

EVEX.256.66.0F38.W0 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqps_ymm_k1_vm64z = 3_324

VGATHERQPS ymm1 {k1}, vm64z

EVEX.512.66.0F38.W0 93 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vgatherqpd_xmm_k1_vm64x = 3_325

VGATHERQPD xmm1 {k1}, vm64x

EVEX.128.66.0F38.W1 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqpd_ymm_k1_vm64y = 3_326

VGATHERQPD ymm1 {k1}, vm64y

EVEX.256.66.0F38.W1 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqpd_zmm_k1_vm64z = 3_327

VGATHERQPD zmm1 {k1}, vm64z

EVEX.512.66.0F38.W1 93 /vsib

AVX512F

16/32/64-bit

§

VEX_Vfmaddsub132ps_xmm_xmm_xmmm128 = 3_328

VFMADDSUB132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 96 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub132ps_ymm_ymm_ymmm256 = 3_329

VFMADDSUB132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 96 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub132pd_xmm_xmm_xmmm128 = 3_330

VFMADDSUB132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 96 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub132pd_ymm_ymm_ymmm256 = 3_331

VFMADDSUB132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 96 /r

FMA

16/32/64-bit

§

EVEX_Vfmaddsub132ps_xmm_k1z_xmm_xmmm128b32 = 3_332

VFMADDSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132ps_ymm_k1z_ymm_ymmm256b32 = 3_333

VFMADDSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132ps_zmm_k1z_zmm_zmmm512b32_er = 3_334

VFMADDSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 96 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132pd_xmm_k1z_xmm_xmmm128b64 = 3_335

VFMADDSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132pd_ymm_k1z_ymm_ymmm256b64 = 3_336

VFMADDSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132pd_zmm_k1z_zmm_zmmm512b64_er = 3_337

VFMADDSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 96 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsubadd132ps_xmm_xmm_xmmm128 = 3_338

VFMSUBADD132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 97 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd132ps_ymm_ymm_ymmm256 = 3_339

VFMSUBADD132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 97 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd132pd_xmm_xmm_xmmm128 = 3_340

VFMSUBADD132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 97 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd132pd_ymm_ymm_ymmm256 = 3_341

VFMSUBADD132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 97 /r

FMA

16/32/64-bit

§

EVEX_Vfmsubadd132ps_xmm_k1z_xmm_xmmm128b32 = 3_342

VFMSUBADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132ps_ymm_k1z_ymm_ymmm256b32 = 3_343

VFMSUBADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132ps_zmm_k1z_zmm_zmmm512b32_er = 3_344

VFMSUBADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 97 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132pd_xmm_k1z_xmm_xmmm128b64 = 3_345

VFMSUBADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132pd_ymm_k1z_ymm_ymmm256b64 = 3_346

VFMSUBADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132pd_zmm_k1z_zmm_zmmm512b64_er = 3_347

VFMSUBADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 97 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd132ps_xmm_xmm_xmmm128 = 3_348

VFMADD132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 98 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132ps_ymm_ymm_ymmm256 = 3_349

VFMADD132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 98 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132pd_xmm_xmm_xmmm128 = 3_350

VFMADD132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 98 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132pd_ymm_ymm_ymmm256 = 3_351

VFMADD132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 98 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd132ps_xmm_k1z_xmm_xmmm128b32 = 3_352

VFMADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132ps_ymm_k1z_ymm_ymmm256b32 = 3_353

VFMADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132ps_zmm_k1z_zmm_zmmm512b32_er = 3_354

VFMADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 98 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd132pd_xmm_k1z_xmm_xmmm128b64 = 3_355

VFMADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132pd_ymm_k1z_ymm_ymmm256b64 = 3_356

VFMADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132pd_zmm_k1z_zmm_zmmm512b64_er = 3_357

VFMADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 98 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd132ss_xmm_xmm_xmmm32 = 3_358

VFMADD132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 99 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132sd_xmm_xmm_xmmm64 = 3_359

VFMADD132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 99 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd132ss_xmm_k1z_xmm_xmmm32_er = 3_360

VFMADD132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 99 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd132sd_xmm_k1z_xmm_xmmm64_er = 3_361

VFMADD132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 99 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub132ps_xmm_xmm_xmmm128 = 3_362

VFMSUB132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 9A /r

FMA

16/32/64-bit

§

VEX_Vfmsub132ps_ymm_ymm_ymmm256 = 3_363

VFMSUB132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 9A /r

FMA

16/32/64-bit

§

VEX_Vfmsub132pd_xmm_xmm_xmmm128 = 3_364

VFMSUB132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 9A /r

FMA

16/32/64-bit

§

VEX_Vfmsub132pd_ymm_ymm_ymmm256 = 3_365

VFMSUB132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 9A /r

FMA

16/32/64-bit

§

EVEX_Vfmsub132ps_xmm_k1z_xmm_xmmm128b32 = 3_366

VFMSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132ps_ymm_k1z_ymm_ymmm256b32 = 3_367

VFMSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132ps_zmm_k1z_zmm_zmmm512b32_er = 3_368

VFMSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 9A /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub132pd_xmm_k1z_xmm_xmmm128b64 = 3_369

VFMSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132pd_ymm_k1z_ymm_ymmm256b64 = 3_370

VFMSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132pd_zmm_k1z_zmm_zmmm512b64_er = 3_371

VFMSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 9A /r

AVX512F

16/32/64-bit

§

EVEX_V4fmaddps_zmm_k1z_zmmp3_m128 = 3_372

V4FMADDPS zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 9A /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfmsub132ss_xmm_xmm_xmmm32 = 3_373

VFMSUB132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 9B /r

FMA

16/32/64-bit

§

VEX_Vfmsub132sd_xmm_xmm_xmmm64 = 3_374

VFMSUB132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 9B /r

FMA

16/32/64-bit

§

EVEX_Vfmsub132ss_xmm_k1z_xmm_xmmm32_er = 3_375

VFMSUB132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 9B /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub132sd_xmm_k1z_xmm_xmmm64_er = 3_376

VFMSUB132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 9B /r

AVX512F

16/32/64-bit

§

EVEX_V4fmaddss_xmm_k1z_xmmp3_m128 = 3_377

V4FMADDSS xmm1 {k1}{z}, xmm2+3, m128

EVEX.LIG.F2.0F38.W0 9B /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfnmadd132ps_xmm_xmm_xmmm128 = 3_378

VFNMADD132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 9C /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132ps_ymm_ymm_ymmm256 = 3_379

VFNMADD132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 9C /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132pd_xmm_xmm_xmmm128 = 3_380

VFNMADD132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 9C /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132pd_ymm_ymm_ymmm256 = 3_381

VFNMADD132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 9C /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd132ps_xmm_k1z_xmm_xmmm128b32 = 3_382

VFNMADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132ps_ymm_k1z_ymm_ymmm256b32 = 3_383

VFNMADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132ps_zmm_k1z_zmm_zmmm512b32_er = 3_384

VFNMADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 9C /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132pd_xmm_k1z_xmm_xmmm128b64 = 3_385

VFNMADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132pd_ymm_k1z_ymm_ymmm256b64 = 3_386

VFNMADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132pd_zmm_k1z_zmm_zmmm512b64_er = 3_387

VFNMADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 9C /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd132ss_xmm_xmm_xmmm32 = 3_388

VFNMADD132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 9D /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132sd_xmm_xmm_xmmm64 = 3_389

VFNMADD132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 9D /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd132ss_xmm_k1z_xmm_xmmm32_er = 3_390

VFNMADD132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 9D /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132sd_xmm_k1z_xmm_xmmm64_er = 3_391

VFNMADD132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 9D /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub132ps_xmm_xmm_xmmm128 = 3_392

VFNMSUB132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 9E /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132ps_ymm_ymm_ymmm256 = 3_393

VFNMSUB132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 9E /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132pd_xmm_xmm_xmmm128 = 3_394

VFNMSUB132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 9E /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132pd_ymm_ymm_ymmm256 = 3_395

VFNMSUB132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 9E /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub132ps_xmm_k1z_xmm_xmmm128b32 = 3_396

VFNMSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132ps_ymm_k1z_ymm_ymmm256b32 = 3_397

VFNMSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132ps_zmm_k1z_zmm_zmmm512b32_er = 3_398

VFNMSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 9E /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132pd_xmm_k1z_xmm_xmmm128b64 = 3_399

VFNMSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132pd_ymm_k1z_ymm_ymmm256b64 = 3_400

VFNMSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132pd_zmm_k1z_zmm_zmmm512b64_er = 3_401

VFNMSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 9E /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub132ss_xmm_xmm_xmmm32 = 3_402

VFNMSUB132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 9F /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132sd_xmm_xmm_xmmm64 = 3_403

VFNMSUB132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 9F /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub132ss_xmm_k1z_xmm_xmmm32_er = 3_404

VFNMSUB132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 9F /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132sd_xmm_k1z_xmm_xmmm64_er = 3_405

VFNMSUB132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 9F /r

AVX512F

16/32/64-bit

§

EVEX_Vpscatterdd_vm32x_k1_xmm = 3_406

VPSCATTERDD vm32x {k1}, xmm1

EVEX.128.66.0F38.W0 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdd_vm32y_k1_ymm = 3_407

VPSCATTERDD vm32y {k1}, ymm1

EVEX.256.66.0F38.W0 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdd_vm32z_k1_zmm = 3_408

VPSCATTERDD vm32z {k1}, zmm1

EVEX.512.66.0F38.W0 A0 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpscatterdq_vm32x_k1_xmm = 3_409

VPSCATTERDQ vm32x {k1}, xmm1

EVEX.128.66.0F38.W1 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdq_vm32x_k1_ymm = 3_410

VPSCATTERDQ vm32x {k1}, ymm1

EVEX.256.66.0F38.W1 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdq_vm32y_k1_zmm = 3_411

VPSCATTERDQ vm32y {k1}, zmm1

EVEX.512.66.0F38.W1 A0 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpscatterqd_vm64x_k1_xmm = 3_412

VPSCATTERQD vm64x {k1}, xmm1

EVEX.128.66.0F38.W0 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqd_vm64y_k1_xmm = 3_413

VPSCATTERQD vm64y {k1}, xmm1

EVEX.256.66.0F38.W0 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqd_vm64z_k1_ymm = 3_414

VPSCATTERQD vm64z {k1}, ymm1

EVEX.512.66.0F38.W0 A1 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpscatterqq_vm64x_k1_xmm = 3_415

VPSCATTERQQ vm64x {k1}, xmm1

EVEX.128.66.0F38.W1 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqq_vm64y_k1_ymm = 3_416

VPSCATTERQQ vm64y {k1}, ymm1

EVEX.256.66.0F38.W1 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqq_vm64z_k1_zmm = 3_417

VPSCATTERQQ vm64z {k1}, zmm1

EVEX.512.66.0F38.W1 A1 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterdps_vm32x_k1_xmm = 3_418

VSCATTERDPS vm32x {k1}, xmm1

EVEX.128.66.0F38.W0 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdps_vm32y_k1_ymm = 3_419

VSCATTERDPS vm32y {k1}, ymm1

EVEX.256.66.0F38.W0 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdps_vm32z_k1_zmm = 3_420

VSCATTERDPS vm32z {k1}, zmm1

EVEX.512.66.0F38.W0 A2 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterdpd_vm32x_k1_xmm = 3_421

VSCATTERDPD vm32x {k1}, xmm1

EVEX.128.66.0F38.W1 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdpd_vm32x_k1_ymm = 3_422

VSCATTERDPD vm32x {k1}, ymm1

EVEX.256.66.0F38.W1 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdpd_vm32y_k1_zmm = 3_423

VSCATTERDPD vm32y {k1}, zmm1

EVEX.512.66.0F38.W1 A2 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterqps_vm64x_k1_xmm = 3_424

VSCATTERQPS vm64x {k1}, xmm1

EVEX.128.66.0F38.W0 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqps_vm64y_k1_xmm = 3_425

VSCATTERQPS vm64y {k1}, xmm1

EVEX.256.66.0F38.W0 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqps_vm64z_k1_ymm = 3_426

VSCATTERQPS vm64z {k1}, ymm1

EVEX.512.66.0F38.W0 A3 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterqpd_vm64x_k1_xmm = 3_427

VSCATTERQPD vm64x {k1}, xmm1

EVEX.128.66.0F38.W1 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqpd_vm64y_k1_ymm = 3_428

VSCATTERQPD vm64y {k1}, ymm1

EVEX.256.66.0F38.W1 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqpd_vm64z_k1_zmm = 3_429

VSCATTERQPD vm64z {k1}, zmm1

EVEX.512.66.0F38.W1 A3 /vsib

AVX512F

16/32/64-bit

§

VEX_Vfmaddsub213ps_xmm_xmm_xmmm128 = 3_430

VFMADDSUB213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 A6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub213ps_ymm_ymm_ymmm256 = 3_431

VFMADDSUB213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 A6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub213pd_xmm_xmm_xmmm128 = 3_432

VFMADDSUB213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 A6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub213pd_ymm_ymm_ymmm256 = 3_433

VFMADDSUB213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 A6 /r

FMA

16/32/64-bit

§

EVEX_Vfmaddsub213ps_xmm_k1z_xmm_xmmm128b32 = 3_434

VFMADDSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213ps_ymm_k1z_ymm_ymmm256b32 = 3_435

VFMADDSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213ps_zmm_k1z_zmm_zmmm512b32_er = 3_436

VFMADDSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 A6 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213pd_xmm_k1z_xmm_xmmm128b64 = 3_437

VFMADDSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213pd_ymm_k1z_ymm_ymmm256b64 = 3_438

VFMADDSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213pd_zmm_k1z_zmm_zmmm512b64_er = 3_439

VFMADDSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 A6 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsubadd213ps_xmm_xmm_xmmm128 = 3_440

VFMSUBADD213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 A7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd213ps_ymm_ymm_ymmm256 = 3_441

VFMSUBADD213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 A7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd213pd_xmm_xmm_xmmm128 = 3_442

VFMSUBADD213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 A7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd213pd_ymm_ymm_ymmm256 = 3_443

VFMSUBADD213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 A7 /r

FMA

16/32/64-bit

§

EVEX_Vfmsubadd213ps_xmm_k1z_xmm_xmmm128b32 = 3_444

VFMSUBADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213ps_ymm_k1z_ymm_ymmm256b32 = 3_445

VFMSUBADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213ps_zmm_k1z_zmm_zmmm512b32_er = 3_446

VFMSUBADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 A7 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213pd_xmm_k1z_xmm_xmmm128b64 = 3_447

VFMSUBADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213pd_ymm_k1z_ymm_ymmm256b64 = 3_448

VFMSUBADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213pd_zmm_k1z_zmm_zmmm512b64_er = 3_449

VFMSUBADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 A7 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd213ps_xmm_xmm_xmmm128 = 3_450

VFMADD213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 A8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213ps_ymm_ymm_ymmm256 = 3_451

VFMADD213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 A8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213pd_xmm_xmm_xmmm128 = 3_452

VFMADD213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 A8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213pd_ymm_ymm_ymmm256 = 3_453

VFMADD213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 A8 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd213ps_xmm_k1z_xmm_xmmm128b32 = 3_454

VFMADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213ps_ymm_k1z_ymm_ymmm256b32 = 3_455

VFMADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213ps_zmm_k1z_zmm_zmmm512b32_er = 3_456

VFMADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 A8 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd213pd_xmm_k1z_xmm_xmmm128b64 = 3_457

VFMADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213pd_ymm_k1z_ymm_ymmm256b64 = 3_458

VFMADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213pd_zmm_k1z_zmm_zmmm512b64_er = 3_459

VFMADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 A8 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd213ss_xmm_xmm_xmmm32 = 3_460

VFMADD213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 A9 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213sd_xmm_xmm_xmmm64 = 3_461

VFMADD213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 A9 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd213ss_xmm_k1z_xmm_xmmm32_er = 3_462

VFMADD213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 A9 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd213sd_xmm_k1z_xmm_xmmm64_er = 3_463

VFMADD213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 A9 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub213ps_xmm_xmm_xmmm128 = 3_464

VFMSUB213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 AA /r

FMA

16/32/64-bit

§

VEX_Vfmsub213ps_ymm_ymm_ymmm256 = 3_465

VFMSUB213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 AA /r

FMA

16/32/64-bit

§

VEX_Vfmsub213pd_xmm_xmm_xmmm128 = 3_466

VFMSUB213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 AA /r

FMA

16/32/64-bit

§

VEX_Vfmsub213pd_ymm_ymm_ymmm256 = 3_467

VFMSUB213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 AA /r

FMA

16/32/64-bit

§

EVEX_Vfmsub213ps_xmm_k1z_xmm_xmmm128b32 = 3_468

VFMSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213ps_ymm_k1z_ymm_ymmm256b32 = 3_469

VFMSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213ps_zmm_k1z_zmm_zmmm512b32_er = 3_470

VFMSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 AA /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub213pd_xmm_k1z_xmm_xmmm128b64 = 3_471

VFMSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213pd_ymm_k1z_ymm_ymmm256b64 = 3_472

VFMSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213pd_zmm_k1z_zmm_zmmm512b64_er = 3_473

VFMSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 AA /r

AVX512F

16/32/64-bit

§

EVEX_V4fnmaddps_zmm_k1z_zmmp3_m128 = 3_474

V4FNMADDPS zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 AA /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfmsub213ss_xmm_xmm_xmmm32 = 3_475

VFMSUB213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 AB /r

FMA

16/32/64-bit

§

VEX_Vfmsub213sd_xmm_xmm_xmmm64 = 3_476

VFMSUB213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 AB /r

FMA

16/32/64-bit

§

EVEX_Vfmsub213ss_xmm_k1z_xmm_xmmm32_er = 3_477

VFMSUB213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 AB /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub213sd_xmm_k1z_xmm_xmmm64_er = 3_478

VFMSUB213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 AB /r

AVX512F

16/32/64-bit

§

EVEX_V4fnmaddss_xmm_k1z_xmmp3_m128 = 3_479

V4FNMADDSS xmm1 {k1}{z}, xmm2+3, m128

EVEX.LIG.F2.0F38.W0 AB /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfnmadd213ps_xmm_xmm_xmmm128 = 3_480

VFNMADD213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 AC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213ps_ymm_ymm_ymmm256 = 3_481

VFNMADD213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 AC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213pd_xmm_xmm_xmmm128 = 3_482

VFNMADD213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 AC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213pd_ymm_ymm_ymmm256 = 3_483

VFNMADD213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 AC /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd213ps_xmm_k1z_xmm_xmmm128b32 = 3_484

VFNMADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213ps_ymm_k1z_ymm_ymmm256b32 = 3_485

VFNMADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213ps_zmm_k1z_zmm_zmmm512b32_er = 3_486

VFNMADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 AC /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213pd_xmm_k1z_xmm_xmmm128b64 = 3_487

VFNMADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213pd_ymm_k1z_ymm_ymmm256b64 = 3_488

VFNMADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213pd_zmm_k1z_zmm_zmmm512b64_er = 3_489

VFNMADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 AC /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd213ss_xmm_xmm_xmmm32 = 3_490

VFNMADD213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 AD /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213sd_xmm_xmm_xmmm64 = 3_491

VFNMADD213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 AD /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd213ss_xmm_k1z_xmm_xmmm32_er = 3_492

VFNMADD213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 AD /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213sd_xmm_k1z_xmm_xmmm64_er = 3_493

VFNMADD213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 AD /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub213ps_xmm_xmm_xmmm128 = 3_494

VFNMSUB213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 AE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213ps_ymm_ymm_ymmm256 = 3_495

VFNMSUB213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 AE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213pd_xmm_xmm_xmmm128 = 3_496

VFNMSUB213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 AE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213pd_ymm_ymm_ymmm256 = 3_497

VFNMSUB213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 AE /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub213ps_xmm_k1z_xmm_xmmm128b32 = 3_498

VFNMSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213ps_ymm_k1z_ymm_ymmm256b32 = 3_499

VFNMSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213ps_zmm_k1z_zmm_zmmm512b32_er = 3_500

VFNMSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 AE /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213pd_xmm_k1z_xmm_xmmm128b64 = 3_501

VFNMSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213pd_ymm_k1z_ymm_ymmm256b64 = 3_502

VFNMSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213pd_zmm_k1z_zmm_zmmm512b64_er = 3_503

VFNMSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 AE /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub213ss_xmm_xmm_xmmm32 = 3_504

VFNMSUB213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 AF /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213sd_xmm_xmm_xmmm64 = 3_505

VFNMSUB213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 AF /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub213ss_xmm_k1z_xmm_xmmm32_er = 3_506

VFNMSUB213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 AF /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213sd_xmm_k1z_xmm_xmmm64_er = 3_507

VFNMSUB213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 AF /r

AVX512F

16/32/64-bit

§

EVEX_Vpmadd52luq_xmm_k1z_xmm_xmmm128b64 = 3_508

VPMADD52LUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B4 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52luq_ymm_k1z_ymm_ymmm256b64 = 3_509

VPMADD52LUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B4 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52luq_zmm_k1z_zmm_zmmm512b64 = 3_510

VPMADD52LUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 B4 /r

AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52huq_xmm_k1z_xmm_xmmm128b64 = 3_511

VPMADD52HUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B5 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52huq_ymm_k1z_ymm_ymmm256b64 = 3_512

VPMADD52HUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B5 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52huq_zmm_k1z_zmm_zmmm512b64 = 3_513

VPMADD52HUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 B5 /r

AVX512_IFMA

16/32/64-bit

§

VEX_Vfmaddsub231ps_xmm_xmm_xmmm128 = 3_514

VFMADDSUB231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 B6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub231ps_ymm_ymm_ymmm256 = 3_515

VFMADDSUB231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 B6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub231pd_xmm_xmm_xmmm128 = 3_516

VFMADDSUB231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub231pd_ymm_ymm_ymmm256 = 3_517

VFMADDSUB231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B6 /r

FMA

16/32/64-bit

§

EVEX_Vfmaddsub231ps_xmm_k1z_xmm_xmmm128b32 = 3_518

VFMADDSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231ps_ymm_k1z_ymm_ymmm256b32 = 3_519

VFMADDSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231ps_zmm_k1z_zmm_zmmm512b32_er = 3_520

VFMADDSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 B6 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231pd_xmm_k1z_xmm_xmmm128b64 = 3_521

VFMADDSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231pd_ymm_k1z_ymm_ymmm256b64 = 3_522

VFMADDSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231pd_zmm_k1z_zmm_zmmm512b64_er = 3_523

VFMADDSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 B6 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsubadd231ps_xmm_xmm_xmmm128 = 3_524

VFMSUBADD231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 B7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd231ps_ymm_ymm_ymmm256 = 3_525

VFMSUBADD231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 B7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd231pd_xmm_xmm_xmmm128 = 3_526

VFMSUBADD231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd231pd_ymm_ymm_ymmm256 = 3_527

VFMSUBADD231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B7 /r

FMA

16/32/64-bit

§

EVEX_Vfmsubadd231ps_xmm_k1z_xmm_xmmm128b32 = 3_528

VFMSUBADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231ps_ymm_k1z_ymm_ymmm256b32 = 3_529

VFMSUBADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231ps_zmm_k1z_zmm_zmmm512b32_er = 3_530

VFMSUBADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 B7 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231pd_xmm_k1z_xmm_xmmm128b64 = 3_531

VFMSUBADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231pd_ymm_k1z_ymm_ymmm256b64 = 3_532

VFMSUBADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231pd_zmm_k1z_zmm_zmmm512b64_er = 3_533

VFMSUBADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 B7 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd231ps_xmm_xmm_xmmm128 = 3_534

VFMADD231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 B8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231ps_ymm_ymm_ymmm256 = 3_535

VFMADD231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 B8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231pd_xmm_xmm_xmmm128 = 3_536

VFMADD231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231pd_ymm_ymm_ymmm256 = 3_537

VFMADD231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B8 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd231ps_xmm_k1z_xmm_xmmm128b32 = 3_538

VFMADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231ps_ymm_k1z_ymm_ymmm256b32 = 3_539

VFMADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231ps_zmm_k1z_zmm_zmmm512b32_er = 3_540

VFMADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 B8 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd231pd_xmm_k1z_xmm_xmmm128b64 = 3_541

VFMADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231pd_ymm_k1z_ymm_ymmm256b64 = 3_542

VFMADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231pd_zmm_k1z_zmm_zmmm512b64_er = 3_543

VFMADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 B8 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd231ss_xmm_xmm_xmmm32 = 3_544

VFMADD231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 B9 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231sd_xmm_xmm_xmmm64 = 3_545

VFMADD231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 B9 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd231ss_xmm_k1z_xmm_xmmm32_er = 3_546

VFMADD231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 B9 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd231sd_xmm_k1z_xmm_xmmm64_er = 3_547

VFMADD231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 B9 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub231ps_xmm_xmm_xmmm128 = 3_548

VFMSUB231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 BA /r

FMA

16/32/64-bit

§

VEX_Vfmsub231ps_ymm_ymm_ymmm256 = 3_549

VFMSUB231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 BA /r

FMA

16/32/64-bit

§

VEX_Vfmsub231pd_xmm_xmm_xmmm128 = 3_550

VFMSUB231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 BA /r

FMA

16/32/64-bit

§

VEX_Vfmsub231pd_ymm_ymm_ymmm256 = 3_551

VFMSUB231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 BA /r

FMA

16/32/64-bit

§

EVEX_Vfmsub231ps_xmm_k1z_xmm_xmmm128b32 = 3_552

VFMSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231ps_ymm_k1z_ymm_ymmm256b32 = 3_553

VFMSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231ps_zmm_k1z_zmm_zmmm512b32_er = 3_554

VFMSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 BA /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub231pd_xmm_k1z_xmm_xmmm128b64 = 3_555

VFMSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231pd_ymm_k1z_ymm_ymmm256b64 = 3_556

VFMSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231pd_zmm_k1z_zmm_zmmm512b64_er = 3_557

VFMSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 BA /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub231ss_xmm_xmm_xmmm32 = 3_558

VFMSUB231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 BB /r

FMA

16/32/64-bit

§

VEX_Vfmsub231sd_xmm_xmm_xmmm64 = 3_559

VFMSUB231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 BB /r

FMA

16/32/64-bit

§

EVEX_Vfmsub231ss_xmm_k1z_xmm_xmmm32_er = 3_560

VFMSUB231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 BB /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub231sd_xmm_k1z_xmm_xmmm64_er = 3_561

VFMSUB231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 BB /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd231ps_xmm_xmm_xmmm128 = 3_562

VFNMADD231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 BC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231ps_ymm_ymm_ymmm256 = 3_563

VFNMADD231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 BC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231pd_xmm_xmm_xmmm128 = 3_564

VFNMADD231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 BC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231pd_ymm_ymm_ymmm256 = 3_565

VFNMADD231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 BC /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd231ps_xmm_k1z_xmm_xmmm128b32 = 3_566

VFNMADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231ps_ymm_k1z_ymm_ymmm256b32 = 3_567

VFNMADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231ps_zmm_k1z_zmm_zmmm512b32_er = 3_568

VFNMADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 BC /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231pd_xmm_k1z_xmm_xmmm128b64 = 3_569

VFNMADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231pd_ymm_k1z_ymm_ymmm256b64 = 3_570

VFNMADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231pd_zmm_k1z_zmm_zmmm512b64_er = 3_571

VFNMADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 BC /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd231ss_xmm_xmm_xmmm32 = 3_572

VFNMADD231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 BD /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231sd_xmm_xmm_xmmm64 = 3_573

VFNMADD231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 BD /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd231ss_xmm_k1z_xmm_xmmm32_er = 3_574

VFNMADD231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 BD /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231sd_xmm_k1z_xmm_xmmm64_er = 3_575

VFNMADD231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 BD /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub231ps_xmm_xmm_xmmm128 = 3_576

VFNMSUB231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 BE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231ps_ymm_ymm_ymmm256 = 3_577

VFNMSUB231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 BE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231pd_xmm_xmm_xmmm128 = 3_578

VFNMSUB231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 BE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231pd_ymm_ymm_ymmm256 = 3_579

VFNMSUB231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 BE /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub231ps_xmm_k1z_xmm_xmmm128b32 = 3_580

VFNMSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231ps_ymm_k1z_ymm_ymmm256b32 = 3_581

VFNMSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231ps_zmm_k1z_zmm_zmmm512b32_er = 3_582

VFNMSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 BE /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231pd_xmm_k1z_xmm_xmmm128b64 = 3_583

VFNMSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231pd_ymm_k1z_ymm_ymmm256b64 = 3_584

VFNMSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231pd_zmm_k1z_zmm_zmmm512b64_er = 3_585

VFNMSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 BE /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub231ss_xmm_xmm_xmmm32 = 3_586

VFNMSUB231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 BF /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231sd_xmm_xmm_xmmm64 = 3_587

VFNMSUB231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 BF /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub231ss_xmm_k1z_xmm_xmmm32_er = 3_588

VFNMSUB231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 BF /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231sd_xmm_k1z_xmm_xmmm64_er = 3_589

VFNMSUB231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 BF /r

AVX512F

16/32/64-bit

§

EVEX_Vpconflictd_xmm_k1z_xmmm128b32 = 3_590

VPCONFLICTD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictd_ymm_k1z_ymmm256b32 = 3_591

VPCONFLICTD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictd_zmm_k1z_zmmm512b32 = 3_592

VPCONFLICTD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 C4 /r

AVX512CD

16/32/64-bit

§

EVEX_Vpconflictq_xmm_k1z_xmmm128b64 = 3_593

VPCONFLICTQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictq_ymm_k1z_ymmm256b64 = 3_594

VPCONFLICTQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictq_zmm_k1z_zmmm512b64 = 3_595

VPCONFLICTQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 C4 /r

AVX512CD

16/32/64-bit

§

EVEX_Vgatherpf0dps_vm32z_k1 = 3_596

VGATHERPF0DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf0dpd_vm32y_k1 = 3_597

VGATHERPF0DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1dps_vm32z_k1 = 3_598

VGATHERPF1DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1dpd_vm32y_k1 = 3_599

VGATHERPF1DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0dps_vm32z_k1 = 3_600

VSCATTERPF0DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0dpd_vm32y_k1 = 3_601

VSCATTERPF0DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1dps_vm32z_k1 = 3_602

VSCATTERPF1DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /6 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1dpd_vm32y_k1 = 3_603

VSCATTERPF1DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /6 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf0qps_vm64z_k1 = 3_604

VGATHERPF0QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf0qpd_vm64z_k1 = 3_605

VGATHERPF0QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1qps_vm64z_k1 = 3_606

VGATHERPF1QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1qpd_vm64z_k1 = 3_607

VGATHERPF1QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0qps_vm64z_k1 = 3_608

VSCATTERPF0QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0qpd_vm64z_k1 = 3_609

VSCATTERPF0QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1qps_vm64z_k1 = 3_610

VSCATTERPF1QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /6 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1qpd_vm64z_k1 = 3_611

VSCATTERPF1QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /6 /vsib

AVX512PF

16/32/64-bit

§

Sha1nexte_xmm_xmmm128 = 3_612

SHA1NEXTE xmm1, xmm2/m128

NP 0F 38 C8 /r

SHA

16/32/64-bit

§

EVEX_Vexp2ps_zmm_k1z_zmmm512b32_sae = 3_613

VEXP2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 C8 /r

AVX512ER

16/32/64-bit

§

EVEX_Vexp2pd_zmm_k1z_zmmm512b64_sae = 3_614

VEXP2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 C8 /r

AVX512ER

16/32/64-bit

§

Sha1msg1_xmm_xmmm128 = 3_615

SHA1MSG1 xmm1, xmm2/m128

NP 0F 38 C9 /r

SHA

16/32/64-bit

§

Sha1msg2_xmm_xmmm128 = 3_616

SHA1MSG2 xmm1, xmm2/m128

NP 0F 38 CA /r

SHA

16/32/64-bit

§

EVEX_Vrcp28ps_zmm_k1z_zmmm512b32_sae = 3_617

VRCP28PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 CA /r

AVX512ER

16/32/64-bit

§

EVEX_Vrcp28pd_zmm_k1z_zmmm512b64_sae = 3_618

VRCP28PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 CA /r

AVX512ER

16/32/64-bit

§

Sha256rnds2_xmm_xmmm128 = 3_619

SHA256RNDS2 xmm1, xmm2/m128, <XMM0>

NP 0F 38 CB /r

SHA

16/32/64-bit

§

EVEX_Vrcp28ss_xmm_k1z_xmm_xmmm32_sae = 3_620

VRCP28SS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.66.0F38.W0 CB /r

AVX512ER

16/32/64-bit

§

EVEX_Vrcp28sd_xmm_k1z_xmm_xmmm64_sae = 3_621

VRCP28SD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.66.0F38.W1 CB /r

AVX512ER

16/32/64-bit

§

Sha256msg1_xmm_xmmm128 = 3_622

SHA256MSG1 xmm1, xmm2/m128

NP 0F 38 CC /r

SHA

16/32/64-bit

§

EVEX_Vrsqrt28ps_zmm_k1z_zmmm512b32_sae = 3_623

VRSQRT28PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 CC /r

AVX512ER

16/32/64-bit

§

EVEX_Vrsqrt28pd_zmm_k1z_zmmm512b64_sae = 3_624

VRSQRT28PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 CC /r

AVX512ER

16/32/64-bit

§

Sha256msg2_xmm_xmmm128 = 3_625

SHA256MSG2 xmm1, xmm2/m128

NP 0F 38 CD /r

SHA

16/32/64-bit

§

EVEX_Vrsqrt28ss_xmm_k1z_xmm_xmmm32_sae = 3_626

VRSQRT28SS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.66.0F38.W0 CD /r

AVX512ER

16/32/64-bit

§

EVEX_Vrsqrt28sd_xmm_k1z_xmm_xmmm64_sae = 3_627

VRSQRT28SD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.66.0F38.W1 CD /r

AVX512ER

16/32/64-bit

§

Gf2p8mulb_xmm_xmmm128 = 3_628

GF2P8MULB xmm1, xmm2/m128

66 0F 38 CF /r

GFNI

16/32/64-bit

§

VEX_Vgf2p8mulb_xmm_xmm_xmmm128 = 3_629

VGF2P8MULB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 CF /r

AVX and GFNI

16/32/64-bit

§

VEX_Vgf2p8mulb_ymm_ymm_ymmm256 = 3_630

VGF2P8MULB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 CF /r

AVX and GFNI

16/32/64-bit

§

EVEX_Vgf2p8mulb_xmm_k1z_xmm_xmmm128 = 3_631

VGF2P8MULB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 CF /r

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8mulb_ymm_k1z_ymm_ymmm256 = 3_632

VGF2P8MULB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 CF /r

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8mulb_zmm_k1z_zmm_zmmm512 = 3_633

VGF2P8MULB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 CF /r

AVX512F and GFNI

16/32/64-bit

§

Aesimc_xmm_xmmm128 = 3_634

AESIMC xmm1, xmm2/m128

66 0F 38 DB /r

AES

16/32/64-bit

§

VEX_Vaesimc_xmm_xmmm128 = 3_635

VAESIMC xmm1, xmm2/m128

VEX.128.66.0F38.WIG DB /r

AES and AVX

16/32/64-bit

§

Aesenc_xmm_xmmm128 = 3_636

AESENC xmm1, xmm2/m128

66 0F 38 DC /r

AES

16/32/64-bit

§

VEX_Vaesenc_xmm_xmm_xmmm128 = 3_637

VAESENC xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DC /r

AES and AVX

16/32/64-bit

§

VEX_Vaesenc_ymm_ymm_ymmm256 = 3_638

VAESENC ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DC /r

VAES

16/32/64-bit

§

EVEX_Vaesenc_xmm_xmm_xmmm128 = 3_639

VAESENC xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DC /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenc_ymm_ymm_ymmm256 = 3_640

VAESENC ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DC /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenc_zmm_zmm_zmmm512 = 3_641

VAESENC zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DC /r

AVX512F and VAES

16/32/64-bit

§

Aesenclast_xmm_xmmm128 = 3_642

AESENCLAST xmm1, xmm2/m128

66 0F 38 DD /r

AES

16/32/64-bit

§

VEX_Vaesenclast_xmm_xmm_xmmm128 = 3_643

VAESENCLAST xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DD /r

AES and AVX

16/32/64-bit

§

VEX_Vaesenclast_ymm_ymm_ymmm256 = 3_644

VAESENCLAST ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DD /r

VAES

16/32/64-bit

§

EVEX_Vaesenclast_xmm_xmm_xmmm128 = 3_645

VAESENCLAST xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DD /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenclast_ymm_ymm_ymmm256 = 3_646

VAESENCLAST ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DD /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenclast_zmm_zmm_zmmm512 = 3_647

VAESENCLAST zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DD /r

AVX512F and VAES

16/32/64-bit

§

Aesdec_xmm_xmmm128 = 3_648

AESDEC xmm1, xmm2/m128

66 0F 38 DE /r

AES

16/32/64-bit

§

VEX_Vaesdec_xmm_xmm_xmmm128 = 3_649

VAESDEC xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DE /r

AES and AVX

16/32/64-bit

§

VEX_Vaesdec_ymm_ymm_ymmm256 = 3_650

VAESDEC ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DE /r

VAES

16/32/64-bit

§

EVEX_Vaesdec_xmm_xmm_xmmm128 = 3_651

VAESDEC xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DE /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdec_ymm_ymm_ymmm256 = 3_652

VAESDEC ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DE /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdec_zmm_zmm_zmmm512 = 3_653

VAESDEC zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DE /r

AVX512F and VAES

16/32/64-bit

§

Aesdeclast_xmm_xmmm128 = 3_654

AESDECLAST xmm1, xmm2/m128

66 0F 38 DF /r

AES

16/32/64-bit

§

VEX_Vaesdeclast_xmm_xmm_xmmm128 = 3_655

VAESDECLAST xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DF /r

AES and AVX

16/32/64-bit

§

VEX_Vaesdeclast_ymm_ymm_ymmm256 = 3_656

VAESDECLAST ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DF /r

VAES

16/32/64-bit

§

EVEX_Vaesdeclast_xmm_xmm_xmmm128 = 3_657

VAESDECLAST xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DF /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdeclast_ymm_ymm_ymmm256 = 3_658

VAESDECLAST ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DF /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdeclast_zmm_zmm_zmmm512 = 3_659

VAESDECLAST zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DF /r

AVX512F and VAES

16/32/64-bit

§

Movbe_r16_m16 = 3_660

MOVBE r16, m16

o16 0F 38 F0 /r

MOVBE

16/32/64-bit

§

Movbe_r32_m32 = 3_661

MOVBE r32, m32

o32 0F 38 F0 /r

MOVBE

16/32/64-bit

§

Movbe_r64_m64 = 3_662

MOVBE r64, m64

o64 0F 38 F0 /r

MOVBE

64-bit

§

Crc32_r32_rm8 = 3_663

CRC32 r32, r/m8

F2 0F 38 F0 /r

SSE4.2

16/32/64-bit

§

Crc32_r64_rm8 = 3_664

CRC32 r64, r/m8

F2 o64 0F 38 F0 /r

SSE4.2

64-bit

§

Movbe_m16_r16 = 3_665

MOVBE m16, r16

o16 0F 38 F1 /r

MOVBE

16/32/64-bit

§

Movbe_m32_r32 = 3_666

MOVBE m32, r32

o32 0F 38 F1 /r

MOVBE

16/32/64-bit

§

Movbe_m64_r64 = 3_667

MOVBE m64, r64

o64 0F 38 F1 /r

MOVBE

64-bit

§

Crc32_r32_rm16 = 3_668

CRC32 r32, r/m16

o16 F2 0F 38 F1 /r

SSE4.2

16/32/64-bit

§

Crc32_r32_rm32 = 3_669

CRC32 r32, r/m32

o32 F2 0F 38 F1 /r

SSE4.2

16/32/64-bit

§

Crc32_r64_rm64 = 3_670

CRC32 r64, r/m64

F2 o64 0F 38 F1 /r

SSE4.2

64-bit

§

VEX_Andn_r32_r32_rm32 = 3_671

ANDN r32a, r32b, r/m32

VEX.LZ.0F38.W0 F2 /r

BMI1

16/32/64-bit

§

VEX_Andn_r64_r64_rm64 = 3_672

ANDN r64a, r64b, r/m64

VEX.LZ.0F38.W1 F2 /r

BMI1

64-bit

§

VEX_Blsr_r32_rm32 = 3_673

BLSR r32, r/m32

VEX.LZ.0F38.W0 F3 /1

BMI1

16/32/64-bit

§

VEX_Blsr_r64_rm64 = 3_674

BLSR r64, r/m64

VEX.LZ.0F38.W1 F3 /1

BMI1

64-bit

§

VEX_Blsmsk_r32_rm32 = 3_675

BLSMSK r32, r/m32

VEX.LZ.0F38.W0 F3 /2

BMI1

16/32/64-bit

§

VEX_Blsmsk_r64_rm64 = 3_676

BLSMSK r64, r/m64

VEX.LZ.0F38.W1 F3 /2

BMI1

64-bit

§

VEX_Blsi_r32_rm32 = 3_677

BLSI r32, r/m32

VEX.LZ.0F38.W0 F3 /3

BMI1

16/32/64-bit

§

VEX_Blsi_r64_rm64 = 3_678

BLSI r64, r/m64

VEX.LZ.0F38.W1 F3 /3

BMI1

64-bit

§

VEX_Bzhi_r32_rm32_r32 = 3_679

BZHI r32a, r/m32, r32b

VEX.LZ.0F38.W0 F5 /r

BMI2

16/32/64-bit

§

VEX_Bzhi_r64_rm64_r64 = 3_680

BZHI r64a, r/m64, r64b

VEX.LZ.0F38.W1 F5 /r

BMI2

64-bit

§

Wrussd_m32_r32 = 3_681

WRUSSD m32, r32

66 0F 38 F5 /r

CET_SS

16/32/64-bit

§

Wrussq_m64_r64 = 3_682

WRUSSQ m64, r64

66 o64 0F 38 F5 /r

CET_SS

64-bit

§

VEX_Pext_r32_r32_rm32 = 3_683

PEXT r32a, r32b, r/m32

VEX.LZ.F3.0F38.W0 F5 /r

BMI2

16/32/64-bit

§

VEX_Pext_r64_r64_rm64 = 3_684

PEXT r64a, r64b, r/m64

VEX.LZ.F3.0F38.W1 F5 /r

BMI2

64-bit

§

VEX_Pdep_r32_r32_rm32 = 3_685

PDEP r32a, r32b, r/m32

VEX.LZ.F2.0F38.W0 F5 /r

BMI2

16/32/64-bit

§

VEX_Pdep_r64_r64_rm64 = 3_686

PDEP r64a, r64b, r/m64

VEX.LZ.F2.0F38.W1 F5 /r

BMI2

64-bit

§

Wrssd_m32_r32 = 3_687

WRSSD m32, r32

NP 0F 38 F6 /r

CET_SS

16/32/64-bit

§

Wrssq_m64_r64 = 3_688

WRSSQ m64, r64

NP o64 0F 38 F6 /r

CET_SS

64-bit

§

Adcx_r32_rm32 = 3_689

ADCX r32, r/m32

66 0F 38 F6 /r

ADX

16/32/64-bit

§

Adcx_r64_rm64 = 3_690

ADCX r64, r/m64

66 o64 0F 38 F6 /r

ADX

64-bit

§

Adox_r32_rm32 = 3_691

ADOX r32, r/m32

F3 0F 38 F6 /r

ADX

16/32/64-bit

§

Adox_r64_rm64 = 3_692

ADOX r64, r/m64

F3 o64 0F 38 F6 /r

ADX

64-bit

§

VEX_Mulx_r32_r32_rm32 = 3_693

MULX r32a, r32b, r/m32

VEX.LZ.F2.0F38.W0 F6 /r

BMI2

16/32/64-bit

§

VEX_Mulx_r64_r64_rm64 = 3_694

MULX r64a, r64b, r/m64

VEX.LZ.F2.0F38.W1 F6 /r

BMI2

64-bit

§

VEX_Bextr_r32_rm32_r32 = 3_695

BEXTR r32a, r/m32, r32b

VEX.LZ.0F38.W0 F7 /r

BMI1

16/32/64-bit

§

VEX_Bextr_r64_rm64_r64 = 3_696

BEXTR r64a, r/m64, r64b

VEX.LZ.0F38.W1 F7 /r

BMI1

64-bit

§

VEX_Shlx_r32_rm32_r32 = 3_697

SHLX r32a, r/m32, r32b

VEX.LZ.66.0F38.W0 F7 /r

BMI2

16/32/64-bit

§

VEX_Shlx_r64_rm64_r64 = 3_698

SHLX r64a, r/m64, r64b

VEX.LZ.66.0F38.W1 F7 /r

BMI2

64-bit

§

VEX_Sarx_r32_rm32_r32 = 3_699

SARX r32a, r/m32, r32b

VEX.LZ.F3.0F38.W0 F7 /r

BMI2

16/32/64-bit

§

VEX_Sarx_r64_rm64_r64 = 3_700

SARX r64a, r/m64, r64b

VEX.LZ.F3.0F38.W1 F7 /r

BMI2

64-bit

§

VEX_Shrx_r32_rm32_r32 = 3_701

SHRX r32a, r/m32, r32b

VEX.LZ.F2.0F38.W0 F7 /r

BMI2

16/32/64-bit

§

VEX_Shrx_r64_rm64_r64 = 3_702

SHRX r64a, r/m64, r64b

VEX.LZ.F2.0F38.W1 F7 /r

BMI2

64-bit

§

Movdir64b_r16_m512 = 3_703

MOVDIR64B r16, m512

a16 66 0F 38 F8 /r

MOVDIR64B

16/32-bit

§

Movdir64b_r32_m512 = 3_704

MOVDIR64B r32, m512

a32 66 0F 38 F8 /r

MOVDIR64B

16/32/64-bit

§

Movdir64b_r64_m512 = 3_705

MOVDIR64B r64, m512

a64 66 0F 38 F8 /r

MOVDIR64B

64-bit

§

Enqcmds_r16_m512 = 3_706

ENQCMDS r16, m512

a16 F3 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32-bit

§

Enqcmds_r32_m512 = 3_707

ENQCMDS r32, m512

a32 F3 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32/64-bit

§

Enqcmds_r64_m512 = 3_708

ENQCMDS r64, m512

a64 F3 0F 38 F8 !(11):rrr:bbb

ENQCMD

64-bit

§

Enqcmd_r16_m512 = 3_709

ENQCMD r16, m512

a16 F2 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32-bit

§

Enqcmd_r32_m512 = 3_710

ENQCMD r32, m512

a32 F2 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32/64-bit

§

Enqcmd_r64_m512 = 3_711

ENQCMD r64, m512

a64 F2 0F 38 F8 !(11):rrr:bbb

ENQCMD

64-bit

§

Movdiri_m32_r32 = 3_712

MOVDIRI m32, r32

NP 0F 38 F9 /r

MOVDIRI

16/32/64-bit

§

Movdiri_m64_r64 = 3_713

MOVDIRI m64, r64

NP o64 0F 38 F9 /r

MOVDIRI

64-bit

§

VEX_Vpermq_ymm_ymmm256_imm8 = 3_714

VPERMQ ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W1 00 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpermq_ymm_k1z_ymmm256b64_imm8 = 3_715

VPERMQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 00 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermq_zmm_k1z_zmmm512b64_imm8 = 3_716

VPERMQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 00 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpermpd_ymm_ymmm256_imm8 = 3_717

VPERMPD ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W1 01 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpermpd_ymm_k1z_ymmm256b64_imm8 = 3_718

VPERMPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 01 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermpd_zmm_k1z_zmmm512b64_imm8 = 3_719

VPERMPD zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 01 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpblendd_xmm_xmm_xmmm128_imm8 = 3_720

VPBLENDD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W0 02 /r ib

AVX2

16/32/64-bit

§

VEX_Vpblendd_ymm_ymm_ymmm256_imm8 = 3_721

VPBLENDD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W0 02 /r ib

AVX2

16/32/64-bit

§

EVEX_Valignd_xmm_k1z_xmm_xmmm128b32_imm8 = 3_722

VALIGND xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignd_ymm_k1z_ymm_ymmm256b32_imm8 = 3_723

VALIGND ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignd_zmm_k1z_zmm_zmmm512b32_imm8 = 3_724

VALIGND zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 03 /r ib

AVX512F

16/32/64-bit

§

EVEX_Valignq_xmm_k1z_xmm_xmmm128b64_imm8 = 3_725

VALIGNQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignq_ymm_k1z_ymm_ymmm256b64_imm8 = 3_726

VALIGNQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignq_zmm_k1z_zmm_zmmm512b64_imm8 = 3_727

VALIGNQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 03 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpermilps_xmm_xmmm128_imm8 = 3_728

VPERMILPS xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 04 /r ib

AVX

16/32/64-bit

§

VEX_Vpermilps_ymm_ymmm256_imm8 = 3_729

VPERMILPS ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W0 04 /r ib

AVX

16/32/64-bit

§

EVEX_Vpermilps_xmm_k1z_xmmm128b32_imm8 = 3_730

VPERMILPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 04 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_ymm_k1z_ymmm256b32_imm8 = 3_731

VPERMILPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 04 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_zmm_k1z_zmmm512b32_imm8 = 3_732

VPERMILPS zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 04 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpermilpd_xmm_xmmm128_imm8 = 3_733

VPERMILPD xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 05 /r ib

AVX

16/32/64-bit

§

VEX_Vpermilpd_ymm_ymmm256_imm8 = 3_734

VPERMILPD ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W0 05 /r ib

AVX

16/32/64-bit

§

EVEX_Vpermilpd_xmm_k1z_xmmm128b64_imm8 = 3_735

VPERMILPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 05 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_ymm_k1z_ymmm256b64_imm8 = 3_736

VPERMILPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 05 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_zmm_k1z_zmmm512b64_imm8 = 3_737

VPERMILPD zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 05 /r ib

AVX512F

16/32/64-bit

§

VEX_Vperm2f128_ymm_ymm_ymmm256_imm8 = 3_738

VPERM2F128 ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W0 06 /r ib

AVX

16/32/64-bit

§

Roundps_xmm_xmmm128_imm8 = 3_739

ROUNDPS xmm1, xmm2/m128, imm8

66 0F 3A 08 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundps_xmm_xmmm128_imm8 = 3_740

VROUNDPS xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 08 /r ib

AVX

16/32/64-bit

§

VEX_Vroundps_ymm_ymmm256_imm8 = 3_741

VROUNDPS ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.WIG 08 /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscaleps_xmm_k1z_xmmm128b32_imm8 = 3_742

VRNDSCALEPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 08 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscaleps_ymm_k1z_ymmm256b32_imm8 = 3_743

VRNDSCALEPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 08 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscaleps_zmm_k1z_zmmm512b32_imm8_sae = 3_744

VRNDSCALEPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 08 /r ib

AVX512F

16/32/64-bit

§

Roundpd_xmm_xmmm128_imm8 = 3_745

ROUNDPD xmm1, xmm2/m128, imm8

66 0F 3A 09 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundpd_xmm_xmmm128_imm8 = 3_746

VROUNDPD xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 09 /r ib

AVX

16/32/64-bit

§

VEX_Vroundpd_ymm_ymmm256_imm8 = 3_747

VROUNDPD ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.WIG 09 /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscalepd_xmm_k1z_xmmm128b64_imm8 = 3_748

VRNDSCALEPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 09 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscalepd_ymm_k1z_ymmm256b64_imm8 = 3_749

VRNDSCALEPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 09 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscalepd_zmm_k1z_zmmm512b64_imm8_sae = 3_750

VRNDSCALEPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 09 /r ib

AVX512F

16/32/64-bit

§

Roundss_xmm_xmmm32_imm8 = 3_751

ROUNDSS xmm1, xmm2/m32, imm8

66 0F 3A 0A /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundss_xmm_xmm_xmmm32_imm8 = 3_752

VROUNDSS xmm1, xmm2, xmm3/m32, imm8

VEX.LIG.66.0F3A.WIG 0A /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscaless_xmm_k1z_xmm_xmmm32_imm8_sae = 3_753

VRNDSCALESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 0A /r ib

AVX512F

16/32/64-bit

§

Roundsd_xmm_xmmm64_imm8 = 3_754

ROUNDSD xmm1, xmm2/m64, imm8

66 0F 3A 0B /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundsd_xmm_xmm_xmmm64_imm8 = 3_755

VROUNDSD xmm1, xmm2, xmm3/m64, imm8

VEX.LIG.66.0F3A.WIG 0B /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscalesd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_756

VRNDSCALESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 0B /r ib

AVX512F

16/32/64-bit

§

Blendps_xmm_xmmm128_imm8 = 3_757

BLENDPS xmm1, xmm2/m128, imm8

66 0F 3A 0C /r ib

SSE4.1

16/32/64-bit

§

VEX_Vblendps_xmm_xmm_xmmm128_imm8 = 3_758

VBLENDPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0C /r ib

AVX

16/32/64-bit

§

VEX_Vblendps_ymm_ymm_ymmm256_imm8 = 3_759

VBLENDPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0C /r ib

AVX

16/32/64-bit

§

Blendpd_xmm_xmmm128_imm8 = 3_760

BLENDPD xmm1, xmm2/m128, imm8

66 0F 3A 0D /r ib

SSE4.1

16/32/64-bit

§

VEX_Vblendpd_xmm_xmm_xmmm128_imm8 = 3_761

VBLENDPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0D /r ib

AVX

16/32/64-bit

§

VEX_Vblendpd_ymm_ymm_ymmm256_imm8 = 3_762

VBLENDPD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0D /r ib

AVX

16/32/64-bit

§

Pblendw_xmm_xmmm128_imm8 = 3_763

PBLENDW xmm1, xmm2/m128, imm8

66 0F 3A 0E /r ib

SSE4.1

16/32/64-bit

§

VEX_Vpblendw_xmm_xmm_xmmm128_imm8 = 3_764

VPBLENDW xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0E /r ib

AVX

16/32/64-bit

§

VEX_Vpblendw_ymm_ymm_ymmm256_imm8 = 3_765

VPBLENDW ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0E /r ib

AVX2

16/32/64-bit

§

Palignr_mm_mmm64_imm8 = 3_766

PALIGNR mm1, mm2/m64, imm8

NP 0F 3A 0F /r ib

SSSE3

16/32/64-bit

§

Palignr_xmm_xmmm128_imm8 = 3_767

PALIGNR xmm1, xmm2/m128, imm8

66 0F 3A 0F /r ib

SSSE3

16/32/64-bit

§

VEX_Vpalignr_xmm_xmm_xmmm128_imm8 = 3_768

VPALIGNR xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0F /r ib

AVX

16/32/64-bit

§

VEX_Vpalignr_ymm_ymm_ymmm256_imm8 = 3_769

VPALIGNR ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0F /r ib

AVX2

16/32/64-bit

§

EVEX_Vpalignr_xmm_k1z_xmm_xmmm128_imm8 = 3_770

VPALIGNR xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.WIG 0F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpalignr_ymm_k1z_ymm_ymmm256_imm8 = 3_771

VPALIGNR ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.WIG 0F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpalignr_zmm_k1z_zmm_zmmm512_imm8 = 3_772

VPALIGNR zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.WIG 0F /r ib

AVX512BW

16/32/64-bit

§

Pextrb_r32m8_xmm_imm8 = 3_773

PEXTRB r32/m8, xmm2, imm8

66 0F 3A 14 /r ib

SSE4.1

16/32/64-bit

§

Pextrb_r64m8_xmm_imm8 = 3_774

PEXTRB r64/m8, xmm2, imm8

66 o64 0F 3A 14 /r ib

SSE4.1

64-bit

§

VEX_Vpextrb_r32m8_xmm_imm8 = 3_775

VPEXTRB r32/m8, xmm2, imm8

VEX.128.66.0F3A.W0 14 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrb_r64m8_xmm_imm8 = 3_776

VPEXTRB r64/m8, xmm2, imm8

VEX.128.66.0F3A.W1 14 /r ib

AVX

64-bit

§

EVEX_Vpextrb_r32m8_xmm_imm8 = 3_777

VPEXTRB r32/m8, xmm2, imm8

EVEX.128.66.0F3A.W0 14 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpextrb_r64m8_xmm_imm8 = 3_778

VPEXTRB r64/m8, xmm2, imm8

EVEX.128.66.0F3A.W1 14 /r ib

AVX512BW

64-bit

§

Pextrw_r32m16_xmm_imm8 = 3_779

PEXTRW r32/m16, xmm, imm8

66 0F 3A 15 /r ib

SSE4.1

16/32/64-bit

§

Pextrw_r64m16_xmm_imm8 = 3_780

PEXTRW r64/m16, xmm, imm8

66 o64 0F 3A 15 /r ib

SSE4.1

64-bit

§

VEX_Vpextrw_r32m16_xmm_imm8 = 3_781

VPEXTRW r32/m16, xmm2, imm8

VEX.128.66.0F3A.W0 15 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrw_r64m16_xmm_imm8 = 3_782

VPEXTRW r64/m16, xmm2, imm8

VEX.128.66.0F3A.W1 15 /r ib

AVX

64-bit

§

EVEX_Vpextrw_r32m16_xmm_imm8 = 3_783

VPEXTRW r32/m16, xmm2, imm8

EVEX.128.66.0F3A.W0 15 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpextrw_r64m16_xmm_imm8 = 3_784

VPEXTRW r64/m16, xmm2, imm8

EVEX.128.66.0F3A.W1 15 /r ib

AVX512BW

64-bit

§

Pextrd_rm32_xmm_imm8 = 3_785

PEXTRD r/m32, xmm2, imm8

66 0F 3A 16 /r ib

SSE4.1

16/32/64-bit

§

Pextrq_rm64_xmm_imm8 = 3_786

PEXTRQ r/m64, xmm2, imm8

66 o64 0F 3A 16 /r ib

SSE4.1

64-bit

§

VEX_Vpextrd_rm32_xmm_imm8 = 3_787

VPEXTRD r/m32, xmm2, imm8

VEX.128.66.0F3A.W0 16 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrq_rm64_xmm_imm8 = 3_788

VPEXTRQ r/m64, xmm2, imm8

VEX.128.66.0F3A.W1 16 /r ib

AVX

64-bit

§

EVEX_Vpextrd_rm32_xmm_imm8 = 3_789

VPEXTRD r/m32, xmm2, imm8

EVEX.128.66.0F3A.W0 16 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vpextrq_rm64_xmm_imm8 = 3_790

VPEXTRQ r/m64, xmm2, imm8

EVEX.128.66.0F3A.W1 16 /r ib

AVX512DQ

64-bit

§

Extractps_rm32_xmm_imm8 = 3_791

EXTRACTPS r/m32, xmm1, imm8

66 0F 3A 17 /r ib

SSE4.1

16/32/64-bit

§

Extractps_r64m32_xmm_imm8 = 3_792

EXTRACTPS r64/m32, xmm1, imm8

66 o64 0F 3A 17 /r ib

SSE4.1

64-bit

§

VEX_Vextractps_rm32_xmm_imm8 = 3_793

VEXTRACTPS r/m32, xmm1, imm8

VEX.128.66.0F3A.W0 17 /r ib

AVX

16/32/64-bit

§

VEX_Vextractps_r64m32_xmm_imm8 = 3_794

VEXTRACTPS r64/m32, xmm1, imm8

VEX.128.66.0F3A.W1 17 /r ib

AVX

64-bit

§

EVEX_Vextractps_rm32_xmm_imm8 = 3_795

VEXTRACTPS r/m32, xmm1, imm8

EVEX.128.66.0F3A.W0 17 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextractps_r64m32_xmm_imm8 = 3_796

VEXTRACTPS r64/m32, xmm1, imm8

EVEX.128.66.0F3A.W1 17 /r ib

AVX512F

64-bit

§

VEX_Vinsertf128_ymm_ymm_xmmm128_imm8 = 3_797

VINSERTF128 ymm1, ymm2, xmm3/m128, imm8

VEX.256.66.0F3A.W0 18 /r ib

AVX

16/32/64-bit

§

EVEX_Vinsertf32x4_ymm_k1z_ymm_xmmm128_imm8 = 3_798

VINSERTF32X4 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W0 18 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vinsertf32x4_zmm_k1z_zmm_xmmm128_imm8 = 3_799

VINSERTF32X4 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W0 18 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vinsertf64x2_ymm_k1z_ymm_xmmm128_imm8 = 3_800

VINSERTF64X2 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W1 18 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vinsertf64x2_zmm_k1z_zmm_xmmm128_imm8 = 3_801

VINSERTF64X2 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W1 18 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vextractf128_xmmm128_ymm_imm8 = 3_802

VEXTRACTF128 xmm1/m128, ymm2, imm8

VEX.256.66.0F3A.W0 19 /r ib

AVX

16/32/64-bit

§

EVEX_Vextractf32x4_xmmm128_k1z_ymm_imm8 = 3_803

VEXTRACTF32X4 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W0 19 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vextractf32x4_xmmm128_k1z_zmm_imm8 = 3_804

VEXTRACTF32X4 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 19 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextractf64x2_xmmm128_k1z_ymm_imm8 = 3_805

VEXTRACTF64X2 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W1 19 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vextractf64x2_xmmm128_k1z_zmm_imm8 = 3_806

VEXTRACTF64X2 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 19 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinsertf32x8_zmm_k1z_zmm_ymmm256_imm8 = 3_807

VINSERTF32X8 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W0 1A /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinsertf64x4_zmm_k1z_zmm_ymmm256_imm8 = 3_808

VINSERTF64X4 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W1 1A /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextractf32x8_ymmm256_k1z_zmm_imm8 = 3_809

VEXTRACTF32X8 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 1B /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vextractf64x4_ymmm256_k1z_zmm_imm8 = 3_810

VEXTRACTF64X4 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 1B /r ib

AVX512F

16/32/64-bit

§

VEX_Vcvtps2ph_xmmm64_xmm_imm8 = 3_811

VCVTPS2PH xmm1/m64, xmm2, imm8

VEX.128.66.0F3A.W0 1D /r ib

F16C

16/32/64-bit

§

VEX_Vcvtps2ph_xmmm128_ymm_imm8 = 3_812

VCVTPS2PH xmm1/m128, ymm2, imm8

VEX.256.66.0F3A.W0 1D /r ib

F16C

16/32/64-bit

§

EVEX_Vcvtps2ph_xmmm64_k1z_xmm_imm8 = 3_813

VCVTPS2PH xmm1/m64 {k1}{z}, xmm2, imm8

EVEX.128.66.0F3A.W0 1D /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2ph_xmmm128_k1z_ymm_imm8 = 3_814

VCVTPS2PH xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W0 1D /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2ph_ymmm256_k1z_zmm_imm8_sae = 3_815

VCVTPS2PH ymm1/m256 {k1}{z}, zmm2{sae}, imm8

EVEX.512.66.0F3A.W0 1D /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpud_kr_k1_xmm_xmmm128b32_imm8 = 3_816

VPCMPUD k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpud_kr_k1_ymm_ymmm256b32_imm8 = 3_817

VPCMPUD k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpud_kr_k1_zmm_zmmm512b32_imm8 = 3_818

VPCMPUD k1 {k2}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 1E /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpuq_kr_k1_xmm_xmmm128b64_imm8 = 3_819

VPCMPUQ k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpuq_kr_k1_ymm_ymmm256b64_imm8 = 3_820

VPCMPUQ k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpuq_kr_k1_zmm_zmmm512b64_imm8 = 3_821

VPCMPUQ k1 {k2}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 1E /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpd_kr_k1_xmm_xmmm128b32_imm8 = 3_822

VPCMPD k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpd_kr_k1_ymm_ymmm256b32_imm8 = 3_823

VPCMPD k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpd_kr_k1_zmm_zmmm512b32_imm8 = 3_824

VPCMPD k1 {k2}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 1F /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpq_kr_k1_xmm_xmmm128b64_imm8 = 3_825

VPCMPQ k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpq_kr_k1_ymm_ymmm256b64_imm8 = 3_826

VPCMPQ k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpq_kr_k1_zmm_zmmm512b64_imm8 = 3_827

VPCMPQ k1 {k2}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 1F /r ib

AVX512F

16/32/64-bit

§

Pinsrb_xmm_r32m8_imm8 = 3_828

PINSRB xmm1, r32/m8, imm8

66 0F 3A 20 /r ib

SSE4.1

16/32/64-bit

§

Pinsrb_xmm_r64m8_imm8 = 3_829

PINSRB xmm1, r64/m8, imm8

66 o64 0F 3A 20 /r ib

SSE4.1

64-bit

§

VEX_Vpinsrb_xmm_xmm_r32m8_imm8 = 3_830

VPINSRB xmm1, xmm2, r32/m8, imm8

VEX.128.66.0F3A.W0 20 /r ib

AVX

16/32/64-bit

§

VEX_Vpinsrb_xmm_xmm_r64m8_imm8 = 3_831

VPINSRB xmm1, xmm2, r64/m8, imm8

VEX.128.66.0F3A.W1 20 /r ib

AVX

64-bit

§

EVEX_Vpinsrb_xmm_xmm_r32m8_imm8 = 3_832

VPINSRB xmm1, xmm2, r32/m8, imm8

EVEX.128.66.0F3A.W0 20 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpinsrb_xmm_xmm_r64m8_imm8 = 3_833

VPINSRB xmm1, xmm2, r64/m8, imm8

EVEX.128.66.0F3A.W1 20 /r ib

AVX512BW

64-bit

§

Insertps_xmm_xmmm32_imm8 = 3_834

INSERTPS xmm1, xmm2/m32, imm8

66 0F 3A 21 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vinsertps_xmm_xmm_xmmm32_imm8 = 3_835

VINSERTPS xmm1, xmm2, xmm3/m32, imm8

VEX.128.66.0F3A.WIG 21 /r ib

AVX

16/32/64-bit

§

EVEX_Vinsertps_xmm_xmm_xmmm32_imm8 = 3_836

VINSERTPS xmm1, xmm2, xmm3/m32, imm8

EVEX.128.66.0F3A.W0 21 /r ib

AVX512F

16/32/64-bit

§

Pinsrd_xmm_rm32_imm8 = 3_837

PINSRD xmm1, r/m32, imm8

66 0F 3A 22 /r ib

SSE4.1

16/32/64-bit

§

Pinsrq_xmm_rm64_imm8 = 3_838

PINSRQ xmm1, r/m64, imm8

66 o64 0F 3A 22 /r ib

SSE4.1

64-bit

§

VEX_Vpinsrd_xmm_xmm_rm32_imm8 = 3_839

VPINSRD xmm1, xmm2, r/m32, imm8

VEX.128.66.0F3A.W0 22 /r ib

AVX

16/32/64-bit

§

VEX_Vpinsrq_xmm_xmm_rm64_imm8 = 3_840

VPINSRQ xmm1, xmm2, r/m64, imm8

VEX.128.66.0F3A.W1 22 /r ib

AVX

64-bit

§

EVEX_Vpinsrd_xmm_xmm_rm32_imm8 = 3_841

VPINSRD xmm1, xmm2, r/m32, imm8

EVEX.128.66.0F3A.W0 22 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vpinsrq_xmm_xmm_rm64_imm8 = 3_842

VPINSRQ xmm1, xmm2, r/m64, imm8

EVEX.128.66.0F3A.W1 22 /r ib

AVX512DQ

64-bit

§

EVEX_Vshuff32x4_ymm_k1z_ymm_ymmm256b32_imm8 = 3_843

VSHUFF32X4 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 23 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshuff32x4_zmm_k1z_zmm_zmmm512b32_imm8 = 3_844

VSHUFF32X4 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 23 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vshuff64x2_ymm_k1z_ymm_ymmm256b64_imm8 = 3_845

VSHUFF64X2 ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 23 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshuff64x2_zmm_k1z_zmm_zmmm512b64_imm8 = 3_846

VSHUFF64X2 zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 23 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpternlogd_xmm_k1z_xmm_xmmm128b32_imm8 = 3_847

VPTERNLOGD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogd_ymm_k1z_ymm_ymmm256b32_imm8 = 3_848

VPTERNLOGD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogd_zmm_k1z_zmm_zmmm512b32_imm8 = 3_849

VPTERNLOGD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 25 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpternlogq_xmm_k1z_xmm_xmmm128b64_imm8 = 3_850

VPTERNLOGQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogq_ymm_k1z_ymm_ymmm256b64_imm8 = 3_851

VPTERNLOGQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogq_zmm_k1z_zmm_zmmm512b64_imm8 = 3_852

VPTERNLOGQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 25 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantps_xmm_k1z_xmmm128b32_imm8 = 3_853

VGETMANTPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantps_ymm_k1z_ymmm256b32_imm8 = 3_854

VGETMANTPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantps_zmm_k1z_zmmm512b32_imm8_sae = 3_855

VGETMANTPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 26 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantpd_xmm_k1z_xmmm128b64_imm8 = 3_856

VGETMANTPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantpd_ymm_k1z_ymmm256b64_imm8 = 3_857

VGETMANTPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantpd_zmm_k1z_zmmm512b64_imm8_sae = 3_858

VGETMANTPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 26 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantss_xmm_k1z_xmm_xmmm32_imm8_sae = 3_859

VGETMANTSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 27 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantsd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_860

VGETMANTSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 27 /r ib

AVX512F

16/32/64-bit

§

VEX_Kshiftrb_kr_kr_imm8 = 3_861

KSHIFTRB k1, k2, imm8

VEX.L0.66.0F3A.W0 30 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Kshiftrw_kr_kr_imm8 = 3_862

KSHIFTRW k1, k2, imm8

VEX.L0.66.0F3A.W1 30 /r ib

AVX512F

16/32/64-bit

§

VEX_Kshiftrd_kr_kr_imm8 = 3_863

KSHIFTRD k1, k2, imm8

VEX.L0.66.0F3A.W0 31 /r ib

AVX512BW

16/32/64-bit

§

VEX_Kshiftrq_kr_kr_imm8 = 3_864

KSHIFTRQ k1, k2, imm8

VEX.L0.66.0F3A.W1 31 /r ib

AVX512BW

16/32/64-bit

§

VEX_Kshiftlb_kr_kr_imm8 = 3_865

KSHIFTLB k1, k2, imm8

VEX.L0.66.0F3A.W0 32 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Kshiftlw_kr_kr_imm8 = 3_866

KSHIFTLW k1, k2, imm8

VEX.L0.66.0F3A.W1 32 /r ib

AVX512F

16/32/64-bit

§

VEX_Kshiftld_kr_kr_imm8 = 3_867

KSHIFTLD k1, k2, imm8

VEX.L0.66.0F3A.W0 33 /r ib

AVX512BW

16/32/64-bit

§

VEX_Kshiftlq_kr_kr_imm8 = 3_868

KSHIFTLQ k1, k2, imm8

VEX.L0.66.0F3A.W1 33 /r ib

AVX512BW

16/32/64-bit

§

VEX_Vinserti128_ymm_ymm_xmmm128_imm8 = 3_869

VINSERTI128 ymm1, ymm2, xmm3/m128, imm8

VEX.256.66.0F3A.W0 38 /r ib

AVX2

16/32/64-bit

§

EVEX_Vinserti32x4_ymm_k1z_ymm_xmmm128_imm8 = 3_870

VINSERTI32X4 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W0 38 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vinserti32x4_zmm_k1z_zmm_xmmm128_imm8 = 3_871

VINSERTI32X4 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W0 38 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vinserti64x2_ymm_k1z_ymm_xmmm128_imm8 = 3_872

VINSERTI64X2 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W1 38 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vinserti64x2_zmm_k1z_zmm_xmmm128_imm8 = 3_873

VINSERTI64X2 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W1 38 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vextracti128_xmmm128_ymm_imm8 = 3_874

VEXTRACTI128 xmm1/m128, ymm2, imm8

VEX.256.66.0F3A.W0 39 /r ib

AVX2

16/32/64-bit

§

EVEX_Vextracti32x4_xmmm128_k1z_ymm_imm8 = 3_875

VEXTRACTI32X4 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W0 39 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vextracti32x4_xmmm128_k1z_zmm_imm8 = 3_876

VEXTRACTI32X4 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 39 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextracti64x2_xmmm128_k1z_ymm_imm8 = 3_877

VEXTRACTI64X2 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W1 39 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vextracti64x2_xmmm128_k1z_zmm_imm8 = 3_878

VEXTRACTI64X2 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 39 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinserti32x8_zmm_k1z_zmm_ymmm256_imm8 = 3_879

VINSERTI32X8 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W0 3A /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinserti64x4_zmm_k1z_zmm_ymmm256_imm8 = 3_880

VINSERTI64X4 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W1 3A /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextracti32x8_ymmm256_k1z_zmm_imm8 = 3_881

VEXTRACTI32X8 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 3B /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vextracti64x4_ymmm256_k1z_zmm_imm8 = 3_882

VEXTRACTI64X4 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 3B /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpub_kr_k1_xmm_xmmm128_imm8 = 3_883

VPCMPUB k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W0 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpub_kr_k1_ymm_ymmm256_imm8 = 3_884

VPCMPUB k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W0 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpub_kr_k1_zmm_zmmm512_imm8 = 3_885

VPCMPUB k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W0 3E /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpcmpuw_kr_k1_xmm_xmmm128_imm8 = 3_886

VPCMPUW k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpuw_kr_k1_ymm_ymmm256_imm8 = 3_887

VPCMPUW k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpuw_kr_k1_zmm_zmmm512_imm8 = 3_888

VPCMPUW k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 3E /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpcmpb_kr_k1_xmm_xmmm128_imm8 = 3_889

VPCMPB k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W0 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpb_kr_k1_ymm_ymmm256_imm8 = 3_890

VPCMPB k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W0 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpb_kr_k1_zmm_zmmm512_imm8 = 3_891

VPCMPB k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W0 3F /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpcmpw_kr_k1_xmm_xmmm128_imm8 = 3_892

VPCMPW k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpw_kr_k1_ymm_ymmm256_imm8 = 3_893

VPCMPW k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpw_kr_k1_zmm_zmmm512_imm8 = 3_894

VPCMPW k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 3F /r ib

AVX512BW

16/32/64-bit

§

Dpps_xmm_xmmm128_imm8 = 3_895

DPPS xmm1, xmm2/m128, imm8

66 0F 3A 40 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vdpps_xmm_xmm_xmmm128_imm8 = 3_896

VDPPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 40 /r ib

AVX

16/32/64-bit

§

VEX_Vdpps_ymm_ymm_ymmm256_imm8 = 3_897

VDPPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 40 /r ib

AVX

16/32/64-bit

§

Dppd_xmm_xmmm128_imm8 = 3_898

DPPD xmm1, xmm2/m128, imm8

66 0F 3A 41 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vdppd_xmm_xmm_xmmm128_imm8 = 3_899

VDPPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 41 /r ib

AVX

16/32/64-bit

§

Mpsadbw_xmm_xmmm128_imm8 = 3_900

MPSADBW xmm1, xmm2/m128, imm8

66 0F 3A 42 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vmpsadbw_xmm_xmm_xmmm128_imm8 = 3_901

VMPSADBW xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 42 /r ib

AVX

16/32/64-bit

§

VEX_Vmpsadbw_ymm_ymm_ymmm256_imm8 = 3_902

VMPSADBW ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 42 /r ib

AVX2

16/32/64-bit

§

EVEX_Vdbpsadbw_xmm_k1z_xmm_xmmm128_imm8 = 3_903

VDBPSADBW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W0 42 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vdbpsadbw_ymm_k1z_ymm_ymmm256_imm8 = 3_904

VDBPSADBW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W0 42 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vdbpsadbw_zmm_k1z_zmm_zmmm512_imm8 = 3_905

VDBPSADBW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W0 42 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vshufi32x4_ymm_k1z_ymm_ymmm256b32_imm8 = 3_906

VSHUFI32X4 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 43 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufi32x4_zmm_k1z_zmm_zmmm512b32_imm8 = 3_907

VSHUFI32X4 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 43 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vshufi64x2_ymm_k1z_ymm_ymmm256b64_imm8 = 3_908

VSHUFI64X2 ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 43 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufi64x2_zmm_k1z_zmm_zmmm512b64_imm8 = 3_909

VSHUFI64X2 zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 43 /r ib

AVX512F

16/32/64-bit

§

Pclmulqdq_xmm_xmmm128_imm8 = 3_910

PCLMULQDQ xmm1, xmm2/m128, imm8

66 0F 3A 44 /r ib

PCLMULQDQ

16/32/64-bit

§

VEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8 = 3_911

VPCLMULQDQ xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 44 /r ib

PCLMULQDQ and AVX

16/32/64-bit

§

VEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8 = 3_912

VPCLMULQDQ ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 44 /r ib

VPCLMULQDQ

16/32/64-bit

§

EVEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8 = 3_913

VPCLMULQDQ xmm1, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.WIG 44 /r ib

AVX512VL and VPCLMULQDQ

16/32/64-bit

§

EVEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8 = 3_914

VPCLMULQDQ ymm1, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.WIG 44 /r ib

AVX512VL and VPCLMULQDQ

16/32/64-bit

§

EVEX_Vpclmulqdq_zmm_zmm_zmmm512_imm8 = 3_915

VPCLMULQDQ zmm1, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.WIG 44 /r ib

AVX512F and VPCLMULQDQ

16/32/64-bit

§

VEX_Vperm2i128_ymm_ymm_ymmm256_imm8 = 3_916

VPERM2I128 ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W0 46 /r ib

AVX2

16/32/64-bit

§

VEX_Vpermil2ps_xmm_xmm_xmmm128_xmm_imm4 = 3_917

VPERMIL2PS xmm1, xmm2, xmm3/m128, xmm4, imm4

VEX.128.66.0F3A.W0 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2ps_ymm_ymm_ymmm256_ymm_imm4 = 3_918

VPERMIL2PS ymm1, ymm2, ymm3/m256, ymm4, imm4

VEX.256.66.0F3A.W0 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2ps_xmm_xmm_xmm_xmmm128_imm4 = 3_919

VPERMIL2PS xmm1, xmm2, xmm3, xmm4/m128, imm4

VEX.128.66.0F3A.W1 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2ps_ymm_ymm_ymm_ymmm256_imm4 = 3_920

VPERMIL2PS ymm1, ymm2, ymm3, ymm4/m256, imm4

VEX.256.66.0F3A.W1 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_xmm_xmm_xmmm128_xmm_imm4 = 3_921

VPERMIL2PD xmm1, xmm2, xmm3/m128, xmm4, imm4

VEX.128.66.0F3A.W0 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_ymm_ymm_ymmm256_ymm_imm4 = 3_922

VPERMIL2PD ymm1, ymm2, ymm3/m256, ymm4, imm4

VEX.256.66.0F3A.W0 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_xmm_xmm_xmm_xmmm128_imm4 = 3_923

VPERMIL2PD xmm1, xmm2, xmm3, xmm4/m128, imm4

VEX.128.66.0F3A.W1 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_ymm_ymm_ymm_ymmm256_imm4 = 3_924

VPERMIL2PD ymm1, ymm2, ymm3, ymm4/m256, imm4

VEX.256.66.0F3A.W1 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vblendvps_xmm_xmm_xmmm128_xmm = 3_925

VBLENDVPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 4A /r /is4

AVX

16/32/64-bit

§

VEX_Vblendvps_ymm_ymm_ymmm256_ymm = 3_926

VBLENDVPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 4A /r /is4

AVX

16/32/64-bit

§

VEX_Vblendvpd_xmm_xmm_xmmm128_xmm = 3_927

VBLENDVPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 4B /r /is4

AVX

16/32/64-bit

§

VEX_Vblendvpd_ymm_ymm_ymmm256_ymm = 3_928

VBLENDVPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 4B /r /is4

AVX

16/32/64-bit

§

VEX_Vpblendvb_xmm_xmm_xmmm128_xmm = 3_929

VPBLENDVB xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 4C /r /is4

AVX

16/32/64-bit

§

VEX_Vpblendvb_ymm_ymm_ymmm256_ymm = 3_930

VPBLENDVB ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 4C /r /is4

AVX2

16/32/64-bit

§

EVEX_Vrangeps_xmm_k1z_xmm_xmmm128b32_imm8 = 3_931

VRANGEPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangeps_ymm_k1z_ymm_ymmm256b32_imm8 = 3_932

VRANGEPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangeps_zmm_k1z_zmm_zmmm512b32_imm8_sae = 3_933

VRANGEPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 50 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vrangepd_xmm_k1z_xmm_xmmm128b64_imm8 = 3_934

VRANGEPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangepd_ymm_k1z_ymm_ymmm256b64_imm8 = 3_935

VRANGEPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangepd_zmm_k1z_zmm_zmmm512b64_imm8_sae = 3_936

VRANGEPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 50 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vrangess_xmm_k1z_xmm_xmmm32_imm8_sae = 3_937

VRANGESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 51 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vrangesd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_938

VRANGESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 51 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfixupimmps_xmm_k1z_xmm_xmmm128b32_imm8 = 3_939

VFIXUPIMMPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmps_ymm_k1z_ymm_ymmm256b32_imm8 = 3_940

VFIXUPIMMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmps_zmm_k1z_zmm_zmmm512b32_imm8_sae = 3_941

VFIXUPIMMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 54 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vfixupimmpd_xmm_k1z_xmm_xmmm128b64_imm8 = 3_942

VFIXUPIMMPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmpd_ymm_k1z_ymm_ymmm256b64_imm8 = 3_943

VFIXUPIMMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmpd_zmm_k1z_zmm_zmmm512b64_imm8_sae = 3_944

VFIXUPIMMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 54 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vfixupimmss_xmm_k1z_xmm_xmmm32_imm8_sae = 3_945

VFIXUPIMMSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 55 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vfixupimmsd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_946

VFIXUPIMMSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 55 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vreduceps_xmm_k1z_xmmm128b32_imm8 = 3_947

VREDUCEPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreduceps_ymm_k1z_ymmm256b32_imm8 = 3_948

VREDUCEPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreduceps_zmm_k1z_zmmm512b32_imm8_sae = 3_949

VREDUCEPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 56 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vreducepd_xmm_k1z_xmmm128b64_imm8 = 3_950

VREDUCEPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreducepd_ymm_k1z_ymmm256b64_imm8 = 3_951

VREDUCEPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreducepd_zmm_k1z_zmmm512b64_imm8_sae = 3_952

VREDUCEPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 56 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vreducess_xmm_k1z_xmm_xmmm32_imm8_sae = 3_953

VREDUCESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 57 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vreducesd_xmm_k1z_xmm_xmmm64_imm8_sae = 3_954

VREDUCESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 57 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vfmaddsubps_xmm_xmm_xmmm128_xmm = 3_955

VFMADDSUBPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubps_ymm_ymm_ymmm256_ymm = 3_956

VFMADDSUBPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubps_xmm_xmm_xmm_xmmm128 = 3_957

VFMADDSUBPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubps_ymm_ymm_ymm_ymmm256 = 3_958

VFMADDSUBPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_xmm_xmm_xmmm128_xmm = 3_959

VFMADDSUBPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_ymm_ymm_ymmm256_ymm = 3_960

VFMADDSUBPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_xmm_xmm_xmm_xmmm128 = 3_961

VFMADDSUBPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_ymm_ymm_ymm_ymmm256 = 3_962

VFMADDSUBPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_xmm_xmm_xmmm128_xmm = 3_963

VFMSUBADDPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_ymm_ymm_ymmm256_ymm = 3_964

VFMSUBADDPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_xmm_xmm_xmm_xmmm128 = 3_965

VFMSUBADDPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_ymm_ymm_ymm_ymmm256 = 3_966

VFMSUBADDPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_xmm_xmm_xmmm128_xmm = 3_967

VFMSUBADDPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_ymm_ymm_ymmm256_ymm = 3_968

VFMSUBADDPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_xmm_xmm_xmm_xmmm128 = 3_969

VFMSUBADDPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_ymm_ymm_ymm_ymmm256 = 3_970

VFMSUBADDPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5F /r /is4

FMA4

16/32/64-bit

§

Pcmpestrm_xmm_xmmm128_imm8 = 3_971

PCMPESTRM xmm1, xmm2/m128, imm8

66 0F 3A 60 /r ib

SSE4.2

16/32/64-bit

§

Pcmpestrm64_xmm_xmmm128_imm8 = 3_972

PCMPESTRM64 xmm1, xmm2/m128, imm8

66 o64 0F 3A 60 /r ib

SSE4.2

64-bit

§

VEX_Vpcmpestrm_xmm_xmmm128_imm8 = 3_973

VPCMPESTRM xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 60 /r ib

AVX

16/32/64-bit

§

VEX_Vpcmpestrm64_xmm_xmmm128_imm8 = 3_974

VPCMPESTRM64 xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W1 60 /r ib

AVX

64-bit

§

Pcmpestri_xmm_xmmm128_imm8 = 3_975

PCMPESTRI xmm1, xmm2/m128, imm8

66 0F 3A 61 /r ib

SSE4.2

16/32/64-bit

§

Pcmpestri64_xmm_xmmm128_imm8 = 3_976

PCMPESTRI64 xmm1, xmm2/m128, imm8

66 o64 0F 3A 61 /r ib

SSE4.2

64-bit

§

VEX_Vpcmpestri_xmm_xmmm128_imm8 = 3_977

VPCMPESTRI xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 61 /r ib

AVX

16/32/64-bit

§

VEX_Vpcmpestri64_xmm_xmmm128_imm8 = 3_978

VPCMPESTRI64 xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W1 61 /r ib

AVX

64-bit

§

Pcmpistrm_xmm_xmmm128_imm8 = 3_979

PCMPISTRM xmm1, xmm2/m128, imm8

66 0F 3A 62 /r ib

SSE4.2

16/32/64-bit

§

VEX_Vpcmpistrm_xmm_xmmm128_imm8 = 3_980

VPCMPISTRM xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 62 /r ib

AVX

16/32/64-bit

§

Pcmpistri_xmm_xmmm128_imm8 = 3_981

PCMPISTRI xmm1, xmm2/m128, imm8

66 0F 3A 63 /r ib

SSE4.2

16/32/64-bit

§

VEX_Vpcmpistri_xmm_xmmm128_imm8 = 3_982

VPCMPISTRI xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 63 /r ib

AVX

16/32/64-bit

§

EVEX_Vfpclassps_kr_k1_xmmm128b32_imm8 = 3_983

VFPCLASSPS k2 {k1}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclassps_kr_k1_ymmm256b32_imm8 = 3_984

VFPCLASSPS k2 {k1}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclassps_kr_k1_zmmm512b32_imm8 = 3_985

VFPCLASSPS k2 {k1}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 66 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasspd_kr_k1_xmmm128b64_imm8 = 3_986

VFPCLASSPD k2 {k1}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasspd_kr_k1_ymmm256b64_imm8 = 3_987

VFPCLASSPD k2 {k1}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasspd_kr_k1_zmmm512b64_imm8 = 3_988

VFPCLASSPD k2 {k1}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 66 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfpclassss_kr_k1_xmmm32_imm8 = 3_989

VFPCLASSSS k2 {k1}, xmm2/m32, imm8

EVEX.LIG.66.0F3A.W0 67 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasssd_kr_k1_xmmm64_imm8 = 3_990

VFPCLASSSD k2 {k1}, xmm2/m64, imm8

EVEX.LIG.66.0F3A.W1 67 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vfmaddps_xmm_xmm_xmmm128_xmm = 3_991

VFMADDPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddps_ymm_ymm_ymmm256_ymm = 3_992

VFMADDPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddps_xmm_xmm_xmm_xmmm128 = 3_993

VFMADDPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddps_ymm_ymm_ymm_ymmm256 = 3_994

VFMADDPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_xmm_xmm_xmmm128_xmm = 3_995

VFMADDPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_ymm_ymm_ymmm256_ymm = 3_996

VFMADDPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_xmm_xmm_xmm_xmmm128 = 3_997

VFMADDPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_ymm_ymm_ymm_ymmm256 = 3_998

VFMADDPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddss_xmm_xmm_xmmm32_xmm = 3_999

VFMADDSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 6A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddss_xmm_xmm_xmm_xmmm32 = 4_000

VFMADDSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 6A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsd_xmm_xmm_xmmm64_xmm = 4_001

VFMADDSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 6B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsd_xmm_xmm_xmm_xmmm64 = 4_002

VFMADDSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 6B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_xmm_xmm_xmmm128_xmm = 4_003

VFMSUBPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_ymm_ymm_ymmm256_ymm = 4_004

VFMSUBPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_xmm_xmm_xmm_xmmm128 = 4_005

VFMSUBPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_ymm_ymm_ymm_ymmm256 = 4_006

VFMSUBPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_xmm_xmm_xmmm128_xmm = 4_007

VFMSUBPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_ymm_ymm_ymmm256_ymm = 4_008

VFMSUBPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_xmm_xmm_xmm_xmmm128 = 4_009

VFMSUBPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_ymm_ymm_ymm_ymmm256 = 4_010

VFMSUBPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubss_xmm_xmm_xmmm32_xmm = 4_011

VFMSUBSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 6E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubss_xmm_xmm_xmm_xmmm32 = 4_012

VFMSUBSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 6E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubsd_xmm_xmm_xmmm64_xmm = 4_013

VFMSUBSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 6F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubsd_xmm_xmm_xmm_xmmm64 = 4_014

VFMSUBSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 6F /r /is4

FMA4

16/32/64-bit

§

EVEX_Vpshldw_xmm_k1z_xmm_xmmm128_imm8 = 4_015

VPSHLDW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 70 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldw_ymm_k1z_ymm_ymmm256_imm8 = 4_016

VPSHLDW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 70 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldw_zmm_k1z_zmm_zmmm512_imm8 = 4_017

VPSHLDW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 70 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldd_xmm_k1z_xmm_xmmm128b32_imm8 = 4_018

VPSHLDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldd_ymm_k1z_ymm_ymmm256b32_imm8 = 4_019

VPSHLDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldd_zmm_k1z_zmm_zmmm512b32_imm8 = 4_020

VPSHLDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 71 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldq_xmm_k1z_xmm_xmmm128b64_imm8 = 4_021

VPSHLDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldq_ymm_k1z_ymm_ymmm256b64_imm8 = 4_022

VPSHLDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldq_zmm_k1z_zmm_zmmm512b64_imm8 = 4_023

VPSHLDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 71 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdw_xmm_k1z_xmm_xmmm128_imm8 = 4_024

VPSHRDW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 72 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdw_ymm_k1z_ymm_ymmm256_imm8 = 4_025

VPSHRDW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 72 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdw_zmm_k1z_zmm_zmmm512_imm8 = 4_026

VPSHRDW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 72 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdd_xmm_k1z_xmm_xmmm128b32_imm8 = 4_027

VPSHRDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdd_ymm_k1z_ymm_ymmm256b32_imm8 = 4_028

VPSHRDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdd_zmm_k1z_zmm_zmmm512b32_imm8 = 4_029

VPSHRDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 73 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdq_xmm_k1z_xmm_xmmm128b64_imm8 = 4_030

VPSHRDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdq_ymm_k1z_ymm_ymmm256b64_imm8 = 4_031

VPSHRDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdq_zmm_k1z_zmm_zmmm512b64_imm8 = 4_032

VPSHRDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 73 /r ib

AVX512_VBMI2

16/32/64-bit

§

VEX_Vfnmaddps_xmm_xmm_xmmm128_xmm = 4_033

VFNMADDPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddps_ymm_ymm_ymmm256_ymm = 4_034

VFNMADDPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddps_xmm_xmm_xmm_xmmm128 = 4_035

VFNMADDPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddps_ymm_ymm_ymm_ymmm256 = 4_036

VFNMADDPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_xmm_xmm_xmmm128_xmm = 4_037

VFNMADDPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_ymm_ymm_ymmm256_ymm = 4_038

VFNMADDPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_xmm_xmm_xmm_xmmm128 = 4_039

VFNMADDPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_ymm_ymm_ymm_ymmm256 = 4_040

VFNMADDPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddss_xmm_xmm_xmmm32_xmm = 4_041

VFNMADDSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 7A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddss_xmm_xmm_xmm_xmmm32 = 4_042

VFNMADDSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 7A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddsd_xmm_xmm_xmmm64_xmm = 4_043

VFNMADDSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 7B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddsd_xmm_xmm_xmm_xmmm64 = 4_044

VFNMADDSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 7B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_xmm_xmm_xmmm128_xmm = 4_045

VFNMSUBPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_ymm_ymm_ymmm256_ymm = 4_046

VFNMSUBPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_xmm_xmm_xmm_xmmm128 = 4_047

VFNMSUBPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_ymm_ymm_ymm_ymmm256 = 4_048

VFNMSUBPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_xmm_xmm_xmmm128_xmm = 4_049

VFNMSUBPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_ymm_ymm_ymmm256_ymm = 4_050

VFNMSUBPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_xmm_xmm_xmm_xmmm128 = 4_051

VFNMSUBPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_ymm_ymm_ymm_ymmm256 = 4_052

VFNMSUBPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubss_xmm_xmm_xmmm32_xmm = 4_053

VFNMSUBSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 7E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubss_xmm_xmm_xmm_xmmm32 = 4_054

VFNMSUBSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 7E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubsd_xmm_xmm_xmmm64_xmm = 4_055

VFNMSUBSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 7F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubsd_xmm_xmm_xmm_xmmm64 = 4_056

VFNMSUBSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 7F /r /is4

FMA4

16/32/64-bit

§

Sha1rnds4_xmm_xmmm128_imm8 = 4_057

SHA1RNDS4 xmm1, xmm2/m128, imm8

NP 0F 3A CC /r ib

SHA

16/32/64-bit

§

Gf2p8affineqb_xmm_xmmm128_imm8 = 4_058

GF2P8AFFINEQB xmm1, xmm2/m128, imm8

66 0F 3A CE /r ib

GFNI

16/32/64-bit

§

VEX_Vgf2p8affineqb_xmm_xmm_xmmm128_imm8 = 4_059

VGF2P8AFFINEQB xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W1 CE /r ib

AVX and GFNI

16/32/64-bit

§

VEX_Vgf2p8affineqb_ymm_ymm_ymmm256_imm8 = 4_060

VGF2P8AFFINEQB ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W1 CE /r ib

AVX and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineqb_xmm_k1z_xmm_xmmm128b64_imm8 = 4_061

VGF2P8AFFINEQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 CE /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineqb_ymm_k1z_ymm_ymmm256b64_imm8 = 4_062

VGF2P8AFFINEQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 CE /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineqb_zmm_k1z_zmm_zmmm512b64_imm8 = 4_063

VGF2P8AFFINEQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 CE /r ib

AVX512F and GFNI

16/32/64-bit

§

Gf2p8affineinvqb_xmm_xmmm128_imm8 = 4_064

GF2P8AFFINEINVQB xmm1, xmm2/m128, imm8

66 0F 3A CF /r ib

GFNI

16/32/64-bit

§

VEX_Vgf2p8affineinvqb_xmm_xmm_xmmm128_imm8 = 4_065

VGF2P8AFFINEINVQB xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W1 CF /r ib

AVX and GFNI

16/32/64-bit

§

VEX_Vgf2p8affineinvqb_ymm_ymm_ymmm256_imm8 = 4_066

VGF2P8AFFINEINVQB ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W1 CF /r ib

AVX and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineinvqb_xmm_k1z_xmm_xmmm128b64_imm8 = 4_067

VGF2P8AFFINEINVQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 CF /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineinvqb_ymm_k1z_ymm_ymmm256b64_imm8 = 4_068

VGF2P8AFFINEINVQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 CF /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineinvqb_zmm_k1z_zmm_zmmm512b64_imm8 = 4_069

VGF2P8AFFINEINVQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 CF /r ib

AVX512F and GFNI

16/32/64-bit

§

Aeskeygenassist_xmm_xmmm128_imm8 = 4_070

AESKEYGENASSIST xmm1, xmm2/m128, imm8

66 0F 3A DF /r ib

AES

16/32/64-bit

§

VEX_Vaeskeygenassist_xmm_xmmm128_imm8 = 4_071

VAESKEYGENASSIST xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG DF /r ib

AES and AVX

16/32/64-bit

§

VEX_Rorx_r32_rm32_imm8 = 4_072

RORX r32, r/m32, imm8

VEX.LZ.F2.0F3A.W0 F0 /r ib

BMI2

16/32/64-bit

§

VEX_Rorx_r64_rm64_imm8 = 4_073

RORX r64, r/m64, imm8

VEX.LZ.F2.0F3A.W1 F0 /r ib

BMI2

64-bit

§

XOP_Vpmacssww_xmm_xmm_xmmm128_xmm = 4_074

VPMACSSWW xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 85 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsswd_xmm_xmm_xmmm128_xmm = 4_075

VPMACSSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 86 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacssdql_xmm_xmm_xmmm128_xmm = 4_076

VPMACSSDQL xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 87 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacssdd_xmm_xmm_xmmm128_xmm = 4_077

VPMACSSDD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 8E /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacssdqh_xmm_xmm_xmmm128_xmm = 4_078

VPMACSSDQH xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 8F /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsww_xmm_xmm_xmmm128_xmm = 4_079

VPMACSWW xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 95 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacswd_xmm_xmm_xmmm128_xmm = 4_080

VPMACSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 96 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsdql_xmm_xmm_xmmm128_xmm = 4_081

VPMACSDQL xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 97 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsdd_xmm_xmm_xmmm128_xmm = 4_082

VPMACSDD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 9E /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsdqh_xmm_xmm_xmmm128_xmm = 4_083

VPMACSDQH xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 9F /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_xmm_xmm_xmmm128_xmm = 4_084

VPCMOV xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_ymm_ymm_ymmm256_ymm = 4_085

VPCMOV ymm1, ymm2, ymm3/m256, ymm4

XOP.256.X8.W0 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_xmm_xmm_xmm_xmmm128 = 4_086

VPCMOV xmm1, xmm2, xmm3, xmm4/m128

XOP.128.X8.W1 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_ymm_ymm_ymm_ymmm256 = 4_087

VPCMOV ymm1, ymm2, ymm3, ymm4/m256

XOP.256.X8.W1 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpperm_xmm_xmm_xmmm128_xmm = 4_088

VPPERM xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 A3 /r /is4

XOP

16/32/64-bit

§

XOP_Vpperm_xmm_xmm_xmm_xmmm128 = 4_089

VPPERM xmm1, xmm2, xmm3, xmm4/m128

XOP.128.X8.W1 A3 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmadcsswd_xmm_xmm_xmmm128_xmm = 4_090

VPMADCSSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 A6 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmadcswd_xmm_xmm_xmmm128_xmm = 4_091

VPMADCSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 B6 /r /is4

XOP

16/32/64-bit

§

XOP_Vprotb_xmm_xmmm128_imm8 = 4_092

VPROTB xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C0 /r ib

XOP

16/32/64-bit

§

XOP_Vprotw_xmm_xmmm128_imm8 = 4_093

VPROTW xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C1 /r ib

XOP

16/32/64-bit

§

XOP_Vprotd_xmm_xmmm128_imm8 = 4_094

VPROTD xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C2 /r ib

XOP

16/32/64-bit

§

XOP_Vprotq_xmm_xmmm128_imm8 = 4_095

VPROTQ xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C3 /r ib

XOP

16/32/64-bit

§

XOP_Vpcomb_xmm_xmm_xmmm128_imm8 = 4_096

VPCOMB xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CC /r ib

XOP

16/32/64-bit

§

XOP_Vpcomw_xmm_xmm_xmmm128_imm8 = 4_097

VPCOMW xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CD /r ib

XOP

16/32/64-bit

§

XOP_Vpcomd_xmm_xmm_xmmm128_imm8 = 4_098

VPCOMD xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CE /r ib

XOP

16/32/64-bit

§

XOP_Vpcomq_xmm_xmm_xmmm128_imm8 = 4_099

VPCOMQ xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CF /r ib

XOP

16/32/64-bit

§

XOP_Vpcomub_xmm_xmm_xmmm128_imm8 = 4_100

VPCOMUB xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 EC /r ib

XOP

16/32/64-bit

§

XOP_Vpcomuw_xmm_xmm_xmmm128_imm8 = 4_101

VPCOMUW xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 ED /r ib

XOP

16/32/64-bit

§

XOP_Vpcomud_xmm_xmm_xmmm128_imm8 = 4_102

VPCOMUD xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 EE /r ib

XOP

16/32/64-bit

§

XOP_Vpcomuq_xmm_xmm_xmmm128_imm8 = 4_103

VPCOMUQ xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 EF /r ib

XOP

16/32/64-bit

§

XOP_Blcfill_r32_rm32 = 4_104

BLCFILL r32, r/m32

XOP.L0.X9.W0 01 /1

TBM

16/32/64-bit

§

XOP_Blcfill_r64_rm64 = 4_105

BLCFILL r64, r/m64

XOP.L0.X9.W1 01 /1

TBM

64-bit

§

XOP_Blsfill_r32_rm32 = 4_106

BLSFILL r32, r/m32

XOP.L0.X9.W0 01 /2

TBM

16/32/64-bit

§

XOP_Blsfill_r64_rm64 = 4_107

BLSFILL r64, r/m64

XOP.L0.X9.W1 01 /2

TBM

64-bit

§

XOP_Blcs_r32_rm32 = 4_108

BLCS r32, r/m32

XOP.L0.X9.W0 01 /3

TBM

16/32/64-bit

§

XOP_Blcs_r64_rm64 = 4_109

BLCS r64, r/m64

XOP.L0.X9.W1 01 /3

TBM

64-bit

§

XOP_Tzmsk_r32_rm32 = 4_110

TZMSK r32, r/m32

XOP.L0.X9.W0 01 /4

TBM

16/32/64-bit

§

XOP_Tzmsk_r64_rm64 = 4_111

TZMSK r64, r/m64

XOP.L0.X9.W1 01 /4

TBM

64-bit

§

XOP_Blcic_r32_rm32 = 4_112

BLCIC r32, r/m32

XOP.L0.X9.W0 01 /5

TBM

16/32/64-bit

§

XOP_Blcic_r64_rm64 = 4_113

BLCIC r64, r/m64

XOP.L0.X9.W1 01 /5

TBM

64-bit

§

XOP_Blsic_r32_rm32 = 4_114

BLSIC r32, r/m32

XOP.L0.X9.W0 01 /6

TBM

16/32/64-bit

§

XOP_Blsic_r64_rm64 = 4_115

BLSIC r64, r/m64

XOP.L0.X9.W1 01 /6

TBM

64-bit

§

XOP_T1mskc_r32_rm32 = 4_116

T1MSKC r32, r/m32

XOP.L0.X9.W0 01 /7

TBM

16/32/64-bit

§

XOP_T1mskc_r64_rm64 = 4_117

T1MSKC r64, r/m64

XOP.L0.X9.W1 01 /7

TBM

64-bit

§

XOP_Blcmsk_r32_rm32 = 4_118

BLCMSK r32, r/m32

XOP.L0.X9.W0 02 /1

TBM

16/32/64-bit

§

XOP_Blcmsk_r64_rm64 = 4_119

BLCMSK r64, r/m64

XOP.L0.X9.W1 02 /1

TBM

64-bit

§

XOP_Blci_r32_rm32 = 4_120

BLCI r32, r/m32

XOP.L0.X9.W0 02 /6

TBM

16/32/64-bit

§

XOP_Blci_r64_rm64 = 4_121

BLCI r64, r/m64

XOP.L0.X9.W1 02 /6

TBM

64-bit

§

XOP_Llwpcb_r32 = 4_122

LLWPCB r32

XOP.L0.X9.W0 12 /0

LWP

16/32/64-bit

§

XOP_Llwpcb_r64 = 4_123

LLWPCB r64

XOP.L0.X9.W1 12 /0

LWP

64-bit

§

XOP_Slwpcb_r32 = 4_124

SLWPCB r32

XOP.L0.X9.W0 12 /1

LWP

16/32/64-bit

§

XOP_Slwpcb_r64 = 4_125

SLWPCB r64

XOP.L0.X9.W1 12 /1

LWP

64-bit

§

XOP_Vfrczps_xmm_xmmm128 = 4_126

VFRCZPS xmm1, xmm2/m128

XOP.128.X9.W0 80 /r

XOP

16/32/64-bit

§

XOP_Vfrczps_ymm_ymmm256 = 4_127

VFRCZPS ymm1, ymm2/m256

XOP.256.X9.W0 80 /r

XOP

16/32/64-bit

§

XOP_Vfrczpd_xmm_xmmm128 = 4_128

VFRCZPD xmm1, xmm2/m128

XOP.128.X9.W0 81 /r

XOP

16/32/64-bit

§

XOP_Vfrczpd_ymm_ymmm256 = 4_129

VFRCZPD ymm1, ymm2/m256

XOP.256.X9.W0 81 /r

XOP

16/32/64-bit

§

XOP_Vfrczss_xmm_xmmm32 = 4_130

VFRCZSS xmm1, xmm2/m32

XOP.128.X9.W0 82 /r

XOP

16/32/64-bit

§

XOP_Vfrczsd_xmm_xmmm64 = 4_131

VFRCZSD xmm1, xmm2/m64

XOP.128.X9.W0 83 /r

XOP

16/32/64-bit

§

XOP_Vprotb_xmm_xmmm128_xmm = 4_132

VPROTB xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 90 /r

XOP

16/32/64-bit

§

XOP_Vprotb_xmm_xmm_xmmm128 = 4_133

VPROTB xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 90 /r

XOP

16/32/64-bit

§

XOP_Vprotw_xmm_xmmm128_xmm = 4_134

VPROTW xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 91 /r

XOP

16/32/64-bit

§

XOP_Vprotw_xmm_xmm_xmmm128 = 4_135

VPROTW xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 91 /r

XOP

16/32/64-bit

§

XOP_Vprotd_xmm_xmmm128_xmm = 4_136

VPROTD xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 92 /r

XOP

16/32/64-bit

§

XOP_Vprotd_xmm_xmm_xmmm128 = 4_137

VPROTD xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 92 /r

XOP

16/32/64-bit

§

XOP_Vprotq_xmm_xmmm128_xmm = 4_138

VPROTQ xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 93 /r

XOP

16/32/64-bit

§

XOP_Vprotq_xmm_xmm_xmmm128 = 4_139

VPROTQ xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 93 /r

XOP

16/32/64-bit

§

XOP_Vpshlb_xmm_xmmm128_xmm = 4_140

VPSHLB xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 94 /r

XOP

16/32/64-bit

§

XOP_Vpshlb_xmm_xmm_xmmm128 = 4_141

VPSHLB xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 94 /r

XOP

16/32/64-bit

§

XOP_Vpshlw_xmm_xmmm128_xmm = 4_142

VPSHLW xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 95 /r

XOP

16/32/64-bit

§

XOP_Vpshlw_xmm_xmm_xmmm128 = 4_143

VPSHLW xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 95 /r

XOP

16/32/64-bit

§

XOP_Vpshld_xmm_xmmm128_xmm = 4_144

VPSHLD xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 96 /r

XOP

16/32/64-bit

§

XOP_Vpshld_xmm_xmm_xmmm128 = 4_145

VPSHLD xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 96 /r

XOP

16/32/64-bit

§

XOP_Vpshlq_xmm_xmmm128_xmm = 4_146

VPSHLQ xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 97 /r

XOP

16/32/64-bit

§

XOP_Vpshlq_xmm_xmm_xmmm128 = 4_147

VPSHLQ xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 97 /r

XOP

16/32/64-bit

§

XOP_Vpshab_xmm_xmmm128_xmm = 4_148

VPSHAB xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 98 /r

XOP

16/32/64-bit

§

XOP_Vpshab_xmm_xmm_xmmm128 = 4_149

VPSHAB xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 98 /r

XOP

16/32/64-bit

§

XOP_Vpshaw_xmm_xmmm128_xmm = 4_150

VPSHAW xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 99 /r

XOP

16/32/64-bit

§

XOP_Vpshaw_xmm_xmm_xmmm128 = 4_151

VPSHAW xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 99 /r

XOP

16/32/64-bit

§

XOP_Vpshad_xmm_xmmm128_xmm = 4_152

VPSHAD xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 9A /r

XOP

16/32/64-bit

§

XOP_Vpshad_xmm_xmm_xmmm128 = 4_153

VPSHAD xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 9A /r

XOP

16/32/64-bit

§

XOP_Vpshaq_xmm_xmmm128_xmm = 4_154

VPSHAQ xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 9B /r

XOP

16/32/64-bit

§

XOP_Vpshaq_xmm_xmm_xmmm128 = 4_155

VPSHAQ xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 9B /r

XOP

16/32/64-bit

§

XOP_Vphaddbw_xmm_xmmm128 = 4_156

VPHADDBW xmm1, xmm2/m128

XOP.128.X9.W0 C1 /r

XOP

16/32/64-bit

§

XOP_Vphaddbd_xmm_xmmm128 = 4_157

VPHADDBD xmm1, xmm2/m128

XOP.128.X9.W0 C2 /r

XOP

16/32/64-bit

§

XOP_Vphaddbq_xmm_xmmm128 = 4_158

VPHADDBQ xmm1, xmm2/m128

XOP.128.X9.W0 C3 /r

XOP

16/32/64-bit

§

XOP_Vphaddwd_xmm_xmmm128 = 4_159

VPHADDWD xmm1, xmm2/m128

XOP.128.X9.W0 C6 /r

XOP

16/32/64-bit

§

XOP_Vphaddwq_xmm_xmmm128 = 4_160

VPHADDWQ xmm1, xmm2/m128

XOP.128.X9.W0 C7 /r

XOP

16/32/64-bit

§

XOP_Vphadddq_xmm_xmmm128 = 4_161

VPHADDDQ xmm1, xmm2/m128

XOP.128.X9.W0 CB /r

XOP

16/32/64-bit

§

XOP_Vphaddubw_xmm_xmmm128 = 4_162

VPHADDUBW xmm1, xmm2/m128

XOP.128.X9.W0 D1 /r

XOP

16/32/64-bit

§

XOP_Vphaddubd_xmm_xmmm128 = 4_163

VPHADDUBD xmm1, xmm2/m128

XOP.128.X9.W0 D2 /r

XOP

16/32/64-bit

§

XOP_Vphaddubq_xmm_xmmm128 = 4_164

VPHADDUBQ xmm1, xmm2/m128

XOP.128.X9.W0 D3 /r

XOP

16/32/64-bit

§

XOP_Vphadduwd_xmm_xmmm128 = 4_165

VPHADDUWD xmm1, xmm2/m128

XOP.128.X9.W0 D6 /r

XOP

16/32/64-bit

§

XOP_Vphadduwq_xmm_xmmm128 = 4_166

VPHADDUWQ xmm1, xmm2/m128

XOP.128.X9.W0 D7 /r

XOP

16/32/64-bit

§

XOP_Vphaddudq_xmm_xmmm128 = 4_167

VPHADDUDQ xmm1, xmm2/m128

XOP.128.X9.W0 DB /r

XOP

16/32/64-bit

§

XOP_Vphsubbw_xmm_xmmm128 = 4_168

VPHSUBBW xmm1, xmm2/m128

XOP.128.X9.W0 E1 /r

XOP

16/32/64-bit

§

XOP_Vphsubwd_xmm_xmmm128 = 4_169

VPHSUBWD xmm1, xmm2/m128

XOP.128.X9.W0 E2 /r

XOP

16/32/64-bit

§

XOP_Vphsubdq_xmm_xmmm128 = 4_170

VPHSUBDQ xmm1, xmm2/m128

XOP.128.X9.W0 E3 /r

XOP

16/32/64-bit

§

XOP_Bextr_r32_rm32_imm32 = 4_171

BEXTR r32, r/m32, imm32

XOP.L0.XA.W0 10 /r id

TBM

16/32/64-bit

§

XOP_Bextr_r64_rm64_imm32 = 4_172

BEXTR r64, r/m64, imm32

XOP.L0.XA.W1 10 /r id

TBM

64-bit

§

XOP_Lwpins_r32_rm32_imm32 = 4_173

LWPINS r32, r/m32, imm32

XOP.L0.XA.W0 12 /0 id

LWP

16/32/64-bit

§

XOP_Lwpins_r64_rm32_imm32 = 4_174

LWPINS r64, r/m32, imm32

XOP.L0.XA.W1 12 /0 id

LWP

64-bit

§

XOP_Lwpval_r32_rm32_imm32 = 4_175

LWPVAL r32, r/m32, imm32

XOP.L0.XA.W0 12 /1 id

LWP

16/32/64-bit

§

XOP_Lwpval_r64_rm32_imm32 = 4_176

LWPVAL r64, r/m32, imm32

XOP.L0.XA.W1 12 /1 id

LWP

64-bit

§

D3NOW_Pi2fw_mm_mmm64 = 4_177

PI2FW mm, mm/m64

0F 0F /r 0C

3DNOWEXT

16/32/64-bit

§

D3NOW_Pi2fd_mm_mmm64 = 4_178

PI2FD mm, mm/m64

0F 0F /r 0D

3DNOW

16/32/64-bit

§

D3NOW_Pf2iw_mm_mmm64 = 4_179

PF2IW mm, mm/m64

0F 0F /r 1C

3DNOWEXT

16/32/64-bit

§

D3NOW_Pf2id_mm_mmm64 = 4_180

PF2ID mm, mm/m64

0F 0F /r 1D

3DNOW

16/32/64-bit

§

D3NOW_Pfrcpv_mm_mmm64 = 4_181

PFRCPV mm, mm/m64

0F 0F /r 86

AMD Geode GX/LX

16/32-bit

§

D3NOW_Pfrsqrtv_mm_mmm64 = 4_182

PFRSQRTV mm, mm/m64

0F 0F /r 87

AMD Geode GX/LX

16/32-bit

§

D3NOW_Pfnacc_mm_mmm64 = 4_183

PFNACC mm, mm/m64

0F 0F /r 8A

3DNOWEXT

16/32/64-bit

§

D3NOW_Pfpnacc_mm_mmm64 = 4_184

PFPNACC mm, mm/m64

0F 0F /r 8E

3DNOWEXT

16/32/64-bit

§

D3NOW_Pfcmpge_mm_mmm64 = 4_185

PFCMPGE mm, mm/m64

0F 0F /r 90

3DNOW

16/32/64-bit

§

D3NOW_Pfmin_mm_mmm64 = 4_186

PFMIN mm, mm/m64

0F 0F /r 94

3DNOW

16/32/64-bit

§

D3NOW_Pfrcp_mm_mmm64 = 4_187

PFRCP mm, mm/m64

0F 0F /r 96

3DNOW

16/32/64-bit

§

D3NOW_Pfrsqrt_mm_mmm64 = 4_188

PFRSQRT mm, mm/m64

0F 0F /r 97

3DNOW

16/32/64-bit

§

D3NOW_Pfsub_mm_mmm64 = 4_189

PFSUB mm, mm/m64

0F 0F /r 9A

3DNOW

16/32/64-bit

§

D3NOW_Pfadd_mm_mmm64 = 4_190

PFADD mm, mm/m64

0F 0F /r 9E

3DNOW

16/32/64-bit

§

D3NOW_Pfcmpgt_mm_mmm64 = 4_191

PFCMPGT mm, mm/m64

0F 0F /r A0

3DNOW

16/32/64-bit

§

D3NOW_Pfmax_mm_mmm64 = 4_192

PFMAX mm, mm/m64

0F 0F /r A4

3DNOW

16/32/64-bit

§

D3NOW_Pfrcpit1_mm_mmm64 = 4_193

PFRCPIT1 mm, mm/m64

0F 0F /r A6

3DNOW

16/32/64-bit

§

D3NOW_Pfrsqit1_mm_mmm64 = 4_194

PFRSQIT1 mm, mm/m64

0F 0F /r A7

3DNOW

16/32/64-bit

§

D3NOW_Pfsubr_mm_mmm64 = 4_195

PFSUBR mm, mm/m64

0F 0F /r AA

3DNOW

16/32/64-bit

§

D3NOW_Pfacc_mm_mmm64 = 4_196

PFACC mm, mm/m64

0F 0F /r AE

3DNOW

16/32/64-bit

§

D3NOW_Pfcmpeq_mm_mmm64 = 4_197

PFCMPEQ mm, mm/m64

0F 0F /r B0

3DNOW

16/32/64-bit

§

D3NOW_Pfmul_mm_mmm64 = 4_198

PFMUL mm, mm/m64

0F 0F /r B4

3DNOW

16/32/64-bit

§

D3NOW_Pfrcpit2_mm_mmm64 = 4_199

PFRCPIT2 mm, mm/m64

0F 0F /r B6

3DNOW

16/32/64-bit

§

D3NOW_Pmulhrw_mm_mmm64 = 4_200

PMULHRW mm, mm/m64

0F 0F /r B7

3DNOW

16/32/64-bit

§

D3NOW_Pswapd_mm_mmm64 = 4_201

PSWAPD mm, mm/m64

0F 0F /r BB

3DNOWEXT

16/32/64-bit

§

D3NOW_Pavgusb_mm_mmm64 = 4_202

PAVGUSB mm, mm/m64

0F 0F /r BF

3DNOW

16/32/64-bit

§

Rmpadjust = 4_203

RMPADJUST

F3 0F 01 FE

SEV-SNP

64-bit

§

Rmpupdate = 4_204

RMPUPDATE

F2 0F 01 FE

SEV-SNP

64-bit

§

Psmash = 4_205

PSMASH

F3 0F 01 FF

SEV-SNP

64-bit

§

Pvalidatew = 4_206

PVALIDATE

a16 F2 0F 01 FF

SEV-SNP

16/32-bit

§

Pvalidated = 4_207

PVALIDATE

a32 F2 0F 01 FF

SEV-SNP

16/32/64-bit

§

Pvalidateq = 4_208

PVALIDATE

a64 F2 0F 01 FF

SEV-SNP

64-bit

§

Serialize = 4_209

SERIALIZE

NP 0F 01 E8

SERIALIZE

16/32/64-bit

§

Xsusldtrk = 4_210

XSUSLDTRK

F2 0F 01 E8

TSXLDTRK

16/32/64-bit

§

Xresldtrk = 4_211

XRESLDTRK

F2 0F 01 E9

TSXLDTRK

16/32/64-bit

§

Invlpgbw = 4_212

INVLPGB

a16 NP 0F 01 FE

INVLPGB

16/32-bit

§

Invlpgbd = 4_213

INVLPGB

a32 NP 0F 01 FE

INVLPGB

16/32/64-bit

§

Invlpgbq = 4_214

INVLPGB

a64 NP 0F 01 FE

INVLPGB

64-bit

§

Tlbsync = 4_215

TLBSYNC

NP 0F 01 FF

INVLPGB

16/32/64-bit

§

Prefetchreserved3_m8 = 4_216

PREFETCHW m8

0F 0D /3

PREFETCHW

16/32/64-bit

§

Prefetchreserved4_m8 = 4_217

PREFETCH m8

0F 0D /4

PREFETCHW

16/32/64-bit

§

Prefetchreserved5_m8 = 4_218

PREFETCH m8

0F 0D /5

PREFETCHW

16/32/64-bit

§

Prefetchreserved6_m8 = 4_219

PREFETCH m8

0F 0D /6

PREFETCHW

16/32/64-bit

§

Prefetchreserved7_m8 = 4_220

PREFETCH m8

0F 0D /7

PREFETCHW

16/32/64-bit

§

Ud0 = 4_221

UD0

0F FF

286+

16/32/64-bit

§

Vmgexit = 4_222

VMGEXIT

F3 0F 01 D9

SEV-ES

16/32/64-bit

§

Getsecq = 4_223

GETSECQ

NP o64 0F 37

SMX

64-bit

§

VEX_Ldtilecfg_m512 = 4_224

LDTILECFG m512

VEX.128.0F38.W0 49 !(11):000:bbb

AMX-TILE

64-bit

§

VEX_Tilerelease = 4_225

TILERELEASE

VEX.128.0F38.W0 49 C0

AMX-TILE

64-bit

§

VEX_Sttilecfg_m512 = 4_226

STTILECFG m512

VEX.128.66.0F38.W0 49 !(11):000:bbb

AMX-TILE

64-bit

§

VEX_Tilezero_tmm = 4_227

TILEZERO tmm1

VEX.128.F2.0F38.W0 49 11:rrr:000

AMX-TILE

64-bit

§

VEX_Tileloaddt1_tmm_sibmem = 4_228

TILELOADDT1 tmm1, sibmem

VEX.128.66.0F38.W0 4B !(11):rrr:100

AMX-TILE

64-bit

§

VEX_Tilestored_sibmem_tmm = 4_229

TILESTORED sibmem, tmm1

VEX.128.F3.0F38.W0 4B !(11):rrr:100

AMX-TILE

64-bit

§

VEX_Tileloadd_tmm_sibmem = 4_230

TILELOADD tmm1, sibmem

VEX.128.F2.0F38.W0 4B !(11):rrr:100

AMX-TILE

64-bit

§

VEX_Tdpbf16ps_tmm_tmm_tmm = 4_231

TDPBF16PS tmm1, tmm2, tmm3

VEX.128.F3.0F38.W0 5C 11:rrr:bbb

AMX-BF16

64-bit

§

VEX_Tdpbuud_tmm_tmm_tmm = 4_232

TDPBUUD tmm1, tmm2, tmm3

VEX.128.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

VEX_Tdpbusd_tmm_tmm_tmm = 4_233

TDPBUSD tmm1, tmm2, tmm3

VEX.128.66.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

VEX_Tdpbsud_tmm_tmm_tmm = 4_234

TDPBSUD tmm1, tmm2, tmm3

VEX.128.F3.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

VEX_Tdpbssd_tmm_tmm_tmm = 4_235

TDPBSSD tmm1, tmm2, tmm3

VEX.128.F2.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

Fnstdw_AX = 4_236

FNSTDW AX

DF E1

387 SL

16/32-bit

§

Fnstsg_AX = 4_237

FNSTSG AX

DF E2

387 SL

16/32-bit

§

Rdshr_rm32 = 4_238

RDSHR r/m32

0F 36 /0

Cyrix 6x86MX, M II, III

16/32-bit

§

Wrshr_rm32 = 4_239

WRSHR r/m32

0F 37 /0

Cyrix 6x86MX, M II, III

16/32-bit

§

Smint = 4_240

SMINT

0F 38

Cyrix 6x86MX+, AMD Geode GX/LX

16/32-bit

§

Dmint = 4_241

DMINT

0F 39

AMD Geode GX/LX

16/32-bit

§

Rdm = 4_242

RDM

0F 3A

AMD Geode GX/LX

16/32-bit

§

Svdc_m80_Sreg = 4_243

SVDC m80, Sreg

0F 78 /r

Cyrix, AMD Geode GX/LX

16/32-bit

§

Rsdc_Sreg_m80 = 4_244

RSDC Sreg, m80

0F 79 /r

Cyrix, AMD Geode GX/LX

16/32-bit

§

Svldt_m80 = 4_245

SVLDT m80

0F 7A /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Rsldt_m80 = 4_246

RSLDT m80

0F 7B /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Svts_m80 = 4_247

SVTS m80

0F 7C /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Rsts_m80 = 4_248

RSTS m80

0F 7D /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Smint_0F7E = 4_249

SMINT

0F 7E

Cyrix 6x86 or earlier

16/32-bit

§

Bb0_reset = 4_250

BB0_RESET

0F 3A

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Bb1_reset = 4_251

BB1_RESET

0F 3B

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Cpu_write = 4_252

CPU_WRITE

0F 3C

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Cpu_read = 4_253

CPU_READ

0F 3D

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Altinst = 4_254

ALTINST

0F 3F

Centaur AIS

16/32-bit

§

Paveb_mm_mmm64 = 4_255

PAVEB mm, mm/m64

0F 50 /r

CYRIX_EMMI

16/32-bit

§

Paddsiw_mm_mmm64 = 4_256

PADDSIW mm, mm/m64

0F 51 /r

CYRIX_EMMI

16/32-bit

§

Pmagw_mm_mmm64 = 4_257

PMAGW mm, mm/m64

0F 52 /r

CYRIX_EMMI

16/32-bit

§

Pdistib_mm_m64 = 4_258

PDISTIB mm, m64

0F 54 /r

CYRIX_EMMI

16/32-bit

§

Psubsiw_mm_mmm64 = 4_259

PSUBSIW mm, mm/m64

0F 55 /r

CYRIX_EMMI

16/32-bit

§

Pmvzb_mm_m64 = 4_260

PMVZB mm, m64

0F 58 /r

CYRIX_EMMI

16/32-bit

§

Pmulhrw_mm_mmm64 = 4_261

PMULHRW mm, mm/m64

0F 59 /r

CYRIX_EMMI

16/32-bit

§

Pmvnzb_mm_m64 = 4_262

PMVNZB mm, m64

0F 5A /r

CYRIX_EMMI

16/32-bit

§

Pmvlzb_mm_m64 = 4_263

PMVLZB mm, m64

0F 5B /r

CYRIX_EMMI

16/32-bit

§

Pmvgezb_mm_m64 = 4_264

PMVGEZB mm, m64

0F 5C /r

CYRIX_EMMI

16/32-bit

§

Pmulhriw_mm_mmm64 = 4_265

PMULHRIW mm, mm/m64

0F 5D /r

CYRIX_EMMI

16/32-bit

§

Pmachriw_mm_m64 = 4_266

PMACHRIW mm, m64

0F 5E /r

CYRIX_EMMI

16/32-bit

§

Cyrix_D9D7 = 4_267

UNDOC

D9 D7

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_D9E2 = 4_268

UNDOC

D9 E2

Cyrix, AMD Geode GX/LX

16/32-bit

§

Ftstp = 4_269

FTSTP

D9 E6

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_D9E7 = 4_270

UNDOC

D9 E7

Cyrix, AMD Geode GX/LX

16/32-bit

§

Frint2 = 4_271

FRINT2

DB FC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Frichop = 4_272

FRICHOP

DD FC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DED8 = 4_273

UNDOC

DE D8

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDA = 4_274

UNDOC

DE DA

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDC = 4_275

UNDOC

DE DC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDD = 4_276

UNDOC

DE DD

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDE = 4_277

UNDOC

DE DE

Cyrix, AMD Geode GX/LX

16/32-bit

§

Frinear = 4_278

FRINEAR

DF FC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Tdcall = 4_279

TDCALL

66 0F 01 CC

TDX

16/32/64-bit

§

Seamret = 4_280

SEAMRET

66 0F 01 CD

TDX

64-bit

§

Seamops = 4_281

SEAMOPS

66 0F 01 CE

TDX

64-bit

§

Seamcall = 4_282

SEAMCALL

66 0F 01 CF

TDX

64-bit

§

Aesencwide128kl_m384 = 4_283

AESENCWIDE128KL m384, <XMM0-7>

F3 0F 38 D8 !(11):000:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Aesdecwide128kl_m384 = 4_284

AESDECWIDE128KL m384, <XMM0-7>

F3 0F 38 D8 !(11):001:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Aesencwide256kl_m512 = 4_285

AESENCWIDE256KL m512, <XMM0-7>

F3 0F 38 D8 !(11):010:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Aesdecwide256kl_m512 = 4_286

AESDECWIDE256KL m512, <XMM0-7>

F3 0F 38 D8 !(11):011:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Loadiwkey_xmm_xmm = 4_287

LOADIWKEY xmm1, xmm2, <EAX>, <XMM0>

F3 0F 38 DC 11:rrr:bbb

KL

16/32/64-bit

§

Aesenc128kl_xmm_m384 = 4_288

AESENC128KL xmm, m384

F3 0F 38 DC !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Aesdec128kl_xmm_m384 = 4_289

AESDEC128KL xmm, m384

F3 0F 38 DD !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Aesenc256kl_xmm_m512 = 4_290

AESENC256KL xmm, m512

F3 0F 38 DE !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Aesdec256kl_xmm_m512 = 4_291

AESDEC256KL xmm, m512

F3 0F 38 DF !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Encodekey128_r32_r32 = 4_292

ENCODEKEY128 r32, r32, <XMM0-2>, <XMM4-6>

F3 0F 38 FA 11:rrr:bbb

AESKLE

16/32/64-bit

§

Encodekey256_r32_r32 = 4_293

ENCODEKEY256 r32, r32, <XMM0-6>

F3 0F 38 FB 11:rrr:bbb

AESKLE

16/32/64-bit

§

VEX_Vbroadcastss_xmm_xmm = 4_294

VBROADCASTSS xmm1, xmm2

VEX.128.66.0F38.W0 18 /r

AVX2

16/32/64-bit

§

VEX_Vbroadcastss_ymm_xmm = 4_295

VBROADCASTSS ymm1, xmm2

VEX.256.66.0F38.W0 18 /r

AVX2

16/32/64-bit

§

VEX_Vbroadcastsd_ymm_xmm = 4_296

VBROADCASTSD ymm1, xmm2

VEX.256.66.0F38.W0 19 /r

AVX2

16/32/64-bit

§

Vmgexit_F2 = 4_297

VMGEXIT

F2 0F 01 D9

SEV-ES

16/32/64-bit

§

Uiret = 4_298

UIRET

F3 0F 01 EC

UINTR

64-bit

§

Testui = 4_299

TESTUI

F3 0F 01 ED

UINTR

64-bit

§

Clui = 4_300

CLUI

F3 0F 01 EE

UINTR

64-bit

§

Stui = 4_301

STUI

F3 0F 01 EF

UINTR

64-bit

§

Senduipi_r64 = 4_302

SENDUIPI r64

F3 0F C7 /6

UINTR

64-bit

§

Hreset_imm8 = 4_303

HRESET imm8, <EAX>

F3 0F 3A F0 C0 ib

HRESET

16/32/64-bit

§

VEX_Vpdpbusd_xmm_xmm_xmmm128 = 4_304

VPDPBUSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 50 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpbusd_ymm_ymm_ymmm256 = 4_305

VPDPBUSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 50 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpbusds_xmm_xmm_xmmm128 = 4_306

VPDPBUSDS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 51 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpbusds_ymm_ymm_ymmm256 = 4_307

VPDPBUSDS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 51 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssd_xmm_xmm_xmmm128 = 4_308

VPDPWSSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 52 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssd_ymm_ymm_ymmm256 = 4_309

VPDPWSSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 52 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssds_xmm_xmm_xmmm128 = 4_310

VPDPWSSDS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 53 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssds_ymm_ymm_ymmm256 = 4_311

VPDPWSSDS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 53 /r

AVX-VNNI

16/32/64-bit

§

Ccs_hash_16 = 4_312

CCS_HASH

a16 F3 0F A6 E8

PADLOCK_GMI

16/32-bit

§

Ccs_hash_32 = 4_313

CCS_HASH

a32 F3 0F A6 E8

PADLOCK_GMI

16/32/64-bit

§

Ccs_hash_64 = 4_314

CCS_HASH

a64 F3 0F A6 E8

PADLOCK_GMI

64-bit

§

Ccs_encrypt_16 = 4_315

CCS_ENCRYPT

a16 F3 0F A7 F0

PADLOCK_GMI

16/32-bit

§

Ccs_encrypt_32 = 4_316

CCS_ENCRYPT

a32 F3 0F A7 F0

PADLOCK_GMI

16/32/64-bit

§

Ccs_encrypt_64 = 4_317

CCS_ENCRYPT

a64 F3 0F A7 F0

PADLOCK_GMI

64-bit

§

Lkgs_rm16 = 4_318

LKGS r/m16

o16 F2 0F 00 /6

LKGS

64-bit

§

Lkgs_r32m16 = 4_319

LKGS r32/m16

o32 F2 0F 00 /6

LKGS

64-bit

§

Lkgs_r64m16 = 4_320

LKGS r64/m16

F2 o64 0F 00 /6

LKGS

64-bit

§

Eretu = 4_321

ERETU

F3 0F 01 CA

FRED

64-bit

§

Erets = 4_322

ERETS

F2 0F 01 CA

FRED

64-bit

§

EVEX_Vaddph_xmm_k1z_xmm_xmmm128b16 = 4_323

VADDPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 58 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vaddph_ymm_k1z_ymm_ymmm256b16 = 4_324

VADDPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 58 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vaddph_zmm_k1z_zmm_zmmm512b16_er = 4_325

VADDPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 58 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vaddsh_xmm_k1z_xmm_xmmm16_er = 4_326

VADDSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 58 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpph_kr_k1_xmm_xmmm128b16_imm8 = 4_327

VCMPPH k1 {k2}, xmm2, xmm3/m128/m16bcst, imm8

EVEX.128.0F3A.W0 C2 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpph_kr_k1_ymm_ymmm256b16_imm8 = 4_328

VCMPPH k1 {k2}, ymm2, ymm3/m256/m16bcst, imm8

EVEX.256.0F3A.W0 C2 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpph_kr_k1_zmm_zmmm512b16_imm8_sae = 4_329

VCMPPH k1 {k2}, zmm2, zmm3/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 C2 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpsh_kr_k1_xmm_xmmm16_imm8_sae = 4_330

VCMPSH k1 {k2}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.F3.0F3A.W0 C2 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vcomish_xmm_xmmm16_sae = 4_331

VCOMISH xmm1, xmm2/m16{sae}

EVEX.LIG.MAP5.W0 2F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtdq2ph_xmm_k1z_xmmm128b32 = 4_332

VCVTDQ2PH xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtdq2ph_xmm_k1z_ymmm256b32 = 4_333

VCVTDQ2PH xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtdq2ph_ymm_k1z_zmmm512b32_er = 4_334

VCVTDQ2PH ymm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.MAP5.W0 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtpd2ph_xmm_k1z_xmmm128b64 = 4_335

VCVTPD2PH xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.MAP5.W1 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtpd2ph_xmm_k1z_ymmm256b64 = 4_336

VCVTPD2PH xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.MAP5.W1 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtpd2ph_xmm_k1z_zmmm512b64_er = 4_337

VCVTPD2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.MAP5.W1 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2dq_xmm_k1z_xmmm64b16 = 4_338

VCVTPH2DQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.66.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2dq_ymm_k1z_xmmm128b16 = 4_339

VCVTPH2DQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.66.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2dq_zmm_k1z_ymmm256b16_er = 4_340

VCVTPH2DQ zmm1 {k1}{z}, ymm2/m256/m16bcst{er}

EVEX.512.66.MAP5.W0 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2pd_xmm_k1z_xmmm32b16 = 4_341

VCVTPH2PD xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.MAP5.W0 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2pd_ymm_k1z_xmmm64b16 = 4_342

VCVTPH2PD ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.MAP5.W0 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2pd_zmm_k1z_xmmm128b16_sae = 4_343

VCVTPH2PD zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}

EVEX.512.MAP5.W0 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2psx_xmm_k1z_xmmm64b16 = 4_344

VCVTPH2PSX xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.66.MAP6.W0 13 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2psx_ymm_k1z_xmmm128b16 = 4_345

VCVTPH2PSX ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.66.MAP6.W0 13 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2psx_zmm_k1z_ymmm256b16_sae = 4_346

VCVTPH2PSX zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}

EVEX.512.66.MAP6.W0 13 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2qq_xmm_k1z_xmmm32b16 = 4_347

VCVTPH2QQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 7B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2qq_ymm_k1z_xmmm64b16 = 4_348

VCVTPH2QQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 7B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2qq_zmm_k1z_xmmm128b16_er = 4_349

VCVTPH2QQ zmm1 {k1}{z}, xmm2/m128/m16bcst{er}

EVEX.512.66.MAP5.W0 7B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2udq_xmm_k1z_xmmm64b16 = 4_350

VCVTPH2UDQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2udq_ymm_k1z_xmmm128b16 = 4_351

VCVTPH2UDQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2udq_zmm_k1z_ymmm256b16_er = 4_352

VCVTPH2UDQ zmm1 {k1}{z}, ymm2/m256/m16bcst{er}

EVEX.512.MAP5.W0 79 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uqq_xmm_k1z_xmmm32b16 = 4_353

VCVTPH2UQQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uqq_ymm_k1z_xmmm64b16 = 4_354

VCVTPH2UQQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uqq_zmm_k1z_xmmm128b16_er = 4_355

VCVTPH2UQQ zmm1 {k1}{z}, xmm2/m128/m16bcst{er}

EVEX.512.66.MAP5.W0 79 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uw_xmm_k1z_xmmm128b16 = 4_356

VCVTPH2UW xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uw_ymm_k1z_ymmm256b16 = 4_357

VCVTPH2UW ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uw_zmm_k1z_zmmm512b16_er = 4_358

VCVTPH2UW zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2w_xmm_k1z_xmmm128b16 = 4_359

VCVTPH2W xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2w_ymm_k1z_ymmm256b16 = 4_360

VCVTPH2W ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2w_zmm_k1z_zmmm512b16_er = 4_361

VCVTPH2W zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.66.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtps2phx_xmm_k1z_xmmm128b32 = 4_362

VCVTPS2PHX xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.MAP5.W0 1D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtps2phx_xmm_k1z_ymmm256b32 = 4_363

VCVTPS2PHX xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.MAP5.W0 1D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtps2phx_ymm_k1z_zmmm512b32_er = 4_364

VCVTPS2PHX ymm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.66.MAP5.W0 1D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtqq2ph_xmm_k1z_xmmm128b64 = 4_365

VCVTQQ2PH xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.MAP5.W1 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtqq2ph_xmm_k1z_ymmm256b64 = 4_366

VCVTQQ2PH xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.MAP5.W1 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtqq2ph_xmm_k1z_zmmm512b64_er = 4_367

VCVTQQ2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.MAP5.W1 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsd2sh_xmm_k1z_xmm_xmmm64_er = 4_368

VCVTSD2SH xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.MAP5.W1 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2sd_xmm_k1z_xmm_xmmm16_sae = 4_369

VCVTSH2SD xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.F3.MAP5.W0 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2si_r32_xmmm16_er = 4_370

VCVTSH2SI r32, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W0 2D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2si_r64_xmmm16_er = 4_371

VCVTSH2SI r64, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W1 2D /r

AVX512-FP16

64-bit

§

EVEX_Vcvtsh2ss_xmm_k1z_xmm_xmmm16_sae = 4_372

VCVTSH2SS xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.MAP6.W0 13 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2usi_r32_xmmm16_er = 4_373

VCVTSH2USI r32, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W0 79 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2usi_r64_xmmm16_er = 4_374

VCVTSH2USI r64, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W1 79 /r

AVX512-FP16

64-bit

§

EVEX_Vcvtsi2sh_xmm_xmm_rm32_er = 4_375

VCVTSI2SH xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.MAP5.W0 2A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsi2sh_xmm_xmm_rm64_er = 4_376

VCVTSI2SH xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.MAP5.W1 2A /r

AVX512-FP16

64-bit

§

EVEX_Vcvtss2sh_xmm_k1z_xmm_xmmm32_er = 4_377

VCVTSS2SH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.MAP5.W0 1D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2dq_xmm_k1z_xmmm64b16 = 4_378

VCVTTPH2DQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.F3.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2dq_ymm_k1z_xmmm128b16 = 4_379

VCVTTPH2DQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.F3.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2dq_zmm_k1z_ymmm256b16_sae = 4_380

VCVTTPH2DQ zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}

EVEX.512.F3.MAP5.W0 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2qq_xmm_k1z_xmmm32b16 = 4_381

VCVTTPH2QQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2qq_ymm_k1z_xmmm64b16 = 4_382

VCVTTPH2QQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2qq_zmm_k1z_xmmm128b16_sae = 4_383

VCVTTPH2QQ zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}

EVEX.512.66.MAP5.W0 7A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2udq_xmm_k1z_xmmm64b16 = 4_384

VCVTTPH2UDQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2udq_ymm_k1z_xmmm128b16 = 4_385

VCVTTPH2UDQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2udq_zmm_k1z_ymmm256b16_sae = 4_386

VCVTTPH2UDQ zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}

EVEX.512.MAP5.W0 78 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uqq_xmm_k1z_xmmm32b16 = 4_387

VCVTTPH2UQQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uqq_ymm_k1z_xmmm64b16 = 4_388

VCVTTPH2UQQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uqq_zmm_k1z_xmmm128b16_sae = 4_389

VCVTTPH2UQQ zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}

EVEX.512.66.MAP5.W0 78 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uw_xmm_k1z_xmmm128b16 = 4_390

VCVTTPH2UW xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uw_ymm_k1z_ymmm256b16 = 4_391

VCVTTPH2UW ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uw_zmm_k1z_zmmm512b16_sae = 4_392

VCVTTPH2UW zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}

EVEX.512.MAP5.W0 7C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2w_xmm_k1z_xmmm128b16 = 4_393

VCVTTPH2W xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2w_ymm_k1z_ymmm256b16 = 4_394

VCVTTPH2W ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2w_zmm_k1z_zmmm512b16_sae = 4_395

VCVTTPH2W zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}

EVEX.512.66.MAP5.W0 7C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttsh2si_r32_xmmm16_sae = 4_396

VCVTTSH2SI r32, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W0 2C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttsh2si_r64_xmmm16_sae = 4_397

VCVTTSH2SI r64, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W1 2C /r

AVX512-FP16

64-bit

§

EVEX_Vcvttsh2usi_r32_xmmm16_sae = 4_398

VCVTTSH2USI r32, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W0 78 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttsh2usi_r64_xmmm16_sae = 4_399

VCVTTSH2USI r64, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W1 78 /r

AVX512-FP16

64-bit

§

EVEX_Vcvtudq2ph_xmm_k1z_xmmm128b32 = 4_400

VCVTUDQ2PH xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F2.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtudq2ph_xmm_k1z_ymmm256b32 = 4_401

VCVTUDQ2PH xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F2.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtudq2ph_ymm_k1z_zmmm512b32_er = 4_402

VCVTUDQ2PH ymm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.F2.MAP5.W0 7A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuqq2ph_xmm_k1z_xmmm128b64 = 4_403

VCVTUQQ2PH xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F2.MAP5.W1 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuqq2ph_xmm_k1z_ymmm256b64 = 4_404

VCVTUQQ2PH xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F2.MAP5.W1 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuqq2ph_xmm_k1z_zmmm512b64_er = 4_405

VCVTUQQ2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F2.MAP5.W1 7A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtusi2sh_xmm_xmm_rm32_er = 4_406

VCVTUSI2SH xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.MAP5.W0 7B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtusi2sh_xmm_xmm_rm64_er = 4_407

VCVTUSI2SH xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.MAP5.W1 7B /r

AVX512-FP16

64-bit

§

EVEX_Vcvtuw2ph_xmm_k1z_xmmm128b16 = 4_408

VCVTUW2PH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.F2.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuw2ph_ymm_k1z_ymmm256b16 = 4_409

VCVTUW2PH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.F2.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuw2ph_zmm_k1z_zmmm512b16_er = 4_410

VCVTUW2PH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.F2.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtw2ph_xmm_k1z_xmmm128b16 = 4_411

VCVTW2PH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.F3.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtw2ph_ymm_k1z_ymmm256b16 = 4_412

VCVTW2PH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.F3.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtw2ph_zmm_k1z_zmmm512b16_er = 4_413

VCVTW2PH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.F3.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vdivph_xmm_k1z_xmm_xmmm128b16 = 4_414

VDIVPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vdivph_ymm_k1z_ymm_ymmm256b16 = 4_415

VDIVPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vdivph_zmm_k1z_zmm_zmmm512b16_er = 4_416

VDIVPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 5E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vdivsh_xmm_k1z_xmm_xmmm16_er = 4_417

VDIVSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 5E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcph_xmm_k1z_xmm_xmmm128b32 = 4_418

VFCMADDCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcph_ymm_k1z_ymm_ymmm256b32 = 4_419

VFCMADDCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcph_zmm_k1z_zmm_zmmm512b32_er = 4_420

VFCMADDCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F2.MAP6.W0 56 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcph_xmm_k1z_xmm_xmmm128b32 = 4_421

VFMADDCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcph_ymm_k1z_ymm_ymmm256b32 = 4_422

VFMADDCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcph_zmm_k1z_zmm_zmmm512b32_er = 4_423

VFMADDCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F3.MAP6.W0 56 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcsh_xmm_k1z_xmm_xmmm32_er = 4_424

VFCMADDCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F2.MAP6.W0 57 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcsh_xmm_k1z_xmm_xmmm32_er = 4_425

VFMADDCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.MAP6.W0 57 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcph_xmm_k1z_xmm_xmmm128b32 = 4_426

VFCMULCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcph_ymm_k1z_ymm_ymmm256b32 = 4_427

VFCMULCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcph_zmm_k1z_zmm_zmmm512b32_er = 4_428

VFCMULCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F2.MAP6.W0 D6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcph_xmm_k1z_xmm_xmmm128b32 = 4_429

VFMULCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcph_ymm_k1z_ymm_ymmm256b32 = 4_430

VFMULCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcph_zmm_k1z_zmm_zmmm512b32_er = 4_431

VFMULCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F3.MAP6.W0 D6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcsh_xmm_k1z_xmm_xmmm32_er = 4_432

VFCMULCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F2.MAP6.W0 D7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcsh_xmm_k1z_xmm_xmmm32_er = 4_433

VFMULCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.MAP6.W0 D7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub132ph_xmm_k1z_xmm_xmmm128b16 = 4_434

VFMADDSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 96 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub132ph_ymm_k1z_ymm_ymmm256b16 = 4_435

VFMADDSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 96 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub132ph_zmm_k1z_zmm_zmmm512b16_er = 4_436

VFMADDSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 96 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub213ph_xmm_k1z_xmm_xmmm128b16 = 4_437

VFMADDSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 A6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub213ph_ymm_k1z_ymm_ymmm256b16 = 4_438

VFMADDSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 A6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub213ph_zmm_k1z_zmm_zmmm512b16_er = 4_439

VFMADDSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 A6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub231ph_xmm_k1z_xmm_xmmm128b16 = 4_440

VFMADDSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 B6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub231ph_ymm_k1z_ymm_ymmm256b16 = 4_441

VFMADDSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 B6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub231ph_zmm_k1z_zmm_zmmm512b16_er = 4_442

VFMADDSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 B6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd132ph_xmm_k1z_xmm_xmmm128b16 = 4_443

VFMSUBADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 97 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd132ph_ymm_k1z_ymm_ymmm256b16 = 4_444

VFMSUBADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 97 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd132ph_zmm_k1z_zmm_zmmm512b16_er = 4_445

VFMSUBADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 97 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd213ph_xmm_k1z_xmm_xmmm128b16 = 4_446

VFMSUBADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 A7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd213ph_ymm_k1z_ymm_ymmm256b16 = 4_447

VFMSUBADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 A7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd213ph_zmm_k1z_zmm_zmmm512b16_er = 4_448

VFMSUBADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 A7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd231ph_xmm_k1z_xmm_xmmm128b16 = 4_449

VFMSUBADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 B7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd231ph_ymm_k1z_ymm_ymmm256b16 = 4_450

VFMSUBADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 B7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd231ph_zmm_k1z_zmm_zmmm512b16_er = 4_451

VFMSUBADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 B7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132ph_xmm_k1z_xmm_xmmm128b16 = 4_452

VFMADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 98 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132ph_ymm_k1z_ymm_ymmm256b16 = 4_453

VFMADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 98 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132ph_zmm_k1z_zmm_zmmm512b16_er = 4_454

VFMADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 98 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213ph_xmm_k1z_xmm_xmmm128b16 = 4_455

VFMADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 A8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213ph_ymm_k1z_ymm_ymmm256b16 = 4_456

VFMADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 A8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213ph_zmm_k1z_zmm_zmmm512b16_er = 4_457

VFMADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 A8 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231ph_xmm_k1z_xmm_xmmm128b16 = 4_458

VFMADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 B8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231ph_ymm_k1z_ymm_ymmm256b16 = 4_459

VFMADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 B8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231ph_zmm_k1z_zmm_zmmm512b16_er = 4_460

VFMADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 B8 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132ph_xmm_k1z_xmm_xmmm128b16 = 4_461

VFNMADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 9C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132ph_ymm_k1z_ymm_ymmm256b16 = 4_462

VFNMADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 9C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132ph_zmm_k1z_zmm_zmmm512b16_er = 4_463

VFNMADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 9C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213ph_xmm_k1z_xmm_xmmm128b16 = 4_464

VFNMADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 AC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213ph_ymm_k1z_ymm_ymmm256b16 = 4_465

VFNMADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 AC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213ph_zmm_k1z_zmm_zmmm512b16_er = 4_466

VFNMADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 AC /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231ph_xmm_k1z_xmm_xmmm128b16 = 4_467

VFNMADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 BC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231ph_ymm_k1z_ymm_ymmm256b16 = 4_468

VFNMADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 BC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231ph_zmm_k1z_zmm_zmmm512b16_er = 4_469

VFNMADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 BC /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132sh_xmm_k1z_xmm_xmmm16_er = 4_470

VFMADD132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 99 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213sh_xmm_k1z_xmm_xmmm16_er = 4_471

VFMADD213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 A9 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231sh_xmm_k1z_xmm_xmmm16_er = 4_472

VFMADD231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 B9 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132sh_xmm_k1z_xmm_xmmm16_er = 4_473

VFNMADD132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 9D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213sh_xmm_k1z_xmm_xmmm16_er = 4_474

VFNMADD213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 AD /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231sh_xmm_k1z_xmm_xmmm16_er = 4_475

VFNMADD231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 BD /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132ph_xmm_k1z_xmm_xmmm128b16 = 4_476

VFMSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 9A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132ph_ymm_k1z_ymm_ymmm256b16 = 4_477

VFMSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 9A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132ph_zmm_k1z_zmm_zmmm512b16_er = 4_478

VFMSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 9A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213ph_xmm_k1z_xmm_xmmm128b16 = 4_479

VFMSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 AA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213ph_ymm_k1z_ymm_ymmm256b16 = 4_480

VFMSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 AA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213ph_zmm_k1z_zmm_zmmm512b16_er = 4_481

VFMSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 AA /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231ph_xmm_k1z_xmm_xmmm128b16 = 4_482

VFMSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 BA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231ph_ymm_k1z_ymm_ymmm256b16 = 4_483

VFMSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 BA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231ph_zmm_k1z_zmm_zmmm512b16_er = 4_484

VFMSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 BA /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132ph_xmm_k1z_xmm_xmmm128b16 = 4_485

VFNMSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 9E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132ph_ymm_k1z_ymm_ymmm256b16 = 4_486

VFNMSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 9E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132ph_zmm_k1z_zmm_zmmm512b16_er = 4_487

VFNMSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 9E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213ph_xmm_k1z_xmm_xmmm128b16 = 4_488

VFNMSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 AE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213ph_ymm_k1z_ymm_ymmm256b16 = 4_489

VFNMSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 AE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213ph_zmm_k1z_zmm_zmmm512b16_er = 4_490

VFNMSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 AE /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231ph_xmm_k1z_xmm_xmmm128b16 = 4_491

VFNMSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 BE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231ph_ymm_k1z_ymm_ymmm256b16 = 4_492

VFNMSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 BE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231ph_zmm_k1z_zmm_zmmm512b16_er = 4_493

VFNMSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 BE /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132sh_xmm_k1z_xmm_xmmm16_er = 4_494

VFMSUB132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 9B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213sh_xmm_k1z_xmm_xmmm16_er = 4_495

VFMSUB213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 AB /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231sh_xmm_k1z_xmm_xmmm16_er = 4_496

VFMSUB231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 BB /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132sh_xmm_k1z_xmm_xmmm16_er = 4_497

VFNMSUB132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 9F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213sh_xmm_k1z_xmm_xmmm16_er = 4_498

VFNMSUB213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 AF /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231sh_xmm_k1z_xmm_xmmm16_er = 4_499

VFNMSUB231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 BF /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclassph_kr_k1_xmmm128b16_imm8 = 4_500

VFPCLASSPH k1 {k2}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 66 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclassph_kr_k1_ymmm256b16_imm8 = 4_501

VFPCLASSPH k1 {k2}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 66 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclassph_kr_k1_zmmm512b16_imm8 = 4_502

VFPCLASSPH k1 {k2}, zmm2/m512/m16bcst, imm8

EVEX.512.0F3A.W0 66 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclasssh_kr_k1_xmmm16_imm8 = 4_503

VFPCLASSSH k1 {k2}, xmm2/m16, imm8

EVEX.LIG.0F3A.W0 67 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpph_xmm_k1z_xmmm128b16 = 4_504

VGETEXPPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP6.W0 42 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpph_ymm_k1z_ymmm256b16 = 4_505

VGETEXPPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP6.W0 42 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpph_zmm_k1z_zmmm512b16_sae = 4_506

VGETEXPPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}

EVEX.512.66.MAP6.W0 42 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpsh_xmm_k1z_xmm_xmmm16_sae = 4_507

VGETEXPSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.66.MAP6.W0 43 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantph_xmm_k1z_xmmm128b16_imm8 = 4_508

VGETMANTPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 26 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantph_ymm_k1z_ymmm256b16_imm8 = 4_509

VGETMANTPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 26 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantph_zmm_k1z_zmmm512b16_imm8_sae = 4_510

VGETMANTPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 26 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantsh_xmm_k1z_xmm_xmmm16_imm8_sae = 4_511

VGETMANTSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.0F3A.W0 27 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxph_xmm_k1z_xmm_xmmm128b16 = 4_512

VMAXPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5F /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxph_ymm_k1z_ymm_ymmm256b16 = 4_513

VMAXPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5F /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxph_zmm_k1z_zmm_zmmm512b16_sae = 4_514

VMAXPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{sae}

EVEX.512.MAP5.W0 5F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxsh_xmm_k1z_xmm_xmmm16_sae = 4_515

VMAXSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.F3.MAP5.W0 5F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vminph_xmm_k1z_xmm_xmmm128b16 = 4_516

VMINPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vminph_ymm_k1z_ymm_ymmm256b16 = 4_517

VMINPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vminph_zmm_k1z_zmm_zmmm512b16_sae = 4_518

VMINPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{sae}

EVEX.512.MAP5.W0 5D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vminsh_xmm_k1z_xmm_xmmm16_sae = 4_519

VMINSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.F3.MAP5.W0 5D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_xmm_k1z_m16 = 4_520

VMOVSH xmm1 {k1}{z}, m16

EVEX.LIG.F3.MAP5.W0 10 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_m16_k1_xmm = 4_521

VMOVSH m16 {k1}, xmm1

EVEX.LIG.F3.MAP5.W0 11 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_xmm_k1z_xmm_xmm = 4_522

VMOVSH xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.MAP5.W0 10 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_xmm_k1z_xmm_xmm_MAP5_11 = 4_523

VMOVSH xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.MAP5.W0 11 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovw_xmm_r32m16 = 4_524

VMOVW xmm1, r32/m16

EVEX.128.66.MAP5.W0 6E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovw_xmm_r64m16 = 4_525

VMOVW xmm1, r64/m16

EVEX.128.66.MAP5.W1 6E /r

AVX512-FP16

64-bit

§

EVEX_Vmovw_r32m16_xmm = 4_526

VMOVW r32/m16, xmm1

EVEX.128.66.MAP5.W0 7E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovw_r64m16_xmm = 4_527

VMOVW r64/m16, xmm1

EVEX.128.66.MAP5.W1 7E /r

AVX512-FP16

64-bit

§

EVEX_Vmulph_xmm_k1z_xmm_xmmm128b16 = 4_528

VMULPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 59 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmulph_ymm_k1z_ymm_ymmm256b16 = 4_529

VMULPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 59 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmulph_zmm_k1z_zmm_zmmm512b16_er = 4_530

VMULPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 59 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmulsh_xmm_k1z_xmm_xmmm16_er = 4_531

VMULSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 59 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpph_xmm_k1z_xmmm128b16 = 4_532

VRCPPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP6.W0 4C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpph_ymm_k1z_ymmm256b16 = 4_533

VRCPPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP6.W0 4C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpph_zmm_k1z_zmmm512b16 = 4_534

VRCPPH zmm1 {k1}{z}, zmm2/m512/m16bcst

EVEX.512.66.MAP6.W0 4C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpsh_xmm_k1z_xmm_xmmm16 = 4_535

VRCPSH xmm1 {k1}{z}, xmm2, xmm3/m16

EVEX.LIG.66.MAP6.W0 4D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vreduceph_xmm_k1z_xmmm128b16_imm8 = 4_536

VREDUCEPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 56 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vreduceph_ymm_k1z_ymmm256b16_imm8 = 4_537

VREDUCEPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 56 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vreduceph_zmm_k1z_zmmm512b16_imm8_sae = 4_538

VREDUCEPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 56 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vreducesh_xmm_k1z_xmm_xmmm16_imm8_sae = 4_539

VREDUCESH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.0F3A.W0 57 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscaleph_xmm_k1z_xmmm128b16_imm8 = 4_540

VRNDSCALEPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 08 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscaleph_ymm_k1z_ymmm256b16_imm8 = 4_541

VRNDSCALEPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 08 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscaleph_zmm_k1z_zmmm512b16_imm8_sae = 4_542

VRNDSCALEPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 08 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscalesh_xmm_k1z_xmm_xmmm16_imm8_sae = 4_543

VRNDSCALESH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.0F3A.W0 0A /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtph_xmm_k1z_xmmm128b16 = 4_544

VRSQRTPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP6.W0 4E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtph_ymm_k1z_ymmm256b16 = 4_545

VRSQRTPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP6.W0 4E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtph_zmm_k1z_zmmm512b16 = 4_546

VRSQRTPH zmm1 {k1}{z}, zmm2/m512/m16bcst

EVEX.512.66.MAP6.W0 4E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtsh_xmm_k1z_xmm_xmmm16 = 4_547

VRSQRTSH xmm1 {k1}{z}, xmm2, xmm3/m16

EVEX.LIG.66.MAP6.W0 4F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefph_xmm_k1z_xmm_xmmm128b16 = 4_548

VSCALEFPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 2C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefph_ymm_k1z_ymm_ymmm256b16 = 4_549

VSCALEFPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 2C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefph_zmm_k1z_zmm_zmmm512b16_er = 4_550

VSCALEFPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 2C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefsh_xmm_k1z_xmm_xmmm16_er = 4_551

VSCALEFSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 2D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtph_xmm_k1z_xmmm128b16 = 4_552

VSQRTPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.MAP5.W0 51 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtph_ymm_k1z_ymmm256b16 = 4_553

VSQRTPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.MAP5.W0 51 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtph_zmm_k1z_zmmm512b16_er = 4_554

VSQRTPH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.MAP5.W0 51 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtsh_xmm_k1z_xmm_xmmm16_er = 4_555

VSQRTSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 51 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsubph_xmm_k1z_xmm_xmmm128b16 = 4_556

VSUBPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsubph_ymm_k1z_ymm_ymmm256b16 = 4_557

VSUBPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsubph_zmm_k1z_zmm_zmmm512b16_er = 4_558

VSUBPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 5C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsubsh_xmm_k1z_xmm_xmmm16_er = 4_559

VSUBSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 5C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vucomish_xmm_xmmm16_sae = 4_560

VUCOMISH xmm1, xmm2/m16{sae}

EVEX.LIG.MAP5.W0 2E /r

AVX512-FP16

16/32/64-bit

§

Rdudbg = 4_561

RDUDBG

0F 0E

UDBG

16/32/64-bit

§

Wrudbg = 4_562

WRUDBG

0F 0F

UDBG

16/32/64-bit

§

VEX_KNC_Jkzd_kr_rel8_64 = 4_563

JKZD k1, rel8

VEX.128.W0 74 cb

KNC

64-bit

§

VEX_KNC_Jknzd_kr_rel8_64 = 4_564

JKNZD k1, rel8

VEX.128.W0 75 cb

KNC

64-bit

§

VEX_KNC_Vprefetchnta_m8 = 4_565

VPREFETCHNTA m8

VEX.128.0F.WIG 18 /0

KNC

64-bit

§

VEX_KNC_Vprefetch0_m8 = 4_566

VPREFETCH0 m8

VEX.128.0F.WIG 18 /1

KNC

64-bit

§

VEX_KNC_Vprefetch1_m8 = 4_567

VPREFETCH1 m8

VEX.128.0F.WIG 18 /2

KNC

64-bit

§

VEX_KNC_Vprefetch2_m8 = 4_568

VPREFETCH2 m8

VEX.128.0F.WIG 18 /3

KNC

64-bit

§

VEX_KNC_Vprefetchenta_m8 = 4_569

VPREFETCHENTA m8

VEX.128.0F.WIG 18 /4

KNC

64-bit

§

VEX_KNC_Vprefetche0_m8 = 4_570

VPREFETCHE0 m8

VEX.128.0F.WIG 18 /5

KNC

64-bit

§

VEX_KNC_Vprefetche1_m8 = 4_571

VPREFETCHE1 m8

VEX.128.0F.WIG 18 /6

KNC

64-bit

§

VEX_KNC_Vprefetche2_m8 = 4_572

VPREFETCHE2 m8

VEX.128.0F.WIG 18 /7

KNC

64-bit

§

VEX_KNC_Kand_kr_kr = 4_573

KAND k1, k2

VEX.128.0F.W0 41 /r

KNC

64-bit

§

VEX_KNC_Kandn_kr_kr = 4_574

KANDN k1, k2

VEX.128.0F.W0 42 /r

KNC

64-bit

§

VEX_KNC_Kandnr_kr_kr = 4_575

KANDNR k1, k2

VEX.128.0F.W0 43 /r

KNC

64-bit

§

VEX_KNC_Knot_kr_kr = 4_576

KNOT k1, k2

VEX.128.0F.W0 44 /r

KNC

64-bit

§

VEX_KNC_Kor_kr_kr = 4_577

KOR k1, k2

VEX.128.0F.W0 45 /r

KNC

64-bit

§

VEX_KNC_Kxnor_kr_kr = 4_578

KXNOR k1, k2

VEX.128.0F.W0 46 /r

KNC

64-bit

§

VEX_KNC_Kxor_kr_kr = 4_579

KXOR k1, k2

VEX.128.0F.W0 47 /r

KNC

64-bit

§

VEX_KNC_Kmerge2l1h_kr_kr = 4_580

KMERGE2L1H k1, k2

VEX.128.0F.W0 48 /r

KNC

64-bit

§

VEX_KNC_Kmerge2l1l_kr_kr = 4_581

KMERGE2L1L k1, k2

VEX.128.0F.W0 49 /r

KNC

64-bit

§

VEX_KNC_Jkzd_kr_rel32_64 = 4_582

JKZD k1, rel32

VEX.128.0F.W0 84 cd

KNC

64-bit

§

VEX_KNC_Jknzd_kr_rel32_64 = 4_583

JKNZD k1, rel32

VEX.128.0F.W0 85 cd

KNC

64-bit

§

VEX_KNC_Kmov_kr_kr = 4_584

KMOV k1, k2

VEX.128.0F.W0 90 /r

KNC

64-bit

§

VEX_KNC_Kmov_kr_r32 = 4_585

KMOV k1, r32

VEX.128.0F.W0 92 /r

KNC

64-bit

§

VEX_KNC_Kmov_r32_kr = 4_586

KMOV r32, k1

VEX.128.0F.W0 93 /r

KNC

64-bit

§

VEX_KNC_Kconcath_r64_kr_kr = 4_587

KCONCATH r64, k1, k2

VEX.128.0F.W0 95 /r

KNC

64-bit

§

VEX_KNC_Kconcatl_r64_kr_kr = 4_588

KCONCATL r64, k1, k2

VEX.128.0F.W0 97 /r

KNC

64-bit

§

VEX_KNC_Kortest_kr_kr = 4_589

KORTEST k1, k2

VEX.128.0F.W0 98 /r

KNC

64-bit

§

VEX_KNC_Delay_r32 = 4_590

DELAY r32

VEX.128.F3.0F.W0 AE /6

KNC

64-bit

§

VEX_KNC_Delay_r64 = 4_591

DELAY r64

VEX.128.F3.0F.W1 AE /6

KNC

64-bit

§

VEX_KNC_Spflt_r32 = 4_592

SPFLT r32

VEX.128.F2.0F.W0 AE /6

KNC

64-bit

§

VEX_KNC_Spflt_r64 = 4_593

SPFLT r64

VEX.128.F2.0F.W1 AE /6

KNC

64-bit

§

VEX_KNC_Clevict1_m8 = 4_594

CLEVICT1 m8

VEX.128.F3.0F.WIG AE /7

KNC

64-bit

§

VEX_KNC_Clevict0_m8 = 4_595

CLEVICT0 m8

VEX.128.F2.0F.WIG AE /7

KNC

64-bit

§

VEX_KNC_Popcnt_r32_r32 = 4_596

POPCNT r32, r32

VEX.128.F3.0F.W0 B8 /r

KNC

64-bit

§

VEX_KNC_Popcnt_r64_r64 = 4_597

POPCNT r64, r64

VEX.128.F3.0F.W1 B8 /r

KNC

64-bit

§

VEX_KNC_Tzcnt_r32_r32 = 4_598

TZCNT r32, r32

VEX.128.F3.0F.W0 BC /r

KNC

64-bit

§

VEX_KNC_Tzcnt_r64_r64 = 4_599

TZCNT r64, r64

VEX.128.F3.0F.W1 BC /r

KNC

64-bit

§

VEX_KNC_Tzcnti_r32_r32 = 4_600

TZCNTI r32, r32

VEX.128.F2.0F.W0 BC /r

KNC

64-bit

§

VEX_KNC_Tzcnti_r64_r64 = 4_601

TZCNTI r64, r64

VEX.128.F2.0F.W1 BC /r

KNC

64-bit

§

VEX_KNC_Lzcnt_r32_r32 = 4_602

LZCNT r32, r32

VEX.128.F3.0F.W0 BD /r

KNC

64-bit

§

VEX_KNC_Lzcnt_r64_r64 = 4_603

LZCNT r64, r64

VEX.128.F3.0F.W1 BD /r

KNC

64-bit

§

VEX_KNC_Undoc_r32_rm32_128_F3_0F38_W0_F0 = 4_604

UNDOC r32, r/m32

VEX.128.F3.0F38.W0 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r64_rm64_128_F3_0F38_W1_F0 = 4_605

UNDOC r64, r/m64

VEX.128.F3.0F38.W1 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F0 = 4_606

UNDOC r32, r/m32

VEX.128.F2.0F38.W0 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F0 = 4_607

UNDOC r64, r/m64

VEX.128.F2.0F38.W1 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F1 = 4_608

UNDOC r32, r/m32

VEX.128.F2.0F38.W0 F1 /r

KNC

64-bit

§

VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F1 = 4_609

UNDOC r64, r/m64

VEX.128.F2.0F38.W1 F1 /r

KNC

64-bit

§

VEX_KNC_Kextract_kr_r64_imm8 = 4_610

KEXTRACT k1, r64, imm8

VEX.128.66.0F3A.W0 3E /r ib

KNC

64-bit

§

MVEX_Vprefetchnta_m = 4_611

VPREFETCHNTA m

MVEX.512.0F.WIG 18 /0

KNC

64-bit

§

MVEX_Vprefetch0_m = 4_612

VPREFETCH0 m

MVEX.512.0F.WIG 18 /1

KNC

64-bit

§

MVEX_Vprefetch1_m = 4_613

VPREFETCH1 m

MVEX.512.0F.WIG 18 /2

KNC

64-bit

§

MVEX_Vprefetch2_m = 4_614

VPREFETCH2 m

MVEX.512.0F.WIG 18 /3

KNC

64-bit

§

MVEX_Vprefetchenta_m = 4_615

VPREFETCHENTA m

MVEX.512.0F.WIG 18 /4

KNC

64-bit

§

MVEX_Vprefetche0_m = 4_616

VPREFETCHE0 m

MVEX.512.0F.WIG 18 /5

KNC

64-bit

§

MVEX_Vprefetche1_m = 4_617

VPREFETCHE1 m

MVEX.512.0F.WIG 18 /6

KNC

64-bit

§

MVEX_Vprefetche2_m = 4_618

VPREFETCHE2 m

MVEX.512.0F.WIG 18 /7

KNC

64-bit

§

MVEX_Vmovaps_zmm_k1_zmmmt = 4_619

VMOVAPS zmm1 {k1}, Sf32(zmm2/mt)

MVEX.512.0F.W0 28 /r

KNC

64-bit

§

MVEX_Vmovapd_zmm_k1_zmmmt = 4_620

VMOVAPD zmm1 {k1}, Sf64(zmm2/mt)

MVEX.512.66.0F.W1 28 /r

KNC

64-bit

§

MVEX_Vmovaps_mt_k1_zmm = 4_621

VMOVAPS mt {k1}, Df32(zmm1)

MVEX.512.0F.W0 29 /r

KNC

64-bit

§

MVEX_Vmovapd_mt_k1_zmm = 4_622

VMOVAPD mt {k1}, Df64(zmm1)

MVEX.512.66.0F.W1 29 /r

KNC

64-bit

§

MVEX_Vmovnrapd_m_k1_zmm = 4_623

VMOVNRAPD m {k1}, Df64(zmm1)

MVEX.512.F3.0F.W1.EH0 29 /r

KNC

64-bit

§

MVEX_Vmovnrngoapd_m_k1_zmm = 4_624

VMOVNRNGOAPD m {k1}, Df64(zmm1)

MVEX.512.F3.0F.W1.EH1 29 /r

KNC

64-bit

§

MVEX_Vmovnraps_m_k1_zmm = 4_625

VMOVNRAPS m {k1}, Df32(zmm1)

MVEX.512.F2.0F.W0.EH0 29 /r

KNC

64-bit

§

MVEX_Vmovnrngoaps_m_k1_zmm = 4_626

VMOVNRNGOAPS m {k1}, Df32(zmm1)

MVEX.512.F2.0F.W0.EH1 29 /r

KNC

64-bit

§

MVEX_Vaddps_zmm_k1_zmm_zmmmt = 4_627

VADDPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.0F.W0 58 /r

KNC

64-bit

§

MVEX_Vaddpd_zmm_k1_zmm_zmmmt = 4_628

VADDPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 58 /r

KNC

64-bit

§

MVEX_Vmulps_zmm_k1_zmm_zmmmt = 4_629

VMULPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.0F.W0 59 /r

KNC

64-bit

§

MVEX_Vmulpd_zmm_k1_zmm_zmmmt = 4_630

VMULPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 59 /r

KNC

64-bit

§

MVEX_Vcvtps2pd_zmm_k1_zmmmt = 4_631

VCVTPS2PD zmm1 {k1}, Sf32(zmm2/mt)

MVEX.512.0F.W0 5A /r

KNC

64-bit

§

MVEX_Vcvtpd2ps_zmm_k1_zmmmt = 4_632

VCVTPD2PS zmm1 {k1}, Sf64(zmm2/mt)

MVEX.512.66.0F.W1 5A /r

KNC

64-bit

§

MVEX_Vsubps_zmm_k1_zmm_zmmmt = 4_633

VSUBPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.0F.W0 5C /r

KNC

64-bit

§

MVEX_Vsubpd_zmm_k1_zmm_zmmmt = 4_634

VSUBPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 5C /r

KNC

64-bit

§

MVEX_Vpcmpgtd_kr_k1_zmm_zmmmt = 4_635

VPCMPGTD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F.W0 66 /r

KNC

64-bit

§

MVEX_Vmovdqa32_zmm_k1_zmmmt = 4_636

VMOVDQA32 zmm1 {k1}, Si32(zmm2/mt)

MVEX.512.66.0F.W0 6F /r

KNC

64-bit

§

MVEX_Vmovdqa64_zmm_k1_zmmmt = 4_637

VMOVDQA64 zmm1 {k1}, Si64(zmm2/mt)

MVEX.512.66.0F.W1 6F /r

KNC

64-bit

§

MVEX_Vpshufd_zmm_k1_zmmmt_imm8 = 4_638

VPSHUFD zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F.W0 70 /r ib

KNC

64-bit

§

MVEX_Vpsrld_zmm_k1_zmmmt_imm8 = 4_639

VPSRLD zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.NDD.512.66.0F.W0 72 /2 ib

KNC

64-bit

§

MVEX_Vpsrad_zmm_k1_zmmmt_imm8 = 4_640

VPSRAD zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.NDD.512.66.0F.W0 72 /4 ib

KNC

64-bit

§

MVEX_Vpslld_zmm_k1_zmmmt_imm8 = 4_641

VPSLLD zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.NDD.512.66.0F.W0 72 /6 ib

KNC

64-bit

§

MVEX_Vpcmpeqd_kr_k1_zmm_zmmmt = 4_642

VPCMPEQD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F.W0 76 /r

KNC

64-bit

§

MVEX_Vcvtudq2pd_zmm_k1_zmmmt = 4_643

VCVTUDQ2PD zmm1 {k1}, Si32(zmm2/mt)

MVEX.512.F3.0F.W0 7A /r

KNC

64-bit

§

MVEX_Vmovdqa32_mt_k1_zmm = 4_644

VMOVDQA32 mt {k1}, Di32(zmm1)

MVEX.512.66.0F.W0 7F /r

KNC

64-bit

§

MVEX_Vmovdqa64_mt_k1_zmm = 4_645

VMOVDQA64 mt {k1}, Di64(zmm1)

MVEX.512.66.0F.W1 7F /r

KNC

64-bit

§

MVEX_Clevict1_m = 4_646

CLEVICT1 m

MVEX.512.F3.0F.WIG AE /7

KNC

64-bit

§

MVEX_Clevict0_m = 4_647

CLEVICT0 m

MVEX.512.F2.0F.WIG AE /7

KNC

64-bit

§

MVEX_Vcmpps_kr_k1_zmm_zmmmt_imm8 = 4_648

VCMPPS k2 {k1}, zmm1, Sf32(zmm2/mt), imm8

MVEX.NDS.512.0F.W0 C2 /r ib

KNC

64-bit

§

MVEX_Vcmppd_kr_k1_zmm_zmmmt_imm8 = 4_649

VCMPPD k2 {k1}, zmm1, Sf64(zmm2/mt), imm8

MVEX.NDS.512.66.0F.W1 C2 /r ib

KNC

64-bit

§

MVEX_Vpandd_zmm_k1_zmm_zmmmt = 4_650

VPANDD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 DB /r

KNC

64-bit

§

MVEX_Vpandq_zmm_k1_zmm_zmmmt = 4_651

VPANDQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 DB /r

KNC

64-bit

§

MVEX_Vpandnd_zmm_k1_zmm_zmmmt = 4_652

VPANDND zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 DF /r

KNC

64-bit

§

MVEX_Vpandnq_zmm_k1_zmm_zmmmt = 4_653

VPANDNQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 DF /r

KNC

64-bit

§

MVEX_Vcvtdq2pd_zmm_k1_zmmmt = 4_654

VCVTDQ2PD zmm1 {k1}, Si32(zmm2/mt)

MVEX.512.F3.0F.W0 E6 /r

KNC

64-bit

§

MVEX_Vpord_zmm_k1_zmm_zmmmt = 4_655

VPORD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 EB /r

KNC

64-bit

§

MVEX_Vporq_zmm_k1_zmm_zmmmt = 4_656

VPORQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 EB /r

KNC

64-bit

§

MVEX_Vpxord_zmm_k1_zmm_zmmmt = 4_657

VPXORD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 EF /r

KNC

64-bit

§

MVEX_Vpxorq_zmm_k1_zmm_zmmmt = 4_658

VPXORQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 EF /r

KNC

64-bit

§

MVEX_Vpsubd_zmm_k1_zmm_zmmmt = 4_659

VPSUBD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 FA /r

KNC

64-bit

§

MVEX_Vpaddd_zmm_k1_zmm_zmmmt = 4_660

VPADDD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 FE /r

KNC

64-bit

§

MVEX_Vbroadcastss_zmm_k1_mt = 4_661

VBROADCASTSS zmm1 {k1}, Uf32(mt)

MVEX.512.66.0F38.W0 18 /r

KNC

64-bit

§

MVEX_Vbroadcastsd_zmm_k1_mt = 4_662

VBROADCASTSD zmm1 {k1}, Uf64(mt)

MVEX.512.66.0F38.W1 19 /r

KNC

64-bit

§

MVEX_Vbroadcastf32x4_zmm_k1_mt = 4_663

VBROADCASTF32X4 zmm1 {k1}, Uf32(mt)

MVEX.512.66.0F38.W0 1A /r

KNC

64-bit

§

MVEX_Vbroadcastf64x4_zmm_k1_mt = 4_664

VBROADCASTF64X4 zmm1 {k1}, Uf64(mt)

MVEX.512.66.0F38.W1 1B /r

KNC

64-bit

§

MVEX_Vptestmd_kr_k1_zmm_zmmmt = 4_665

VPTESTMD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F38.W0 27 /r

KNC

64-bit

§

MVEX_Vpermd_zmm_k1_zmm_zmmmt = 4_666

VPERMD zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 36 /r

KNC

64-bit

§

MVEX_Vpminsd_zmm_k1_zmm_zmmmt = 4_667

VPMINSD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 39 /r

KNC

64-bit

§

MVEX_Vpminud_zmm_k1_zmm_zmmmt = 4_668

VPMINUD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 3B /r

KNC

64-bit

§

MVEX_Vpmaxsd_zmm_k1_zmm_zmmmt = 4_669

VPMAXSD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 3D /r

KNC

64-bit

§

MVEX_Vpmaxud_zmm_k1_zmm_zmmmt = 4_670

VPMAXUD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 3F /r

KNC

64-bit

§

MVEX_Vpmulld_zmm_k1_zmm_zmmmt = 4_671

VPMULLD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 40 /r

KNC

64-bit

§

MVEX_Vgetexpps_zmm_k1_zmmmt = 4_672

VGETEXPPS zmm1 {k1}, Sf32(zmm2/mt)

MVEX.512.66.0F38.W0 42 /r

KNC

64-bit

§

MVEX_Vgetexppd_zmm_k1_zmmmt = 4_673

VGETEXPPD zmm1 {k1}, Sf64(zmm2/mt)

MVEX.512.66.0F38.W1 42 /r

KNC

64-bit

§

MVEX_Vpsrlvd_zmm_k1_zmm_zmmmt = 4_674

VPSRLVD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 45 /r

KNC

64-bit

§

MVEX_Vpsravd_zmm_k1_zmm_zmmmt = 4_675

VPSRAVD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 46 /r

KNC

64-bit

§

MVEX_Vpsllvd_zmm_k1_zmm_zmmmt = 4_676

VPSLLVD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 47 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_48 = 4_677

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 48 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_49 = 4_678

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 49 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4A = 4_679

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 4A /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4B = 4_680

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 4B /r

KNC

64-bit

§

MVEX_Vaddnps_zmm_k1_zmm_zmmmt = 4_681

VADDNPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 50 /r

KNC

64-bit

§

MVEX_Vaddnpd_zmm_k1_zmm_zmmmt = 4_682

VADDNPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 50 /r

KNC

64-bit

§

MVEX_Vgmaxabsps_zmm_k1_zmm_zmmmt = 4_683

VGMAXABSPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 51 /r

KNC

64-bit

§

MVEX_Vgminps_zmm_k1_zmm_zmmmt = 4_684

VGMINPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 52 /r

KNC

64-bit

§

MVEX_Vgminpd_zmm_k1_zmm_zmmmt = 4_685

VGMINPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 52 /r

KNC

64-bit

§

MVEX_Vgmaxps_zmm_k1_zmm_zmmmt = 4_686

VGMAXPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 53 /r

KNC

64-bit

§

MVEX_Vgmaxpd_zmm_k1_zmm_zmmmt = 4_687

VGMAXPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 53 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_54 = 4_688

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 54 /r

KNC

64-bit

§

MVEX_Vfixupnanps_zmm_k1_zmm_zmmmt = 4_689

VFIXUPNANPS zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 55 /r

KNC

64-bit

§

MVEX_Vfixupnanpd_zmm_k1_zmm_zmmmt = 4_690

VFIXUPNANPD zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 55 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_56 = 4_691

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 56 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_57 = 4_692

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 57 /r

KNC

64-bit

§

MVEX_Vpbroadcastd_zmm_k1_mt = 4_693

VPBROADCASTD zmm1 {k1}, Ui32(mt)

MVEX.512.66.0F38.W0 58 /r

KNC

64-bit

§

MVEX_Vpbroadcastq_zmm_k1_mt = 4_694

VPBROADCASTQ zmm1 {k1}, Ui64(mt)

MVEX.512.66.0F38.W1 59 /r

KNC

64-bit

§

MVEX_Vbroadcasti32x4_zmm_k1_mt = 4_695

VBROADCASTI32X4 zmm1 {k1}, Ui32(mt)

MVEX.512.66.0F38.W0 5A /r

KNC

64-bit

§

MVEX_Vbroadcasti64x4_zmm_k1_mt = 4_696

VBROADCASTI64X4 zmm1 {k1}, Ui64(mt)

MVEX.512.66.0F38.W1 5B /r

KNC

64-bit

§

MVEX_Vpadcd_zmm_k1_kr_zmmmt = 4_697

VPADCD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5C /r

KNC

64-bit

§

MVEX_Vpaddsetcd_zmm_k1_kr_zmmmt = 4_698

VPADDSETCD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5D /r

KNC

64-bit

§

MVEX_Vpsbbd_zmm_k1_kr_zmmmt = 4_699

VPSBBD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5E /r

KNC

64-bit

§

MVEX_Vpsubsetbd_zmm_k1_kr_zmmmt = 4_700

VPSUBSETBD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5F /r

KNC

64-bit

§

MVEX_Vpblendmd_zmm_k1_zmm_zmmmt = 4_701

VPBLENDMD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 64 /r

KNC

64-bit

§

MVEX_Vpblendmq_zmm_k1_zmm_zmmmt = 4_702

VPBLENDMQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 64 /r

KNC

64-bit

§

MVEX_Vblendmps_zmm_k1_zmm_zmmmt = 4_703

VBLENDMPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 65 /r

KNC

64-bit

§

MVEX_Vblendmpd_zmm_k1_zmm_zmmmt = 4_704

VBLENDMPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 65 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_67 = 4_705

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 67 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_68 = 4_706

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 68 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_69 = 4_707

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 69 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6A = 4_708

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 6A /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6B = 4_709

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 6B /r

KNC

64-bit

§

MVEX_Vpsubrd_zmm_k1_zmm_zmmmt = 4_710

VPSUBRD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6C /r

KNC

64-bit

§

MVEX_Vsubrps_zmm_k1_zmm_zmmmt = 4_711

VSUBRPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6D /r

KNC

64-bit

§

MVEX_Vsubrpd_zmm_k1_zmm_zmmmt = 4_712

VSUBRPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 6D /r

KNC

64-bit

§

MVEX_Vpsbbrd_zmm_k1_kr_zmmmt = 4_713

VPSBBRD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6E /r

KNC

64-bit

§

MVEX_Vpsubrsetbd_zmm_k1_kr_zmmmt = 4_714

VPSUBRSETBD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6F /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_70 = 4_715

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 70 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_71 = 4_716

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 71 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_72 = 4_717

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 72 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_73 = 4_718

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 73 /r

KNC

64-bit

§

MVEX_Vpcmpltd_kr_k1_zmm_zmmmt = 4_719

VPCMPLTD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F38.W0 74 /r

KNC

64-bit

§

MVEX_Vscaleps_zmm_k1_zmm_zmmmt = 4_720

VSCALEPS zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 84 /r

KNC

64-bit

§

MVEX_Vpmulhud_zmm_k1_zmm_zmmmt = 4_721

VPMULHUD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 86 /r

KNC

64-bit

§

MVEX_Vpmulhd_zmm_k1_zmm_zmmmt = 4_722

VPMULHD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 87 /r

KNC

64-bit

§

MVEX_Vpgatherdd_zmm_k1_mvt = 4_723

VPGATHERDD zmm1 {k1}, Ui32(mvt)

MVEX.512.66.0F38.W0 90 /vsib

KNC

64-bit

§

MVEX_Vpgatherdq_zmm_k1_mvt = 4_724

VPGATHERDQ zmm1 {k1}, Ui64(mvt)

MVEX.512.66.0F38.W1 90 /vsib

KNC

64-bit

§

MVEX_Vgatherdps_zmm_k1_mvt = 4_725

VGATHERDPS zmm1 {k1}, Uf32(mvt)

MVEX.512.66.0F38.W0 92 /vsib

KNC

64-bit

§

MVEX_Vgatherdpd_zmm_k1_mvt = 4_726

VGATHERDPD zmm1 {k1}, Uf64(mvt)

MVEX.512.66.0F38.W1 92 /vsib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_94 = 4_727

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 94 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_94 = 4_728

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W1 94 /r

KNC

64-bit

§

MVEX_Vfmadd132ps_zmm_k1_zmm_zmmmt = 4_729

VFMADD132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 98 /r

KNC

64-bit

§

MVEX_Vfmadd132pd_zmm_k1_zmm_zmmmt = 4_730

VFMADD132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 98 /r

KNC

64-bit

§

MVEX_Vfmsub132ps_zmm_k1_zmm_zmmmt = 4_731

VFMSUB132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 9A /r

KNC

64-bit

§

MVEX_Vfmsub132pd_zmm_k1_zmm_zmmmt = 4_732

VFMSUB132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 9A /r

KNC

64-bit

§

MVEX_Vfnmadd132ps_zmm_k1_zmm_zmmmt = 4_733

VFNMADD132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 9C /r

KNC

64-bit

§

MVEX_Vfnmadd132pd_zmm_k1_zmm_zmmmt = 4_734

VFNMADD132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 9C /r

KNC

64-bit

§

MVEX_Vfnmsub132ps_zmm_k1_zmm_zmmmt = 4_735

VFNMSUB132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 9E /r

KNC

64-bit

§

MVEX_Vfnmsub132pd_zmm_k1_zmm_zmmmt = 4_736

VFNMSUB132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 9E /r

KNC

64-bit

§

MVEX_Vpscatterdd_mvt_k1_zmm = 4_737

VPSCATTERDD mvt {k1}, Di32(zmm1)

MVEX.512.66.0F38.W0 A0 /vsib

KNC

64-bit

§

MVEX_Vpscatterdq_mvt_k1_zmm = 4_738

VPSCATTERDQ mvt {k1}, Di64(zmm1)

MVEX.512.66.0F38.W1 A0 /vsib

KNC

64-bit

§

MVEX_Vscatterdps_mvt_k1_zmm = 4_739

VSCATTERDPS mvt {k1}, Df32(zmm1)

MVEX.512.66.0F38.W0 A2 /vsib

KNC

64-bit

§

MVEX_Vscatterdpd_mvt_k1_zmm = 4_740

VSCATTERDPD mvt {k1}, Df64(zmm1)

MVEX.512.66.0F38.W1 A2 /vsib

KNC

64-bit

§

MVEX_Vfmadd233ps_zmm_k1_zmm_zmmmt = 4_741

VFMADD233PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 A4 /r

KNC

64-bit

§

MVEX_Vfmadd213ps_zmm_k1_zmm_zmmmt = 4_742

VFMADD213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 A8 /r

KNC

64-bit

§

MVEX_Vfmadd213pd_zmm_k1_zmm_zmmmt = 4_743

VFMADD213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 A8 /r

KNC

64-bit

§

MVEX_Vfmsub213ps_zmm_k1_zmm_zmmmt = 4_744

VFMSUB213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 AA /r

KNC

64-bit

§

MVEX_Vfmsub213pd_zmm_k1_zmm_zmmmt = 4_745

VFMSUB213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 AA /r

KNC

64-bit

§

MVEX_Vfnmadd213ps_zmm_k1_zmm_zmmmt = 4_746

VFNMADD213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 AC /r

KNC

64-bit

§

MVEX_Vfnmadd213pd_zmm_k1_zmm_zmmmt = 4_747

VFNMADD213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 AC /r

KNC

64-bit

§

MVEX_Vfnmsub213ps_zmm_k1_zmm_zmmmt = 4_748

VFNMSUB213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 AE /r

KNC

64-bit

§

MVEX_Vfnmsub213pd_zmm_k1_zmm_zmmmt = 4_749

VFNMSUB213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 AE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B0 = 4_750

UNDOC zmm1 {k1}, mvt

MVEX.512.66.0F38.W0 B0 /vsib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B2 = 4_751

UNDOC zmm1 {k1}, mvt

MVEX.512.66.0F38.W0 B2 /vsib

KNC

64-bit

§

MVEX_Vpmadd233d_zmm_k1_zmm_zmmmt = 4_752

VPMADD233D zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 B4 /r

KNC

64-bit

§

MVEX_Vpmadd231d_zmm_k1_zmm_zmmmt = 4_753

VPMADD231D zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 B5 /r

KNC

64-bit

§

MVEX_Vfmadd231ps_zmm_k1_zmm_zmmmt = 4_754

VFMADD231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 B8 /r

KNC

64-bit

§

MVEX_Vfmadd231pd_zmm_k1_zmm_zmmmt = 4_755

VFMADD231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 B8 /r

KNC

64-bit

§

MVEX_Vfmsub231ps_zmm_k1_zmm_zmmmt = 4_756

VFMSUB231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 BA /r

KNC

64-bit

§

MVEX_Vfmsub231pd_zmm_k1_zmm_zmmmt = 4_757

VFMSUB231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 BA /r

KNC

64-bit

§

MVEX_Vfnmadd231ps_zmm_k1_zmm_zmmmt = 4_758

VFNMADD231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 BC /r

KNC

64-bit

§

MVEX_Vfnmadd231pd_zmm_k1_zmm_zmmmt = 4_759

VFNMADD231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 BC /r

KNC

64-bit

§

MVEX_Vfnmsub231ps_zmm_k1_zmm_zmmmt = 4_760

VFNMSUB231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 BE /r

KNC

64-bit

§

MVEX_Vfnmsub231pd_zmm_k1_zmm_zmmmt = 4_761

VFNMSUB231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 BE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_C0 = 4_762

UNDOC zmm1 {k1}, mvt

MVEX.512.66.0F38.W0 C0 /vsib

KNC

64-bit

§

MVEX_Vgatherpf0hintdps_mvt_k1 = 4_763

VGATHERPF0HINTDPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /0 /vsib

KNC

64-bit

§

MVEX_Vgatherpf0hintdpd_mvt_k1 = 4_764

VGATHERPF0HINTDPD Uf64(mvt) {k1}

MVEX.512.66.0F38.W1 C6 /0 /vsib

KNC

64-bit

§

MVEX_Vgatherpf0dps_mvt_k1 = 4_765

VGATHERPF0DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /1 /vsib

KNC

64-bit

§

MVEX_Vgatherpf1dps_mvt_k1 = 4_766

VGATHERPF1DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /2 /vsib

KNC

64-bit

§

MVEX_Vscatterpf0hintdps_mvt_k1 = 4_767

VSCATTERPF0HINTDPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /4 /vsib

KNC

64-bit

§

MVEX_Vscatterpf0hintdpd_mvt_k1 = 4_768

VSCATTERPF0HINTDPD Uf64(mvt) {k1}

MVEX.512.66.0F38.W1 C6 /4 /vsib

KNC

64-bit

§

MVEX_Vscatterpf0dps_mvt_k1 = 4_769

VSCATTERPF0DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /5 /vsib

KNC

64-bit

§

MVEX_Vscatterpf1dps_mvt_k1 = 4_770

VSCATTERPF1DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /6 /vsib

KNC

64-bit

§

MVEX_Vexp223ps_zmm_k1_zmmmt = 4_771

VEXP223PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 C8 /r

KNC

64-bit

§

MVEX_Vlog2ps_zmm_k1_zmmmt = 4_772

VLOG2PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 C9 /r

KNC

64-bit

§

MVEX_Vrcp23ps_zmm_k1_zmmmt = 4_773

VRCP23PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 CA /r

KNC

64-bit

§

MVEX_Vrsqrt23ps_zmm_k1_zmmmt = 4_774

VRSQRT23PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 CB /r

KNC

64-bit

§

MVEX_Vaddsetsps_zmm_k1_zmm_zmmmt = 4_775

VADDSETSPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 CC /r

KNC

64-bit

§

MVEX_Vpaddsetsd_zmm_k1_zmm_zmmmt = 4_776

VPADDSETSD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 CD /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CE = 4_777

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 CE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_CE = 4_778

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W1 CE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CF = 4_779

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 CF /r

KNC

64-bit

§

MVEX_Vloadunpackld_zmm_k1_mt = 4_780

VLOADUNPACKLD zmm1 {k1}, Ui32(mt)

MVEX.512.0F38.W0 D0 /r

KNC

64-bit

§

MVEX_Vloadunpacklq_zmm_k1_mt = 4_781

VLOADUNPACKLQ zmm1 {k1}, Ui64(mt)

MVEX.512.0F38.W1 D0 /r

KNC

64-bit

§

MVEX_Vpackstoreld_mt_k1_zmm = 4_782

VPACKSTORELD mt {k1}, Di32(zmm1)

MVEX.512.66.0F38.W0 D0 /r

KNC

64-bit

§

MVEX_Vpackstorelq_mt_k1_zmm = 4_783

VPACKSTORELQ mt {k1}, Di64(zmm1)

MVEX.512.66.0F38.W1 D0 /r

KNC

64-bit

§

MVEX_Vloadunpacklps_zmm_k1_mt = 4_784

VLOADUNPACKLPS zmm1 {k1}, Uf32(mt)

MVEX.512.0F38.W0 D1 /r

KNC

64-bit

§

MVEX_Vloadunpacklpd_zmm_k1_mt = 4_785

VLOADUNPACKLPD zmm1 {k1}, Uf64(mt)

MVEX.512.0F38.W1 D1 /r

KNC

64-bit

§

MVEX_Vpackstorelps_mt_k1_zmm = 4_786

VPACKSTORELPS mt {k1}, Df32(zmm1)

MVEX.512.66.0F38.W0 D1 /r

KNC

64-bit

§

MVEX_Vpackstorelpd_mt_k1_zmm = 4_787

VPACKSTORELPD mt {k1}, Df64(zmm1)

MVEX.512.66.0F38.W1 D1 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D2 = 4_788

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D2 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D2 = 4_789

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 D2 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D3 = 4_790

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D3 /r

KNC

64-bit

§

MVEX_Vloadunpackhd_zmm_k1_mt = 4_791

VLOADUNPACKHD zmm1 {k1}, Ui32(mt)

MVEX.512.0F38.W0 D4 /r

KNC

64-bit

§

MVEX_Vloadunpackhq_zmm_k1_mt = 4_792

VLOADUNPACKHQ zmm1 {k1}, Ui64(mt)

MVEX.512.0F38.W1 D4 /r

KNC

64-bit

§

MVEX_Vpackstorehd_mt_k1_zmm = 4_793

VPACKSTOREHD mt {k1}, Di32(zmm1)

MVEX.512.66.0F38.W0 D4 /r

KNC

64-bit

§

MVEX_Vpackstorehq_mt_k1_zmm = 4_794

VPACKSTOREHQ mt {k1}, Di64(zmm1)

MVEX.512.66.0F38.W1 D4 /r

KNC

64-bit

§

MVEX_Vloadunpackhps_zmm_k1_mt = 4_795

VLOADUNPACKHPS zmm1 {k1}, Uf32(mt)

MVEX.512.0F38.W0 D5 /r

KNC

64-bit

§

MVEX_Vloadunpackhpd_zmm_k1_mt = 4_796

VLOADUNPACKHPD zmm1 {k1}, Uf64(mt)

MVEX.512.0F38.W1 D5 /r

KNC

64-bit

§

MVEX_Vpackstorehps_mt_k1_zmm = 4_797

VPACKSTOREHPS mt {k1}, Df32(zmm1)

MVEX.512.66.0F38.W0 D5 /r

KNC

64-bit

§

MVEX_Vpackstorehpd_mt_k1_zmm = 4_798

VPACKSTOREHPD mt {k1}, Df64(zmm1)

MVEX.512.66.0F38.W1 D5 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D6 = 4_799

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D6 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D6 = 4_800

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 D6 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D7 = 4_801

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D7 /r

KNC

64-bit

§

MVEX_Valignd_zmm_k1_zmm_zmmmt_imm8 = 4_802

VALIGND zmm1 {k1}, zmm2, zmm3/mt, imm8

MVEX.NDS.512.66.0F3A.W0 03 /r ib

KNC

64-bit

§

MVEX_Vpermf32x4_zmm_k1_zmmmt_imm8 = 4_803

VPERMF32X4 zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F3A.W0 07 /r ib

KNC

64-bit

§

MVEX_Vpcmpud_kr_k1_zmm_zmmmt_imm8 = 4_804

VPCMPUD k2 {k1}, zmm1, Si32(zmm2/mt), imm8

MVEX.NDS.512.66.0F3A.W0 1E /r ib

KNC

64-bit

§

MVEX_Vpcmpd_kr_k1_zmm_zmmmt_imm8 = 4_805

VPCMPD k2 {k1}, zmm1, Si32(zmm2/mt), imm8

MVEX.NDS.512.66.0F3A.W0 1F /r ib

KNC

64-bit

§

MVEX_Vgetmantps_zmm_k1_zmmmt_imm8 = 4_806

VGETMANTPS zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 26 /r ib

KNC

64-bit

§

MVEX_Vgetmantpd_zmm_k1_zmmmt_imm8 = 4_807

VGETMANTPD zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.66.0F3A.W1 26 /r ib

KNC

64-bit

§

MVEX_Vrndfxpntps_zmm_k1_zmmmt_imm8 = 4_808

VRNDFXPNTPS zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 52 /r ib

KNC

64-bit

§

MVEX_Vrndfxpntpd_zmm_k1_zmmmt_imm8 = 4_809

VRNDFXPNTPD zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.66.0F3A.W1 52 /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntudq2ps_zmm_k1_zmmmt_imm8 = 4_810

VCVTFXPNTUDQ2PS zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.512.0F3A.W0 CA /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntps2udq_zmm_k1_zmmmt_imm8 = 4_811

VCVTFXPNTPS2UDQ zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 CA /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntpd2udq_zmm_k1_zmmmt_imm8 = 4_812

VCVTFXPNTPD2UDQ zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.F2.0F3A.W1 CA /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntdq2ps_zmm_k1_zmmmt_imm8 = 4_813

VCVTFXPNTDQ2PS zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.512.0F3A.W0 CB /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntps2dq_zmm_k1_zmmmt_imm8 = 4_814

VCVTFXPNTPS2DQ zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 CB /r ib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D0 = 4_815

UNDOC zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F3A.W0 D0 /r ib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D1 = 4_816

UNDOC zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F3A.W0 D1 /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntpd2dq_zmm_k1_zmmmt_imm8 = 4_817

VCVTFXPNTPD2DQ zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.F2.0F3A.W1 E6 /r ib

KNC

64-bit

§

Via_undoc_F30FA6F0_16 = 4_818

UNDOC

a16 F3 0F A6 F0

PADLOCK_UNDOC

16/32-bit

§

Via_undoc_F30FA6F0_32 = 4_819

UNDOC

a32 F3 0F A6 F0

PADLOCK_UNDOC

16/32/64-bit

§

Via_undoc_F30FA6F0_64 = 4_820

UNDOC

a64 F3 0F A6 F0

PADLOCK_UNDOC

64-bit

§

Via_undoc_F30FA6F8_16 = 4_821

UNDOC

a16 F3 0F A6 F8

PADLOCK_UNDOC

16/32-bit

§

Via_undoc_F30FA6F8_32 = 4_822

UNDOC

a32 F3 0F A6 F8

PADLOCK_UNDOC

16/32/64-bit

§

Via_undoc_F30FA6F8_64 = 4_823

UNDOC

a64 F3 0F A6 F8

PADLOCK_UNDOC

64-bit

§

Xsha512_16 = 4_824

XSHA512

a16 F3 0F A6 E0

PADLOCK_PHE

16/32-bit

§

Xsha512_32 = 4_825

XSHA512

a32 F3 0F A6 E0

PADLOCK_PHE

16/32/64-bit

§

Xsha512_64 = 4_826

XSHA512

a64 F3 0F A6 E0

PADLOCK_PHE

64-bit

§

Xstore_alt_16 = 4_827

XSTORE_ALT

a16 F3 0F A7 F8

PADLOCK_RNG

16/32-bit

§

Xstore_alt_32 = 4_828

XSTORE_ALT

a32 F3 0F A7 F8

PADLOCK_RNG

16/32/64-bit

§

Xstore_alt_64 = 4_829

XSTORE_ALT

a64 F3 0F A7 F8

PADLOCK_RNG

64-bit

§

Xsha512_alt_16 = 4_830

XSHA512_ALT

a16 F3 0F A6 D8

PADLOCK_PHE

16/32-bit

§

Xsha512_alt_32 = 4_831

XSHA512_ALT

a32 F3 0F A6 D8

PADLOCK_PHE

16/32/64-bit

§

Xsha512_alt_64 = 4_832

XSHA512_ALT

a64 F3 0F A6 D8

PADLOCK_PHE

64-bit

§

Zero_bytes = 4_833

A zero-sized instruction. Can be used as a label.

§

Wrmsrns = 4_834

WRMSRNS

NP 0F 01 C6

WRMSRNS

16/32/64-bit

§

Wrmsrlist = 4_835

WRMSRLIST

F3 0F 01 C6

MSRLIST

64-bit

§

Rdmsrlist = 4_836

RDMSRLIST

F2 0F 01 C6

MSRLIST

64-bit

§

Rmpquery = 4_837

RMPQUERY

F3 0F 01 FD

RMPQUERY

64-bit

§

Prefetchit1_m8 = 4_838

PREFETCHIT1 m8

0F 18 /6

PREFETCHITI

16/32/64-bit

§

Prefetchit0_m8 = 4_839

PREFETCHIT0 m8

0F 18 /7

PREFETCHITI

16/32/64-bit

§

Aadd_m32_r32 = 4_840

AADD m32, r32

NP 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Aadd_m64_r64 = 4_841

AADD m64, r64

NP o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

Aand_m32_r32 = 4_842

AAND m32, r32

66 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Aand_m64_r64 = 4_843

AAND m64, r64

66 o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

Axor_m32_r32 = 4_844

AXOR m32, r32

F3 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Axor_m64_r64 = 4_845

AXOR m64, r64

F3 o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

Aor_m32_r32 = 4_846

AOR m32, r32

F2 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Aor_m64_r64 = 4_847

AOR m64, r64

F2 o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

VEX_Vpdpbuud_xmm_xmm_xmmm128 = 4_848

VPDPBUUD xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbuud_ymm_ymm_ymmm256 = 4_849

VPDPBUUD ymm1, ymm2, ymm3/m256

VEX.256.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsud_xmm_xmm_xmmm128 = 4_850

VPDPBSUD xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsud_ymm_ymm_ymmm256 = 4_851

VPDPBSUD ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssd_xmm_xmm_xmmm128 = 4_852

VPDPBSSD xmm1, xmm2, xmm3/m128

VEX.128.F2.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssd_ymm_ymm_ymmm256 = 4_853

VPDPBSSD ymm1, ymm2, ymm3/m256

VEX.256.F2.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbuuds_xmm_xmm_xmmm128 = 4_854

VPDPBUUDS xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbuuds_ymm_ymm_ymmm256 = 4_855

VPDPBUUDS ymm1, ymm2, ymm3/m256

VEX.256.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsuds_xmm_xmm_xmmm128 = 4_856

VPDPBSUDS xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsuds_ymm_ymm_ymmm256 = 4_857

VPDPBSUDS ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssds_xmm_xmm_xmmm128 = 4_858

VPDPBSSDS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssds_ymm_ymm_ymmm256 = 4_859

VPDPBSSDS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Tdpfp16ps_tmm_tmm_tmm = 4_860

TDPFP16PS tmm1, tmm2, tmm3

VEX.128.F2.0F38.W0 5C 11:rrr:bbb

AMX-FP16

64-bit

§

VEX_Vcvtneps2bf16_xmm_xmmm128 = 4_861

VCVTNEPS2BF16 xmm1, xmm2/m128

VEX.128.F3.0F38.W0 72 /r

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneps2bf16_xmm_ymmm256 = 4_862

VCVTNEPS2BF16 xmm1, ymm2/m256

VEX.256.F3.0F38.W0 72 /r

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneoph2ps_xmm_m128 = 4_863

VCVTNEOPH2PS xmm1, m128

VEX.128.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneoph2ps_ymm_m256 = 4_864

VCVTNEOPH2PS ymm1, m256

VEX.256.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneeph2ps_xmm_m128 = 4_865

VCVTNEEPH2PS xmm1, m128

VEX.128.66.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneeph2ps_ymm_m256 = 4_866

VCVTNEEPH2PS ymm1, m256

VEX.256.66.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneebf162ps_xmm_m128 = 4_867

VCVTNEEBF162PS xmm1, m128

VEX.128.F3.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneebf162ps_ymm_m256 = 4_868

VCVTNEEBF162PS ymm1, m256

VEX.256.F3.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneobf162ps_xmm_m128 = 4_869

VCVTNEOBF162PS xmm1, m128

VEX.128.F2.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneobf162ps_ymm_m256 = 4_870

VCVTNEOBF162PS ymm1, m256

VEX.256.F2.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnesh2ps_xmm_m16 = 4_871

VBCSTNESH2PS xmm1, m16

VEX.128.66.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnesh2ps_ymm_m16 = 4_872

VBCSTNESH2PS ymm1, m16

VEX.256.66.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnebf162ps_xmm_m16 = 4_873

VBCSTNEBF162PS xmm1, m16

VEX.128.F3.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnebf162ps_ymm_m16 = 4_874

VBCSTNEBF162PS ymm1, m16

VEX.256.F3.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vpmadd52luq_xmm_xmm_xmmm128 = 4_875

VPMADD52LUQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B4 /r

AVX-IFMA

16/32/64-bit

§

VEX_Vpmadd52luq_ymm_ymm_ymmm256 = 4_876

VPMADD52LUQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B4 /r

AVX-IFMA

16/32/64-bit

§

VEX_Vpmadd52huq_xmm_xmm_xmmm128 = 4_877

VPMADD52HUQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B5 /r

AVX-IFMA

16/32/64-bit

§

VEX_Vpmadd52huq_ymm_ymm_ymmm256 = 4_878

VPMADD52HUQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B5 /r

AVX-IFMA

16/32/64-bit

§

VEX_Cmpoxadd_m32_r32_r32 = 4_879

CMPOXADD m32, r32, r32

VEX.128.66.0F38.W0 E0 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpoxadd_m64_r64_r64 = 4_880

CMPOXADD m64, r64, r64

VEX.128.66.0F38.W1 E0 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnoxadd_m32_r32_r32 = 4_881

CMPNOXADD m32, r32, r32

VEX.128.66.0F38.W0 E1 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnoxadd_m64_r64_r64 = 4_882

CMPNOXADD m64, r64, r64

VEX.128.66.0F38.W1 E1 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbxadd_m32_r32_r32 = 4_883

CMPBXADD m32, r32, r32

VEX.128.66.0F38.W0 E2 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbxadd_m64_r64_r64 = 4_884

CMPBXADD m64, r64, r64

VEX.128.66.0F38.W1 E2 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbxadd_m32_r32_r32 = 4_885

CMPNBXADD m32, r32, r32

VEX.128.66.0F38.W0 E3 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbxadd_m64_r64_r64 = 4_886

CMPNBXADD m64, r64, r64

VEX.128.66.0F38.W1 E3 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpzxadd_m32_r32_r32 = 4_887

CMPZXADD m32, r32, r32

VEX.128.66.0F38.W0 E4 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpzxadd_m64_r64_r64 = 4_888

CMPZXADD m64, r64, r64

VEX.128.66.0F38.W1 E4 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnzxadd_m32_r32_r32 = 4_889

CMPNZXADD m32, r32, r32

VEX.128.66.0F38.W0 E5 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnzxadd_m64_r64_r64 = 4_890

CMPNZXADD m64, r64, r64

VEX.128.66.0F38.W1 E5 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbexadd_m32_r32_r32 = 4_891

CMPBEXADD m32, r32, r32

VEX.128.66.0F38.W0 E6 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbexadd_m64_r64_r64 = 4_892

CMPBEXADD m64, r64, r64

VEX.128.66.0F38.W1 E6 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbexadd_m32_r32_r32 = 4_893

CMPNBEXADD m32, r32, r32

VEX.128.66.0F38.W0 E7 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbexadd_m64_r64_r64 = 4_894

CMPNBEXADD m64, r64, r64

VEX.128.66.0F38.W1 E7 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpsxadd_m32_r32_r32 = 4_895

CMPSXADD m32, r32, r32

VEX.128.66.0F38.W0 E8 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpsxadd_m64_r64_r64 = 4_896

CMPSXADD m64, r64, r64

VEX.128.66.0F38.W1 E8 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnsxadd_m32_r32_r32 = 4_897

CMPNSXADD m32, r32, r32

VEX.128.66.0F38.W0 E9 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnsxadd_m64_r64_r64 = 4_898

CMPNSXADD m64, r64, r64

VEX.128.66.0F38.W1 E9 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmppxadd_m32_r32_r32 = 4_899

CMPPXADD m32, r32, r32

VEX.128.66.0F38.W0 EA !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmppxadd_m64_r64_r64 = 4_900

CMPPXADD m64, r64, r64

VEX.128.66.0F38.W1 EA !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnpxadd_m32_r32_r32 = 4_901

CMPNPXADD m32, r32, r32

VEX.128.66.0F38.W0 EB !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnpxadd_m64_r64_r64 = 4_902

CMPNPXADD m64, r64, r64

VEX.128.66.0F38.W1 EB !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplxadd_m32_r32_r32 = 4_903

CMPLXADD m32, r32, r32

VEX.128.66.0F38.W0 EC !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplxadd_m64_r64_r64 = 4_904

CMPLXADD m64, r64, r64

VEX.128.66.0F38.W1 EC !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlxadd_m32_r32_r32 = 4_905

CMPNLXADD m32, r32, r32

VEX.128.66.0F38.W0 ED !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlxadd_m64_r64_r64 = 4_906

CMPNLXADD m64, r64, r64

VEX.128.66.0F38.W1 ED !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplexadd_m32_r32_r32 = 4_907

CMPLEXADD m32, r32, r32

VEX.128.66.0F38.W0 EE !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplexadd_m64_r64_r64 = 4_908

CMPLEXADD m64, r64, r64

VEX.128.66.0F38.W1 EE !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlexadd_m32_r32_r32 = 4_909

CMPNLEXADD m32, r32, r32

VEX.128.66.0F38.W0 EF !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlexadd_m64_r64_r64 = 4_910

CMPNLEXADD m64, r64, r64

VEX.128.66.0F38.W1 EF !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Tcmmrlfp16ps_tmm_tmm_tmm = 4_911

TCMMRLFP16PS tmm1, tmm2, tmm3

VEX.128.0F38.W0 6C 11:rrr:bbb

AMX-COMPLEX

64-bit

§

VEX_Tcmmimfp16ps_tmm_tmm_tmm = 4_912

TCMMIMFP16PS tmm1, tmm2, tmm3

VEX.128.66.0F38.W0 6C 11:rrr:bbb

AMX-COMPLEX

64-bit

§

Pbndkb = 4_913

PBNDKB

NP 0F 01 C7

TSE

64-bit

§

VEX_Vsha512rnds2_ymm_ymm_xmm = 4_914

VSHA512RNDS2 ymm1, ymm2, xmm3

VEX.256.F2.0F38.W0 CB 11:rrr:bbb

AVX and SHA512

16/32/64-bit

§

VEX_Vsha512msg1_ymm_xmm = 4_915

VSHA512MSG1 ymm1, xmm2

VEX.256.F2.0F38.W0 CC 11:rrr:bbb

AVX and SHA512

16/32/64-bit

§

VEX_Vsha512msg2_ymm_ymm = 4_916

VSHA512MSG2 ymm1, ymm2

VEX.256.F2.0F38.W0 CD 11:rrr:bbb

AVX and SHA512

16/32/64-bit

§

VEX_Vpdpwuud_xmm_xmm_xmmm128 = 4_917

VPDPWUUD xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 D2 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwuud_ymm_ymm_ymmm256 = 4_918

VPDPWUUD ymm1, ymm2, ymm3/m256

VEX.256.0F38.W0 D2 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwusd_xmm_xmm_xmmm128 = 4_919

VPDPWUSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 D2 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwusd_ymm_ymm_ymmm256 = 4_920

VPDPWUSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 D2 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwsud_xmm_xmm_xmmm128 = 4_921

VPDPWSUD xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 D2 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwsud_ymm_ymm_ymmm256 = 4_922

VPDPWSUD ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 D2 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwuuds_xmm_xmm_xmmm128 = 4_923

VPDPWUUDS xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 D3 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwuuds_ymm_ymm_ymmm256 = 4_924

VPDPWUUDS ymm1, ymm2, ymm3/m256

VEX.256.0F38.W0 D3 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwusds_xmm_xmm_xmmm128 = 4_925

VPDPWUSDS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 D3 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwusds_ymm_ymm_ymmm256 = 4_926

VPDPWUSDS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 D3 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwsuds_xmm_xmm_xmmm128 = 4_927

VPDPWSUDS xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 D3 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vpdpwsuds_ymm_ymm_ymmm256 = 4_928

VPDPWSUDS ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 D3 /r

AVX-VNNI-INT16

16/32/64-bit

§

VEX_Vsm3msg1_xmm_xmm_xmmm128 = 4_929

VSM3MSG1 xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 DA /r

AVX and SM3

16/32/64-bit

§

VEX_Vsm3msg2_xmm_xmm_xmmm128 = 4_930

VSM3MSG2 xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 DA /r

AVX and SM3

16/32/64-bit

§

VEX_Vsm4key4_xmm_xmm_xmmm128 = 4_931

VSM4KEY4 xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 DA /r

AVX and SM4

16/32/64-bit

§

VEX_Vsm4key4_ymm_ymm_ymmm256 = 4_932

VSM4KEY4 ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 DA /r

AVX and SM4

16/32/64-bit

§

VEX_Vsm4rnds4_xmm_xmm_xmmm128 = 4_933

VSM4RNDS4 xmm1, xmm2, xmm3/m128

VEX.128.F2.0F38.W0 DA /r

AVX and SM4

16/32/64-bit

§

VEX_Vsm4rnds4_ymm_ymm_ymmm256 = 4_934

VSM4RNDS4 ymm1, ymm2, ymm3/m256

VEX.256.F2.0F38.W0 DA /r

AVX and SM4

16/32/64-bit

§

VEX_Vsm3rnds2_xmm_xmm_xmmm128_imm8 = 4_935

VSM3RNDS2 xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W0 DE /r ib

AVX and SM3

16/32/64-bit

Implementations§

source§

impl Code

source

pub fn values( ) -> impl Iterator<Item = Code> + DoubleEndedIterator + ExactSizeIterator + FusedIterator

Iterates over all Code enum values

source§

impl Code

source

pub fn mnemonic(self) -> Mnemonic

Gets the mnemonic

Examples
use iced_x86::*;
assert_eq!(Code::Add_rm32_r32.mnemonic(), Mnemonic::Add);
source§

impl Code

source

pub fn op_code(self) -> &'static OpCodeInfo

Gets a OpCodeInfo

source§

impl Code

source

pub fn encoding(self) -> EncodingKind

Gets the encoding, eg. Legacy, 3DNow!, VEX, EVEX, XOP

Examples
use iced_x86::*;
assert_eq!(Code::Add_rm32_r32.encoding(), EncodingKind::Legacy);
assert_eq!(Code::VEX_Vmovups_xmm_xmmm128.encoding(), EncodingKind::VEX);
assert_eq!(Code::EVEX_Vmovups_xmm_k1z_xmmm128.encoding(), EncodingKind::EVEX);
assert_eq!(Code::XOP_Vpmacssww_xmm_xmm_xmmm128_xmm.encoding(), EncodingKind::XOP);
assert_eq!(Code::D3NOW_Pi2fw_mm_mmm64.encoding(), EncodingKind::D3NOW);
assert_eq!(Code::MVEX_Vpackstoreld_mt_k1_zmm.encoding(), EncodingKind::MVEX);
source

pub fn cpuid_features(self) -> &'static [CpuidFeature]

Gets the CPU or CPUID feature flags

Examples
use iced_x86::*;

let cpuid = Code::VEX_Vmovups_xmm_xmmm128.cpuid_features();
assert_eq!(cpuid.len(), 1);
assert_eq!(cpuid[0], CpuidFeature::AVX);

let cpuid = Code::EVEX_Vmovaps_xmm_k1z_xmmm128.cpuid_features();
assert_eq!(cpuid.len(), 2);
assert_eq!(cpuid[0], CpuidFeature::AVX512VL);
assert_eq!(cpuid[1], CpuidFeature::AVX512F);
source

pub fn flow_control(self) -> FlowControl

Gets control flow info

Examples
use iced_x86::*;
assert_eq!(Code::Or_r32_rm32.flow_control(), FlowControl::Next);
assert_eq!(Code::Ud0_r64_rm64.flow_control(), FlowControl::Exception);
assert_eq!(Code::Call_rm64.flow_control(), FlowControl::IndirectCall);
source

pub fn is_privileged(self) -> bool

Checks if it’s a privileged instruction (all CPL=0 instructions (except VMCALL) and IOPL instructions IN, INS, OUT, OUTS, CLI, STI)

source

pub fn is_stack_instruction(self) -> bool

Checks if this is an instruction that implicitly uses the stack pointer (SP/ESP/RSP), eg. CALL, PUSH, POP, RET, etc. See also Instruction::stack_pointer_increment()

Examples
use iced_x86::*;
assert!(!Code::Or_r32_rm32.is_stack_instruction());
assert!(Code::Push_r64.is_stack_instruction());
assert!(Code::Call_rm64.is_stack_instruction());
source

pub fn is_save_restore_instruction(self) -> bool

Checks if it’s an instruction that saves or restores too many registers (eg. FXRSTOR, XSAVE, etc).

source

pub const fn is_jcc_near(self) -> bool

Checks if it’s a Jcc NEAR instruction

source

pub const fn is_jcc_short(self) -> bool

Checks if it’s a Jcc SHORT instruction

source

pub const fn is_jmp_short(self) -> bool

Checks if it’s a JMP SHORT instruction

source

pub const fn is_jmp_near(self) -> bool

Checks if it’s a JMP NEAR instruction

source

pub const fn is_jmp_short_or_near(self) -> bool

Checks if it’s a JMP SHORT or a JMP NEAR instruction

source

pub const fn is_jmp_far(self) -> bool

Checks if it’s a JMP FAR instruction

source

pub const fn is_call_near(self) -> bool

Checks if it’s a CALL NEAR instruction

source

pub const fn is_call_far(self) -> bool

Checks if it’s a CALL FAR instruction

source

pub const fn is_jmp_near_indirect(self) -> bool

Checks if it’s a JMP NEAR reg/[mem] instruction

source

pub const fn is_jmp_far_indirect(self) -> bool

Checks if it’s a JMP FAR [mem] instruction

source

pub const fn is_call_near_indirect(self) -> bool

Checks if it’s a CALL NEAR reg/[mem] instruction

source

pub const fn is_call_far_indirect(self) -> bool

Checks if it’s a CALL FAR [mem] instruction

source

pub const fn is_jkcc_short_or_near(self) -> bool

Checks if it’s a JKccD SHORT or JKccD NEAR instruction

source

pub const fn is_jkcc_near(self) -> bool

Checks if it’s a JKccD NEAR instruction

source

pub const fn is_jkcc_short(self) -> bool

Checks if it’s a JKccD SHORT instruction

source

pub fn condition_code(self) -> ConditionCode

Gets the condition code if it’s Jcc, SETcc, CMOVcc, CMPccXADD, LOOPcc else ConditionCode::None is returned

Examples
use iced_x86::*;
assert_eq!(Code::Jbe_rel8_64.condition_code(), ConditionCode::be);
assert_eq!(Code::Cmovo_r64_rm64.condition_code(), ConditionCode::o);
assert_eq!(Code::Setne_rm8.condition_code(), ConditionCode::ne);
assert_eq!(Code::Pause.condition_code(), ConditionCode::None);
source

pub const fn is_string_instruction(self) -> bool

true if this Code corresponds to a “string” operation, such as MOVS, LODS, STOS, etc.

source

pub const fn is_jcx_short(self) -> bool

Checks if it’s a JCXZ SHORT, JECXZ SHORT or JRCXZ SHORT instruction

source

pub const fn is_loopcc(self) -> bool

Checks if it’s a LOOPcc SHORT instruction

source

pub const fn is_loop(self) -> bool

Checks if it’s a LOOP SHORT instruction

source§

impl Code

source

pub const fn is_jcc_short_or_near(self) -> bool

Checks if it’s a Jcc SHORT or Jcc NEAR instruction

source§

impl Code

source

pub fn negate_condition_code(self) -> Self

Negates the condition code, eg. JE -> JNE. Can be used if it’s Jcc, SETcc, CMOVcc, CMPccXADD, LOOPcc and returns the original value if it’s none of those instructions.

Examples
use iced_x86::*;
assert_eq!(Code::Setbe_rm8.negate_condition_code(), Code::Seta_rm8);
assert_eq!(Code::Seta_rm8.negate_condition_code(), Code::Setbe_rm8);
source

pub fn as_short_branch(self) -> Self

Converts Jcc/JMP NEAR to Jcc/JMP SHORT. Returns the input if it’s not a Jcc/JMP NEAR instruction.

Examples
use iced_x86::*;
assert_eq!(Code::Jbe_rel32_64.as_short_branch(), Code::Jbe_rel8_64);
assert_eq!(Code::Jbe_rel8_64.as_short_branch(), Code::Jbe_rel8_64);
assert_eq!(Code::Pause.as_short_branch(), Code::Pause);
source

pub fn as_near_branch(self) -> Self

Converts Jcc/JMP SHORT to Jcc/JMP NEAR. Returns the input if it’s not a Jcc/JMP SHORT instruction.

Examples
use iced_x86::*;
assert_eq!(Code::Jbe_rel8_64.as_near_branch(), Code::Jbe_rel32_64);
assert_eq!(Code::Jbe_rel32_64.as_near_branch(), Code::Jbe_rel32_64);
assert_eq!(Code::Pause.as_near_branch(), Code::Pause);

Trait Implementations§

source§

impl Clone for Code

source§

fn clone(&self) -> Code

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for Code

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Default for Code

source§

fn default() -> Self

Returns the “default value” for a type. Read more
source§

impl<'de> Deserialize<'de> for Code

source§

fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
source§

impl Hash for Code

source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where H: Hasher, Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
source§

impl Ord for Code

source§

fn cmp(&self, other: &Code) -> Ordering

This method returns an Ordering between self and other. Read more
1.21.0 · source§

fn max(self, other: Self) -> Self
where Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
source§

impl PartialEq for Code

source§

fn eq(&self, other: &Code) -> bool

This method tests for self and other values to be equal, and is used by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
source§

impl PartialOrd for Code

source§

fn partial_cmp(&self, other: &Code) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= operator. Read more
source§

impl Serialize for Code

source§

fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,

Serialize this value into the given Serde serializer. Read more
source§

impl TryFrom<usize> for Code

§

type Error = IcedError

The type returned in the event of a conversion error.
source§

fn try_from(value: usize) -> Result<Self, Self::Error>

Performs the conversion.
source§

impl Copy for Code

source§

impl Eq for Code

source§

impl StructuralEq for Code

source§

impl StructuralPartialEq for Code

Auto Trait Implementations§

§

impl RefUnwindSafe for Code

§

impl Send for Code

§

impl Sync for Code

§

impl Unpin for Code

§

impl UnwindSafe for Code

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

source§

impl<T, U> Into<U> for T
where U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

source§

impl<T> ToOwned for T
where T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,