Enum iced_x86::Code

source ·
#[non_exhaustive]
pub enum Code {
Show 4911 variants INVALID, DeclareByte, DeclareWord, DeclareDword, DeclareQword, Add_rm8_r8, Add_rm16_r16, Add_rm32_r32, Add_rm64_r64, Add_r8_rm8, Add_r16_rm16, Add_r32_rm32, Add_r64_rm64, Add_AL_imm8, Add_AX_imm16, Add_EAX_imm32, Add_RAX_imm32, Pushw_ES, Pushd_ES, Popw_ES, Popd_ES, Or_rm8_r8, Or_rm16_r16, Or_rm32_r32, Or_rm64_r64, Or_r8_rm8, Or_r16_rm16, Or_r32_rm32, Or_r64_rm64, Or_AL_imm8, Or_AX_imm16, Or_EAX_imm32, Or_RAX_imm32, Pushw_CS, Pushd_CS, Popw_CS, Adc_rm8_r8, Adc_rm16_r16, Adc_rm32_r32, Adc_rm64_r64, Adc_r8_rm8, Adc_r16_rm16, Adc_r32_rm32, Adc_r64_rm64, Adc_AL_imm8, Adc_AX_imm16, Adc_EAX_imm32, Adc_RAX_imm32, Pushw_SS, Pushd_SS, Popw_SS, Popd_SS, Sbb_rm8_r8, Sbb_rm16_r16, Sbb_rm32_r32, Sbb_rm64_r64, Sbb_r8_rm8, Sbb_r16_rm16, Sbb_r32_rm32, Sbb_r64_rm64, Sbb_AL_imm8, Sbb_AX_imm16, Sbb_EAX_imm32, Sbb_RAX_imm32, Pushw_DS, Pushd_DS, Popw_DS, Popd_DS, And_rm8_r8, And_rm16_r16, And_rm32_r32, And_rm64_r64, And_r8_rm8, And_r16_rm16, And_r32_rm32, And_r64_rm64, And_AL_imm8, And_AX_imm16, And_EAX_imm32, And_RAX_imm32, Daa, Sub_rm8_r8, Sub_rm16_r16, Sub_rm32_r32, Sub_rm64_r64, Sub_r8_rm8, Sub_r16_rm16, Sub_r32_rm32, Sub_r64_rm64, Sub_AL_imm8, Sub_AX_imm16, Sub_EAX_imm32, Sub_RAX_imm32, Das, Xor_rm8_r8, Xor_rm16_r16, Xor_rm32_r32, Xor_rm64_r64, Xor_r8_rm8, Xor_r16_rm16, Xor_r32_rm32, Xor_r64_rm64, Xor_AL_imm8, Xor_AX_imm16, Xor_EAX_imm32, Xor_RAX_imm32, Aaa, Cmp_rm8_r8, Cmp_rm16_r16, Cmp_rm32_r32, Cmp_rm64_r64, Cmp_r8_rm8, Cmp_r16_rm16, Cmp_r32_rm32, Cmp_r64_rm64, Cmp_AL_imm8, Cmp_AX_imm16, Cmp_EAX_imm32, Cmp_RAX_imm32, Aas, Inc_r16, Inc_r32, Dec_r16, Dec_r32, Push_r16, Push_r32, Push_r64, Pop_r16, Pop_r32, Pop_r64, Pushaw, Pushad, Popaw, Popad, Bound_r16_m1616, Bound_r32_m3232, Arpl_rm16_r16, Arpl_r32m16_r32, Movsxd_r16_rm16, Movsxd_r32_rm32, Movsxd_r64_rm32, Push_imm16, Pushd_imm32, Pushq_imm32, Imul_r16_rm16_imm16, Imul_r32_rm32_imm32, Imul_r64_rm64_imm32, Pushw_imm8, Pushd_imm8, Pushq_imm8, Imul_r16_rm16_imm8, Imul_r32_rm32_imm8, Imul_r64_rm64_imm8, Insb_m8_DX, Insw_m16_DX, Insd_m32_DX, Outsb_DX_m8, Outsw_DX_m16, Outsd_DX_m32, Jo_rel8_16, Jo_rel8_32, Jo_rel8_64, Jno_rel8_16, Jno_rel8_32, Jno_rel8_64, Jb_rel8_16, Jb_rel8_32, Jb_rel8_64, Jae_rel8_16, Jae_rel8_32, Jae_rel8_64, Je_rel8_16, Je_rel8_32, Je_rel8_64, Jne_rel8_16, Jne_rel8_32, Jne_rel8_64, Jbe_rel8_16, Jbe_rel8_32, Jbe_rel8_64, Ja_rel8_16, Ja_rel8_32, Ja_rel8_64, Js_rel8_16, Js_rel8_32, Js_rel8_64, Jns_rel8_16, Jns_rel8_32, Jns_rel8_64, Jp_rel8_16, Jp_rel8_32, Jp_rel8_64, Jnp_rel8_16, Jnp_rel8_32, Jnp_rel8_64, Jl_rel8_16, Jl_rel8_32, Jl_rel8_64, Jge_rel8_16, Jge_rel8_32, Jge_rel8_64, Jle_rel8_16, Jle_rel8_32, Jle_rel8_64, Jg_rel8_16, Jg_rel8_32, Jg_rel8_64, Add_rm8_imm8, Or_rm8_imm8, Adc_rm8_imm8, Sbb_rm8_imm8, And_rm8_imm8, Sub_rm8_imm8, Xor_rm8_imm8, Cmp_rm8_imm8, Add_rm16_imm16, Add_rm32_imm32, Add_rm64_imm32, Or_rm16_imm16, Or_rm32_imm32, Or_rm64_imm32, Adc_rm16_imm16, Adc_rm32_imm32, Adc_rm64_imm32, Sbb_rm16_imm16, Sbb_rm32_imm32, Sbb_rm64_imm32, And_rm16_imm16, And_rm32_imm32, And_rm64_imm32, Sub_rm16_imm16, Sub_rm32_imm32, Sub_rm64_imm32, Xor_rm16_imm16, Xor_rm32_imm32, Xor_rm64_imm32, Cmp_rm16_imm16, Cmp_rm32_imm32, Cmp_rm64_imm32, Add_rm8_imm8_82, Or_rm8_imm8_82, Adc_rm8_imm8_82, Sbb_rm8_imm8_82, And_rm8_imm8_82, Sub_rm8_imm8_82, Xor_rm8_imm8_82, Cmp_rm8_imm8_82, Add_rm16_imm8, Add_rm32_imm8, Add_rm64_imm8, Or_rm16_imm8, Or_rm32_imm8, Or_rm64_imm8, Adc_rm16_imm8, Adc_rm32_imm8, Adc_rm64_imm8, Sbb_rm16_imm8, Sbb_rm32_imm8, Sbb_rm64_imm8, And_rm16_imm8, And_rm32_imm8, And_rm64_imm8, Sub_rm16_imm8, Sub_rm32_imm8, Sub_rm64_imm8, Xor_rm16_imm8, Xor_rm32_imm8, Xor_rm64_imm8, Cmp_rm16_imm8, Cmp_rm32_imm8, Cmp_rm64_imm8, Test_rm8_r8, Test_rm16_r16, Test_rm32_r32, Test_rm64_r64, Xchg_rm8_r8, Xchg_rm16_r16, Xchg_rm32_r32, Xchg_rm64_r64, Mov_rm8_r8, Mov_rm16_r16, Mov_rm32_r32, Mov_rm64_r64, Mov_r8_rm8, Mov_r16_rm16, Mov_r32_rm32, Mov_r64_rm64, Mov_rm16_Sreg, Mov_r32m16_Sreg, Mov_r64m16_Sreg, Lea_r16_m, Lea_r32_m, Lea_r64_m, Mov_Sreg_rm16, Mov_Sreg_r32m16, Mov_Sreg_r64m16, Pop_rm16, Pop_rm32, Pop_rm64, Nopw, Nopd, Nopq, Xchg_r16_AX, Xchg_r32_EAX, Xchg_r64_RAX, Pause, Cbw, Cwde, Cdqe, Cwd, Cdq, Cqo, Call_ptr1616, Call_ptr1632, Wait, Pushfw, Pushfd, Pushfq, Popfw, Popfd, Popfq, Sahf, Lahf, Mov_AL_moffs8, Mov_AX_moffs16, Mov_EAX_moffs32, Mov_RAX_moffs64, Mov_moffs8_AL, Mov_moffs16_AX, Mov_moffs32_EAX, Mov_moffs64_RAX, Movsb_m8_m8, Movsw_m16_m16, Movsd_m32_m32, Movsq_m64_m64, Cmpsb_m8_m8, Cmpsw_m16_m16, Cmpsd_m32_m32, Cmpsq_m64_m64, Test_AL_imm8, Test_AX_imm16, Test_EAX_imm32, Test_RAX_imm32, Stosb_m8_AL, Stosw_m16_AX, Stosd_m32_EAX, Stosq_m64_RAX, Lodsb_AL_m8, Lodsw_AX_m16, Lodsd_EAX_m32, Lodsq_RAX_m64, Scasb_AL_m8, Scasw_AX_m16, Scasd_EAX_m32, Scasq_RAX_m64, Mov_r8_imm8, Mov_r16_imm16, Mov_r32_imm32, Mov_r64_imm64, Rol_rm8_imm8, Ror_rm8_imm8, Rcl_rm8_imm8, Rcr_rm8_imm8, Shl_rm8_imm8, Shr_rm8_imm8, Sal_rm8_imm8, Sar_rm8_imm8, Rol_rm16_imm8, Rol_rm32_imm8, Rol_rm64_imm8, Ror_rm16_imm8, Ror_rm32_imm8, Ror_rm64_imm8, Rcl_rm16_imm8, Rcl_rm32_imm8, Rcl_rm64_imm8, Rcr_rm16_imm8, Rcr_rm32_imm8, Rcr_rm64_imm8, Shl_rm16_imm8, Shl_rm32_imm8, Shl_rm64_imm8, Shr_rm16_imm8, Shr_rm32_imm8, Shr_rm64_imm8, Sal_rm16_imm8, Sal_rm32_imm8, Sal_rm64_imm8, Sar_rm16_imm8, Sar_rm32_imm8, Sar_rm64_imm8, Retnw_imm16, Retnd_imm16, Retnq_imm16, Retnw, Retnd, Retnq, Les_r16_m1616, Les_r32_m1632, Lds_r16_m1616, Lds_r32_m1632, Mov_rm8_imm8, Xabort_imm8, Mov_rm16_imm16, Mov_rm32_imm32, Mov_rm64_imm32, Xbegin_rel16, Xbegin_rel32, Enterw_imm16_imm8, Enterd_imm16_imm8, Enterq_imm16_imm8, Leavew, Leaved, Leaveq, Retfw_imm16, Retfd_imm16, Retfq_imm16, Retfw, Retfd, Retfq, Int3, Int_imm8, Into, Iretw, Iretd, Iretq, Rol_rm8_1, Ror_rm8_1, Rcl_rm8_1, Rcr_rm8_1, Shl_rm8_1, Shr_rm8_1, Sal_rm8_1, Sar_rm8_1, Rol_rm16_1, Rol_rm32_1, Rol_rm64_1, Ror_rm16_1, Ror_rm32_1, Ror_rm64_1, Rcl_rm16_1, Rcl_rm32_1, Rcl_rm64_1, Rcr_rm16_1, Rcr_rm32_1, Rcr_rm64_1, Shl_rm16_1, Shl_rm32_1, Shl_rm64_1, Shr_rm16_1, Shr_rm32_1, Shr_rm64_1, Sal_rm16_1, Sal_rm32_1, Sal_rm64_1, Sar_rm16_1, Sar_rm32_1, Sar_rm64_1, Rol_rm8_CL, Ror_rm8_CL, Rcl_rm8_CL, Rcr_rm8_CL, Shl_rm8_CL, Shr_rm8_CL, Sal_rm8_CL, Sar_rm8_CL, Rol_rm16_CL, Rol_rm32_CL, Rol_rm64_CL, Ror_rm16_CL, Ror_rm32_CL, Ror_rm64_CL, Rcl_rm16_CL, Rcl_rm32_CL, Rcl_rm64_CL, Rcr_rm16_CL, Rcr_rm32_CL, Rcr_rm64_CL, Shl_rm16_CL, Shl_rm32_CL, Shl_rm64_CL, Shr_rm16_CL, Shr_rm32_CL, Shr_rm64_CL, Sal_rm16_CL, Sal_rm32_CL, Sal_rm64_CL, Sar_rm16_CL, Sar_rm32_CL, Sar_rm64_CL, Aam_imm8, Aad_imm8, Salc, Xlat_m8, Fadd_m32fp, Fmul_m32fp, Fcom_m32fp, Fcomp_m32fp, Fsub_m32fp, Fsubr_m32fp, Fdiv_m32fp, Fdivr_m32fp, Fadd_st0_sti, Fmul_st0_sti, Fcom_st0_sti, Fcomp_st0_sti, Fsub_st0_sti, Fsubr_st0_sti, Fdiv_st0_sti, Fdivr_st0_sti, Fld_m32fp, Fst_m32fp, Fstp_m32fp, Fldenv_m14byte, Fldenv_m28byte, Fldcw_m2byte, Fnstenv_m14byte, Fstenv_m14byte, Fnstenv_m28byte, Fstenv_m28byte, Fnstcw_m2byte, Fstcw_m2byte, Fld_sti, Fxch_st0_sti, Fnop, Fstpnce_sti, Fchs, Fabs, Ftst, Fxam, Fld1, Fldl2t, Fldl2e, Fldpi, Fldlg2, Fldln2, Fldz, F2xm1, Fyl2x, Fptan, Fpatan, Fxtract, Fprem1, Fdecstp, Fincstp, Fprem, Fyl2xp1, Fsqrt, Fsincos, Frndint, Fscale, Fsin, Fcos, Fiadd_m32int, Fimul_m32int, Ficom_m32int, Ficomp_m32int, Fisub_m32int, Fisubr_m32int, Fidiv_m32int, Fidivr_m32int, Fcmovb_st0_sti, Fcmove_st0_sti, Fcmovbe_st0_sti, Fcmovu_st0_sti, Fucompp, Fild_m32int, Fisttp_m32int, Fist_m32int, Fistp_m32int, Fld_m80fp, Fstp_m80fp, Fcmovnb_st0_sti, Fcmovne_st0_sti, Fcmovnbe_st0_sti, Fcmovnu_st0_sti, Fneni, Feni, Fndisi, Fdisi, Fnclex, Fclex, Fninit, Finit, Fnsetpm, Fsetpm, Frstpm, Fucomi_st0_sti, Fcomi_st0_sti, Fadd_m64fp, Fmul_m64fp, Fcom_m64fp, Fcomp_m64fp, Fsub_m64fp, Fsubr_m64fp, Fdiv_m64fp, Fdivr_m64fp, Fadd_sti_st0, Fmul_sti_st0, Fcom_st0_sti_DCD0, Fcomp_st0_sti_DCD8, Fsubr_sti_st0, Fsub_sti_st0, Fdivr_sti_st0, Fdiv_sti_st0, Fld_m64fp, Fisttp_m64int, Fst_m64fp, Fstp_m64fp, Frstor_m94byte, Frstor_m108byte, Fnsave_m94byte, Fsave_m94byte, Fnsave_m108byte, Fsave_m108byte, Fnstsw_m2byte, Fstsw_m2byte, Ffree_sti, Fxch_st0_sti_DDC8, Fst_sti, Fstp_sti, Fucom_st0_sti, Fucomp_st0_sti, Fiadd_m16int, Fimul_m16int, Ficom_m16int, Ficomp_m16int, Fisub_m16int, Fisubr_m16int, Fidiv_m16int, Fidivr_m16int, Faddp_sti_st0, Fmulp_sti_st0, Fcomp_st0_sti_DED0, Fcompp, Fsubrp_sti_st0, Fsubp_sti_st0, Fdivrp_sti_st0, Fdivp_sti_st0, Fild_m16int, Fisttp_m16int, Fist_m16int, Fistp_m16int, Fbld_m80bcd, Fild_m64int, Fbstp_m80bcd, Fistp_m64int, Ffreep_sti, Fxch_st0_sti_DFC8, Fstp_sti_DFD0, Fstp_sti_DFD8, Fnstsw_AX, Fstsw_AX, Fstdw_AX, Fstsg_AX, Fucomip_st0_sti, Fcomip_st0_sti, Loopne_rel8_16_CX, Loopne_rel8_32_CX, Loopne_rel8_16_ECX, Loopne_rel8_32_ECX, Loopne_rel8_64_ECX, Loopne_rel8_16_RCX, Loopne_rel8_64_RCX, Loope_rel8_16_CX, Loope_rel8_32_CX, Loope_rel8_16_ECX, Loope_rel8_32_ECX, Loope_rel8_64_ECX, Loope_rel8_16_RCX, Loope_rel8_64_RCX, Loop_rel8_16_CX, Loop_rel8_32_CX, Loop_rel8_16_ECX, Loop_rel8_32_ECX, Loop_rel8_64_ECX, Loop_rel8_16_RCX, Loop_rel8_64_RCX, Jcxz_rel8_16, Jcxz_rel8_32, Jecxz_rel8_16, Jecxz_rel8_32, Jecxz_rel8_64, Jrcxz_rel8_16, Jrcxz_rel8_64, In_AL_imm8, In_AX_imm8, In_EAX_imm8, Out_imm8_AL, Out_imm8_AX, Out_imm8_EAX, Call_rel16, Call_rel32_32, Call_rel32_64, Jmp_rel16, Jmp_rel32_32, Jmp_rel32_64, Jmp_ptr1616, Jmp_ptr1632, Jmp_rel8_16, Jmp_rel8_32, Jmp_rel8_64, In_AL_DX, In_AX_DX, In_EAX_DX, Out_DX_AL, Out_DX_AX, Out_DX_EAX, Int1, Hlt, Cmc, Test_rm8_imm8, Test_rm8_imm8_F6r1, Not_rm8, Neg_rm8, Mul_rm8, Imul_rm8, Div_rm8, Idiv_rm8, Test_rm16_imm16, Test_rm32_imm32, Test_rm64_imm32, Test_rm16_imm16_F7r1, Test_rm32_imm32_F7r1, Test_rm64_imm32_F7r1, Not_rm16, Not_rm32, Not_rm64, Neg_rm16, Neg_rm32, Neg_rm64, Mul_rm16, Mul_rm32, Mul_rm64, Imul_rm16, Imul_rm32, Imul_rm64, Div_rm16, Div_rm32, Div_rm64, Idiv_rm16, Idiv_rm32, Idiv_rm64, Clc, Stc, Cli, Sti, Cld, Std, Inc_rm8, Dec_rm8, Inc_rm16, Inc_rm32, Inc_rm64, Dec_rm16, Dec_rm32, Dec_rm64, Call_rm16, Call_rm32, Call_rm64, Call_m1616, Call_m1632, Call_m1664, Jmp_rm16, Jmp_rm32, Jmp_rm64, Jmp_m1616, Jmp_m1632, Jmp_m1664, Push_rm16, Push_rm32, Push_rm64, Sldt_rm16, Sldt_r32m16, Sldt_r64m16, Str_rm16, Str_r32m16, Str_r64m16, Lldt_rm16, Lldt_r32m16, Lldt_r64m16, Ltr_rm16, Ltr_r32m16, Ltr_r64m16, Verr_rm16, Verr_r32m16, Verr_r64m16, Verw_rm16, Verw_r32m16, Verw_r64m16, Jmpe_rm16, Jmpe_rm32, Sgdt_m1632_16, Sgdt_m1632, Sgdt_m1664, Sidt_m1632_16, Sidt_m1632, Sidt_m1664, Lgdt_m1632_16, Lgdt_m1632, Lgdt_m1664, Lidt_m1632_16, Lidt_m1632, Lidt_m1664, Smsw_rm16, Smsw_r32m16, Smsw_r64m16, Rstorssp_m64, Lmsw_rm16, Lmsw_r32m16, Lmsw_r64m16, Invlpg_m, Enclv, Vmcall, Vmlaunch, Vmresume, Vmxoff, Pconfig, Monitorw, Monitord, Monitorq, Mwait, Clac, Stac, Encls, Xgetbv, Xsetbv, Vmfunc, Xend, Xtest, Enclu, Vmrunw, Vmrund, Vmrunq, Vmmcall, Vmloadw, Vmloadd, Vmloadq, Vmsavew, Vmsaved, Vmsaveq, Stgi, Clgi, Skinit, Invlpgaw, Invlpgad, Invlpgaq, Setssbsy, Saveprevssp, Rdpkru, Wrpkru, Swapgs, Rdtscp, Monitorxw, Monitorxd, Monitorxq, Mcommit, Mwaitx, Clzerow, Clzerod, Clzeroq, Rdpru, Lar_r16_rm16, Lar_r32_r32m16, Lar_r64_r64m16, Lsl_r16_rm16, Lsl_r32_r32m16, Lsl_r64_r64m16, Storeall, Loadall286, Syscall, Clts, Loadall386, Sysretd, Sysretq, Invd, Wbinvd, Wbnoinvd, Cl1invmb, Ud2, Reservednop_rm16_r16_0F0D, Reservednop_rm32_r32_0F0D, Reservednop_rm64_r64_0F0D, Prefetch_m8, Prefetchw_m8, Prefetchwt1_m8, Femms, Umov_rm8_r8, Umov_rm16_r16, Umov_rm32_r32, Umov_r8_rm8, Umov_r16_rm16, Umov_r32_rm32, Movups_xmm_xmmm128, VEX_Vmovups_xmm_xmmm128, VEX_Vmovups_ymm_ymmm256, EVEX_Vmovups_xmm_k1z_xmmm128, EVEX_Vmovups_ymm_k1z_ymmm256, EVEX_Vmovups_zmm_k1z_zmmm512, Movupd_xmm_xmmm128, VEX_Vmovupd_xmm_xmmm128, VEX_Vmovupd_ymm_ymmm256, EVEX_Vmovupd_xmm_k1z_xmmm128, EVEX_Vmovupd_ymm_k1z_ymmm256, EVEX_Vmovupd_zmm_k1z_zmmm512, Movss_xmm_xmmm32, VEX_Vmovss_xmm_xmm_xmm, VEX_Vmovss_xmm_m32, EVEX_Vmovss_xmm_k1z_xmm_xmm, EVEX_Vmovss_xmm_k1z_m32, Movsd_xmm_xmmm64, VEX_Vmovsd_xmm_xmm_xmm, VEX_Vmovsd_xmm_m64, EVEX_Vmovsd_xmm_k1z_xmm_xmm, EVEX_Vmovsd_xmm_k1z_m64, Movups_xmmm128_xmm, VEX_Vmovups_xmmm128_xmm, VEX_Vmovups_ymmm256_ymm, EVEX_Vmovups_xmmm128_k1z_xmm, EVEX_Vmovups_ymmm256_k1z_ymm, EVEX_Vmovups_zmmm512_k1z_zmm, Movupd_xmmm128_xmm, VEX_Vmovupd_xmmm128_xmm, VEX_Vmovupd_ymmm256_ymm, EVEX_Vmovupd_xmmm128_k1z_xmm, EVEX_Vmovupd_ymmm256_k1z_ymm, EVEX_Vmovupd_zmmm512_k1z_zmm, Movss_xmmm32_xmm, VEX_Vmovss_xmm_xmm_xmm_0F11, VEX_Vmovss_m32_xmm, EVEX_Vmovss_xmm_k1z_xmm_xmm_0F11, EVEX_Vmovss_m32_k1_xmm, Movsd_xmmm64_xmm, VEX_Vmovsd_xmm_xmm_xmm_0F11, VEX_Vmovsd_m64_xmm, EVEX_Vmovsd_xmm_k1z_xmm_xmm_0F11, EVEX_Vmovsd_m64_k1_xmm, Movhlps_xmm_xmm, Movlps_xmm_m64, VEX_Vmovhlps_xmm_xmm_xmm, VEX_Vmovlps_xmm_xmm_m64, EVEX_Vmovhlps_xmm_xmm_xmm, EVEX_Vmovlps_xmm_xmm_m64, Movlpd_xmm_m64, VEX_Vmovlpd_xmm_xmm_m64, EVEX_Vmovlpd_xmm_xmm_m64, Movsldup_xmm_xmmm128, VEX_Vmovsldup_xmm_xmmm128, VEX_Vmovsldup_ymm_ymmm256, EVEX_Vmovsldup_xmm_k1z_xmmm128, EVEX_Vmovsldup_ymm_k1z_ymmm256, EVEX_Vmovsldup_zmm_k1z_zmmm512, Movddup_xmm_xmmm64, VEX_Vmovddup_xmm_xmmm64, VEX_Vmovddup_ymm_ymmm256, EVEX_Vmovddup_xmm_k1z_xmmm64, EVEX_Vmovddup_ymm_k1z_ymmm256, EVEX_Vmovddup_zmm_k1z_zmmm512, Movlps_m64_xmm, VEX_Vmovlps_m64_xmm, EVEX_Vmovlps_m64_xmm, Movlpd_m64_xmm, VEX_Vmovlpd_m64_xmm, EVEX_Vmovlpd_m64_xmm, Unpcklps_xmm_xmmm128, VEX_Vunpcklps_xmm_xmm_xmmm128, VEX_Vunpcklps_ymm_ymm_ymmm256, EVEX_Vunpcklps_xmm_k1z_xmm_xmmm128b32, EVEX_Vunpcklps_ymm_k1z_ymm_ymmm256b32, EVEX_Vunpcklps_zmm_k1z_zmm_zmmm512b32, Unpcklpd_xmm_xmmm128, VEX_Vunpcklpd_xmm_xmm_xmmm128, VEX_Vunpcklpd_ymm_ymm_ymmm256, EVEX_Vunpcklpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vunpcklpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vunpcklpd_zmm_k1z_zmm_zmmm512b64, Unpckhps_xmm_xmmm128, VEX_Vunpckhps_xmm_xmm_xmmm128, VEX_Vunpckhps_ymm_ymm_ymmm256, EVEX_Vunpckhps_xmm_k1z_xmm_xmmm128b32, EVEX_Vunpckhps_ymm_k1z_ymm_ymmm256b32, EVEX_Vunpckhps_zmm_k1z_zmm_zmmm512b32, Unpckhpd_xmm_xmmm128, VEX_Vunpckhpd_xmm_xmm_xmmm128, VEX_Vunpckhpd_ymm_ymm_ymmm256, EVEX_Vunpckhpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vunpckhpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vunpckhpd_zmm_k1z_zmm_zmmm512b64, Movlhps_xmm_xmm, VEX_Vmovlhps_xmm_xmm_xmm, EVEX_Vmovlhps_xmm_xmm_xmm, Movhps_xmm_m64, VEX_Vmovhps_xmm_xmm_m64, EVEX_Vmovhps_xmm_xmm_m64, Movhpd_xmm_m64, VEX_Vmovhpd_xmm_xmm_m64, EVEX_Vmovhpd_xmm_xmm_m64, Movshdup_xmm_xmmm128, VEX_Vmovshdup_xmm_xmmm128, VEX_Vmovshdup_ymm_ymmm256, EVEX_Vmovshdup_xmm_k1z_xmmm128, EVEX_Vmovshdup_ymm_k1z_ymmm256, EVEX_Vmovshdup_zmm_k1z_zmmm512, Movhps_m64_xmm, VEX_Vmovhps_m64_xmm, EVEX_Vmovhps_m64_xmm, Movhpd_m64_xmm, VEX_Vmovhpd_m64_xmm, EVEX_Vmovhpd_m64_xmm, Reservednop_rm16_r16_0F18, Reservednop_rm32_r32_0F18, Reservednop_rm64_r64_0F18, Reservednop_rm16_r16_0F19, Reservednop_rm32_r32_0F19, Reservednop_rm64_r64_0F19, Reservednop_rm16_r16_0F1A, Reservednop_rm32_r32_0F1A, Reservednop_rm64_r64_0F1A, Reservednop_rm16_r16_0F1B, Reservednop_rm32_r32_0F1B, Reservednop_rm64_r64_0F1B, Reservednop_rm16_r16_0F1C, Reservednop_rm32_r32_0F1C, Reservednop_rm64_r64_0F1C, Reservednop_rm16_r16_0F1D, Reservednop_rm32_r32_0F1D, Reservednop_rm64_r64_0F1D, Reservednop_rm16_r16_0F1E, Reservednop_rm32_r32_0F1E, Reservednop_rm64_r64_0F1E, Reservednop_rm16_r16_0F1F, Reservednop_rm32_r32_0F1F, Reservednop_rm64_r64_0F1F, Prefetchnta_m8, Prefetcht0_m8, Prefetcht1_m8, Prefetcht2_m8, Bndldx_bnd_mib, Bndmov_bnd_bndm64, Bndmov_bnd_bndm128, Bndcl_bnd_rm32, Bndcl_bnd_rm64, Bndcu_bnd_rm32, Bndcu_bnd_rm64, Bndstx_mib_bnd, Bndmov_bndm64_bnd, Bndmov_bndm128_bnd, Bndmk_bnd_m32, Bndmk_bnd_m64, Bndcn_bnd_rm32, Bndcn_bnd_rm64, Cldemote_m8, Rdsspd_r32, Rdsspq_r64, Endbr64, Endbr32, Nop_rm16, Nop_rm32, Nop_rm64, Mov_r32_cr, Mov_r64_cr, Mov_r32_dr, Mov_r64_dr, Mov_cr_r32, Mov_cr_r64, Mov_dr_r32, Mov_dr_r64, Mov_r32_tr, Mov_tr_r32, Movaps_xmm_xmmm128, VEX_Vmovaps_xmm_xmmm128, VEX_Vmovaps_ymm_ymmm256, EVEX_Vmovaps_xmm_k1z_xmmm128, EVEX_Vmovaps_ymm_k1z_ymmm256, EVEX_Vmovaps_zmm_k1z_zmmm512, Movapd_xmm_xmmm128, VEX_Vmovapd_xmm_xmmm128, VEX_Vmovapd_ymm_ymmm256, EVEX_Vmovapd_xmm_k1z_xmmm128, EVEX_Vmovapd_ymm_k1z_ymmm256, EVEX_Vmovapd_zmm_k1z_zmmm512, Movaps_xmmm128_xmm, VEX_Vmovaps_xmmm128_xmm, VEX_Vmovaps_ymmm256_ymm, EVEX_Vmovaps_xmmm128_k1z_xmm, EVEX_Vmovaps_ymmm256_k1z_ymm, EVEX_Vmovaps_zmmm512_k1z_zmm, Movapd_xmmm128_xmm, VEX_Vmovapd_xmmm128_xmm, VEX_Vmovapd_ymmm256_ymm, EVEX_Vmovapd_xmmm128_k1z_xmm, EVEX_Vmovapd_ymmm256_k1z_ymm, EVEX_Vmovapd_zmmm512_k1z_zmm, Cvtpi2ps_xmm_mmm64, Cvtpi2pd_xmm_mmm64, Cvtsi2ss_xmm_rm32, Cvtsi2ss_xmm_rm64, VEX_Vcvtsi2ss_xmm_xmm_rm32, VEX_Vcvtsi2ss_xmm_xmm_rm64, EVEX_Vcvtsi2ss_xmm_xmm_rm32_er, EVEX_Vcvtsi2ss_xmm_xmm_rm64_er, Cvtsi2sd_xmm_rm32, Cvtsi2sd_xmm_rm64, VEX_Vcvtsi2sd_xmm_xmm_rm32, VEX_Vcvtsi2sd_xmm_xmm_rm64, EVEX_Vcvtsi2sd_xmm_xmm_rm32_er, EVEX_Vcvtsi2sd_xmm_xmm_rm64_er, Movntps_m128_xmm, VEX_Vmovntps_m128_xmm, VEX_Vmovntps_m256_ymm, EVEX_Vmovntps_m128_xmm, EVEX_Vmovntps_m256_ymm, EVEX_Vmovntps_m512_zmm, Movntpd_m128_xmm, VEX_Vmovntpd_m128_xmm, VEX_Vmovntpd_m256_ymm, EVEX_Vmovntpd_m128_xmm, EVEX_Vmovntpd_m256_ymm, EVEX_Vmovntpd_m512_zmm, Movntss_m32_xmm, Movntsd_m64_xmm, Cvttps2pi_mm_xmmm64, Cvttpd2pi_mm_xmmm128, Cvttss2si_r32_xmmm32, Cvttss2si_r64_xmmm32, VEX_Vcvttss2si_r32_xmmm32, VEX_Vcvttss2si_r64_xmmm32, EVEX_Vcvttss2si_r32_xmmm32_sae, EVEX_Vcvttss2si_r64_xmmm32_sae, Cvttsd2si_r32_xmmm64, Cvttsd2si_r64_xmmm64, VEX_Vcvttsd2si_r32_xmmm64, VEX_Vcvttsd2si_r64_xmmm64, EVEX_Vcvttsd2si_r32_xmmm64_sae, EVEX_Vcvttsd2si_r64_xmmm64_sae, Cvtps2pi_mm_xmmm64, Cvtpd2pi_mm_xmmm128, Cvtss2si_r32_xmmm32, Cvtss2si_r64_xmmm32, VEX_Vcvtss2si_r32_xmmm32, VEX_Vcvtss2si_r64_xmmm32, EVEX_Vcvtss2si_r32_xmmm32_er, EVEX_Vcvtss2si_r64_xmmm32_er, Cvtsd2si_r32_xmmm64, Cvtsd2si_r64_xmmm64, VEX_Vcvtsd2si_r32_xmmm64, VEX_Vcvtsd2si_r64_xmmm64, EVEX_Vcvtsd2si_r32_xmmm64_er, EVEX_Vcvtsd2si_r64_xmmm64_er, Ucomiss_xmm_xmmm32, VEX_Vucomiss_xmm_xmmm32, EVEX_Vucomiss_xmm_xmmm32_sae, Ucomisd_xmm_xmmm64, VEX_Vucomisd_xmm_xmmm64, EVEX_Vucomisd_xmm_xmmm64_sae, Comiss_xmm_xmmm32, Comisd_xmm_xmmm64, VEX_Vcomiss_xmm_xmmm32, VEX_Vcomisd_xmm_xmmm64, EVEX_Vcomiss_xmm_xmmm32_sae, EVEX_Vcomisd_xmm_xmmm64_sae, Wrmsr, Rdtsc, Rdmsr, Rdpmc, Sysenter, Sysexitd, Sysexitq, Getsecd, Cmovo_r16_rm16, Cmovo_r32_rm32, Cmovo_r64_rm64, Cmovno_r16_rm16, Cmovno_r32_rm32, Cmovno_r64_rm64, Cmovb_r16_rm16, Cmovb_r32_rm32, Cmovb_r64_rm64, Cmovae_r16_rm16, Cmovae_r32_rm32, Cmovae_r64_rm64, Cmove_r16_rm16, Cmove_r32_rm32, Cmove_r64_rm64, Cmovne_r16_rm16, Cmovne_r32_rm32, Cmovne_r64_rm64, Cmovbe_r16_rm16, Cmovbe_r32_rm32, Cmovbe_r64_rm64, Cmova_r16_rm16, Cmova_r32_rm32, Cmova_r64_rm64, Cmovs_r16_rm16, Cmovs_r32_rm32, Cmovs_r64_rm64, Cmovns_r16_rm16, Cmovns_r32_rm32, Cmovns_r64_rm64, Cmovp_r16_rm16, Cmovp_r32_rm32, Cmovp_r64_rm64, Cmovnp_r16_rm16, Cmovnp_r32_rm32, Cmovnp_r64_rm64, Cmovl_r16_rm16, Cmovl_r32_rm32, Cmovl_r64_rm64, Cmovge_r16_rm16, Cmovge_r32_rm32, Cmovge_r64_rm64, Cmovle_r16_rm16, Cmovle_r32_rm32, Cmovle_r64_rm64, Cmovg_r16_rm16, Cmovg_r32_rm32, Cmovg_r64_rm64, VEX_Kandw_kr_kr_kr, VEX_Kandq_kr_kr_kr, VEX_Kandb_kr_kr_kr, VEX_Kandd_kr_kr_kr, VEX_Kandnw_kr_kr_kr, VEX_Kandnq_kr_kr_kr, VEX_Kandnb_kr_kr_kr, VEX_Kandnd_kr_kr_kr, VEX_Knotw_kr_kr, VEX_Knotq_kr_kr, VEX_Knotb_kr_kr, VEX_Knotd_kr_kr, VEX_Korw_kr_kr_kr, VEX_Korq_kr_kr_kr, VEX_Korb_kr_kr_kr, VEX_Kord_kr_kr_kr, VEX_Kxnorw_kr_kr_kr, VEX_Kxnorq_kr_kr_kr, VEX_Kxnorb_kr_kr_kr, VEX_Kxnord_kr_kr_kr, VEX_Kxorw_kr_kr_kr, VEX_Kxorq_kr_kr_kr, VEX_Kxorb_kr_kr_kr, VEX_Kxord_kr_kr_kr, VEX_Kaddw_kr_kr_kr, VEX_Kaddq_kr_kr_kr, VEX_Kaddb_kr_kr_kr, VEX_Kaddd_kr_kr_kr, VEX_Kunpckwd_kr_kr_kr, VEX_Kunpckdq_kr_kr_kr, VEX_Kunpckbw_kr_kr_kr, Movmskps_r32_xmm, Movmskps_r64_xmm, VEX_Vmovmskps_r32_xmm, VEX_Vmovmskps_r64_xmm, VEX_Vmovmskps_r32_ymm, VEX_Vmovmskps_r64_ymm, Movmskpd_r32_xmm, Movmskpd_r64_xmm, VEX_Vmovmskpd_r32_xmm, VEX_Vmovmskpd_r64_xmm, VEX_Vmovmskpd_r32_ymm, VEX_Vmovmskpd_r64_ymm, Sqrtps_xmm_xmmm128, VEX_Vsqrtps_xmm_xmmm128, VEX_Vsqrtps_ymm_ymmm256, EVEX_Vsqrtps_xmm_k1z_xmmm128b32, EVEX_Vsqrtps_ymm_k1z_ymmm256b32, EVEX_Vsqrtps_zmm_k1z_zmmm512b32_er, Sqrtpd_xmm_xmmm128, VEX_Vsqrtpd_xmm_xmmm128, VEX_Vsqrtpd_ymm_ymmm256, EVEX_Vsqrtpd_xmm_k1z_xmmm128b64, EVEX_Vsqrtpd_ymm_k1z_ymmm256b64, EVEX_Vsqrtpd_zmm_k1z_zmmm512b64_er, Sqrtss_xmm_xmmm32, VEX_Vsqrtss_xmm_xmm_xmmm32, EVEX_Vsqrtss_xmm_k1z_xmm_xmmm32_er, Sqrtsd_xmm_xmmm64, VEX_Vsqrtsd_xmm_xmm_xmmm64, EVEX_Vsqrtsd_xmm_k1z_xmm_xmmm64_er, Rsqrtps_xmm_xmmm128, VEX_Vrsqrtps_xmm_xmmm128, VEX_Vrsqrtps_ymm_ymmm256, Rsqrtss_xmm_xmmm32, VEX_Vrsqrtss_xmm_xmm_xmmm32, Rcpps_xmm_xmmm128, VEX_Vrcpps_xmm_xmmm128, VEX_Vrcpps_ymm_ymmm256, Rcpss_xmm_xmmm32, VEX_Vrcpss_xmm_xmm_xmmm32, Andps_xmm_xmmm128, VEX_Vandps_xmm_xmm_xmmm128, VEX_Vandps_ymm_ymm_ymmm256, EVEX_Vandps_xmm_k1z_xmm_xmmm128b32, EVEX_Vandps_ymm_k1z_ymm_ymmm256b32, EVEX_Vandps_zmm_k1z_zmm_zmmm512b32, Andpd_xmm_xmmm128, VEX_Vandpd_xmm_xmm_xmmm128, VEX_Vandpd_ymm_ymm_ymmm256, EVEX_Vandpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vandpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vandpd_zmm_k1z_zmm_zmmm512b64, Andnps_xmm_xmmm128, VEX_Vandnps_xmm_xmm_xmmm128, VEX_Vandnps_ymm_ymm_ymmm256, EVEX_Vandnps_xmm_k1z_xmm_xmmm128b32, EVEX_Vandnps_ymm_k1z_ymm_ymmm256b32, EVEX_Vandnps_zmm_k1z_zmm_zmmm512b32, Andnpd_xmm_xmmm128, VEX_Vandnpd_xmm_xmm_xmmm128, VEX_Vandnpd_ymm_ymm_ymmm256, EVEX_Vandnpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vandnpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vandnpd_zmm_k1z_zmm_zmmm512b64, Orps_xmm_xmmm128, VEX_Vorps_xmm_xmm_xmmm128, VEX_Vorps_ymm_ymm_ymmm256, EVEX_Vorps_xmm_k1z_xmm_xmmm128b32, EVEX_Vorps_ymm_k1z_ymm_ymmm256b32, EVEX_Vorps_zmm_k1z_zmm_zmmm512b32, Orpd_xmm_xmmm128, VEX_Vorpd_xmm_xmm_xmmm128, VEX_Vorpd_ymm_ymm_ymmm256, EVEX_Vorpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vorpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vorpd_zmm_k1z_zmm_zmmm512b64, Xorps_xmm_xmmm128, VEX_Vxorps_xmm_xmm_xmmm128, VEX_Vxorps_ymm_ymm_ymmm256, EVEX_Vxorps_xmm_k1z_xmm_xmmm128b32, EVEX_Vxorps_ymm_k1z_ymm_ymmm256b32, EVEX_Vxorps_zmm_k1z_zmm_zmmm512b32, Xorpd_xmm_xmmm128, VEX_Vxorpd_xmm_xmm_xmmm128, VEX_Vxorpd_ymm_ymm_ymmm256, EVEX_Vxorpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vxorpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vxorpd_zmm_k1z_zmm_zmmm512b64, Addps_xmm_xmmm128, VEX_Vaddps_xmm_xmm_xmmm128, VEX_Vaddps_ymm_ymm_ymmm256, EVEX_Vaddps_xmm_k1z_xmm_xmmm128b32, EVEX_Vaddps_ymm_k1z_ymm_ymmm256b32, EVEX_Vaddps_zmm_k1z_zmm_zmmm512b32_er, Addpd_xmm_xmmm128, VEX_Vaddpd_xmm_xmm_xmmm128, VEX_Vaddpd_ymm_ymm_ymmm256, EVEX_Vaddpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vaddpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vaddpd_zmm_k1z_zmm_zmmm512b64_er, Addss_xmm_xmmm32, VEX_Vaddss_xmm_xmm_xmmm32, EVEX_Vaddss_xmm_k1z_xmm_xmmm32_er, Addsd_xmm_xmmm64, VEX_Vaddsd_xmm_xmm_xmmm64, EVEX_Vaddsd_xmm_k1z_xmm_xmmm64_er, Mulps_xmm_xmmm128, VEX_Vmulps_xmm_xmm_xmmm128, VEX_Vmulps_ymm_ymm_ymmm256, EVEX_Vmulps_xmm_k1z_xmm_xmmm128b32, EVEX_Vmulps_ymm_k1z_ymm_ymmm256b32, EVEX_Vmulps_zmm_k1z_zmm_zmmm512b32_er, Mulpd_xmm_xmmm128, VEX_Vmulpd_xmm_xmm_xmmm128, VEX_Vmulpd_ymm_ymm_ymmm256, EVEX_Vmulpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vmulpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vmulpd_zmm_k1z_zmm_zmmm512b64_er, Mulss_xmm_xmmm32, VEX_Vmulss_xmm_xmm_xmmm32, EVEX_Vmulss_xmm_k1z_xmm_xmmm32_er, Mulsd_xmm_xmmm64, VEX_Vmulsd_xmm_xmm_xmmm64, EVEX_Vmulsd_xmm_k1z_xmm_xmmm64_er, Cvtps2pd_xmm_xmmm64, VEX_Vcvtps2pd_xmm_xmmm64, VEX_Vcvtps2pd_ymm_xmmm128, EVEX_Vcvtps2pd_xmm_k1z_xmmm64b32, EVEX_Vcvtps2pd_ymm_k1z_xmmm128b32, EVEX_Vcvtps2pd_zmm_k1z_ymmm256b32_sae, Cvtpd2ps_xmm_xmmm128, VEX_Vcvtpd2ps_xmm_xmmm128, VEX_Vcvtpd2ps_xmm_ymmm256, EVEX_Vcvtpd2ps_xmm_k1z_xmmm128b64, EVEX_Vcvtpd2ps_xmm_k1z_ymmm256b64, EVEX_Vcvtpd2ps_ymm_k1z_zmmm512b64_er, Cvtss2sd_xmm_xmmm32, VEX_Vcvtss2sd_xmm_xmm_xmmm32, EVEX_Vcvtss2sd_xmm_k1z_xmm_xmmm32_sae, Cvtsd2ss_xmm_xmmm64, VEX_Vcvtsd2ss_xmm_xmm_xmmm64, EVEX_Vcvtsd2ss_xmm_k1z_xmm_xmmm64_er, Cvtdq2ps_xmm_xmmm128, VEX_Vcvtdq2ps_xmm_xmmm128, VEX_Vcvtdq2ps_ymm_ymmm256, EVEX_Vcvtdq2ps_xmm_k1z_xmmm128b32, EVEX_Vcvtdq2ps_ymm_k1z_ymmm256b32, EVEX_Vcvtdq2ps_zmm_k1z_zmmm512b32_er, EVEX_Vcvtqq2ps_xmm_k1z_xmmm128b64, EVEX_Vcvtqq2ps_xmm_k1z_ymmm256b64, EVEX_Vcvtqq2ps_ymm_k1z_zmmm512b64_er, Cvtps2dq_xmm_xmmm128, VEX_Vcvtps2dq_xmm_xmmm128, VEX_Vcvtps2dq_ymm_ymmm256, EVEX_Vcvtps2dq_xmm_k1z_xmmm128b32, EVEX_Vcvtps2dq_ymm_k1z_ymmm256b32, EVEX_Vcvtps2dq_zmm_k1z_zmmm512b32_er, Cvttps2dq_xmm_xmmm128, VEX_Vcvttps2dq_xmm_xmmm128, VEX_Vcvttps2dq_ymm_ymmm256, EVEX_Vcvttps2dq_xmm_k1z_xmmm128b32, EVEX_Vcvttps2dq_ymm_k1z_ymmm256b32, EVEX_Vcvttps2dq_zmm_k1z_zmmm512b32_sae, Subps_xmm_xmmm128, VEX_Vsubps_xmm_xmm_xmmm128, VEX_Vsubps_ymm_ymm_ymmm256, EVEX_Vsubps_xmm_k1z_xmm_xmmm128b32, EVEX_Vsubps_ymm_k1z_ymm_ymmm256b32, EVEX_Vsubps_zmm_k1z_zmm_zmmm512b32_er, Subpd_xmm_xmmm128, VEX_Vsubpd_xmm_xmm_xmmm128, VEX_Vsubpd_ymm_ymm_ymmm256, EVEX_Vsubpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vsubpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vsubpd_zmm_k1z_zmm_zmmm512b64_er, Subss_xmm_xmmm32, VEX_Vsubss_xmm_xmm_xmmm32, EVEX_Vsubss_xmm_k1z_xmm_xmmm32_er, Subsd_xmm_xmmm64, VEX_Vsubsd_xmm_xmm_xmmm64, EVEX_Vsubsd_xmm_k1z_xmm_xmmm64_er, Minps_xmm_xmmm128, VEX_Vminps_xmm_xmm_xmmm128, VEX_Vminps_ymm_ymm_ymmm256, EVEX_Vminps_xmm_k1z_xmm_xmmm128b32, EVEX_Vminps_ymm_k1z_ymm_ymmm256b32, EVEX_Vminps_zmm_k1z_zmm_zmmm512b32_sae, Minpd_xmm_xmmm128, VEX_Vminpd_xmm_xmm_xmmm128, VEX_Vminpd_ymm_ymm_ymmm256, EVEX_Vminpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vminpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vminpd_zmm_k1z_zmm_zmmm512b64_sae, Minss_xmm_xmmm32, VEX_Vminss_xmm_xmm_xmmm32, EVEX_Vminss_xmm_k1z_xmm_xmmm32_sae, Minsd_xmm_xmmm64, VEX_Vminsd_xmm_xmm_xmmm64, EVEX_Vminsd_xmm_k1z_xmm_xmmm64_sae, Divps_xmm_xmmm128, VEX_Vdivps_xmm_xmm_xmmm128, VEX_Vdivps_ymm_ymm_ymmm256, EVEX_Vdivps_xmm_k1z_xmm_xmmm128b32, EVEX_Vdivps_ymm_k1z_ymm_ymmm256b32, EVEX_Vdivps_zmm_k1z_zmm_zmmm512b32_er, Divpd_xmm_xmmm128, VEX_Vdivpd_xmm_xmm_xmmm128, VEX_Vdivpd_ymm_ymm_ymmm256, EVEX_Vdivpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vdivpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vdivpd_zmm_k1z_zmm_zmmm512b64_er, Divss_xmm_xmmm32, VEX_Vdivss_xmm_xmm_xmmm32, EVEX_Vdivss_xmm_k1z_xmm_xmmm32_er, Divsd_xmm_xmmm64, VEX_Vdivsd_xmm_xmm_xmmm64, EVEX_Vdivsd_xmm_k1z_xmm_xmmm64_er, Maxps_xmm_xmmm128, VEX_Vmaxps_xmm_xmm_xmmm128, VEX_Vmaxps_ymm_ymm_ymmm256, EVEX_Vmaxps_xmm_k1z_xmm_xmmm128b32, EVEX_Vmaxps_ymm_k1z_ymm_ymmm256b32, EVEX_Vmaxps_zmm_k1z_zmm_zmmm512b32_sae, Maxpd_xmm_xmmm128, VEX_Vmaxpd_xmm_xmm_xmmm128, VEX_Vmaxpd_ymm_ymm_ymmm256, EVEX_Vmaxpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vmaxpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vmaxpd_zmm_k1z_zmm_zmmm512b64_sae, Maxss_xmm_xmmm32, VEX_Vmaxss_xmm_xmm_xmmm32, EVEX_Vmaxss_xmm_k1z_xmm_xmmm32_sae, Maxsd_xmm_xmmm64, VEX_Vmaxsd_xmm_xmm_xmmm64, EVEX_Vmaxsd_xmm_k1z_xmm_xmmm64_sae, Punpcklbw_mm_mmm32, Punpcklbw_xmm_xmmm128, VEX_Vpunpcklbw_xmm_xmm_xmmm128, VEX_Vpunpcklbw_ymm_ymm_ymmm256, EVEX_Vpunpcklbw_xmm_k1z_xmm_xmmm128, EVEX_Vpunpcklbw_ymm_k1z_ymm_ymmm256, EVEX_Vpunpcklbw_zmm_k1z_zmm_zmmm512, Punpcklwd_mm_mmm32, Punpcklwd_xmm_xmmm128, VEX_Vpunpcklwd_xmm_xmm_xmmm128, VEX_Vpunpcklwd_ymm_ymm_ymmm256, EVEX_Vpunpcklwd_xmm_k1z_xmm_xmmm128, EVEX_Vpunpcklwd_ymm_k1z_ymm_ymmm256, EVEX_Vpunpcklwd_zmm_k1z_zmm_zmmm512, Punpckldq_mm_mmm32, Punpckldq_xmm_xmmm128, VEX_Vpunpckldq_xmm_xmm_xmmm128, VEX_Vpunpckldq_ymm_ymm_ymmm256, EVEX_Vpunpckldq_xmm_k1z_xmm_xmmm128b32, EVEX_Vpunpckldq_ymm_k1z_ymm_ymmm256b32, EVEX_Vpunpckldq_zmm_k1z_zmm_zmmm512b32, Packsswb_mm_mmm64, Packsswb_xmm_xmmm128, VEX_Vpacksswb_xmm_xmm_xmmm128, VEX_Vpacksswb_ymm_ymm_ymmm256, EVEX_Vpacksswb_xmm_k1z_xmm_xmmm128, EVEX_Vpacksswb_ymm_k1z_ymm_ymmm256, EVEX_Vpacksswb_zmm_k1z_zmm_zmmm512, Pcmpgtb_mm_mmm64, Pcmpgtb_xmm_xmmm128, VEX_Vpcmpgtb_xmm_xmm_xmmm128, VEX_Vpcmpgtb_ymm_ymm_ymmm256, EVEX_Vpcmpgtb_kr_k1_xmm_xmmm128, EVEX_Vpcmpgtb_kr_k1_ymm_ymmm256, EVEX_Vpcmpgtb_kr_k1_zmm_zmmm512, Pcmpgtw_mm_mmm64, Pcmpgtw_xmm_xmmm128, VEX_Vpcmpgtw_xmm_xmm_xmmm128, VEX_Vpcmpgtw_ymm_ymm_ymmm256, EVEX_Vpcmpgtw_kr_k1_xmm_xmmm128, EVEX_Vpcmpgtw_kr_k1_ymm_ymmm256, EVEX_Vpcmpgtw_kr_k1_zmm_zmmm512, Pcmpgtd_mm_mmm64, Pcmpgtd_xmm_xmmm128, VEX_Vpcmpgtd_xmm_xmm_xmmm128, VEX_Vpcmpgtd_ymm_ymm_ymmm256, EVEX_Vpcmpgtd_kr_k1_xmm_xmmm128b32, EVEX_Vpcmpgtd_kr_k1_ymm_ymmm256b32, EVEX_Vpcmpgtd_kr_k1_zmm_zmmm512b32, Packuswb_mm_mmm64, Packuswb_xmm_xmmm128, VEX_Vpackuswb_xmm_xmm_xmmm128, VEX_Vpackuswb_ymm_ymm_ymmm256, EVEX_Vpackuswb_xmm_k1z_xmm_xmmm128, EVEX_Vpackuswb_ymm_k1z_ymm_ymmm256, EVEX_Vpackuswb_zmm_k1z_zmm_zmmm512, Punpckhbw_mm_mmm64, Punpckhbw_xmm_xmmm128, VEX_Vpunpckhbw_xmm_xmm_xmmm128, VEX_Vpunpckhbw_ymm_ymm_ymmm256, EVEX_Vpunpckhbw_xmm_k1z_xmm_xmmm128, EVEX_Vpunpckhbw_ymm_k1z_ymm_ymmm256, EVEX_Vpunpckhbw_zmm_k1z_zmm_zmmm512, Punpckhwd_mm_mmm64, Punpckhwd_xmm_xmmm128, VEX_Vpunpckhwd_xmm_xmm_xmmm128, VEX_Vpunpckhwd_ymm_ymm_ymmm256, EVEX_Vpunpckhwd_xmm_k1z_xmm_xmmm128, EVEX_Vpunpckhwd_ymm_k1z_ymm_ymmm256, EVEX_Vpunpckhwd_zmm_k1z_zmm_zmmm512, Punpckhdq_mm_mmm64, Punpckhdq_xmm_xmmm128, VEX_Vpunpckhdq_xmm_xmm_xmmm128, VEX_Vpunpckhdq_ymm_ymm_ymmm256, EVEX_Vpunpckhdq_xmm_k1z_xmm_xmmm128b32, EVEX_Vpunpckhdq_ymm_k1z_ymm_ymmm256b32, EVEX_Vpunpckhdq_zmm_k1z_zmm_zmmm512b32, Packssdw_mm_mmm64, Packssdw_xmm_xmmm128, VEX_Vpackssdw_xmm_xmm_xmmm128, VEX_Vpackssdw_ymm_ymm_ymmm256, EVEX_Vpackssdw_xmm_k1z_xmm_xmmm128b32, EVEX_Vpackssdw_ymm_k1z_ymm_ymmm256b32, EVEX_Vpackssdw_zmm_k1z_zmm_zmmm512b32, Punpcklqdq_xmm_xmmm128, VEX_Vpunpcklqdq_xmm_xmm_xmmm128, VEX_Vpunpcklqdq_ymm_ymm_ymmm256, EVEX_Vpunpcklqdq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpunpcklqdq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpunpcklqdq_zmm_k1z_zmm_zmmm512b64, Punpckhqdq_xmm_xmmm128, VEX_Vpunpckhqdq_xmm_xmm_xmmm128, VEX_Vpunpckhqdq_ymm_ymm_ymmm256, EVEX_Vpunpckhqdq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpunpckhqdq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpunpckhqdq_zmm_k1z_zmm_zmmm512b64, Movd_mm_rm32, Movq_mm_rm64, Movd_xmm_rm32, Movq_xmm_rm64, VEX_Vmovd_xmm_rm32, VEX_Vmovq_xmm_rm64, EVEX_Vmovd_xmm_rm32, EVEX_Vmovq_xmm_rm64, Movq_mm_mmm64, Movdqa_xmm_xmmm128, VEX_Vmovdqa_xmm_xmmm128, VEX_Vmovdqa_ymm_ymmm256, EVEX_Vmovdqa32_xmm_k1z_xmmm128, EVEX_Vmovdqa32_ymm_k1z_ymmm256, EVEX_Vmovdqa32_zmm_k1z_zmmm512, EVEX_Vmovdqa64_xmm_k1z_xmmm128, EVEX_Vmovdqa64_ymm_k1z_ymmm256, EVEX_Vmovdqa64_zmm_k1z_zmmm512, Movdqu_xmm_xmmm128, VEX_Vmovdqu_xmm_xmmm128, VEX_Vmovdqu_ymm_ymmm256, EVEX_Vmovdqu32_xmm_k1z_xmmm128, EVEX_Vmovdqu32_ymm_k1z_ymmm256, EVEX_Vmovdqu32_zmm_k1z_zmmm512, EVEX_Vmovdqu64_xmm_k1z_xmmm128, EVEX_Vmovdqu64_ymm_k1z_ymmm256, EVEX_Vmovdqu64_zmm_k1z_zmmm512, EVEX_Vmovdqu8_xmm_k1z_xmmm128, EVEX_Vmovdqu8_ymm_k1z_ymmm256, EVEX_Vmovdqu8_zmm_k1z_zmmm512, EVEX_Vmovdqu16_xmm_k1z_xmmm128, EVEX_Vmovdqu16_ymm_k1z_ymmm256, EVEX_Vmovdqu16_zmm_k1z_zmmm512, Pshufw_mm_mmm64_imm8, Pshufd_xmm_xmmm128_imm8, VEX_Vpshufd_xmm_xmmm128_imm8, VEX_Vpshufd_ymm_ymmm256_imm8, EVEX_Vpshufd_xmm_k1z_xmmm128b32_imm8, EVEX_Vpshufd_ymm_k1z_ymmm256b32_imm8, EVEX_Vpshufd_zmm_k1z_zmmm512b32_imm8, Pshufhw_xmm_xmmm128_imm8, VEX_Vpshufhw_xmm_xmmm128_imm8, VEX_Vpshufhw_ymm_ymmm256_imm8, EVEX_Vpshufhw_xmm_k1z_xmmm128_imm8, EVEX_Vpshufhw_ymm_k1z_ymmm256_imm8, EVEX_Vpshufhw_zmm_k1z_zmmm512_imm8, Pshuflw_xmm_xmmm128_imm8, VEX_Vpshuflw_xmm_xmmm128_imm8, VEX_Vpshuflw_ymm_ymmm256_imm8, EVEX_Vpshuflw_xmm_k1z_xmmm128_imm8, EVEX_Vpshuflw_ymm_k1z_ymmm256_imm8, EVEX_Vpshuflw_zmm_k1z_zmmm512_imm8, Psrlw_mm_imm8, Psrlw_xmm_imm8, VEX_Vpsrlw_xmm_xmm_imm8, VEX_Vpsrlw_ymm_ymm_imm8, EVEX_Vpsrlw_xmm_k1z_xmmm128_imm8, EVEX_Vpsrlw_ymm_k1z_ymmm256_imm8, EVEX_Vpsrlw_zmm_k1z_zmmm512_imm8, Psraw_mm_imm8, Psraw_xmm_imm8, VEX_Vpsraw_xmm_xmm_imm8, VEX_Vpsraw_ymm_ymm_imm8, EVEX_Vpsraw_xmm_k1z_xmmm128_imm8, EVEX_Vpsraw_ymm_k1z_ymmm256_imm8, EVEX_Vpsraw_zmm_k1z_zmmm512_imm8, Psllw_mm_imm8, Psllw_xmm_imm8, VEX_Vpsllw_xmm_xmm_imm8, VEX_Vpsllw_ymm_ymm_imm8, EVEX_Vpsllw_xmm_k1z_xmmm128_imm8, EVEX_Vpsllw_ymm_k1z_ymmm256_imm8, EVEX_Vpsllw_zmm_k1z_zmmm512_imm8, EVEX_Vprord_xmm_k1z_xmmm128b32_imm8, EVEX_Vprord_ymm_k1z_ymmm256b32_imm8, EVEX_Vprord_zmm_k1z_zmmm512b32_imm8, EVEX_Vprorq_xmm_k1z_xmmm128b64_imm8, EVEX_Vprorq_ymm_k1z_ymmm256b64_imm8, EVEX_Vprorq_zmm_k1z_zmmm512b64_imm8, EVEX_Vprold_xmm_k1z_xmmm128b32_imm8, EVEX_Vprold_ymm_k1z_ymmm256b32_imm8, EVEX_Vprold_zmm_k1z_zmmm512b32_imm8, EVEX_Vprolq_xmm_k1z_xmmm128b64_imm8, EVEX_Vprolq_ymm_k1z_ymmm256b64_imm8, EVEX_Vprolq_zmm_k1z_zmmm512b64_imm8, Psrld_mm_imm8, Psrld_xmm_imm8, VEX_Vpsrld_xmm_xmm_imm8, VEX_Vpsrld_ymm_ymm_imm8, EVEX_Vpsrld_xmm_k1z_xmmm128b32_imm8, EVEX_Vpsrld_ymm_k1z_ymmm256b32_imm8, EVEX_Vpsrld_zmm_k1z_zmmm512b32_imm8, Psrad_mm_imm8, Psrad_xmm_imm8, VEX_Vpsrad_xmm_xmm_imm8, VEX_Vpsrad_ymm_ymm_imm8, EVEX_Vpsrad_xmm_k1z_xmmm128b32_imm8, EVEX_Vpsrad_ymm_k1z_ymmm256b32_imm8, EVEX_Vpsrad_zmm_k1z_zmmm512b32_imm8, EVEX_Vpsraq_xmm_k1z_xmmm128b64_imm8, EVEX_Vpsraq_ymm_k1z_ymmm256b64_imm8, EVEX_Vpsraq_zmm_k1z_zmmm512b64_imm8, Pslld_mm_imm8, Pslld_xmm_imm8, VEX_Vpslld_xmm_xmm_imm8, VEX_Vpslld_ymm_ymm_imm8, EVEX_Vpslld_xmm_k1z_xmmm128b32_imm8, EVEX_Vpslld_ymm_k1z_ymmm256b32_imm8, EVEX_Vpslld_zmm_k1z_zmmm512b32_imm8, Psrlq_mm_imm8, Psrlq_xmm_imm8, VEX_Vpsrlq_xmm_xmm_imm8, VEX_Vpsrlq_ymm_ymm_imm8, EVEX_Vpsrlq_xmm_k1z_xmmm128b64_imm8, EVEX_Vpsrlq_ymm_k1z_ymmm256b64_imm8, EVEX_Vpsrlq_zmm_k1z_zmmm512b64_imm8, Psrldq_xmm_imm8, VEX_Vpsrldq_xmm_xmm_imm8, VEX_Vpsrldq_ymm_ymm_imm8, EVEX_Vpsrldq_xmm_xmmm128_imm8, EVEX_Vpsrldq_ymm_ymmm256_imm8, EVEX_Vpsrldq_zmm_zmmm512_imm8, Psllq_mm_imm8, Psllq_xmm_imm8, VEX_Vpsllq_xmm_xmm_imm8, VEX_Vpsllq_ymm_ymm_imm8, EVEX_Vpsllq_xmm_k1z_xmmm128b64_imm8, EVEX_Vpsllq_ymm_k1z_ymmm256b64_imm8, EVEX_Vpsllq_zmm_k1z_zmmm512b64_imm8, Pslldq_xmm_imm8, VEX_Vpslldq_xmm_xmm_imm8, VEX_Vpslldq_ymm_ymm_imm8, EVEX_Vpslldq_xmm_xmmm128_imm8, EVEX_Vpslldq_ymm_ymmm256_imm8, EVEX_Vpslldq_zmm_zmmm512_imm8, Pcmpeqb_mm_mmm64, Pcmpeqb_xmm_xmmm128, VEX_Vpcmpeqb_xmm_xmm_xmmm128, VEX_Vpcmpeqb_ymm_ymm_ymmm256, EVEX_Vpcmpeqb_kr_k1_xmm_xmmm128, EVEX_Vpcmpeqb_kr_k1_ymm_ymmm256, EVEX_Vpcmpeqb_kr_k1_zmm_zmmm512, Pcmpeqw_mm_mmm64, Pcmpeqw_xmm_xmmm128, VEX_Vpcmpeqw_xmm_xmm_xmmm128, VEX_Vpcmpeqw_ymm_ymm_ymmm256, EVEX_Vpcmpeqw_kr_k1_xmm_xmmm128, EVEX_Vpcmpeqw_kr_k1_ymm_ymmm256, EVEX_Vpcmpeqw_kr_k1_zmm_zmmm512, Pcmpeqd_mm_mmm64, Pcmpeqd_xmm_xmmm128, VEX_Vpcmpeqd_xmm_xmm_xmmm128, VEX_Vpcmpeqd_ymm_ymm_ymmm256, EVEX_Vpcmpeqd_kr_k1_xmm_xmmm128b32, EVEX_Vpcmpeqd_kr_k1_ymm_ymmm256b32, EVEX_Vpcmpeqd_kr_k1_zmm_zmmm512b32, Emms, VEX_Vzeroupper, VEX_Vzeroall, Vmread_rm32_r32, Vmread_rm64_r64, EVEX_Vcvttps2udq_xmm_k1z_xmmm128b32, EVEX_Vcvttps2udq_ymm_k1z_ymmm256b32, EVEX_Vcvttps2udq_zmm_k1z_zmmm512b32_sae, EVEX_Vcvttpd2udq_xmm_k1z_xmmm128b64, EVEX_Vcvttpd2udq_xmm_k1z_ymmm256b64, EVEX_Vcvttpd2udq_ymm_k1z_zmmm512b64_sae, Extrq_xmm_imm8_imm8, EVEX_Vcvttps2uqq_xmm_k1z_xmmm64b32, EVEX_Vcvttps2uqq_ymm_k1z_xmmm128b32, EVEX_Vcvttps2uqq_zmm_k1z_ymmm256b32_sae, EVEX_Vcvttpd2uqq_xmm_k1z_xmmm128b64, EVEX_Vcvttpd2uqq_ymm_k1z_ymmm256b64, EVEX_Vcvttpd2uqq_zmm_k1z_zmmm512b64_sae, EVEX_Vcvttss2usi_r32_xmmm32_sae, EVEX_Vcvttss2usi_r64_xmmm32_sae, Insertq_xmm_xmm_imm8_imm8, EVEX_Vcvttsd2usi_r32_xmmm64_sae, EVEX_Vcvttsd2usi_r64_xmmm64_sae, Vmwrite_r32_rm32, Vmwrite_r64_rm64, EVEX_Vcvtps2udq_xmm_k1z_xmmm128b32, EVEX_Vcvtps2udq_ymm_k1z_ymmm256b32, EVEX_Vcvtps2udq_zmm_k1z_zmmm512b32_er, EVEX_Vcvtpd2udq_xmm_k1z_xmmm128b64, EVEX_Vcvtpd2udq_xmm_k1z_ymmm256b64, EVEX_Vcvtpd2udq_ymm_k1z_zmmm512b64_er, Extrq_xmm_xmm, EVEX_Vcvtps2uqq_xmm_k1z_xmmm64b32, EVEX_Vcvtps2uqq_ymm_k1z_xmmm128b32, EVEX_Vcvtps2uqq_zmm_k1z_ymmm256b32_er, EVEX_Vcvtpd2uqq_xmm_k1z_xmmm128b64, EVEX_Vcvtpd2uqq_ymm_k1z_ymmm256b64, EVEX_Vcvtpd2uqq_zmm_k1z_zmmm512b64_er, EVEX_Vcvtss2usi_r32_xmmm32_er, EVEX_Vcvtss2usi_r64_xmmm32_er, Insertq_xmm_xmm, EVEX_Vcvtsd2usi_r32_xmmm64_er, EVEX_Vcvtsd2usi_r64_xmmm64_er, EVEX_Vcvttps2qq_xmm_k1z_xmmm64b32, EVEX_Vcvttps2qq_ymm_k1z_xmmm128b32, EVEX_Vcvttps2qq_zmm_k1z_ymmm256b32_sae, EVEX_Vcvttpd2qq_xmm_k1z_xmmm128b64, EVEX_Vcvttpd2qq_ymm_k1z_ymmm256b64, EVEX_Vcvttpd2qq_zmm_k1z_zmmm512b64_sae, EVEX_Vcvtudq2pd_xmm_k1z_xmmm64b32, EVEX_Vcvtudq2pd_ymm_k1z_xmmm128b32, EVEX_Vcvtudq2pd_zmm_k1z_ymmm256b32_er, EVEX_Vcvtuqq2pd_xmm_k1z_xmmm128b64, EVEX_Vcvtuqq2pd_ymm_k1z_ymmm256b64, EVEX_Vcvtuqq2pd_zmm_k1z_zmmm512b64_er, EVEX_Vcvtudq2ps_xmm_k1z_xmmm128b32, EVEX_Vcvtudq2ps_ymm_k1z_ymmm256b32, EVEX_Vcvtudq2ps_zmm_k1z_zmmm512b32_er, EVEX_Vcvtuqq2ps_xmm_k1z_xmmm128b64, EVEX_Vcvtuqq2ps_xmm_k1z_ymmm256b64, EVEX_Vcvtuqq2ps_ymm_k1z_zmmm512b64_er, EVEX_Vcvtps2qq_xmm_k1z_xmmm64b32, EVEX_Vcvtps2qq_ymm_k1z_xmmm128b32, EVEX_Vcvtps2qq_zmm_k1z_ymmm256b32_er, EVEX_Vcvtpd2qq_xmm_k1z_xmmm128b64, EVEX_Vcvtpd2qq_ymm_k1z_ymmm256b64, EVEX_Vcvtpd2qq_zmm_k1z_zmmm512b64_er, EVEX_Vcvtusi2ss_xmm_xmm_rm32_er, EVEX_Vcvtusi2ss_xmm_xmm_rm64_er, EVEX_Vcvtusi2sd_xmm_xmm_rm32_er, EVEX_Vcvtusi2sd_xmm_xmm_rm64_er, Haddpd_xmm_xmmm128, VEX_Vhaddpd_xmm_xmm_xmmm128, VEX_Vhaddpd_ymm_ymm_ymmm256, Haddps_xmm_xmmm128, VEX_Vhaddps_xmm_xmm_xmmm128, VEX_Vhaddps_ymm_ymm_ymmm256, Hsubpd_xmm_xmmm128, VEX_Vhsubpd_xmm_xmm_xmmm128, VEX_Vhsubpd_ymm_ymm_ymmm256, Hsubps_xmm_xmmm128, VEX_Vhsubps_xmm_xmm_xmmm128, VEX_Vhsubps_ymm_ymm_ymmm256, Movd_rm32_mm, Movq_rm64_mm, Movd_rm32_xmm, Movq_rm64_xmm, VEX_Vmovd_rm32_xmm, VEX_Vmovq_rm64_xmm, EVEX_Vmovd_rm32_xmm, EVEX_Vmovq_rm64_xmm, Movq_xmm_xmmm64, VEX_Vmovq_xmm_xmmm64, EVEX_Vmovq_xmm_xmmm64, Movq_mmm64_mm, Movdqa_xmmm128_xmm, VEX_Vmovdqa_xmmm128_xmm, VEX_Vmovdqa_ymmm256_ymm, EVEX_Vmovdqa32_xmmm128_k1z_xmm, EVEX_Vmovdqa32_ymmm256_k1z_ymm, EVEX_Vmovdqa32_zmmm512_k1z_zmm, EVEX_Vmovdqa64_xmmm128_k1z_xmm, EVEX_Vmovdqa64_ymmm256_k1z_ymm, EVEX_Vmovdqa64_zmmm512_k1z_zmm, Movdqu_xmmm128_xmm, VEX_Vmovdqu_xmmm128_xmm, VEX_Vmovdqu_ymmm256_ymm, EVEX_Vmovdqu32_xmmm128_k1z_xmm, EVEX_Vmovdqu32_ymmm256_k1z_ymm, EVEX_Vmovdqu32_zmmm512_k1z_zmm, EVEX_Vmovdqu64_xmmm128_k1z_xmm, EVEX_Vmovdqu64_ymmm256_k1z_ymm, EVEX_Vmovdqu64_zmmm512_k1z_zmm, EVEX_Vmovdqu8_xmmm128_k1z_xmm, EVEX_Vmovdqu8_ymmm256_k1z_ymm, EVEX_Vmovdqu8_zmmm512_k1z_zmm, EVEX_Vmovdqu16_xmmm128_k1z_xmm, EVEX_Vmovdqu16_ymmm256_k1z_ymm, EVEX_Vmovdqu16_zmmm512_k1z_zmm, Jo_rel16, Jo_rel32_32, Jo_rel32_64, Jno_rel16, Jno_rel32_32, Jno_rel32_64, Jb_rel16, Jb_rel32_32, Jb_rel32_64, Jae_rel16, Jae_rel32_32, Jae_rel32_64, Je_rel16, Je_rel32_32, Je_rel32_64, Jne_rel16, Jne_rel32_32, Jne_rel32_64, Jbe_rel16, Jbe_rel32_32, Jbe_rel32_64, Ja_rel16, Ja_rel32_32, Ja_rel32_64, Js_rel16, Js_rel32_32, Js_rel32_64, Jns_rel16, Jns_rel32_32, Jns_rel32_64, Jp_rel16, Jp_rel32_32, Jp_rel32_64, Jnp_rel16, Jnp_rel32_32, Jnp_rel32_64, Jl_rel16, Jl_rel32_32, Jl_rel32_64, Jge_rel16, Jge_rel32_32, Jge_rel32_64, Jle_rel16, Jle_rel32_32, Jle_rel32_64, Jg_rel16, Jg_rel32_32, Jg_rel32_64, Seto_rm8, Setno_rm8, Setb_rm8, Setae_rm8, Sete_rm8, Setne_rm8, Setbe_rm8, Seta_rm8, Sets_rm8, Setns_rm8, Setp_rm8, Setnp_rm8, Setl_rm8, Setge_rm8, Setle_rm8, Setg_rm8, VEX_Kmovw_kr_km16, VEX_Kmovq_kr_km64, VEX_Kmovb_kr_km8, VEX_Kmovd_kr_km32, VEX_Kmovw_m16_kr, VEX_Kmovq_m64_kr, VEX_Kmovb_m8_kr, VEX_Kmovd_m32_kr, VEX_Kmovw_kr_r32, VEX_Kmovb_kr_r32, VEX_Kmovd_kr_r32, VEX_Kmovq_kr_r64, VEX_Kmovw_r32_kr, VEX_Kmovb_r32_kr, VEX_Kmovd_r32_kr, VEX_Kmovq_r64_kr, VEX_Kortestw_kr_kr, VEX_Kortestq_kr_kr, VEX_Kortestb_kr_kr, VEX_Kortestd_kr_kr, VEX_Ktestw_kr_kr, VEX_Ktestq_kr_kr, VEX_Ktestb_kr_kr, VEX_Ktestd_kr_kr, Pushw_FS, Pushd_FS, Pushq_FS, Popw_FS, Popd_FS, Popq_FS, Cpuid, Bt_rm16_r16, Bt_rm32_r32, Bt_rm64_r64, Shld_rm16_r16_imm8, Shld_rm32_r32_imm8, Shld_rm64_r64_imm8, Shld_rm16_r16_CL, Shld_rm32_r32_CL, Shld_rm64_r64_CL, Montmul_16, Montmul_32, Montmul_64, Xsha1_16, Xsha1_32, Xsha1_64, Xsha256_16, Xsha256_32, Xsha256_64, Xbts_r16_rm16, Xbts_r32_rm32, Xstore_16, Xstore_32, Xstore_64, Xcryptecb_16, Xcryptecb_32, Xcryptecb_64, Xcryptcbc_16, Xcryptcbc_32, Xcryptcbc_64, Xcryptctr_16, Xcryptctr_32, Xcryptctr_64, Xcryptcfb_16, Xcryptcfb_32, Xcryptcfb_64, Xcryptofb_16, Xcryptofb_32, Xcryptofb_64, Ibts_rm16_r16, Ibts_rm32_r32, Cmpxchg486_rm8_r8, Cmpxchg486_rm16_r16, Cmpxchg486_rm32_r32, Pushw_GS, Pushd_GS, Pushq_GS, Popw_GS, Popd_GS, Popq_GS, Rsm, Bts_rm16_r16, Bts_rm32_r32, Bts_rm64_r64, Shrd_rm16_r16_imm8, Shrd_rm32_r32_imm8, Shrd_rm64_r64_imm8, Shrd_rm16_r16_CL, Shrd_rm32_r32_CL, Shrd_rm64_r64_CL, Fxsave_m512byte, Fxsave64_m512byte, Rdfsbase_r32, Rdfsbase_r64, Fxrstor_m512byte, Fxrstor64_m512byte, Rdgsbase_r32, Rdgsbase_r64, Ldmxcsr_m32, Wrfsbase_r32, Wrfsbase_r64, VEX_Vldmxcsr_m32, Stmxcsr_m32, Wrgsbase_r32, Wrgsbase_r64, VEX_Vstmxcsr_m32, Xsave_mem, Xsave64_mem, Ptwrite_rm32, Ptwrite_rm64, Xrstor_mem, Xrstor64_mem, Incsspd_r32, Incsspq_r64, Xsaveopt_mem, Xsaveopt64_mem, Clwb_m8, Tpause_r32, Tpause_r64, Clrssbsy_m64, Umonitor_r16, Umonitor_r32, Umonitor_r64, Umwait_r32, Umwait_r64, Clflush_m8, Clflushopt_m8, Lfence, Lfence_E9, Lfence_EA, Lfence_EB, Lfence_EC, Lfence_ED, Lfence_EE, Lfence_EF, Mfence, Mfence_F1, Mfence_F2, Mfence_F3, Mfence_F4, Mfence_F5, Mfence_F6, Mfence_F7, Sfence, Sfence_F9, Sfence_FA, Sfence_FB, Sfence_FC, Sfence_FD, Sfence_FE, Sfence_FF, Pcommit, Imul_r16_rm16, Imul_r32_rm32, Imul_r64_rm64, Cmpxchg_rm8_r8, Cmpxchg_rm16_r16, Cmpxchg_rm32_r32, Cmpxchg_rm64_r64, Lss_r16_m1616, Lss_r32_m1632, Lss_r64_m1664, Btr_rm16_r16, Btr_rm32_r32, Btr_rm64_r64, Lfs_r16_m1616, Lfs_r32_m1632, Lfs_r64_m1664, Lgs_r16_m1616, Lgs_r32_m1632, Lgs_r64_m1664, Movzx_r16_rm8, Movzx_r32_rm8, Movzx_r64_rm8, Movzx_r16_rm16, Movzx_r32_rm16, Movzx_r64_rm16, Jmpe_disp16, Jmpe_disp32, Popcnt_r16_rm16, Popcnt_r32_rm32, Popcnt_r64_rm64, Ud1_r16_rm16, Ud1_r32_rm32, Ud1_r64_rm64, Bt_rm16_imm8, Bt_rm32_imm8, Bt_rm64_imm8, Bts_rm16_imm8, Bts_rm32_imm8, Bts_rm64_imm8, Btr_rm16_imm8, Btr_rm32_imm8, Btr_rm64_imm8, Btc_rm16_imm8, Btc_rm32_imm8, Btc_rm64_imm8, Btc_rm16_r16, Btc_rm32_r32, Btc_rm64_r64, Bsf_r16_rm16, Bsf_r32_rm32, Bsf_r64_rm64, Tzcnt_r16_rm16, Tzcnt_r32_rm32, Tzcnt_r64_rm64, Bsr_r16_rm16, Bsr_r32_rm32, Bsr_r64_rm64, Lzcnt_r16_rm16, Lzcnt_r32_rm32, Lzcnt_r64_rm64, Movsx_r16_rm8, Movsx_r32_rm8, Movsx_r64_rm8, Movsx_r16_rm16, Movsx_r32_rm16, Movsx_r64_rm16, Xadd_rm8_r8, Xadd_rm16_r16, Xadd_rm32_r32, Xadd_rm64_r64, Cmpps_xmm_xmmm128_imm8, VEX_Vcmpps_xmm_xmm_xmmm128_imm8, VEX_Vcmpps_ymm_ymm_ymmm256_imm8, EVEX_Vcmpps_kr_k1_xmm_xmmm128b32_imm8, EVEX_Vcmpps_kr_k1_ymm_ymmm256b32_imm8, EVEX_Vcmpps_kr_k1_zmm_zmmm512b32_imm8_sae, Cmppd_xmm_xmmm128_imm8, VEX_Vcmppd_xmm_xmm_xmmm128_imm8, VEX_Vcmppd_ymm_ymm_ymmm256_imm8, EVEX_Vcmppd_kr_k1_xmm_xmmm128b64_imm8, EVEX_Vcmppd_kr_k1_ymm_ymmm256b64_imm8, EVEX_Vcmppd_kr_k1_zmm_zmmm512b64_imm8_sae, Cmpss_xmm_xmmm32_imm8, VEX_Vcmpss_xmm_xmm_xmmm32_imm8, EVEX_Vcmpss_kr_k1_xmm_xmmm32_imm8_sae, Cmpsd_xmm_xmmm64_imm8, VEX_Vcmpsd_xmm_xmm_xmmm64_imm8, EVEX_Vcmpsd_kr_k1_xmm_xmmm64_imm8_sae, Movnti_m32_r32, Movnti_m64_r64, Pinsrw_mm_r32m16_imm8, Pinsrw_mm_r64m16_imm8, Pinsrw_xmm_r32m16_imm8, Pinsrw_xmm_r64m16_imm8, VEX_Vpinsrw_xmm_xmm_r32m16_imm8, VEX_Vpinsrw_xmm_xmm_r64m16_imm8, EVEX_Vpinsrw_xmm_xmm_r32m16_imm8, EVEX_Vpinsrw_xmm_xmm_r64m16_imm8, Pextrw_r32_mm_imm8, Pextrw_r64_mm_imm8, Pextrw_r32_xmm_imm8, Pextrw_r64_xmm_imm8, VEX_Vpextrw_r32_xmm_imm8, VEX_Vpextrw_r64_xmm_imm8, EVEX_Vpextrw_r32_xmm_imm8, EVEX_Vpextrw_r64_xmm_imm8, Shufps_xmm_xmmm128_imm8, VEX_Vshufps_xmm_xmm_xmmm128_imm8, VEX_Vshufps_ymm_ymm_ymmm256_imm8, EVEX_Vshufps_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Vshufps_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vshufps_zmm_k1z_zmm_zmmm512b32_imm8, Shufpd_xmm_xmmm128_imm8, VEX_Vshufpd_xmm_xmm_xmmm128_imm8, VEX_Vshufpd_ymm_ymm_ymmm256_imm8, EVEX_Vshufpd_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vshufpd_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vshufpd_zmm_k1z_zmm_zmmm512b64_imm8, Cmpxchg8b_m64, Cmpxchg16b_m128, Xrstors_mem, Xrstors64_mem, Xsavec_mem, Xsavec64_mem, Xsaves_mem, Xsaves64_mem, Vmptrld_m64, Vmclear_m64, Vmxon_m64, Rdrand_r16, Rdrand_r32, Rdrand_r64, Vmptrst_m64, Rdseed_r16, Rdseed_r32, Rdseed_r64, Rdpid_r32, Rdpid_r64, Bswap_r16, Bswap_r32, Bswap_r64, Addsubpd_xmm_xmmm128, VEX_Vaddsubpd_xmm_xmm_xmmm128, VEX_Vaddsubpd_ymm_ymm_ymmm256, Addsubps_xmm_xmmm128, VEX_Vaddsubps_xmm_xmm_xmmm128, VEX_Vaddsubps_ymm_ymm_ymmm256, Psrlw_mm_mmm64, Psrlw_xmm_xmmm128, VEX_Vpsrlw_xmm_xmm_xmmm128, VEX_Vpsrlw_ymm_ymm_xmmm128, EVEX_Vpsrlw_xmm_k1z_xmm_xmmm128, EVEX_Vpsrlw_ymm_k1z_ymm_xmmm128, EVEX_Vpsrlw_zmm_k1z_zmm_xmmm128, Psrld_mm_mmm64, Psrld_xmm_xmmm128, VEX_Vpsrld_xmm_xmm_xmmm128, VEX_Vpsrld_ymm_ymm_xmmm128, EVEX_Vpsrld_xmm_k1z_xmm_xmmm128, EVEX_Vpsrld_ymm_k1z_ymm_xmmm128, EVEX_Vpsrld_zmm_k1z_zmm_xmmm128, Psrlq_mm_mmm64, Psrlq_xmm_xmmm128, VEX_Vpsrlq_xmm_xmm_xmmm128, VEX_Vpsrlq_ymm_ymm_xmmm128, EVEX_Vpsrlq_xmm_k1z_xmm_xmmm128, EVEX_Vpsrlq_ymm_k1z_ymm_xmmm128, EVEX_Vpsrlq_zmm_k1z_zmm_xmmm128, Paddq_mm_mmm64, Paddq_xmm_xmmm128, VEX_Vpaddq_xmm_xmm_xmmm128, VEX_Vpaddq_ymm_ymm_ymmm256, EVEX_Vpaddq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpaddq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpaddq_zmm_k1z_zmm_zmmm512b64, Pmullw_mm_mmm64, Pmullw_xmm_xmmm128, VEX_Vpmullw_xmm_xmm_xmmm128, VEX_Vpmullw_ymm_ymm_ymmm256, EVEX_Vpmullw_xmm_k1z_xmm_xmmm128, EVEX_Vpmullw_ymm_k1z_ymm_ymmm256, EVEX_Vpmullw_zmm_k1z_zmm_zmmm512, Movq_xmmm64_xmm, VEX_Vmovq_xmmm64_xmm, EVEX_Vmovq_xmmm64_xmm, Movq2dq_xmm_mm, Movdq2q_mm_xmm, Pmovmskb_r32_mm, Pmovmskb_r64_mm, Pmovmskb_r32_xmm, Pmovmskb_r64_xmm, VEX_Vpmovmskb_r32_xmm, VEX_Vpmovmskb_r64_xmm, VEX_Vpmovmskb_r32_ymm, VEX_Vpmovmskb_r64_ymm, Psubusb_mm_mmm64, Psubusb_xmm_xmmm128, VEX_Vpsubusb_xmm_xmm_xmmm128, VEX_Vpsubusb_ymm_ymm_ymmm256, EVEX_Vpsubusb_xmm_k1z_xmm_xmmm128, EVEX_Vpsubusb_ymm_k1z_ymm_ymmm256, EVEX_Vpsubusb_zmm_k1z_zmm_zmmm512, Psubusw_mm_mmm64, Psubusw_xmm_xmmm128, VEX_Vpsubusw_xmm_xmm_xmmm128, VEX_Vpsubusw_ymm_ymm_ymmm256, EVEX_Vpsubusw_xmm_k1z_xmm_xmmm128, EVEX_Vpsubusw_ymm_k1z_ymm_ymmm256, EVEX_Vpsubusw_zmm_k1z_zmm_zmmm512, Pminub_mm_mmm64, Pminub_xmm_xmmm128, VEX_Vpminub_xmm_xmm_xmmm128, VEX_Vpminub_ymm_ymm_ymmm256, EVEX_Vpminub_xmm_k1z_xmm_xmmm128, EVEX_Vpminub_ymm_k1z_ymm_ymmm256, EVEX_Vpminub_zmm_k1z_zmm_zmmm512, Pand_mm_mmm64, Pand_xmm_xmmm128, VEX_Vpand_xmm_xmm_xmmm128, VEX_Vpand_ymm_ymm_ymmm256, EVEX_Vpandd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpandd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpandd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpandq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpandq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpandq_zmm_k1z_zmm_zmmm512b64, Paddusb_mm_mmm64, Paddusb_xmm_xmmm128, VEX_Vpaddusb_xmm_xmm_xmmm128, VEX_Vpaddusb_ymm_ymm_ymmm256, EVEX_Vpaddusb_xmm_k1z_xmm_xmmm128, EVEX_Vpaddusb_ymm_k1z_ymm_ymmm256, EVEX_Vpaddusb_zmm_k1z_zmm_zmmm512, Paddusw_mm_mmm64, Paddusw_xmm_xmmm128, VEX_Vpaddusw_xmm_xmm_xmmm128, VEX_Vpaddusw_ymm_ymm_ymmm256, EVEX_Vpaddusw_xmm_k1z_xmm_xmmm128, EVEX_Vpaddusw_ymm_k1z_ymm_ymmm256, EVEX_Vpaddusw_zmm_k1z_zmm_zmmm512, Pmaxub_mm_mmm64, Pmaxub_xmm_xmmm128, VEX_Vpmaxub_xmm_xmm_xmmm128, VEX_Vpmaxub_ymm_ymm_ymmm256, EVEX_Vpmaxub_xmm_k1z_xmm_xmmm128, EVEX_Vpmaxub_ymm_k1z_ymm_ymmm256, EVEX_Vpmaxub_zmm_k1z_zmm_zmmm512, Pandn_mm_mmm64, Pandn_xmm_xmmm128, VEX_Vpandn_xmm_xmm_xmmm128, VEX_Vpandn_ymm_ymm_ymmm256, EVEX_Vpandnd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpandnd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpandnd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpandnq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpandnq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpandnq_zmm_k1z_zmm_zmmm512b64, Pavgb_mm_mmm64, Pavgb_xmm_xmmm128, VEX_Vpavgb_xmm_xmm_xmmm128, VEX_Vpavgb_ymm_ymm_ymmm256, EVEX_Vpavgb_xmm_k1z_xmm_xmmm128, EVEX_Vpavgb_ymm_k1z_ymm_ymmm256, EVEX_Vpavgb_zmm_k1z_zmm_zmmm512, Psraw_mm_mmm64, Psraw_xmm_xmmm128, VEX_Vpsraw_xmm_xmm_xmmm128, VEX_Vpsraw_ymm_ymm_xmmm128, EVEX_Vpsraw_xmm_k1z_xmm_xmmm128, EVEX_Vpsraw_ymm_k1z_ymm_xmmm128, EVEX_Vpsraw_zmm_k1z_zmm_xmmm128, Psrad_mm_mmm64, Psrad_xmm_xmmm128, VEX_Vpsrad_xmm_xmm_xmmm128, VEX_Vpsrad_ymm_ymm_xmmm128, EVEX_Vpsrad_xmm_k1z_xmm_xmmm128, EVEX_Vpsrad_ymm_k1z_ymm_xmmm128, EVEX_Vpsrad_zmm_k1z_zmm_xmmm128, EVEX_Vpsraq_xmm_k1z_xmm_xmmm128, EVEX_Vpsraq_ymm_k1z_ymm_xmmm128, EVEX_Vpsraq_zmm_k1z_zmm_xmmm128, Pavgw_mm_mmm64, Pavgw_xmm_xmmm128, VEX_Vpavgw_xmm_xmm_xmmm128, VEX_Vpavgw_ymm_ymm_ymmm256, EVEX_Vpavgw_xmm_k1z_xmm_xmmm128, EVEX_Vpavgw_ymm_k1z_ymm_ymmm256, EVEX_Vpavgw_zmm_k1z_zmm_zmmm512, Pmulhuw_mm_mmm64, Pmulhuw_xmm_xmmm128, VEX_Vpmulhuw_xmm_xmm_xmmm128, VEX_Vpmulhuw_ymm_ymm_ymmm256, EVEX_Vpmulhuw_xmm_k1z_xmm_xmmm128, EVEX_Vpmulhuw_ymm_k1z_ymm_ymmm256, EVEX_Vpmulhuw_zmm_k1z_zmm_zmmm512, Pmulhw_mm_mmm64, Pmulhw_xmm_xmmm128, VEX_Vpmulhw_xmm_xmm_xmmm128, VEX_Vpmulhw_ymm_ymm_ymmm256, EVEX_Vpmulhw_xmm_k1z_xmm_xmmm128, EVEX_Vpmulhw_ymm_k1z_ymm_ymmm256, EVEX_Vpmulhw_zmm_k1z_zmm_zmmm512, Cvttpd2dq_xmm_xmmm128, VEX_Vcvttpd2dq_xmm_xmmm128, VEX_Vcvttpd2dq_xmm_ymmm256, EVEX_Vcvttpd2dq_xmm_k1z_xmmm128b64, EVEX_Vcvttpd2dq_xmm_k1z_ymmm256b64, EVEX_Vcvttpd2dq_ymm_k1z_zmmm512b64_sae, Cvtdq2pd_xmm_xmmm64, VEX_Vcvtdq2pd_xmm_xmmm64, VEX_Vcvtdq2pd_ymm_xmmm128, EVEX_Vcvtdq2pd_xmm_k1z_xmmm64b32, EVEX_Vcvtdq2pd_ymm_k1z_xmmm128b32, EVEX_Vcvtdq2pd_zmm_k1z_ymmm256b32_er, EVEX_Vcvtqq2pd_xmm_k1z_xmmm128b64, EVEX_Vcvtqq2pd_ymm_k1z_ymmm256b64, EVEX_Vcvtqq2pd_zmm_k1z_zmmm512b64_er, Cvtpd2dq_xmm_xmmm128, VEX_Vcvtpd2dq_xmm_xmmm128, VEX_Vcvtpd2dq_xmm_ymmm256, EVEX_Vcvtpd2dq_xmm_k1z_xmmm128b64, EVEX_Vcvtpd2dq_xmm_k1z_ymmm256b64, EVEX_Vcvtpd2dq_ymm_k1z_zmmm512b64_er, Movntq_m64_mm, Movntdq_m128_xmm, VEX_Vmovntdq_m128_xmm, VEX_Vmovntdq_m256_ymm, EVEX_Vmovntdq_m128_xmm, EVEX_Vmovntdq_m256_ymm, EVEX_Vmovntdq_m512_zmm, Psubsb_mm_mmm64, Psubsb_xmm_xmmm128, VEX_Vpsubsb_xmm_xmm_xmmm128, VEX_Vpsubsb_ymm_ymm_ymmm256, EVEX_Vpsubsb_xmm_k1z_xmm_xmmm128, EVEX_Vpsubsb_ymm_k1z_ymm_ymmm256, EVEX_Vpsubsb_zmm_k1z_zmm_zmmm512, Psubsw_mm_mmm64, Psubsw_xmm_xmmm128, VEX_Vpsubsw_xmm_xmm_xmmm128, VEX_Vpsubsw_ymm_ymm_ymmm256, EVEX_Vpsubsw_xmm_k1z_xmm_xmmm128, EVEX_Vpsubsw_ymm_k1z_ymm_ymmm256, EVEX_Vpsubsw_zmm_k1z_zmm_zmmm512, Pminsw_mm_mmm64, Pminsw_xmm_xmmm128, VEX_Vpminsw_xmm_xmm_xmmm128, VEX_Vpminsw_ymm_ymm_ymmm256, EVEX_Vpminsw_xmm_k1z_xmm_xmmm128, EVEX_Vpminsw_ymm_k1z_ymm_ymmm256, EVEX_Vpminsw_zmm_k1z_zmm_zmmm512, Por_mm_mmm64, Por_xmm_xmmm128, VEX_Vpor_xmm_xmm_xmmm128, VEX_Vpor_ymm_ymm_ymmm256, EVEX_Vpord_xmm_k1z_xmm_xmmm128b32, EVEX_Vpord_ymm_k1z_ymm_ymmm256b32, EVEX_Vpord_zmm_k1z_zmm_zmmm512b32, EVEX_Vporq_xmm_k1z_xmm_xmmm128b64, EVEX_Vporq_ymm_k1z_ymm_ymmm256b64, EVEX_Vporq_zmm_k1z_zmm_zmmm512b64, Paddsb_mm_mmm64, Paddsb_xmm_xmmm128, VEX_Vpaddsb_xmm_xmm_xmmm128, VEX_Vpaddsb_ymm_ymm_ymmm256, EVEX_Vpaddsb_xmm_k1z_xmm_xmmm128, EVEX_Vpaddsb_ymm_k1z_ymm_ymmm256, EVEX_Vpaddsb_zmm_k1z_zmm_zmmm512, Paddsw_mm_mmm64, Paddsw_xmm_xmmm128, VEX_Vpaddsw_xmm_xmm_xmmm128, VEX_Vpaddsw_ymm_ymm_ymmm256, EVEX_Vpaddsw_xmm_k1z_xmm_xmmm128, EVEX_Vpaddsw_ymm_k1z_ymm_ymmm256, EVEX_Vpaddsw_zmm_k1z_zmm_zmmm512, Pmaxsw_mm_mmm64, Pmaxsw_xmm_xmmm128, VEX_Vpmaxsw_xmm_xmm_xmmm128, VEX_Vpmaxsw_ymm_ymm_ymmm256, EVEX_Vpmaxsw_xmm_k1z_xmm_xmmm128, EVEX_Vpmaxsw_ymm_k1z_ymm_ymmm256, EVEX_Vpmaxsw_zmm_k1z_zmm_zmmm512, Pxor_mm_mmm64, Pxor_xmm_xmmm128, VEX_Vpxor_xmm_xmm_xmmm128, VEX_Vpxor_ymm_ymm_ymmm256, EVEX_Vpxord_xmm_k1z_xmm_xmmm128b32, EVEX_Vpxord_ymm_k1z_ymm_ymmm256b32, EVEX_Vpxord_zmm_k1z_zmm_zmmm512b32, EVEX_Vpxorq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpxorq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpxorq_zmm_k1z_zmm_zmmm512b64, Lddqu_xmm_m128, VEX_Vlddqu_xmm_m128, VEX_Vlddqu_ymm_m256, Psllw_mm_mmm64, Psllw_xmm_xmmm128, VEX_Vpsllw_xmm_xmm_xmmm128, VEX_Vpsllw_ymm_ymm_xmmm128, EVEX_Vpsllw_xmm_k1z_xmm_xmmm128, EVEX_Vpsllw_ymm_k1z_ymm_xmmm128, EVEX_Vpsllw_zmm_k1z_zmm_xmmm128, Pslld_mm_mmm64, Pslld_xmm_xmmm128, VEX_Vpslld_xmm_xmm_xmmm128, VEX_Vpslld_ymm_ymm_xmmm128, EVEX_Vpslld_xmm_k1z_xmm_xmmm128, EVEX_Vpslld_ymm_k1z_ymm_xmmm128, EVEX_Vpslld_zmm_k1z_zmm_xmmm128, Psllq_mm_mmm64, Psllq_xmm_xmmm128, VEX_Vpsllq_xmm_xmm_xmmm128, VEX_Vpsllq_ymm_ymm_xmmm128, EVEX_Vpsllq_xmm_k1z_xmm_xmmm128, EVEX_Vpsllq_ymm_k1z_ymm_xmmm128, EVEX_Vpsllq_zmm_k1z_zmm_xmmm128, Pmuludq_mm_mmm64, Pmuludq_xmm_xmmm128, VEX_Vpmuludq_xmm_xmm_xmmm128, VEX_Vpmuludq_ymm_ymm_ymmm256, EVEX_Vpmuludq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmuludq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmuludq_zmm_k1z_zmm_zmmm512b64, Pmaddwd_mm_mmm64, Pmaddwd_xmm_xmmm128, VEX_Vpmaddwd_xmm_xmm_xmmm128, VEX_Vpmaddwd_ymm_ymm_ymmm256, EVEX_Vpmaddwd_xmm_k1z_xmm_xmmm128, EVEX_Vpmaddwd_ymm_k1z_ymm_ymmm256, EVEX_Vpmaddwd_zmm_k1z_zmm_zmmm512, Psadbw_mm_mmm64, Psadbw_xmm_xmmm128, VEX_Vpsadbw_xmm_xmm_xmmm128, VEX_Vpsadbw_ymm_ymm_ymmm256, EVEX_Vpsadbw_xmm_xmm_xmmm128, EVEX_Vpsadbw_ymm_ymm_ymmm256, EVEX_Vpsadbw_zmm_zmm_zmmm512, Maskmovq_rDI_mm_mm, Maskmovdqu_rDI_xmm_xmm, VEX_Vmaskmovdqu_rDI_xmm_xmm, Psubb_mm_mmm64, Psubb_xmm_xmmm128, VEX_Vpsubb_xmm_xmm_xmmm128, VEX_Vpsubb_ymm_ymm_ymmm256, EVEX_Vpsubb_xmm_k1z_xmm_xmmm128, EVEX_Vpsubb_ymm_k1z_ymm_ymmm256, EVEX_Vpsubb_zmm_k1z_zmm_zmmm512, Psubw_mm_mmm64, Psubw_xmm_xmmm128, VEX_Vpsubw_xmm_xmm_xmmm128, VEX_Vpsubw_ymm_ymm_ymmm256, EVEX_Vpsubw_xmm_k1z_xmm_xmmm128, EVEX_Vpsubw_ymm_k1z_ymm_ymmm256, EVEX_Vpsubw_zmm_k1z_zmm_zmmm512, Psubd_mm_mmm64, Psubd_xmm_xmmm128, VEX_Vpsubd_xmm_xmm_xmmm128, VEX_Vpsubd_ymm_ymm_ymmm256, EVEX_Vpsubd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpsubd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpsubd_zmm_k1z_zmm_zmmm512b32, Psubq_mm_mmm64, Psubq_xmm_xmmm128, VEX_Vpsubq_xmm_xmm_xmmm128, VEX_Vpsubq_ymm_ymm_ymmm256, EVEX_Vpsubq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpsubq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpsubq_zmm_k1z_zmm_zmmm512b64, Paddb_mm_mmm64, Paddb_xmm_xmmm128, VEX_Vpaddb_xmm_xmm_xmmm128, VEX_Vpaddb_ymm_ymm_ymmm256, EVEX_Vpaddb_xmm_k1z_xmm_xmmm128, EVEX_Vpaddb_ymm_k1z_ymm_ymmm256, EVEX_Vpaddb_zmm_k1z_zmm_zmmm512, Paddw_mm_mmm64, Paddw_xmm_xmmm128, VEX_Vpaddw_xmm_xmm_xmmm128, VEX_Vpaddw_ymm_ymm_ymmm256, EVEX_Vpaddw_xmm_k1z_xmm_xmmm128, EVEX_Vpaddw_ymm_k1z_ymm_ymmm256, EVEX_Vpaddw_zmm_k1z_zmm_zmmm512, Paddd_mm_mmm64, Paddd_xmm_xmmm128, VEX_Vpaddd_xmm_xmm_xmmm128, VEX_Vpaddd_ymm_ymm_ymmm256, EVEX_Vpaddd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpaddd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpaddd_zmm_k1z_zmm_zmmm512b32, Ud0_r16_rm16, Ud0_r32_rm32, Ud0_r64_rm64, Pshufb_mm_mmm64, Pshufb_xmm_xmmm128, VEX_Vpshufb_xmm_xmm_xmmm128, VEX_Vpshufb_ymm_ymm_ymmm256, EVEX_Vpshufb_xmm_k1z_xmm_xmmm128, EVEX_Vpshufb_ymm_k1z_ymm_ymmm256, EVEX_Vpshufb_zmm_k1z_zmm_zmmm512, Phaddw_mm_mmm64, Phaddw_xmm_xmmm128, VEX_Vphaddw_xmm_xmm_xmmm128, VEX_Vphaddw_ymm_ymm_ymmm256, Phaddd_mm_mmm64, Phaddd_xmm_xmmm128, VEX_Vphaddd_xmm_xmm_xmmm128, VEX_Vphaddd_ymm_ymm_ymmm256, Phaddsw_mm_mmm64, Phaddsw_xmm_xmmm128, VEX_Vphaddsw_xmm_xmm_xmmm128, VEX_Vphaddsw_ymm_ymm_ymmm256, Pmaddubsw_mm_mmm64, Pmaddubsw_xmm_xmmm128, VEX_Vpmaddubsw_xmm_xmm_xmmm128, VEX_Vpmaddubsw_ymm_ymm_ymmm256, EVEX_Vpmaddubsw_xmm_k1z_xmm_xmmm128, EVEX_Vpmaddubsw_ymm_k1z_ymm_ymmm256, EVEX_Vpmaddubsw_zmm_k1z_zmm_zmmm512, Phsubw_mm_mmm64, Phsubw_xmm_xmmm128, VEX_Vphsubw_xmm_xmm_xmmm128, VEX_Vphsubw_ymm_ymm_ymmm256, Phsubd_mm_mmm64, Phsubd_xmm_xmmm128, VEX_Vphsubd_xmm_xmm_xmmm128, VEX_Vphsubd_ymm_ymm_ymmm256, Phsubsw_mm_mmm64, Phsubsw_xmm_xmmm128, VEX_Vphsubsw_xmm_xmm_xmmm128, VEX_Vphsubsw_ymm_ymm_ymmm256, Psignb_mm_mmm64, Psignb_xmm_xmmm128, VEX_Vpsignb_xmm_xmm_xmmm128, VEX_Vpsignb_ymm_ymm_ymmm256, Psignw_mm_mmm64, Psignw_xmm_xmmm128, VEX_Vpsignw_xmm_xmm_xmmm128, VEX_Vpsignw_ymm_ymm_ymmm256, Psignd_mm_mmm64, Psignd_xmm_xmmm128, VEX_Vpsignd_xmm_xmm_xmmm128, VEX_Vpsignd_ymm_ymm_ymmm256, Pmulhrsw_mm_mmm64, Pmulhrsw_xmm_xmmm128, VEX_Vpmulhrsw_xmm_xmm_xmmm128, VEX_Vpmulhrsw_ymm_ymm_ymmm256, EVEX_Vpmulhrsw_xmm_k1z_xmm_xmmm128, EVEX_Vpmulhrsw_ymm_k1z_ymm_ymmm256, EVEX_Vpmulhrsw_zmm_k1z_zmm_zmmm512, VEX_Vpermilps_xmm_xmm_xmmm128, VEX_Vpermilps_ymm_ymm_ymmm256, EVEX_Vpermilps_xmm_k1z_xmm_xmmm128b32, EVEX_Vpermilps_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermilps_zmm_k1z_zmm_zmmm512b32, VEX_Vpermilpd_xmm_xmm_xmmm128, VEX_Vpermilpd_ymm_ymm_ymmm256, EVEX_Vpermilpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vpermilpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermilpd_zmm_k1z_zmm_zmmm512b64, VEX_Vtestps_xmm_xmmm128, VEX_Vtestps_ymm_ymmm256, VEX_Vtestpd_xmm_xmmm128, VEX_Vtestpd_ymm_ymmm256, Pblendvb_xmm_xmmm128, EVEX_Vpsrlvw_xmm_k1z_xmm_xmmm128, EVEX_Vpsrlvw_ymm_k1z_ymm_ymmm256, EVEX_Vpsrlvw_zmm_k1z_zmm_zmmm512, EVEX_Vpmovuswb_xmmm64_k1z_xmm, EVEX_Vpmovuswb_xmmm128_k1z_ymm, EVEX_Vpmovuswb_ymmm256_k1z_zmm, EVEX_Vpsravw_xmm_k1z_xmm_xmmm128, EVEX_Vpsravw_ymm_k1z_ymm_ymmm256, EVEX_Vpsravw_zmm_k1z_zmm_zmmm512, EVEX_Vpmovusdb_xmmm32_k1z_xmm, EVEX_Vpmovusdb_xmmm64_k1z_ymm, EVEX_Vpmovusdb_xmmm128_k1z_zmm, EVEX_Vpsllvw_xmm_k1z_xmm_xmmm128, EVEX_Vpsllvw_ymm_k1z_ymm_ymmm256, EVEX_Vpsllvw_zmm_k1z_zmm_zmmm512, EVEX_Vpmovusqb_xmmm16_k1z_xmm, EVEX_Vpmovusqb_xmmm32_k1z_ymm, EVEX_Vpmovusqb_xmmm64_k1z_zmm, VEX_Vcvtph2ps_xmm_xmmm64, VEX_Vcvtph2ps_ymm_xmmm128, EVEX_Vcvtph2ps_xmm_k1z_xmmm64, EVEX_Vcvtph2ps_ymm_k1z_xmmm128, EVEX_Vcvtph2ps_zmm_k1z_ymmm256_sae, EVEX_Vpmovusdw_xmmm64_k1z_xmm, EVEX_Vpmovusdw_xmmm128_k1z_ymm, EVEX_Vpmovusdw_ymmm256_k1z_zmm, Blendvps_xmm_xmmm128, EVEX_Vprorvd_xmm_k1z_xmm_xmmm128b32, EVEX_Vprorvd_ymm_k1z_ymm_ymmm256b32, EVEX_Vprorvd_zmm_k1z_zmm_zmmm512b32, EVEX_Vprorvq_xmm_k1z_xmm_xmmm128b64, EVEX_Vprorvq_ymm_k1z_ymm_ymmm256b64, EVEX_Vprorvq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpmovusqw_xmmm32_k1z_xmm, EVEX_Vpmovusqw_xmmm64_k1z_ymm, EVEX_Vpmovusqw_xmmm128_k1z_zmm, Blendvpd_xmm_xmmm128, EVEX_Vprolvd_xmm_k1z_xmm_xmmm128b32, EVEX_Vprolvd_ymm_k1z_ymm_ymmm256b32, EVEX_Vprolvd_zmm_k1z_zmm_zmmm512b32, EVEX_Vprolvq_xmm_k1z_xmm_xmmm128b64, EVEX_Vprolvq_ymm_k1z_ymm_ymmm256b64, EVEX_Vprolvq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpmovusqd_xmmm64_k1z_xmm, EVEX_Vpmovusqd_xmmm128_k1z_ymm, EVEX_Vpmovusqd_ymmm256_k1z_zmm, VEX_Vpermps_ymm_ymm_ymmm256, EVEX_Vpermps_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermps_zmm_k1z_zmm_zmmm512b32, EVEX_Vpermpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermpd_zmm_k1z_zmm_zmmm512b64, Ptest_xmm_xmmm128, VEX_Vptest_xmm_xmmm128, VEX_Vptest_ymm_ymmm256, VEX_Vbroadcastss_xmm_m32, VEX_Vbroadcastss_ymm_m32, EVEX_Vbroadcastss_xmm_k1z_xmmm32, EVEX_Vbroadcastss_ymm_k1z_xmmm32, EVEX_Vbroadcastss_zmm_k1z_xmmm32, VEX_Vbroadcastsd_ymm_m64, EVEX_Vbroadcastf32x2_ymm_k1z_xmmm64, EVEX_Vbroadcastf32x2_zmm_k1z_xmmm64, EVEX_Vbroadcastsd_ymm_k1z_xmmm64, EVEX_Vbroadcastsd_zmm_k1z_xmmm64, VEX_Vbroadcastf128_ymm_m128, EVEX_Vbroadcastf32x4_ymm_k1z_m128, EVEX_Vbroadcastf32x4_zmm_k1z_m128, EVEX_Vbroadcastf64x2_ymm_k1z_m128, EVEX_Vbroadcastf64x2_zmm_k1z_m128, EVEX_Vbroadcastf32x8_zmm_k1z_m256, EVEX_Vbroadcastf64x4_zmm_k1z_m256, Pabsb_mm_mmm64, Pabsb_xmm_xmmm128, VEX_Vpabsb_xmm_xmmm128, VEX_Vpabsb_ymm_ymmm256, EVEX_Vpabsb_xmm_k1z_xmmm128, EVEX_Vpabsb_ymm_k1z_ymmm256, EVEX_Vpabsb_zmm_k1z_zmmm512, Pabsw_mm_mmm64, Pabsw_xmm_xmmm128, VEX_Vpabsw_xmm_xmmm128, VEX_Vpabsw_ymm_ymmm256, EVEX_Vpabsw_xmm_k1z_xmmm128, EVEX_Vpabsw_ymm_k1z_ymmm256, EVEX_Vpabsw_zmm_k1z_zmmm512, Pabsd_mm_mmm64, Pabsd_xmm_xmmm128, VEX_Vpabsd_xmm_xmmm128, VEX_Vpabsd_ymm_ymmm256, EVEX_Vpabsd_xmm_k1z_xmmm128b32, EVEX_Vpabsd_ymm_k1z_ymmm256b32, EVEX_Vpabsd_zmm_k1z_zmmm512b32, EVEX_Vpabsq_xmm_k1z_xmmm128b64, EVEX_Vpabsq_ymm_k1z_ymmm256b64, EVEX_Vpabsq_zmm_k1z_zmmm512b64, Pmovsxbw_xmm_xmmm64, VEX_Vpmovsxbw_xmm_xmmm64, VEX_Vpmovsxbw_ymm_xmmm128, EVEX_Vpmovsxbw_xmm_k1z_xmmm64, EVEX_Vpmovsxbw_ymm_k1z_xmmm128, EVEX_Vpmovsxbw_zmm_k1z_ymmm256, EVEX_Vpmovswb_xmmm64_k1z_xmm, EVEX_Vpmovswb_xmmm128_k1z_ymm, EVEX_Vpmovswb_ymmm256_k1z_zmm, Pmovsxbd_xmm_xmmm32, VEX_Vpmovsxbd_xmm_xmmm32, VEX_Vpmovsxbd_ymm_xmmm64, EVEX_Vpmovsxbd_xmm_k1z_xmmm32, EVEX_Vpmovsxbd_ymm_k1z_xmmm64, EVEX_Vpmovsxbd_zmm_k1z_xmmm128, EVEX_Vpmovsdb_xmmm32_k1z_xmm, EVEX_Vpmovsdb_xmmm64_k1z_ymm, EVEX_Vpmovsdb_xmmm128_k1z_zmm, Pmovsxbq_xmm_xmmm16, VEX_Vpmovsxbq_xmm_xmmm16, VEX_Vpmovsxbq_ymm_xmmm32, EVEX_Vpmovsxbq_xmm_k1z_xmmm16, EVEX_Vpmovsxbq_ymm_k1z_xmmm32, EVEX_Vpmovsxbq_zmm_k1z_xmmm64, EVEX_Vpmovsqb_xmmm16_k1z_xmm, EVEX_Vpmovsqb_xmmm32_k1z_ymm, EVEX_Vpmovsqb_xmmm64_k1z_zmm, Pmovsxwd_xmm_xmmm64, VEX_Vpmovsxwd_xmm_xmmm64, VEX_Vpmovsxwd_ymm_xmmm128, EVEX_Vpmovsxwd_xmm_k1z_xmmm64, EVEX_Vpmovsxwd_ymm_k1z_xmmm128, EVEX_Vpmovsxwd_zmm_k1z_ymmm256, EVEX_Vpmovsdw_xmmm64_k1z_xmm, EVEX_Vpmovsdw_xmmm128_k1z_ymm, EVEX_Vpmovsdw_ymmm256_k1z_zmm, Pmovsxwq_xmm_xmmm32, VEX_Vpmovsxwq_xmm_xmmm32, VEX_Vpmovsxwq_ymm_xmmm64, EVEX_Vpmovsxwq_xmm_k1z_xmmm32, EVEX_Vpmovsxwq_ymm_k1z_xmmm64, EVEX_Vpmovsxwq_zmm_k1z_xmmm128, EVEX_Vpmovsqw_xmmm32_k1z_xmm, EVEX_Vpmovsqw_xmmm64_k1z_ymm, EVEX_Vpmovsqw_xmmm128_k1z_zmm, Pmovsxdq_xmm_xmmm64, VEX_Vpmovsxdq_xmm_xmmm64, VEX_Vpmovsxdq_ymm_xmmm128, EVEX_Vpmovsxdq_xmm_k1z_xmmm64, EVEX_Vpmovsxdq_ymm_k1z_xmmm128, EVEX_Vpmovsxdq_zmm_k1z_ymmm256, EVEX_Vpmovsqd_xmmm64_k1z_xmm, EVEX_Vpmovsqd_xmmm128_k1z_ymm, EVEX_Vpmovsqd_ymmm256_k1z_zmm, EVEX_Vptestmb_kr_k1_xmm_xmmm128, EVEX_Vptestmb_kr_k1_ymm_ymmm256, EVEX_Vptestmb_kr_k1_zmm_zmmm512, EVEX_Vptestmw_kr_k1_xmm_xmmm128, EVEX_Vptestmw_kr_k1_ymm_ymmm256, EVEX_Vptestmw_kr_k1_zmm_zmmm512, EVEX_Vptestnmb_kr_k1_xmm_xmmm128, EVEX_Vptestnmb_kr_k1_ymm_ymmm256, EVEX_Vptestnmb_kr_k1_zmm_zmmm512, EVEX_Vptestnmw_kr_k1_xmm_xmmm128, EVEX_Vptestnmw_kr_k1_ymm_ymmm256, EVEX_Vptestnmw_kr_k1_zmm_zmmm512, EVEX_Vptestmd_kr_k1_xmm_xmmm128b32, EVEX_Vptestmd_kr_k1_ymm_ymmm256b32, EVEX_Vptestmd_kr_k1_zmm_zmmm512b32, EVEX_Vptestmq_kr_k1_xmm_xmmm128b64, EVEX_Vptestmq_kr_k1_ymm_ymmm256b64, EVEX_Vptestmq_kr_k1_zmm_zmmm512b64, EVEX_Vptestnmd_kr_k1_xmm_xmmm128b32, EVEX_Vptestnmd_kr_k1_ymm_ymmm256b32, EVEX_Vptestnmd_kr_k1_zmm_zmmm512b32, EVEX_Vptestnmq_kr_k1_xmm_xmmm128b64, EVEX_Vptestnmq_kr_k1_ymm_ymmm256b64, EVEX_Vptestnmq_kr_k1_zmm_zmmm512b64, Pmuldq_xmm_xmmm128, VEX_Vpmuldq_xmm_xmm_xmmm128, VEX_Vpmuldq_ymm_ymm_ymmm256, EVEX_Vpmuldq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmuldq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmuldq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpmovm2b_xmm_kr, EVEX_Vpmovm2b_ymm_kr, EVEX_Vpmovm2b_zmm_kr, EVEX_Vpmovm2w_xmm_kr, EVEX_Vpmovm2w_ymm_kr, EVEX_Vpmovm2w_zmm_kr, Pcmpeqq_xmm_xmmm128, VEX_Vpcmpeqq_xmm_xmm_xmmm128, VEX_Vpcmpeqq_ymm_ymm_ymmm256, EVEX_Vpcmpeqq_kr_k1_xmm_xmmm128b64, EVEX_Vpcmpeqq_kr_k1_ymm_ymmm256b64, EVEX_Vpcmpeqq_kr_k1_zmm_zmmm512b64, EVEX_Vpmovb2m_kr_xmm, EVEX_Vpmovb2m_kr_ymm, EVEX_Vpmovb2m_kr_zmm, EVEX_Vpmovw2m_kr_xmm, EVEX_Vpmovw2m_kr_ymm, EVEX_Vpmovw2m_kr_zmm, Movntdqa_xmm_m128, VEX_Vmovntdqa_xmm_m128, VEX_Vmovntdqa_ymm_m256, EVEX_Vmovntdqa_xmm_m128, EVEX_Vmovntdqa_ymm_m256, EVEX_Vmovntdqa_zmm_m512, EVEX_Vpbroadcastmb2q_xmm_kr, EVEX_Vpbroadcastmb2q_ymm_kr, EVEX_Vpbroadcastmb2q_zmm_kr, Packusdw_xmm_xmmm128, VEX_Vpackusdw_xmm_xmm_xmmm128, VEX_Vpackusdw_ymm_ymm_ymmm256, EVEX_Vpackusdw_xmm_k1z_xmm_xmmm128b32, EVEX_Vpackusdw_ymm_k1z_ymm_ymmm256b32, EVEX_Vpackusdw_zmm_k1z_zmm_zmmm512b32, VEX_Vmaskmovps_xmm_xmm_m128, VEX_Vmaskmovps_ymm_ymm_m256, EVEX_Vscalefps_xmm_k1z_xmm_xmmm128b32, EVEX_Vscalefps_ymm_k1z_ymm_ymmm256b32, EVEX_Vscalefps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vscalefpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vscalefpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vscalefpd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vmaskmovpd_xmm_xmm_m128, VEX_Vmaskmovpd_ymm_ymm_m256, EVEX_Vscalefss_xmm_k1z_xmm_xmmm32_er, EVEX_Vscalefsd_xmm_k1z_xmm_xmmm64_er, VEX_Vmaskmovps_m128_xmm_xmm, VEX_Vmaskmovps_m256_ymm_ymm, VEX_Vmaskmovpd_m128_xmm_xmm, VEX_Vmaskmovpd_m256_ymm_ymm, Pmovzxbw_xmm_xmmm64, VEX_Vpmovzxbw_xmm_xmmm64, VEX_Vpmovzxbw_ymm_xmmm128, EVEX_Vpmovzxbw_xmm_k1z_xmmm64, EVEX_Vpmovzxbw_ymm_k1z_xmmm128, EVEX_Vpmovzxbw_zmm_k1z_ymmm256, EVEX_Vpmovwb_xmmm64_k1z_xmm, EVEX_Vpmovwb_xmmm128_k1z_ymm, EVEX_Vpmovwb_ymmm256_k1z_zmm, Pmovzxbd_xmm_xmmm32, VEX_Vpmovzxbd_xmm_xmmm32, VEX_Vpmovzxbd_ymm_xmmm64, EVEX_Vpmovzxbd_xmm_k1z_xmmm32, EVEX_Vpmovzxbd_ymm_k1z_xmmm64, EVEX_Vpmovzxbd_zmm_k1z_xmmm128, EVEX_Vpmovdb_xmmm32_k1z_xmm, EVEX_Vpmovdb_xmmm64_k1z_ymm, EVEX_Vpmovdb_xmmm128_k1z_zmm, Pmovzxbq_xmm_xmmm16, VEX_Vpmovzxbq_xmm_xmmm16, VEX_Vpmovzxbq_ymm_xmmm32, EVEX_Vpmovzxbq_xmm_k1z_xmmm16, EVEX_Vpmovzxbq_ymm_k1z_xmmm32, EVEX_Vpmovzxbq_zmm_k1z_xmmm64, EVEX_Vpmovqb_xmmm16_k1z_xmm, EVEX_Vpmovqb_xmmm32_k1z_ymm, EVEX_Vpmovqb_xmmm64_k1z_zmm, Pmovzxwd_xmm_xmmm64, VEX_Vpmovzxwd_xmm_xmmm64, VEX_Vpmovzxwd_ymm_xmmm128, EVEX_Vpmovzxwd_xmm_k1z_xmmm64, EVEX_Vpmovzxwd_ymm_k1z_xmmm128, EVEX_Vpmovzxwd_zmm_k1z_ymmm256, EVEX_Vpmovdw_xmmm64_k1z_xmm, EVEX_Vpmovdw_xmmm128_k1z_ymm, EVEX_Vpmovdw_ymmm256_k1z_zmm, Pmovzxwq_xmm_xmmm32, VEX_Vpmovzxwq_xmm_xmmm32, VEX_Vpmovzxwq_ymm_xmmm64, EVEX_Vpmovzxwq_xmm_k1z_xmmm32, EVEX_Vpmovzxwq_ymm_k1z_xmmm64, EVEX_Vpmovzxwq_zmm_k1z_xmmm128, EVEX_Vpmovqw_xmmm32_k1z_xmm, EVEX_Vpmovqw_xmmm64_k1z_ymm, EVEX_Vpmovqw_xmmm128_k1z_zmm, Pmovzxdq_xmm_xmmm64, VEX_Vpmovzxdq_xmm_xmmm64, VEX_Vpmovzxdq_ymm_xmmm128, EVEX_Vpmovzxdq_xmm_k1z_xmmm64, EVEX_Vpmovzxdq_ymm_k1z_xmmm128, EVEX_Vpmovzxdq_zmm_k1z_ymmm256, EVEX_Vpmovqd_xmmm64_k1z_xmm, EVEX_Vpmovqd_xmmm128_k1z_ymm, EVEX_Vpmovqd_ymmm256_k1z_zmm, VEX_Vpermd_ymm_ymm_ymmm256, EVEX_Vpermd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpermq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermq_zmm_k1z_zmm_zmmm512b64, Pcmpgtq_xmm_xmmm128, VEX_Vpcmpgtq_xmm_xmm_xmmm128, VEX_Vpcmpgtq_ymm_ymm_ymmm256, EVEX_Vpcmpgtq_kr_k1_xmm_xmmm128b64, EVEX_Vpcmpgtq_kr_k1_ymm_ymmm256b64, EVEX_Vpcmpgtq_kr_k1_zmm_zmmm512b64, Pminsb_xmm_xmmm128, VEX_Vpminsb_xmm_xmm_xmmm128, VEX_Vpminsb_ymm_ymm_ymmm256, EVEX_Vpminsb_xmm_k1z_xmm_xmmm128, EVEX_Vpminsb_ymm_k1z_ymm_ymmm256, EVEX_Vpminsb_zmm_k1z_zmm_zmmm512, EVEX_Vpmovm2d_xmm_kr, EVEX_Vpmovm2d_ymm_kr, EVEX_Vpmovm2d_zmm_kr, EVEX_Vpmovm2q_xmm_kr, EVEX_Vpmovm2q_ymm_kr, EVEX_Vpmovm2q_zmm_kr, Pminsd_xmm_xmmm128, VEX_Vpminsd_xmm_xmm_xmmm128, VEX_Vpminsd_ymm_ymm_ymmm256, EVEX_Vpminsd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpminsd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpminsd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpminsq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpminsq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpminsq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpmovd2m_kr_xmm, EVEX_Vpmovd2m_kr_ymm, EVEX_Vpmovd2m_kr_zmm, EVEX_Vpmovq2m_kr_xmm, EVEX_Vpmovq2m_kr_ymm, EVEX_Vpmovq2m_kr_zmm, Pminuw_xmm_xmmm128, VEX_Vpminuw_xmm_xmm_xmmm128, VEX_Vpminuw_ymm_ymm_ymmm256, EVEX_Vpminuw_xmm_k1z_xmm_xmmm128, EVEX_Vpminuw_ymm_k1z_ymm_ymmm256, EVEX_Vpminuw_zmm_k1z_zmm_zmmm512, EVEX_Vpbroadcastmw2d_xmm_kr, EVEX_Vpbroadcastmw2d_ymm_kr, EVEX_Vpbroadcastmw2d_zmm_kr, Pminud_xmm_xmmm128, VEX_Vpminud_xmm_xmm_xmmm128, VEX_Vpminud_ymm_ymm_ymmm256, EVEX_Vpminud_xmm_k1z_xmm_xmmm128b32, EVEX_Vpminud_ymm_k1z_ymm_ymmm256b32, EVEX_Vpminud_zmm_k1z_zmm_zmmm512b32, EVEX_Vpminuq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpminuq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpminuq_zmm_k1z_zmm_zmmm512b64, Pmaxsb_xmm_xmmm128, VEX_Vpmaxsb_xmm_xmm_xmmm128, VEX_Vpmaxsb_ymm_ymm_ymmm256, EVEX_Vpmaxsb_xmm_k1z_xmm_xmmm128, EVEX_Vpmaxsb_ymm_k1z_ymm_ymmm256, EVEX_Vpmaxsb_zmm_k1z_zmm_zmmm512, Pmaxsd_xmm_xmmm128, VEX_Vpmaxsd_xmm_xmm_xmmm128, VEX_Vpmaxsd_ymm_ymm_ymmm256, EVEX_Vpmaxsd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpmaxsd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpmaxsd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpmaxsq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmaxsq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmaxsq_zmm_k1z_zmm_zmmm512b64, Pmaxuw_xmm_xmmm128, VEX_Vpmaxuw_xmm_xmm_xmmm128, VEX_Vpmaxuw_ymm_ymm_ymmm256, EVEX_Vpmaxuw_xmm_k1z_xmm_xmmm128, EVEX_Vpmaxuw_ymm_k1z_ymm_ymmm256, EVEX_Vpmaxuw_zmm_k1z_zmm_zmmm512, Pmaxud_xmm_xmmm128, VEX_Vpmaxud_xmm_xmm_xmmm128, VEX_Vpmaxud_ymm_ymm_ymmm256, EVEX_Vpmaxud_xmm_k1z_xmm_xmmm128b32, EVEX_Vpmaxud_ymm_k1z_ymm_ymmm256b32, EVEX_Vpmaxud_zmm_k1z_zmm_zmmm512b32, EVEX_Vpmaxuq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmaxuq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmaxuq_zmm_k1z_zmm_zmmm512b64, Pmulld_xmm_xmmm128, VEX_Vpmulld_xmm_xmm_xmmm128, VEX_Vpmulld_ymm_ymm_ymmm256, EVEX_Vpmulld_xmm_k1z_xmm_xmmm128b32, EVEX_Vpmulld_ymm_k1z_ymm_ymmm256b32, EVEX_Vpmulld_zmm_k1z_zmm_zmmm512b32, EVEX_Vpmullq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmullq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmullq_zmm_k1z_zmm_zmmm512b64, Phminposuw_xmm_xmmm128, VEX_Vphminposuw_xmm_xmmm128, EVEX_Vgetexpps_xmm_k1z_xmmm128b32, EVEX_Vgetexpps_ymm_k1z_ymmm256b32, EVEX_Vgetexpps_zmm_k1z_zmmm512b32_sae, EVEX_Vgetexppd_xmm_k1z_xmmm128b64, EVEX_Vgetexppd_ymm_k1z_ymmm256b64, EVEX_Vgetexppd_zmm_k1z_zmmm512b64_sae, EVEX_Vgetexpss_xmm_k1z_xmm_xmmm32_sae, EVEX_Vgetexpsd_xmm_k1z_xmm_xmmm64_sae, EVEX_Vplzcntd_xmm_k1z_xmmm128b32, EVEX_Vplzcntd_ymm_k1z_ymmm256b32, EVEX_Vplzcntd_zmm_k1z_zmmm512b32, EVEX_Vplzcntq_xmm_k1z_xmmm128b64, EVEX_Vplzcntq_ymm_k1z_ymmm256b64, EVEX_Vplzcntq_zmm_k1z_zmmm512b64, VEX_Vpsrlvd_xmm_xmm_xmmm128, VEX_Vpsrlvd_ymm_ymm_ymmm256, VEX_Vpsrlvq_xmm_xmm_xmmm128, VEX_Vpsrlvq_ymm_ymm_ymmm256, EVEX_Vpsrlvd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpsrlvd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpsrlvd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpsrlvq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpsrlvq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpsrlvq_zmm_k1z_zmm_zmmm512b64, VEX_Vpsravd_xmm_xmm_xmmm128, VEX_Vpsravd_ymm_ymm_ymmm256, EVEX_Vpsravd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpsravd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpsravd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpsravq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpsravq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpsravq_zmm_k1z_zmm_zmmm512b64, VEX_Vpsllvd_xmm_xmm_xmmm128, VEX_Vpsllvd_ymm_ymm_ymmm256, VEX_Vpsllvq_xmm_xmm_xmmm128, VEX_Vpsllvq_ymm_ymm_ymmm256, EVEX_Vpsllvd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpsllvd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpsllvd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpsllvq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpsllvq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpsllvq_zmm_k1z_zmm_zmmm512b64, EVEX_Vrcp14ps_xmm_k1z_xmmm128b32, EVEX_Vrcp14ps_ymm_k1z_ymmm256b32, EVEX_Vrcp14ps_zmm_k1z_zmmm512b32, EVEX_Vrcp14pd_xmm_k1z_xmmm128b64, EVEX_Vrcp14pd_ymm_k1z_ymmm256b64, EVEX_Vrcp14pd_zmm_k1z_zmmm512b64, EVEX_Vrcp14ss_xmm_k1z_xmm_xmmm32, EVEX_Vrcp14sd_xmm_k1z_xmm_xmmm64, EVEX_Vrsqrt14ps_xmm_k1z_xmmm128b32, EVEX_Vrsqrt14ps_ymm_k1z_ymmm256b32, EVEX_Vrsqrt14ps_zmm_k1z_zmmm512b32, EVEX_Vrsqrt14pd_xmm_k1z_xmmm128b64, EVEX_Vrsqrt14pd_ymm_k1z_ymmm256b64, EVEX_Vrsqrt14pd_zmm_k1z_zmmm512b64, EVEX_Vrsqrt14ss_xmm_k1z_xmm_xmmm32, EVEX_Vrsqrt14sd_xmm_k1z_xmm_xmmm64, EVEX_Vpdpbusd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpdpbusd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpdpbusd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpdpbusds_xmm_k1z_xmm_xmmm128b32, EVEX_Vpdpbusds_ymm_k1z_ymm_ymmm256b32, EVEX_Vpdpbusds_zmm_k1z_zmm_zmmm512b32, EVEX_Vpdpwssd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpdpwssd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpdpwssd_zmm_k1z_zmm_zmmm512b32, EVEX_Vdpbf16ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vdpbf16ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vdpbf16ps_zmm_k1z_zmm_zmmm512b32, EVEX_Vp4dpwssd_zmm_k1z_zmmp3_m128, EVEX_Vpdpwssds_xmm_k1z_xmm_xmmm128b32, EVEX_Vpdpwssds_ymm_k1z_ymm_ymmm256b32, EVEX_Vpdpwssds_zmm_k1z_zmm_zmmm512b32, EVEX_Vp4dpwssds_zmm_k1z_zmmp3_m128, EVEX_Vpopcntb_xmm_k1z_xmmm128, EVEX_Vpopcntb_ymm_k1z_ymmm256, EVEX_Vpopcntb_zmm_k1z_zmmm512, EVEX_Vpopcntw_xmm_k1z_xmmm128, EVEX_Vpopcntw_ymm_k1z_ymmm256, EVEX_Vpopcntw_zmm_k1z_zmmm512, EVEX_Vpopcntd_xmm_k1z_xmmm128b32, EVEX_Vpopcntd_ymm_k1z_ymmm256b32, EVEX_Vpopcntd_zmm_k1z_zmmm512b32, EVEX_Vpopcntq_xmm_k1z_xmmm128b64, EVEX_Vpopcntq_ymm_k1z_ymmm256b64, EVEX_Vpopcntq_zmm_k1z_zmmm512b64, VEX_Vpbroadcastd_xmm_xmmm32, VEX_Vpbroadcastd_ymm_xmmm32, EVEX_Vpbroadcastd_xmm_k1z_xmmm32, EVEX_Vpbroadcastd_ymm_k1z_xmmm32, EVEX_Vpbroadcastd_zmm_k1z_xmmm32, VEX_Vpbroadcastq_xmm_xmmm64, VEX_Vpbroadcastq_ymm_xmmm64, EVEX_Vbroadcasti32x2_xmm_k1z_xmmm64, EVEX_Vbroadcasti32x2_ymm_k1z_xmmm64, EVEX_Vbroadcasti32x2_zmm_k1z_xmmm64, EVEX_Vpbroadcastq_xmm_k1z_xmmm64, EVEX_Vpbroadcastq_ymm_k1z_xmmm64, EVEX_Vpbroadcastq_zmm_k1z_xmmm64, VEX_Vbroadcasti128_ymm_m128, EVEX_Vbroadcasti32x4_ymm_k1z_m128, EVEX_Vbroadcasti32x4_zmm_k1z_m128, EVEX_Vbroadcasti64x2_ymm_k1z_m128, EVEX_Vbroadcasti64x2_zmm_k1z_m128, EVEX_Vbroadcasti32x8_zmm_k1z_m256, EVEX_Vbroadcasti64x4_zmm_k1z_m256, EVEX_Vpexpandb_xmm_k1z_xmmm128, EVEX_Vpexpandb_ymm_k1z_ymmm256, EVEX_Vpexpandb_zmm_k1z_zmmm512, EVEX_Vpexpandw_xmm_k1z_xmmm128, EVEX_Vpexpandw_ymm_k1z_ymmm256, EVEX_Vpexpandw_zmm_k1z_zmmm512, EVEX_Vpcompressb_xmmm128_k1z_xmm, EVEX_Vpcompressb_ymmm256_k1z_ymm, EVEX_Vpcompressb_zmmm512_k1z_zmm, EVEX_Vpcompressw_xmmm128_k1z_xmm, EVEX_Vpcompressw_ymmm256_k1z_ymm, EVEX_Vpcompressw_zmmm512_k1z_zmm, EVEX_Vpblendmd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpblendmd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpblendmd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpblendmq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpblendmq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpblendmq_zmm_k1z_zmm_zmmm512b64, EVEX_Vblendmps_xmm_k1z_xmm_xmmm128b32, EVEX_Vblendmps_ymm_k1z_ymm_ymmm256b32, EVEX_Vblendmps_zmm_k1z_zmm_zmmm512b32, EVEX_Vblendmpd_xmm_k1z_xmm_xmmm128b64, EVEX_Vblendmpd_ymm_k1z_ymm_ymmm256b64, EVEX_Vblendmpd_zmm_k1z_zmm_zmmm512b64, EVEX_Vpblendmb_xmm_k1z_xmm_xmmm128, EVEX_Vpblendmb_ymm_k1z_ymm_ymmm256, EVEX_Vpblendmb_zmm_k1z_zmm_zmmm512, EVEX_Vpblendmw_xmm_k1z_xmm_xmmm128, EVEX_Vpblendmw_ymm_k1z_ymm_ymmm256, EVEX_Vpblendmw_zmm_k1z_zmm_zmmm512, EVEX_Vp2intersectd_kp1_xmm_xmmm128b32, EVEX_Vp2intersectd_kp1_ymm_ymmm256b32, EVEX_Vp2intersectd_kp1_zmm_zmmm512b32, EVEX_Vp2intersectq_kp1_xmm_xmmm128b64, EVEX_Vp2intersectq_kp1_ymm_ymmm256b64, EVEX_Vp2intersectq_kp1_zmm_zmmm512b64, EVEX_Vpshldvw_xmm_k1z_xmm_xmmm128, EVEX_Vpshldvw_ymm_k1z_ymm_ymmm256, EVEX_Vpshldvw_zmm_k1z_zmm_zmmm512, EVEX_Vpshldvd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpshldvd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpshldvd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpshldvq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpshldvq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpshldvq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpshrdvw_xmm_k1z_xmm_xmmm128, EVEX_Vpshrdvw_ymm_k1z_ymm_ymmm256, EVEX_Vpshrdvw_zmm_k1z_zmm_zmmm512, EVEX_Vcvtneps2bf16_xmm_k1z_xmmm128b32, EVEX_Vcvtneps2bf16_xmm_k1z_ymmm256b32, EVEX_Vcvtneps2bf16_ymm_k1z_zmmm512b32, EVEX_Vcvtne2ps2bf16_xmm_k1z_xmm_xmmm128b32, EVEX_Vcvtne2ps2bf16_ymm_k1z_ymm_ymmm256b32, EVEX_Vcvtne2ps2bf16_zmm_k1z_zmm_zmmm512b32, EVEX_Vpshrdvd_xmm_k1z_xmm_xmmm128b32, EVEX_Vpshrdvd_ymm_k1z_ymm_ymmm256b32, EVEX_Vpshrdvd_zmm_k1z_zmm_zmmm512b32, EVEX_Vpshrdvq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpshrdvq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpshrdvq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpermi2b_xmm_k1z_xmm_xmmm128, EVEX_Vpermi2b_ymm_k1z_ymm_ymmm256, EVEX_Vpermi2b_zmm_k1z_zmm_zmmm512, EVEX_Vpermi2w_xmm_k1z_xmm_xmmm128, EVEX_Vpermi2w_ymm_k1z_ymm_ymmm256, EVEX_Vpermi2w_zmm_k1z_zmm_zmmm512, EVEX_Vpermi2d_xmm_k1z_xmm_xmmm128b32, EVEX_Vpermi2d_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermi2d_zmm_k1z_zmm_zmmm512b32, EVEX_Vpermi2q_xmm_k1z_xmm_xmmm128b64, EVEX_Vpermi2q_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermi2q_zmm_k1z_zmm_zmmm512b64, EVEX_Vpermi2ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vpermi2ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermi2ps_zmm_k1z_zmm_zmmm512b32, EVEX_Vpermi2pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vpermi2pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermi2pd_zmm_k1z_zmm_zmmm512b64, VEX_Vpbroadcastb_xmm_xmmm8, VEX_Vpbroadcastb_ymm_xmmm8, EVEX_Vpbroadcastb_xmm_k1z_xmmm8, EVEX_Vpbroadcastb_ymm_k1z_xmmm8, EVEX_Vpbroadcastb_zmm_k1z_xmmm8, VEX_Vpbroadcastw_xmm_xmmm16, VEX_Vpbroadcastw_ymm_xmmm16, EVEX_Vpbroadcastw_xmm_k1z_xmmm16, EVEX_Vpbroadcastw_ymm_k1z_xmmm16, EVEX_Vpbroadcastw_zmm_k1z_xmmm16, EVEX_Vpbroadcastb_xmm_k1z_r32, EVEX_Vpbroadcastb_ymm_k1z_r32, EVEX_Vpbroadcastb_zmm_k1z_r32, EVEX_Vpbroadcastw_xmm_k1z_r32, EVEX_Vpbroadcastw_ymm_k1z_r32, EVEX_Vpbroadcastw_zmm_k1z_r32, EVEX_Vpbroadcastd_xmm_k1z_r32, EVEX_Vpbroadcastd_ymm_k1z_r32, EVEX_Vpbroadcastd_zmm_k1z_r32, EVEX_Vpbroadcastq_xmm_k1z_r64, EVEX_Vpbroadcastq_ymm_k1z_r64, EVEX_Vpbroadcastq_zmm_k1z_r64, EVEX_Vpermt2b_xmm_k1z_xmm_xmmm128, EVEX_Vpermt2b_ymm_k1z_ymm_ymmm256, EVEX_Vpermt2b_zmm_k1z_zmm_zmmm512, EVEX_Vpermt2w_xmm_k1z_xmm_xmmm128, EVEX_Vpermt2w_ymm_k1z_ymm_ymmm256, EVEX_Vpermt2w_zmm_k1z_zmm_zmmm512, EVEX_Vpermt2d_xmm_k1z_xmm_xmmm128b32, EVEX_Vpermt2d_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermt2d_zmm_k1z_zmm_zmmm512b32, EVEX_Vpermt2q_xmm_k1z_xmm_xmmm128b64, EVEX_Vpermt2q_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermt2q_zmm_k1z_zmm_zmmm512b64, EVEX_Vpermt2ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vpermt2ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vpermt2ps_zmm_k1z_zmm_zmmm512b32, EVEX_Vpermt2pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vpermt2pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vpermt2pd_zmm_k1z_zmm_zmmm512b64, Invept_r32_m128, Invept_r64_m128, Invvpid_r32_m128, Invvpid_r64_m128, Invpcid_r32_m128, Invpcid_r64_m128, EVEX_Vpmultishiftqb_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmultishiftqb_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmultishiftqb_zmm_k1z_zmm_zmmm512b64, EVEX_Vexpandps_xmm_k1z_xmmm128, EVEX_Vexpandps_ymm_k1z_ymmm256, EVEX_Vexpandps_zmm_k1z_zmmm512, EVEX_Vexpandpd_xmm_k1z_xmmm128, EVEX_Vexpandpd_ymm_k1z_ymmm256, EVEX_Vexpandpd_zmm_k1z_zmmm512, EVEX_Vpexpandd_xmm_k1z_xmmm128, EVEX_Vpexpandd_ymm_k1z_ymmm256, EVEX_Vpexpandd_zmm_k1z_zmmm512, EVEX_Vpexpandq_xmm_k1z_xmmm128, EVEX_Vpexpandq_ymm_k1z_ymmm256, EVEX_Vpexpandq_zmm_k1z_zmmm512, EVEX_Vcompressps_xmmm128_k1z_xmm, EVEX_Vcompressps_ymmm256_k1z_ymm, EVEX_Vcompressps_zmmm512_k1z_zmm, EVEX_Vcompresspd_xmmm128_k1z_xmm, EVEX_Vcompresspd_ymmm256_k1z_ymm, EVEX_Vcompresspd_zmmm512_k1z_zmm, EVEX_Vpcompressd_xmmm128_k1z_xmm, EVEX_Vpcompressd_ymmm256_k1z_ymm, EVEX_Vpcompressd_zmmm512_k1z_zmm, EVEX_Vpcompressq_xmmm128_k1z_xmm, EVEX_Vpcompressq_ymmm256_k1z_ymm, EVEX_Vpcompressq_zmmm512_k1z_zmm, VEX_Vpmaskmovd_xmm_xmm_m128, VEX_Vpmaskmovd_ymm_ymm_m256, VEX_Vpmaskmovq_xmm_xmm_m128, VEX_Vpmaskmovq_ymm_ymm_m256, EVEX_Vpermb_xmm_k1z_xmm_xmmm128, EVEX_Vpermb_ymm_k1z_ymm_ymmm256, EVEX_Vpermb_zmm_k1z_zmm_zmmm512, EVEX_Vpermw_xmm_k1z_xmm_xmmm128, EVEX_Vpermw_ymm_k1z_ymm_ymmm256, EVEX_Vpermw_zmm_k1z_zmm_zmmm512, VEX_Vpmaskmovd_m128_xmm_xmm, VEX_Vpmaskmovd_m256_ymm_ymm, VEX_Vpmaskmovq_m128_xmm_xmm, VEX_Vpmaskmovq_m256_ymm_ymm, EVEX_Vpshufbitqmb_kr_k1_xmm_xmmm128, EVEX_Vpshufbitqmb_kr_k1_ymm_ymmm256, EVEX_Vpshufbitqmb_kr_k1_zmm_zmmm512, VEX_Vpgatherdd_xmm_vm32x_xmm, VEX_Vpgatherdd_ymm_vm32y_ymm, VEX_Vpgatherdq_xmm_vm32x_xmm, VEX_Vpgatherdq_ymm_vm32x_ymm, EVEX_Vpgatherdd_xmm_k1_vm32x, EVEX_Vpgatherdd_ymm_k1_vm32y, EVEX_Vpgatherdd_zmm_k1_vm32z, EVEX_Vpgatherdq_xmm_k1_vm32x, EVEX_Vpgatherdq_ymm_k1_vm32x, EVEX_Vpgatherdq_zmm_k1_vm32y, VEX_Vpgatherqd_xmm_vm64x_xmm, VEX_Vpgatherqd_xmm_vm64y_xmm, VEX_Vpgatherqq_xmm_vm64x_xmm, VEX_Vpgatherqq_ymm_vm64y_ymm, EVEX_Vpgatherqd_xmm_k1_vm64x, EVEX_Vpgatherqd_xmm_k1_vm64y, EVEX_Vpgatherqd_ymm_k1_vm64z, EVEX_Vpgatherqq_xmm_k1_vm64x, EVEX_Vpgatherqq_ymm_k1_vm64y, EVEX_Vpgatherqq_zmm_k1_vm64z, VEX_Vgatherdps_xmm_vm32x_xmm, VEX_Vgatherdps_ymm_vm32y_ymm, VEX_Vgatherdpd_xmm_vm32x_xmm, VEX_Vgatherdpd_ymm_vm32x_ymm, EVEX_Vgatherdps_xmm_k1_vm32x, EVEX_Vgatherdps_ymm_k1_vm32y, EVEX_Vgatherdps_zmm_k1_vm32z, EVEX_Vgatherdpd_xmm_k1_vm32x, EVEX_Vgatherdpd_ymm_k1_vm32x, EVEX_Vgatherdpd_zmm_k1_vm32y, VEX_Vgatherqps_xmm_vm64x_xmm, VEX_Vgatherqps_xmm_vm64y_xmm, VEX_Vgatherqpd_xmm_vm64x_xmm, VEX_Vgatherqpd_ymm_vm64y_ymm, EVEX_Vgatherqps_xmm_k1_vm64x, EVEX_Vgatherqps_xmm_k1_vm64y, EVEX_Vgatherqps_ymm_k1_vm64z, EVEX_Vgatherqpd_xmm_k1_vm64x, EVEX_Vgatherqpd_ymm_k1_vm64y, EVEX_Vgatherqpd_zmm_k1_vm64z, VEX_Vfmaddsub132ps_xmm_xmm_xmmm128, VEX_Vfmaddsub132ps_ymm_ymm_ymmm256, VEX_Vfmaddsub132pd_xmm_xmm_xmmm128, VEX_Vfmaddsub132pd_ymm_ymm_ymmm256, EVEX_Vfmaddsub132ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmaddsub132ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmaddsub132ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmaddsub132pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmaddsub132pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmaddsub132pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmsubadd132ps_xmm_xmm_xmmm128, VEX_Vfmsubadd132ps_ymm_ymm_ymmm256, VEX_Vfmsubadd132pd_xmm_xmm_xmmm128, VEX_Vfmsubadd132pd_ymm_ymm_ymmm256, EVEX_Vfmsubadd132ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmsubadd132ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmsubadd132ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmsubadd132pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmsubadd132pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmsubadd132pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmadd132ps_xmm_xmm_xmmm128, VEX_Vfmadd132ps_ymm_ymm_ymmm256, VEX_Vfmadd132pd_xmm_xmm_xmmm128, VEX_Vfmadd132pd_ymm_ymm_ymmm256, EVEX_Vfmadd132ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmadd132ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmadd132ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmadd132pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmadd132pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmadd132pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmadd132ss_xmm_xmm_xmmm32, VEX_Vfmadd132sd_xmm_xmm_xmmm64, EVEX_Vfmadd132ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmadd132sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfmsub132ps_xmm_xmm_xmmm128, VEX_Vfmsub132ps_ymm_ymm_ymmm256, VEX_Vfmsub132pd_xmm_xmm_xmmm128, VEX_Vfmsub132pd_ymm_ymm_ymmm256, EVEX_Vfmsub132ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmsub132ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmsub132ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmsub132pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmsub132pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmsub132pd_zmm_k1z_zmm_zmmm512b64_er, EVEX_V4fmaddps_zmm_k1z_zmmp3_m128, VEX_Vfmsub132ss_xmm_xmm_xmmm32, VEX_Vfmsub132sd_xmm_xmm_xmmm64, EVEX_Vfmsub132ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmsub132sd_xmm_k1z_xmm_xmmm64_er, EVEX_V4fmaddss_xmm_k1z_xmmp3_m128, VEX_Vfnmadd132ps_xmm_xmm_xmmm128, VEX_Vfnmadd132ps_ymm_ymm_ymmm256, VEX_Vfnmadd132pd_xmm_xmm_xmmm128, VEX_Vfnmadd132pd_ymm_ymm_ymmm256, EVEX_Vfnmadd132ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfnmadd132ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfnmadd132ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfnmadd132pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfnmadd132pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfnmadd132pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfnmadd132ss_xmm_xmm_xmmm32, VEX_Vfnmadd132sd_xmm_xmm_xmmm64, EVEX_Vfnmadd132ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfnmadd132sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfnmsub132ps_xmm_xmm_xmmm128, VEX_Vfnmsub132ps_ymm_ymm_ymmm256, VEX_Vfnmsub132pd_xmm_xmm_xmmm128, VEX_Vfnmsub132pd_ymm_ymm_ymmm256, EVEX_Vfnmsub132ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfnmsub132ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfnmsub132ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfnmsub132pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfnmsub132pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfnmsub132pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfnmsub132ss_xmm_xmm_xmmm32, VEX_Vfnmsub132sd_xmm_xmm_xmmm64, EVEX_Vfnmsub132ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfnmsub132sd_xmm_k1z_xmm_xmmm64_er, EVEX_Vpscatterdd_vm32x_k1_xmm, EVEX_Vpscatterdd_vm32y_k1_ymm, EVEX_Vpscatterdd_vm32z_k1_zmm, EVEX_Vpscatterdq_vm32x_k1_xmm, EVEX_Vpscatterdq_vm32x_k1_ymm, EVEX_Vpscatterdq_vm32y_k1_zmm, EVEX_Vpscatterqd_vm64x_k1_xmm, EVEX_Vpscatterqd_vm64y_k1_xmm, EVEX_Vpscatterqd_vm64z_k1_ymm, EVEX_Vpscatterqq_vm64x_k1_xmm, EVEX_Vpscatterqq_vm64y_k1_ymm, EVEX_Vpscatterqq_vm64z_k1_zmm, EVEX_Vscatterdps_vm32x_k1_xmm, EVEX_Vscatterdps_vm32y_k1_ymm, EVEX_Vscatterdps_vm32z_k1_zmm, EVEX_Vscatterdpd_vm32x_k1_xmm, EVEX_Vscatterdpd_vm32x_k1_ymm, EVEX_Vscatterdpd_vm32y_k1_zmm, EVEX_Vscatterqps_vm64x_k1_xmm, EVEX_Vscatterqps_vm64y_k1_xmm, EVEX_Vscatterqps_vm64z_k1_ymm, EVEX_Vscatterqpd_vm64x_k1_xmm, EVEX_Vscatterqpd_vm64y_k1_ymm, EVEX_Vscatterqpd_vm64z_k1_zmm, VEX_Vfmaddsub213ps_xmm_xmm_xmmm128, VEX_Vfmaddsub213ps_ymm_ymm_ymmm256, VEX_Vfmaddsub213pd_xmm_xmm_xmmm128, VEX_Vfmaddsub213pd_ymm_ymm_ymmm256, EVEX_Vfmaddsub213ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmaddsub213ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmaddsub213ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmaddsub213pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmaddsub213pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmaddsub213pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmsubadd213ps_xmm_xmm_xmmm128, VEX_Vfmsubadd213ps_ymm_ymm_ymmm256, VEX_Vfmsubadd213pd_xmm_xmm_xmmm128, VEX_Vfmsubadd213pd_ymm_ymm_ymmm256, EVEX_Vfmsubadd213ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmsubadd213ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmsubadd213ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmsubadd213pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmsubadd213pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmsubadd213pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmadd213ps_xmm_xmm_xmmm128, VEX_Vfmadd213ps_ymm_ymm_ymmm256, VEX_Vfmadd213pd_xmm_xmm_xmmm128, VEX_Vfmadd213pd_ymm_ymm_ymmm256, EVEX_Vfmadd213ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmadd213ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmadd213ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmadd213pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmadd213pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmadd213pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmadd213ss_xmm_xmm_xmmm32, VEX_Vfmadd213sd_xmm_xmm_xmmm64, EVEX_Vfmadd213ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmadd213sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfmsub213ps_xmm_xmm_xmmm128, VEX_Vfmsub213ps_ymm_ymm_ymmm256, VEX_Vfmsub213pd_xmm_xmm_xmmm128, VEX_Vfmsub213pd_ymm_ymm_ymmm256, EVEX_Vfmsub213ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmsub213ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmsub213ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmsub213pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmsub213pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmsub213pd_zmm_k1z_zmm_zmmm512b64_er, EVEX_V4fnmaddps_zmm_k1z_zmmp3_m128, VEX_Vfmsub213ss_xmm_xmm_xmmm32, VEX_Vfmsub213sd_xmm_xmm_xmmm64, EVEX_Vfmsub213ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmsub213sd_xmm_k1z_xmm_xmmm64_er, EVEX_V4fnmaddss_xmm_k1z_xmmp3_m128, VEX_Vfnmadd213ps_xmm_xmm_xmmm128, VEX_Vfnmadd213ps_ymm_ymm_ymmm256, VEX_Vfnmadd213pd_xmm_xmm_xmmm128, VEX_Vfnmadd213pd_ymm_ymm_ymmm256, EVEX_Vfnmadd213ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfnmadd213ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfnmadd213ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfnmadd213pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfnmadd213pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfnmadd213pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfnmadd213ss_xmm_xmm_xmmm32, VEX_Vfnmadd213sd_xmm_xmm_xmmm64, EVEX_Vfnmadd213ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfnmadd213sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfnmsub213ps_xmm_xmm_xmmm128, VEX_Vfnmsub213ps_ymm_ymm_ymmm256, VEX_Vfnmsub213pd_xmm_xmm_xmmm128, VEX_Vfnmsub213pd_ymm_ymm_ymmm256, EVEX_Vfnmsub213ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfnmsub213ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfnmsub213ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfnmsub213pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfnmsub213pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfnmsub213pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfnmsub213ss_xmm_xmm_xmmm32, VEX_Vfnmsub213sd_xmm_xmm_xmmm64, EVEX_Vfnmsub213ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfnmsub213sd_xmm_k1z_xmm_xmmm64_er, EVEX_Vpmadd52luq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmadd52luq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmadd52luq_zmm_k1z_zmm_zmmm512b64, EVEX_Vpmadd52huq_xmm_k1z_xmm_xmmm128b64, EVEX_Vpmadd52huq_ymm_k1z_ymm_ymmm256b64, EVEX_Vpmadd52huq_zmm_k1z_zmm_zmmm512b64, VEX_Vfmaddsub231ps_xmm_xmm_xmmm128, VEX_Vfmaddsub231ps_ymm_ymm_ymmm256, VEX_Vfmaddsub231pd_xmm_xmm_xmmm128, VEX_Vfmaddsub231pd_ymm_ymm_ymmm256, EVEX_Vfmaddsub231ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmaddsub231ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmaddsub231ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmaddsub231pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmaddsub231pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmaddsub231pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmsubadd231ps_xmm_xmm_xmmm128, VEX_Vfmsubadd231ps_ymm_ymm_ymmm256, VEX_Vfmsubadd231pd_xmm_xmm_xmmm128, VEX_Vfmsubadd231pd_ymm_ymm_ymmm256, EVEX_Vfmsubadd231ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmsubadd231ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmsubadd231ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmsubadd231pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmsubadd231pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmsubadd231pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmadd231ps_xmm_xmm_xmmm128, VEX_Vfmadd231ps_ymm_ymm_ymmm256, VEX_Vfmadd231pd_xmm_xmm_xmmm128, VEX_Vfmadd231pd_ymm_ymm_ymmm256, EVEX_Vfmadd231ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmadd231ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmadd231ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmadd231pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmadd231pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmadd231pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmadd231ss_xmm_xmm_xmmm32, VEX_Vfmadd231sd_xmm_xmm_xmmm64, EVEX_Vfmadd231ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmadd231sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfmsub231ps_xmm_xmm_xmmm128, VEX_Vfmsub231ps_ymm_ymm_ymmm256, VEX_Vfmsub231pd_xmm_xmm_xmmm128, VEX_Vfmsub231pd_ymm_ymm_ymmm256, EVEX_Vfmsub231ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmsub231ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmsub231ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmsub231pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfmsub231pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfmsub231pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfmsub231ss_xmm_xmm_xmmm32, VEX_Vfmsub231sd_xmm_xmm_xmmm64, EVEX_Vfmsub231ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmsub231sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfnmadd231ps_xmm_xmm_xmmm128, VEX_Vfnmadd231ps_ymm_ymm_ymmm256, VEX_Vfnmadd231pd_xmm_xmm_xmmm128, VEX_Vfnmadd231pd_ymm_ymm_ymmm256, EVEX_Vfnmadd231ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfnmadd231ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfnmadd231ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfnmadd231pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfnmadd231pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfnmadd231pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfnmadd231ss_xmm_xmm_xmmm32, VEX_Vfnmadd231sd_xmm_xmm_xmmm64, EVEX_Vfnmadd231ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfnmadd231sd_xmm_k1z_xmm_xmmm64_er, VEX_Vfnmsub231ps_xmm_xmm_xmmm128, VEX_Vfnmsub231ps_ymm_ymm_ymmm256, VEX_Vfnmsub231pd_xmm_xmm_xmmm128, VEX_Vfnmsub231pd_ymm_ymm_ymmm256, EVEX_Vfnmsub231ps_xmm_k1z_xmm_xmmm128b32, EVEX_Vfnmsub231ps_ymm_k1z_ymm_ymmm256b32, EVEX_Vfnmsub231ps_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfnmsub231pd_xmm_k1z_xmm_xmmm128b64, EVEX_Vfnmsub231pd_ymm_k1z_ymm_ymmm256b64, EVEX_Vfnmsub231pd_zmm_k1z_zmm_zmmm512b64_er, VEX_Vfnmsub231ss_xmm_xmm_xmmm32, VEX_Vfnmsub231sd_xmm_xmm_xmmm64, EVEX_Vfnmsub231ss_xmm_k1z_xmm_xmmm32_er, EVEX_Vfnmsub231sd_xmm_k1z_xmm_xmmm64_er, EVEX_Vpconflictd_xmm_k1z_xmmm128b32, EVEX_Vpconflictd_ymm_k1z_ymmm256b32, EVEX_Vpconflictd_zmm_k1z_zmmm512b32, EVEX_Vpconflictq_xmm_k1z_xmmm128b64, EVEX_Vpconflictq_ymm_k1z_ymmm256b64, EVEX_Vpconflictq_zmm_k1z_zmmm512b64, EVEX_Vgatherpf0dps_vm32z_k1, EVEX_Vgatherpf0dpd_vm32y_k1, EVEX_Vgatherpf1dps_vm32z_k1, EVEX_Vgatherpf1dpd_vm32y_k1, EVEX_Vscatterpf0dps_vm32z_k1, EVEX_Vscatterpf0dpd_vm32y_k1, EVEX_Vscatterpf1dps_vm32z_k1, EVEX_Vscatterpf1dpd_vm32y_k1, EVEX_Vgatherpf0qps_vm64z_k1, EVEX_Vgatherpf0qpd_vm64z_k1, EVEX_Vgatherpf1qps_vm64z_k1, EVEX_Vgatherpf1qpd_vm64z_k1, EVEX_Vscatterpf0qps_vm64z_k1, EVEX_Vscatterpf0qpd_vm64z_k1, EVEX_Vscatterpf1qps_vm64z_k1, EVEX_Vscatterpf1qpd_vm64z_k1, Sha1nexte_xmm_xmmm128, EVEX_Vexp2ps_zmm_k1z_zmmm512b32_sae, EVEX_Vexp2pd_zmm_k1z_zmmm512b64_sae, Sha1msg1_xmm_xmmm128, Sha1msg2_xmm_xmmm128, EVEX_Vrcp28ps_zmm_k1z_zmmm512b32_sae, EVEX_Vrcp28pd_zmm_k1z_zmmm512b64_sae, Sha256rnds2_xmm_xmmm128, EVEX_Vrcp28ss_xmm_k1z_xmm_xmmm32_sae, EVEX_Vrcp28sd_xmm_k1z_xmm_xmmm64_sae, Sha256msg1_xmm_xmmm128, EVEX_Vrsqrt28ps_zmm_k1z_zmmm512b32_sae, EVEX_Vrsqrt28pd_zmm_k1z_zmmm512b64_sae, Sha256msg2_xmm_xmmm128, EVEX_Vrsqrt28ss_xmm_k1z_xmm_xmmm32_sae, EVEX_Vrsqrt28sd_xmm_k1z_xmm_xmmm64_sae, Gf2p8mulb_xmm_xmmm128, VEX_Vgf2p8mulb_xmm_xmm_xmmm128, VEX_Vgf2p8mulb_ymm_ymm_ymmm256, EVEX_Vgf2p8mulb_xmm_k1z_xmm_xmmm128, EVEX_Vgf2p8mulb_ymm_k1z_ymm_ymmm256, EVEX_Vgf2p8mulb_zmm_k1z_zmm_zmmm512, Aesimc_xmm_xmmm128, VEX_Vaesimc_xmm_xmmm128, Aesenc_xmm_xmmm128, VEX_Vaesenc_xmm_xmm_xmmm128, VEX_Vaesenc_ymm_ymm_ymmm256, EVEX_Vaesenc_xmm_xmm_xmmm128, EVEX_Vaesenc_ymm_ymm_ymmm256, EVEX_Vaesenc_zmm_zmm_zmmm512, Aesenclast_xmm_xmmm128, VEX_Vaesenclast_xmm_xmm_xmmm128, VEX_Vaesenclast_ymm_ymm_ymmm256, EVEX_Vaesenclast_xmm_xmm_xmmm128, EVEX_Vaesenclast_ymm_ymm_ymmm256, EVEX_Vaesenclast_zmm_zmm_zmmm512, Aesdec_xmm_xmmm128, VEX_Vaesdec_xmm_xmm_xmmm128, VEX_Vaesdec_ymm_ymm_ymmm256, EVEX_Vaesdec_xmm_xmm_xmmm128, EVEX_Vaesdec_ymm_ymm_ymmm256, EVEX_Vaesdec_zmm_zmm_zmmm512, Aesdeclast_xmm_xmmm128, VEX_Vaesdeclast_xmm_xmm_xmmm128, VEX_Vaesdeclast_ymm_ymm_ymmm256, EVEX_Vaesdeclast_xmm_xmm_xmmm128, EVEX_Vaesdeclast_ymm_ymm_ymmm256, EVEX_Vaesdeclast_zmm_zmm_zmmm512, Movbe_r16_m16, Movbe_r32_m32, Movbe_r64_m64, Crc32_r32_rm8, Crc32_r64_rm8, Movbe_m16_r16, Movbe_m32_r32, Movbe_m64_r64, Crc32_r32_rm16, Crc32_r32_rm32, Crc32_r64_rm64, VEX_Andn_r32_r32_rm32, VEX_Andn_r64_r64_rm64, VEX_Blsr_r32_rm32, VEX_Blsr_r64_rm64, VEX_Blsmsk_r32_rm32, VEX_Blsmsk_r64_rm64, VEX_Blsi_r32_rm32, VEX_Blsi_r64_rm64, VEX_Bzhi_r32_rm32_r32, VEX_Bzhi_r64_rm64_r64, Wrussd_m32_r32, Wrussq_m64_r64, VEX_Pext_r32_r32_rm32, VEX_Pext_r64_r64_rm64, VEX_Pdep_r32_r32_rm32, VEX_Pdep_r64_r64_rm64, Wrssd_m32_r32, Wrssq_m64_r64, Adcx_r32_rm32, Adcx_r64_rm64, Adox_r32_rm32, Adox_r64_rm64, VEX_Mulx_r32_r32_rm32, VEX_Mulx_r64_r64_rm64, VEX_Bextr_r32_rm32_r32, VEX_Bextr_r64_rm64_r64, VEX_Shlx_r32_rm32_r32, VEX_Shlx_r64_rm64_r64, VEX_Sarx_r32_rm32_r32, VEX_Sarx_r64_rm64_r64, VEX_Shrx_r32_rm32_r32, VEX_Shrx_r64_rm64_r64, Movdir64b_r16_m512, Movdir64b_r32_m512, Movdir64b_r64_m512, Enqcmds_r16_m512, Enqcmds_r32_m512, Enqcmds_r64_m512, Enqcmd_r16_m512, Enqcmd_r32_m512, Enqcmd_r64_m512, Movdiri_m32_r32, Movdiri_m64_r64, VEX_Vpermq_ymm_ymmm256_imm8, EVEX_Vpermq_ymm_k1z_ymmm256b64_imm8, EVEX_Vpermq_zmm_k1z_zmmm512b64_imm8, VEX_Vpermpd_ymm_ymmm256_imm8, EVEX_Vpermpd_ymm_k1z_ymmm256b64_imm8, EVEX_Vpermpd_zmm_k1z_zmmm512b64_imm8, VEX_Vpblendd_xmm_xmm_xmmm128_imm8, VEX_Vpblendd_ymm_ymm_ymmm256_imm8, EVEX_Valignd_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Valignd_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Valignd_zmm_k1z_zmm_zmmm512b32_imm8, EVEX_Valignq_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Valignq_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Valignq_zmm_k1z_zmm_zmmm512b64_imm8, VEX_Vpermilps_xmm_xmmm128_imm8, VEX_Vpermilps_ymm_ymmm256_imm8, EVEX_Vpermilps_xmm_k1z_xmmm128b32_imm8, EVEX_Vpermilps_ymm_k1z_ymmm256b32_imm8, EVEX_Vpermilps_zmm_k1z_zmmm512b32_imm8, VEX_Vpermilpd_xmm_xmmm128_imm8, VEX_Vpermilpd_ymm_ymmm256_imm8, EVEX_Vpermilpd_xmm_k1z_xmmm128b64_imm8, EVEX_Vpermilpd_ymm_k1z_ymmm256b64_imm8, EVEX_Vpermilpd_zmm_k1z_zmmm512b64_imm8, VEX_Vperm2f128_ymm_ymm_ymmm256_imm8, Roundps_xmm_xmmm128_imm8, VEX_Vroundps_xmm_xmmm128_imm8, VEX_Vroundps_ymm_ymmm256_imm8, EVEX_Vrndscaleps_xmm_k1z_xmmm128b32_imm8, EVEX_Vrndscaleps_ymm_k1z_ymmm256b32_imm8, EVEX_Vrndscaleps_zmm_k1z_zmmm512b32_imm8_sae, Roundpd_xmm_xmmm128_imm8, VEX_Vroundpd_xmm_xmmm128_imm8, VEX_Vroundpd_ymm_ymmm256_imm8, EVEX_Vrndscalepd_xmm_k1z_xmmm128b64_imm8, EVEX_Vrndscalepd_ymm_k1z_ymmm256b64_imm8, EVEX_Vrndscalepd_zmm_k1z_zmmm512b64_imm8_sae, Roundss_xmm_xmmm32_imm8, VEX_Vroundss_xmm_xmm_xmmm32_imm8, EVEX_Vrndscaless_xmm_k1z_xmm_xmmm32_imm8_sae, Roundsd_xmm_xmmm64_imm8, VEX_Vroundsd_xmm_xmm_xmmm64_imm8, EVEX_Vrndscalesd_xmm_k1z_xmm_xmmm64_imm8_sae, Blendps_xmm_xmmm128_imm8, VEX_Vblendps_xmm_xmm_xmmm128_imm8, VEX_Vblendps_ymm_ymm_ymmm256_imm8, Blendpd_xmm_xmmm128_imm8, VEX_Vblendpd_xmm_xmm_xmmm128_imm8, VEX_Vblendpd_ymm_ymm_ymmm256_imm8, Pblendw_xmm_xmmm128_imm8, VEX_Vpblendw_xmm_xmm_xmmm128_imm8, VEX_Vpblendw_ymm_ymm_ymmm256_imm8, Palignr_mm_mmm64_imm8, Palignr_xmm_xmmm128_imm8, VEX_Vpalignr_xmm_xmm_xmmm128_imm8, VEX_Vpalignr_ymm_ymm_ymmm256_imm8, EVEX_Vpalignr_xmm_k1z_xmm_xmmm128_imm8, EVEX_Vpalignr_ymm_k1z_ymm_ymmm256_imm8, EVEX_Vpalignr_zmm_k1z_zmm_zmmm512_imm8, Pextrb_r32m8_xmm_imm8, Pextrb_r64m8_xmm_imm8, VEX_Vpextrb_r32m8_xmm_imm8, VEX_Vpextrb_r64m8_xmm_imm8, EVEX_Vpextrb_r32m8_xmm_imm8, EVEX_Vpextrb_r64m8_xmm_imm8, Pextrw_r32m16_xmm_imm8, Pextrw_r64m16_xmm_imm8, VEX_Vpextrw_r32m16_xmm_imm8, VEX_Vpextrw_r64m16_xmm_imm8, EVEX_Vpextrw_r32m16_xmm_imm8, EVEX_Vpextrw_r64m16_xmm_imm8, Pextrd_rm32_xmm_imm8, Pextrq_rm64_xmm_imm8, VEX_Vpextrd_rm32_xmm_imm8, VEX_Vpextrq_rm64_xmm_imm8, EVEX_Vpextrd_rm32_xmm_imm8, EVEX_Vpextrq_rm64_xmm_imm8, Extractps_rm32_xmm_imm8, Extractps_r64m32_xmm_imm8, VEX_Vextractps_rm32_xmm_imm8, VEX_Vextractps_r64m32_xmm_imm8, EVEX_Vextractps_rm32_xmm_imm8, EVEX_Vextractps_r64m32_xmm_imm8, VEX_Vinsertf128_ymm_ymm_xmmm128_imm8, EVEX_Vinsertf32x4_ymm_k1z_ymm_xmmm128_imm8, EVEX_Vinsertf32x4_zmm_k1z_zmm_xmmm128_imm8, EVEX_Vinsertf64x2_ymm_k1z_ymm_xmmm128_imm8, EVEX_Vinsertf64x2_zmm_k1z_zmm_xmmm128_imm8, VEX_Vextractf128_xmmm128_ymm_imm8, EVEX_Vextractf32x4_xmmm128_k1z_ymm_imm8, EVEX_Vextractf32x4_xmmm128_k1z_zmm_imm8, EVEX_Vextractf64x2_xmmm128_k1z_ymm_imm8, EVEX_Vextractf64x2_xmmm128_k1z_zmm_imm8, EVEX_Vinsertf32x8_zmm_k1z_zmm_ymmm256_imm8, EVEX_Vinsertf64x4_zmm_k1z_zmm_ymmm256_imm8, EVEX_Vextractf32x8_ymmm256_k1z_zmm_imm8, EVEX_Vextractf64x4_ymmm256_k1z_zmm_imm8, VEX_Vcvtps2ph_xmmm64_xmm_imm8, VEX_Vcvtps2ph_xmmm128_ymm_imm8, EVEX_Vcvtps2ph_xmmm64_k1z_xmm_imm8, EVEX_Vcvtps2ph_xmmm128_k1z_ymm_imm8, EVEX_Vcvtps2ph_ymmm256_k1z_zmm_imm8_sae, EVEX_Vpcmpud_kr_k1_xmm_xmmm128b32_imm8, EVEX_Vpcmpud_kr_k1_ymm_ymmm256b32_imm8, EVEX_Vpcmpud_kr_k1_zmm_zmmm512b32_imm8, EVEX_Vpcmpuq_kr_k1_xmm_xmmm128b64_imm8, EVEX_Vpcmpuq_kr_k1_ymm_ymmm256b64_imm8, EVEX_Vpcmpuq_kr_k1_zmm_zmmm512b64_imm8, EVEX_Vpcmpd_kr_k1_xmm_xmmm128b32_imm8, EVEX_Vpcmpd_kr_k1_ymm_ymmm256b32_imm8, EVEX_Vpcmpd_kr_k1_zmm_zmmm512b32_imm8, EVEX_Vpcmpq_kr_k1_xmm_xmmm128b64_imm8, EVEX_Vpcmpq_kr_k1_ymm_ymmm256b64_imm8, EVEX_Vpcmpq_kr_k1_zmm_zmmm512b64_imm8, Pinsrb_xmm_r32m8_imm8, Pinsrb_xmm_r64m8_imm8, VEX_Vpinsrb_xmm_xmm_r32m8_imm8, VEX_Vpinsrb_xmm_xmm_r64m8_imm8, EVEX_Vpinsrb_xmm_xmm_r32m8_imm8, EVEX_Vpinsrb_xmm_xmm_r64m8_imm8, Insertps_xmm_xmmm32_imm8, VEX_Vinsertps_xmm_xmm_xmmm32_imm8, EVEX_Vinsertps_xmm_xmm_xmmm32_imm8, Pinsrd_xmm_rm32_imm8, Pinsrq_xmm_rm64_imm8, VEX_Vpinsrd_xmm_xmm_rm32_imm8, VEX_Vpinsrq_xmm_xmm_rm64_imm8, EVEX_Vpinsrd_xmm_xmm_rm32_imm8, EVEX_Vpinsrq_xmm_xmm_rm64_imm8, EVEX_Vshuff32x4_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vshuff32x4_zmm_k1z_zmm_zmmm512b32_imm8, EVEX_Vshuff64x2_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vshuff64x2_zmm_k1z_zmm_zmmm512b64_imm8, EVEX_Vpternlogd_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Vpternlogd_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vpternlogd_zmm_k1z_zmm_zmmm512b32_imm8, EVEX_Vpternlogq_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vpternlogq_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vpternlogq_zmm_k1z_zmm_zmmm512b64_imm8, EVEX_Vgetmantps_xmm_k1z_xmmm128b32_imm8, EVEX_Vgetmantps_ymm_k1z_ymmm256b32_imm8, EVEX_Vgetmantps_zmm_k1z_zmmm512b32_imm8_sae, EVEX_Vgetmantpd_xmm_k1z_xmmm128b64_imm8, EVEX_Vgetmantpd_ymm_k1z_ymmm256b64_imm8, EVEX_Vgetmantpd_zmm_k1z_zmmm512b64_imm8_sae, EVEX_Vgetmantss_xmm_k1z_xmm_xmmm32_imm8_sae, EVEX_Vgetmantsd_xmm_k1z_xmm_xmmm64_imm8_sae, VEX_Kshiftrb_kr_kr_imm8, VEX_Kshiftrw_kr_kr_imm8, VEX_Kshiftrd_kr_kr_imm8, VEX_Kshiftrq_kr_kr_imm8, VEX_Kshiftlb_kr_kr_imm8, VEX_Kshiftlw_kr_kr_imm8, VEX_Kshiftld_kr_kr_imm8, VEX_Kshiftlq_kr_kr_imm8, VEX_Vinserti128_ymm_ymm_xmmm128_imm8, EVEX_Vinserti32x4_ymm_k1z_ymm_xmmm128_imm8, EVEX_Vinserti32x4_zmm_k1z_zmm_xmmm128_imm8, EVEX_Vinserti64x2_ymm_k1z_ymm_xmmm128_imm8, EVEX_Vinserti64x2_zmm_k1z_zmm_xmmm128_imm8, VEX_Vextracti128_xmmm128_ymm_imm8, EVEX_Vextracti32x4_xmmm128_k1z_ymm_imm8, EVEX_Vextracti32x4_xmmm128_k1z_zmm_imm8, EVEX_Vextracti64x2_xmmm128_k1z_ymm_imm8, EVEX_Vextracti64x2_xmmm128_k1z_zmm_imm8, EVEX_Vinserti32x8_zmm_k1z_zmm_ymmm256_imm8, EVEX_Vinserti64x4_zmm_k1z_zmm_ymmm256_imm8, EVEX_Vextracti32x8_ymmm256_k1z_zmm_imm8, EVEX_Vextracti64x4_ymmm256_k1z_zmm_imm8, EVEX_Vpcmpub_kr_k1_xmm_xmmm128_imm8, EVEX_Vpcmpub_kr_k1_ymm_ymmm256_imm8, EVEX_Vpcmpub_kr_k1_zmm_zmmm512_imm8, EVEX_Vpcmpuw_kr_k1_xmm_xmmm128_imm8, EVEX_Vpcmpuw_kr_k1_ymm_ymmm256_imm8, EVEX_Vpcmpuw_kr_k1_zmm_zmmm512_imm8, EVEX_Vpcmpb_kr_k1_xmm_xmmm128_imm8, EVEX_Vpcmpb_kr_k1_ymm_ymmm256_imm8, EVEX_Vpcmpb_kr_k1_zmm_zmmm512_imm8, EVEX_Vpcmpw_kr_k1_xmm_xmmm128_imm8, EVEX_Vpcmpw_kr_k1_ymm_ymmm256_imm8, EVEX_Vpcmpw_kr_k1_zmm_zmmm512_imm8, Dpps_xmm_xmmm128_imm8, VEX_Vdpps_xmm_xmm_xmmm128_imm8, VEX_Vdpps_ymm_ymm_ymmm256_imm8, Dppd_xmm_xmmm128_imm8, VEX_Vdppd_xmm_xmm_xmmm128_imm8, Mpsadbw_xmm_xmmm128_imm8, VEX_Vmpsadbw_xmm_xmm_xmmm128_imm8, VEX_Vmpsadbw_ymm_ymm_ymmm256_imm8, EVEX_Vdbpsadbw_xmm_k1z_xmm_xmmm128_imm8, EVEX_Vdbpsadbw_ymm_k1z_ymm_ymmm256_imm8, EVEX_Vdbpsadbw_zmm_k1z_zmm_zmmm512_imm8, EVEX_Vshufi32x4_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vshufi32x4_zmm_k1z_zmm_zmmm512b32_imm8, EVEX_Vshufi64x2_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vshufi64x2_zmm_k1z_zmm_zmmm512b64_imm8, Pclmulqdq_xmm_xmmm128_imm8, VEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8, VEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8, EVEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8, EVEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8, EVEX_Vpclmulqdq_zmm_zmm_zmmm512_imm8, VEX_Vperm2i128_ymm_ymm_ymmm256_imm8, VEX_Vpermil2ps_xmm_xmm_xmmm128_xmm_imm4, VEX_Vpermil2ps_ymm_ymm_ymmm256_ymm_imm4, VEX_Vpermil2ps_xmm_xmm_xmm_xmmm128_imm4, VEX_Vpermil2ps_ymm_ymm_ymm_ymmm256_imm4, VEX_Vpermil2pd_xmm_xmm_xmmm128_xmm_imm4, VEX_Vpermil2pd_ymm_ymm_ymmm256_ymm_imm4, VEX_Vpermil2pd_xmm_xmm_xmm_xmmm128_imm4, VEX_Vpermil2pd_ymm_ymm_ymm_ymmm256_imm4, VEX_Vblendvps_xmm_xmm_xmmm128_xmm, VEX_Vblendvps_ymm_ymm_ymmm256_ymm, VEX_Vblendvpd_xmm_xmm_xmmm128_xmm, VEX_Vblendvpd_ymm_ymm_ymmm256_ymm, VEX_Vpblendvb_xmm_xmm_xmmm128_xmm, VEX_Vpblendvb_ymm_ymm_ymmm256_ymm, EVEX_Vrangeps_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Vrangeps_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vrangeps_zmm_k1z_zmm_zmmm512b32_imm8_sae, EVEX_Vrangepd_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vrangepd_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vrangepd_zmm_k1z_zmm_zmmm512b64_imm8_sae, EVEX_Vrangess_xmm_k1z_xmm_xmmm32_imm8_sae, EVEX_Vrangesd_xmm_k1z_xmm_xmmm64_imm8_sae, EVEX_Vfixupimmps_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Vfixupimmps_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vfixupimmps_zmm_k1z_zmm_zmmm512b32_imm8_sae, EVEX_Vfixupimmpd_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vfixupimmpd_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vfixupimmpd_zmm_k1z_zmm_zmmm512b64_imm8_sae, EVEX_Vfixupimmss_xmm_k1z_xmm_xmmm32_imm8_sae, EVEX_Vfixupimmsd_xmm_k1z_xmm_xmmm64_imm8_sae, EVEX_Vreduceps_xmm_k1z_xmmm128b32_imm8, EVEX_Vreduceps_ymm_k1z_ymmm256b32_imm8, EVEX_Vreduceps_zmm_k1z_zmmm512b32_imm8_sae, EVEX_Vreducepd_xmm_k1z_xmmm128b64_imm8, EVEX_Vreducepd_ymm_k1z_ymmm256b64_imm8, EVEX_Vreducepd_zmm_k1z_zmmm512b64_imm8_sae, EVEX_Vreducess_xmm_k1z_xmm_xmmm32_imm8_sae, EVEX_Vreducesd_xmm_k1z_xmm_xmmm64_imm8_sae, VEX_Vfmaddsubps_xmm_xmm_xmmm128_xmm, VEX_Vfmaddsubps_ymm_ymm_ymmm256_ymm, VEX_Vfmaddsubps_xmm_xmm_xmm_xmmm128, VEX_Vfmaddsubps_ymm_ymm_ymm_ymmm256, VEX_Vfmaddsubpd_xmm_xmm_xmmm128_xmm, VEX_Vfmaddsubpd_ymm_ymm_ymmm256_ymm, VEX_Vfmaddsubpd_xmm_xmm_xmm_xmmm128, VEX_Vfmaddsubpd_ymm_ymm_ymm_ymmm256, VEX_Vfmsubaddps_xmm_xmm_xmmm128_xmm, VEX_Vfmsubaddps_ymm_ymm_ymmm256_ymm, VEX_Vfmsubaddps_xmm_xmm_xmm_xmmm128, VEX_Vfmsubaddps_ymm_ymm_ymm_ymmm256, VEX_Vfmsubaddpd_xmm_xmm_xmmm128_xmm, VEX_Vfmsubaddpd_ymm_ymm_ymmm256_ymm, VEX_Vfmsubaddpd_xmm_xmm_xmm_xmmm128, VEX_Vfmsubaddpd_ymm_ymm_ymm_ymmm256, Pcmpestrm_xmm_xmmm128_imm8, Pcmpestrm64_xmm_xmmm128_imm8, VEX_Vpcmpestrm_xmm_xmmm128_imm8, VEX_Vpcmpestrm64_xmm_xmmm128_imm8, Pcmpestri_xmm_xmmm128_imm8, Pcmpestri64_xmm_xmmm128_imm8, VEX_Vpcmpestri_xmm_xmmm128_imm8, VEX_Vpcmpestri64_xmm_xmmm128_imm8, Pcmpistrm_xmm_xmmm128_imm8, VEX_Vpcmpistrm_xmm_xmmm128_imm8, Pcmpistri_xmm_xmmm128_imm8, VEX_Vpcmpistri_xmm_xmmm128_imm8, EVEX_Vfpclassps_kr_k1_xmmm128b32_imm8, EVEX_Vfpclassps_kr_k1_ymmm256b32_imm8, EVEX_Vfpclassps_kr_k1_zmmm512b32_imm8, EVEX_Vfpclasspd_kr_k1_xmmm128b64_imm8, EVEX_Vfpclasspd_kr_k1_ymmm256b64_imm8, EVEX_Vfpclasspd_kr_k1_zmmm512b64_imm8, EVEX_Vfpclassss_kr_k1_xmmm32_imm8, EVEX_Vfpclasssd_kr_k1_xmmm64_imm8, VEX_Vfmaddps_xmm_xmm_xmmm128_xmm, VEX_Vfmaddps_ymm_ymm_ymmm256_ymm, VEX_Vfmaddps_xmm_xmm_xmm_xmmm128, VEX_Vfmaddps_ymm_ymm_ymm_ymmm256, VEX_Vfmaddpd_xmm_xmm_xmmm128_xmm, VEX_Vfmaddpd_ymm_ymm_ymmm256_ymm, VEX_Vfmaddpd_xmm_xmm_xmm_xmmm128, VEX_Vfmaddpd_ymm_ymm_ymm_ymmm256, VEX_Vfmaddss_xmm_xmm_xmmm32_xmm, VEX_Vfmaddss_xmm_xmm_xmm_xmmm32, VEX_Vfmaddsd_xmm_xmm_xmmm64_xmm, VEX_Vfmaddsd_xmm_xmm_xmm_xmmm64, VEX_Vfmsubps_xmm_xmm_xmmm128_xmm, VEX_Vfmsubps_ymm_ymm_ymmm256_ymm, VEX_Vfmsubps_xmm_xmm_xmm_xmmm128, VEX_Vfmsubps_ymm_ymm_ymm_ymmm256, VEX_Vfmsubpd_xmm_xmm_xmmm128_xmm, VEX_Vfmsubpd_ymm_ymm_ymmm256_ymm, VEX_Vfmsubpd_xmm_xmm_xmm_xmmm128, VEX_Vfmsubpd_ymm_ymm_ymm_ymmm256, VEX_Vfmsubss_xmm_xmm_xmmm32_xmm, VEX_Vfmsubss_xmm_xmm_xmm_xmmm32, VEX_Vfmsubsd_xmm_xmm_xmmm64_xmm, VEX_Vfmsubsd_xmm_xmm_xmm_xmmm64, EVEX_Vpshldw_xmm_k1z_xmm_xmmm128_imm8, EVEX_Vpshldw_ymm_k1z_ymm_ymmm256_imm8, EVEX_Vpshldw_zmm_k1z_zmm_zmmm512_imm8, EVEX_Vpshldd_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Vpshldd_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vpshldd_zmm_k1z_zmm_zmmm512b32_imm8, EVEX_Vpshldq_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vpshldq_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vpshldq_zmm_k1z_zmm_zmmm512b64_imm8, EVEX_Vpshrdw_xmm_k1z_xmm_xmmm128_imm8, EVEX_Vpshrdw_ymm_k1z_ymm_ymmm256_imm8, EVEX_Vpshrdw_zmm_k1z_zmm_zmmm512_imm8, EVEX_Vpshrdd_xmm_k1z_xmm_xmmm128b32_imm8, EVEX_Vpshrdd_ymm_k1z_ymm_ymmm256b32_imm8, EVEX_Vpshrdd_zmm_k1z_zmm_zmmm512b32_imm8, EVEX_Vpshrdq_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vpshrdq_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vpshrdq_zmm_k1z_zmm_zmmm512b64_imm8, VEX_Vfnmaddps_xmm_xmm_xmmm128_xmm, VEX_Vfnmaddps_ymm_ymm_ymmm256_ymm, VEX_Vfnmaddps_xmm_xmm_xmm_xmmm128, VEX_Vfnmaddps_ymm_ymm_ymm_ymmm256, VEX_Vfnmaddpd_xmm_xmm_xmmm128_xmm, VEX_Vfnmaddpd_ymm_ymm_ymmm256_ymm, VEX_Vfnmaddpd_xmm_xmm_xmm_xmmm128, VEX_Vfnmaddpd_ymm_ymm_ymm_ymmm256, VEX_Vfnmaddss_xmm_xmm_xmmm32_xmm, VEX_Vfnmaddss_xmm_xmm_xmm_xmmm32, VEX_Vfnmaddsd_xmm_xmm_xmmm64_xmm, VEX_Vfnmaddsd_xmm_xmm_xmm_xmmm64, VEX_Vfnmsubps_xmm_xmm_xmmm128_xmm, VEX_Vfnmsubps_ymm_ymm_ymmm256_ymm, VEX_Vfnmsubps_xmm_xmm_xmm_xmmm128, VEX_Vfnmsubps_ymm_ymm_ymm_ymmm256, VEX_Vfnmsubpd_xmm_xmm_xmmm128_xmm, VEX_Vfnmsubpd_ymm_ymm_ymmm256_ymm, VEX_Vfnmsubpd_xmm_xmm_xmm_xmmm128, VEX_Vfnmsubpd_ymm_ymm_ymm_ymmm256, VEX_Vfnmsubss_xmm_xmm_xmmm32_xmm, VEX_Vfnmsubss_xmm_xmm_xmm_xmmm32, VEX_Vfnmsubsd_xmm_xmm_xmmm64_xmm, VEX_Vfnmsubsd_xmm_xmm_xmm_xmmm64, Sha1rnds4_xmm_xmmm128_imm8, Gf2p8affineqb_xmm_xmmm128_imm8, VEX_Vgf2p8affineqb_xmm_xmm_xmmm128_imm8, VEX_Vgf2p8affineqb_ymm_ymm_ymmm256_imm8, EVEX_Vgf2p8affineqb_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vgf2p8affineqb_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vgf2p8affineqb_zmm_k1z_zmm_zmmm512b64_imm8, Gf2p8affineinvqb_xmm_xmmm128_imm8, VEX_Vgf2p8affineinvqb_xmm_xmm_xmmm128_imm8, VEX_Vgf2p8affineinvqb_ymm_ymm_ymmm256_imm8, EVEX_Vgf2p8affineinvqb_xmm_k1z_xmm_xmmm128b64_imm8, EVEX_Vgf2p8affineinvqb_ymm_k1z_ymm_ymmm256b64_imm8, EVEX_Vgf2p8affineinvqb_zmm_k1z_zmm_zmmm512b64_imm8, Aeskeygenassist_xmm_xmmm128_imm8, VEX_Vaeskeygenassist_xmm_xmmm128_imm8, VEX_Rorx_r32_rm32_imm8, VEX_Rorx_r64_rm64_imm8, XOP_Vpmacssww_xmm_xmm_xmmm128_xmm, XOP_Vpmacsswd_xmm_xmm_xmmm128_xmm, XOP_Vpmacssdql_xmm_xmm_xmmm128_xmm, XOP_Vpmacssdd_xmm_xmm_xmmm128_xmm, XOP_Vpmacssdqh_xmm_xmm_xmmm128_xmm, XOP_Vpmacsww_xmm_xmm_xmmm128_xmm, XOP_Vpmacswd_xmm_xmm_xmmm128_xmm, XOP_Vpmacsdql_xmm_xmm_xmmm128_xmm, XOP_Vpmacsdd_xmm_xmm_xmmm128_xmm, XOP_Vpmacsdqh_xmm_xmm_xmmm128_xmm, XOP_Vpcmov_xmm_xmm_xmmm128_xmm, XOP_Vpcmov_ymm_ymm_ymmm256_ymm, XOP_Vpcmov_xmm_xmm_xmm_xmmm128, XOP_Vpcmov_ymm_ymm_ymm_ymmm256, XOP_Vpperm_xmm_xmm_xmmm128_xmm, XOP_Vpperm_xmm_xmm_xmm_xmmm128, XOP_Vpmadcsswd_xmm_xmm_xmmm128_xmm, XOP_Vpmadcswd_xmm_xmm_xmmm128_xmm, XOP_Vprotb_xmm_xmmm128_imm8, XOP_Vprotw_xmm_xmmm128_imm8, XOP_Vprotd_xmm_xmmm128_imm8, XOP_Vprotq_xmm_xmmm128_imm8, XOP_Vpcomb_xmm_xmm_xmmm128_imm8, XOP_Vpcomw_xmm_xmm_xmmm128_imm8, XOP_Vpcomd_xmm_xmm_xmmm128_imm8, XOP_Vpcomq_xmm_xmm_xmmm128_imm8, XOP_Vpcomub_xmm_xmm_xmmm128_imm8, XOP_Vpcomuw_xmm_xmm_xmmm128_imm8, XOP_Vpcomud_xmm_xmm_xmmm128_imm8, XOP_Vpcomuq_xmm_xmm_xmmm128_imm8, XOP_Blcfill_r32_rm32, XOP_Blcfill_r64_rm64, XOP_Blsfill_r32_rm32, XOP_Blsfill_r64_rm64, XOP_Blcs_r32_rm32, XOP_Blcs_r64_rm64, XOP_Tzmsk_r32_rm32, XOP_Tzmsk_r64_rm64, XOP_Blcic_r32_rm32, XOP_Blcic_r64_rm64, XOP_Blsic_r32_rm32, XOP_Blsic_r64_rm64, XOP_T1mskc_r32_rm32, XOP_T1mskc_r64_rm64, XOP_Blcmsk_r32_rm32, XOP_Blcmsk_r64_rm64, XOP_Blci_r32_rm32, XOP_Blci_r64_rm64, XOP_Llwpcb_r32, XOP_Llwpcb_r64, XOP_Slwpcb_r32, XOP_Slwpcb_r64, XOP_Vfrczps_xmm_xmmm128, XOP_Vfrczps_ymm_ymmm256, XOP_Vfrczpd_xmm_xmmm128, XOP_Vfrczpd_ymm_ymmm256, XOP_Vfrczss_xmm_xmmm32, XOP_Vfrczsd_xmm_xmmm64, XOP_Vprotb_xmm_xmmm128_xmm, XOP_Vprotb_xmm_xmm_xmmm128, XOP_Vprotw_xmm_xmmm128_xmm, XOP_Vprotw_xmm_xmm_xmmm128, XOP_Vprotd_xmm_xmmm128_xmm, XOP_Vprotd_xmm_xmm_xmmm128, XOP_Vprotq_xmm_xmmm128_xmm, XOP_Vprotq_xmm_xmm_xmmm128, XOP_Vpshlb_xmm_xmmm128_xmm, XOP_Vpshlb_xmm_xmm_xmmm128, XOP_Vpshlw_xmm_xmmm128_xmm, XOP_Vpshlw_xmm_xmm_xmmm128, XOP_Vpshld_xmm_xmmm128_xmm, XOP_Vpshld_xmm_xmm_xmmm128, XOP_Vpshlq_xmm_xmmm128_xmm, XOP_Vpshlq_xmm_xmm_xmmm128, XOP_Vpshab_xmm_xmmm128_xmm, XOP_Vpshab_xmm_xmm_xmmm128, XOP_Vpshaw_xmm_xmmm128_xmm, XOP_Vpshaw_xmm_xmm_xmmm128, XOP_Vpshad_xmm_xmmm128_xmm, XOP_Vpshad_xmm_xmm_xmmm128, XOP_Vpshaq_xmm_xmmm128_xmm, XOP_Vpshaq_xmm_xmm_xmmm128, XOP_Vphaddbw_xmm_xmmm128, XOP_Vphaddbd_xmm_xmmm128, XOP_Vphaddbq_xmm_xmmm128, XOP_Vphaddwd_xmm_xmmm128, XOP_Vphaddwq_xmm_xmmm128, XOP_Vphadddq_xmm_xmmm128, XOP_Vphaddubw_xmm_xmmm128, XOP_Vphaddubd_xmm_xmmm128, XOP_Vphaddubq_xmm_xmmm128, XOP_Vphadduwd_xmm_xmmm128, XOP_Vphadduwq_xmm_xmmm128, XOP_Vphaddudq_xmm_xmmm128, XOP_Vphsubbw_xmm_xmmm128, XOP_Vphsubwd_xmm_xmmm128, XOP_Vphsubdq_xmm_xmmm128, XOP_Bextr_r32_rm32_imm32, XOP_Bextr_r64_rm64_imm32, XOP_Lwpins_r32_rm32_imm32, XOP_Lwpins_r64_rm32_imm32, XOP_Lwpval_r32_rm32_imm32, XOP_Lwpval_r64_rm32_imm32, D3NOW_Pi2fw_mm_mmm64, D3NOW_Pi2fd_mm_mmm64, D3NOW_Pf2iw_mm_mmm64, D3NOW_Pf2id_mm_mmm64, D3NOW_Pfrcpv_mm_mmm64, D3NOW_Pfrsqrtv_mm_mmm64, D3NOW_Pfnacc_mm_mmm64, D3NOW_Pfpnacc_mm_mmm64, D3NOW_Pfcmpge_mm_mmm64, D3NOW_Pfmin_mm_mmm64, D3NOW_Pfrcp_mm_mmm64, D3NOW_Pfrsqrt_mm_mmm64, D3NOW_Pfsub_mm_mmm64, D3NOW_Pfadd_mm_mmm64, D3NOW_Pfcmpgt_mm_mmm64, D3NOW_Pfmax_mm_mmm64, D3NOW_Pfrcpit1_mm_mmm64, D3NOW_Pfrsqit1_mm_mmm64, D3NOW_Pfsubr_mm_mmm64, D3NOW_Pfacc_mm_mmm64, D3NOW_Pfcmpeq_mm_mmm64, D3NOW_Pfmul_mm_mmm64, D3NOW_Pfrcpit2_mm_mmm64, D3NOW_Pmulhrw_mm_mmm64, D3NOW_Pswapd_mm_mmm64, D3NOW_Pavgusb_mm_mmm64, Rmpadjust, Rmpupdate, Psmash, Pvalidatew, Pvalidated, Pvalidateq, Serialize, Xsusldtrk, Xresldtrk, Invlpgbw, Invlpgbd, Invlpgbq, Tlbsync, Prefetchreserved3_m8, Prefetchreserved4_m8, Prefetchreserved5_m8, Prefetchreserved6_m8, Prefetchreserved7_m8, Ud0, Vmgexit, Getsecq, VEX_Ldtilecfg_m512, VEX_Tilerelease, VEX_Sttilecfg_m512, VEX_Tilezero_tmm, VEX_Tileloaddt1_tmm_sibmem, VEX_Tilestored_sibmem_tmm, VEX_Tileloadd_tmm_sibmem, VEX_Tdpbf16ps_tmm_tmm_tmm, VEX_Tdpbuud_tmm_tmm_tmm, VEX_Tdpbusd_tmm_tmm_tmm, VEX_Tdpbsud_tmm_tmm_tmm, VEX_Tdpbssd_tmm_tmm_tmm, Fnstdw_AX, Fnstsg_AX, Rdshr_rm32, Wrshr_rm32, Smint, Dmint, Rdm, Svdc_m80_Sreg, Rsdc_Sreg_m80, Svldt_m80, Rsldt_m80, Svts_m80, Rsts_m80, Smint_0F7E, Bb0_reset, Bb1_reset, Cpu_write, Cpu_read, Altinst, Paveb_mm_mmm64, Paddsiw_mm_mmm64, Pmagw_mm_mmm64, Pdistib_mm_m64, Psubsiw_mm_mmm64, Pmvzb_mm_m64, Pmulhrw_mm_mmm64, Pmvnzb_mm_m64, Pmvlzb_mm_m64, Pmvgezb_mm_m64, Pmulhriw_mm_mmm64, Pmachriw_mm_m64, Cyrix_D9D7, Cyrix_D9E2, Ftstp, Cyrix_D9E7, Frint2, Frichop, Cyrix_DED8, Cyrix_DEDA, Cyrix_DEDC, Cyrix_DEDD, Cyrix_DEDE, Frinear, Tdcall, Seamret, Seamops, Seamcall, Aesencwide128kl_m384, Aesdecwide128kl_m384, Aesencwide256kl_m512, Aesdecwide256kl_m512, Loadiwkey_xmm_xmm, Aesenc128kl_xmm_m384, Aesdec128kl_xmm_m384, Aesenc256kl_xmm_m512, Aesdec256kl_xmm_m512, Encodekey128_r32_r32, Encodekey256_r32_r32, VEX_Vbroadcastss_xmm_xmm, VEX_Vbroadcastss_ymm_xmm, VEX_Vbroadcastsd_ymm_xmm, Vmgexit_F2, Uiret, Testui, Clui, Stui, Senduipi_r64, Hreset_imm8, VEX_Vpdpbusd_xmm_xmm_xmmm128, VEX_Vpdpbusd_ymm_ymm_ymmm256, VEX_Vpdpbusds_xmm_xmm_xmmm128, VEX_Vpdpbusds_ymm_ymm_ymmm256, VEX_Vpdpwssd_xmm_xmm_xmmm128, VEX_Vpdpwssd_ymm_ymm_ymmm256, VEX_Vpdpwssds_xmm_xmm_xmmm128, VEX_Vpdpwssds_ymm_ymm_ymmm256, Ccs_hash_16, Ccs_hash_32, Ccs_hash_64, Ccs_encrypt_16, Ccs_encrypt_32, Ccs_encrypt_64, Lkgs_rm16, Lkgs_r32m16, Lkgs_r64m16, Eretu, Erets, EVEX_Vaddph_xmm_k1z_xmm_xmmm128b16, EVEX_Vaddph_ymm_k1z_ymm_ymmm256b16, EVEX_Vaddph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vaddsh_xmm_k1z_xmm_xmmm16_er, EVEX_Vcmpph_kr_k1_xmm_xmmm128b16_imm8, EVEX_Vcmpph_kr_k1_ymm_ymmm256b16_imm8, EVEX_Vcmpph_kr_k1_zmm_zmmm512b16_imm8_sae, EVEX_Vcmpsh_kr_k1_xmm_xmmm16_imm8_sae, EVEX_Vcomish_xmm_xmmm16_sae, EVEX_Vcvtdq2ph_xmm_k1z_xmmm128b32, EVEX_Vcvtdq2ph_xmm_k1z_ymmm256b32, EVEX_Vcvtdq2ph_ymm_k1z_zmmm512b32_er, EVEX_Vcvtpd2ph_xmm_k1z_xmmm128b64, EVEX_Vcvtpd2ph_xmm_k1z_ymmm256b64, EVEX_Vcvtpd2ph_xmm_k1z_zmmm512b64_er, EVEX_Vcvtph2dq_xmm_k1z_xmmm64b16, EVEX_Vcvtph2dq_ymm_k1z_xmmm128b16, EVEX_Vcvtph2dq_zmm_k1z_ymmm256b16_er, EVEX_Vcvtph2pd_xmm_k1z_xmmm32b16, EVEX_Vcvtph2pd_ymm_k1z_xmmm64b16, EVEX_Vcvtph2pd_zmm_k1z_xmmm128b16_sae, EVEX_Vcvtph2psx_xmm_k1z_xmmm64b16, EVEX_Vcvtph2psx_ymm_k1z_xmmm128b16, EVEX_Vcvtph2psx_zmm_k1z_ymmm256b16_sae, EVEX_Vcvtph2qq_xmm_k1z_xmmm32b16, EVEX_Vcvtph2qq_ymm_k1z_xmmm64b16, EVEX_Vcvtph2qq_zmm_k1z_xmmm128b16_er, EVEX_Vcvtph2udq_xmm_k1z_xmmm64b16, EVEX_Vcvtph2udq_ymm_k1z_xmmm128b16, EVEX_Vcvtph2udq_zmm_k1z_ymmm256b16_er, EVEX_Vcvtph2uqq_xmm_k1z_xmmm32b16, EVEX_Vcvtph2uqq_ymm_k1z_xmmm64b16, EVEX_Vcvtph2uqq_zmm_k1z_xmmm128b16_er, EVEX_Vcvtph2uw_xmm_k1z_xmmm128b16, EVEX_Vcvtph2uw_ymm_k1z_ymmm256b16, EVEX_Vcvtph2uw_zmm_k1z_zmmm512b16_er, EVEX_Vcvtph2w_xmm_k1z_xmmm128b16, EVEX_Vcvtph2w_ymm_k1z_ymmm256b16, EVEX_Vcvtph2w_zmm_k1z_zmmm512b16_er, EVEX_Vcvtps2phx_xmm_k1z_xmmm128b32, EVEX_Vcvtps2phx_xmm_k1z_ymmm256b32, EVEX_Vcvtps2phx_ymm_k1z_zmmm512b32_er, EVEX_Vcvtqq2ph_xmm_k1z_xmmm128b64, EVEX_Vcvtqq2ph_xmm_k1z_ymmm256b64, EVEX_Vcvtqq2ph_xmm_k1z_zmmm512b64_er, EVEX_Vcvtsd2sh_xmm_k1z_xmm_xmmm64_er, EVEX_Vcvtsh2sd_xmm_k1z_xmm_xmmm16_sae, EVEX_Vcvtsh2si_r32_xmmm16_er, EVEX_Vcvtsh2si_r64_xmmm16_er, EVEX_Vcvtsh2ss_xmm_k1z_xmm_xmmm16_sae, EVEX_Vcvtsh2usi_r32_xmmm16_er, EVEX_Vcvtsh2usi_r64_xmmm16_er, EVEX_Vcvtsi2sh_xmm_xmm_rm32_er, EVEX_Vcvtsi2sh_xmm_xmm_rm64_er, EVEX_Vcvtss2sh_xmm_k1z_xmm_xmmm32_er, EVEX_Vcvttph2dq_xmm_k1z_xmmm64b16, EVEX_Vcvttph2dq_ymm_k1z_xmmm128b16, EVEX_Vcvttph2dq_zmm_k1z_ymmm256b16_sae, EVEX_Vcvttph2qq_xmm_k1z_xmmm32b16, EVEX_Vcvttph2qq_ymm_k1z_xmmm64b16, EVEX_Vcvttph2qq_zmm_k1z_xmmm128b16_sae, EVEX_Vcvttph2udq_xmm_k1z_xmmm64b16, EVEX_Vcvttph2udq_ymm_k1z_xmmm128b16, EVEX_Vcvttph2udq_zmm_k1z_ymmm256b16_sae, EVEX_Vcvttph2uqq_xmm_k1z_xmmm32b16, EVEX_Vcvttph2uqq_ymm_k1z_xmmm64b16, EVEX_Vcvttph2uqq_zmm_k1z_xmmm128b16_sae, EVEX_Vcvttph2uw_xmm_k1z_xmmm128b16, EVEX_Vcvttph2uw_ymm_k1z_ymmm256b16, EVEX_Vcvttph2uw_zmm_k1z_zmmm512b16_sae, EVEX_Vcvttph2w_xmm_k1z_xmmm128b16, EVEX_Vcvttph2w_ymm_k1z_ymmm256b16, EVEX_Vcvttph2w_zmm_k1z_zmmm512b16_sae, EVEX_Vcvttsh2si_r32_xmmm16_sae, EVEX_Vcvttsh2si_r64_xmmm16_sae, EVEX_Vcvttsh2usi_r32_xmmm16_sae, EVEX_Vcvttsh2usi_r64_xmmm16_sae, EVEX_Vcvtudq2ph_xmm_k1z_xmmm128b32, EVEX_Vcvtudq2ph_xmm_k1z_ymmm256b32, EVEX_Vcvtudq2ph_ymm_k1z_zmmm512b32_er, EVEX_Vcvtuqq2ph_xmm_k1z_xmmm128b64, EVEX_Vcvtuqq2ph_xmm_k1z_ymmm256b64, EVEX_Vcvtuqq2ph_xmm_k1z_zmmm512b64_er, EVEX_Vcvtusi2sh_xmm_xmm_rm32_er, EVEX_Vcvtusi2sh_xmm_xmm_rm64_er, EVEX_Vcvtuw2ph_xmm_k1z_xmmm128b16, EVEX_Vcvtuw2ph_ymm_k1z_ymmm256b16, EVEX_Vcvtuw2ph_zmm_k1z_zmmm512b16_er, EVEX_Vcvtw2ph_xmm_k1z_xmmm128b16, EVEX_Vcvtw2ph_ymm_k1z_ymmm256b16, EVEX_Vcvtw2ph_zmm_k1z_zmmm512b16_er, EVEX_Vdivph_xmm_k1z_xmm_xmmm128b16, EVEX_Vdivph_ymm_k1z_ymm_ymmm256b16, EVEX_Vdivph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vdivsh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfcmaddcph_xmm_k1z_xmm_xmmm128b32, EVEX_Vfcmaddcph_ymm_k1z_ymm_ymmm256b32, EVEX_Vfcmaddcph_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmaddcph_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmaddcph_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmaddcph_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfcmaddcsh_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmaddcsh_xmm_k1z_xmm_xmmm32_er, EVEX_Vfcmulcph_xmm_k1z_xmm_xmmm128b32, EVEX_Vfcmulcph_ymm_k1z_ymm_ymmm256b32, EVEX_Vfcmulcph_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfmulcph_xmm_k1z_xmm_xmmm128b32, EVEX_Vfmulcph_ymm_k1z_ymm_ymmm256b32, EVEX_Vfmulcph_zmm_k1z_zmm_zmmm512b32_er, EVEX_Vfcmulcsh_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmulcsh_xmm_k1z_xmm_xmmm32_er, EVEX_Vfmaddsub132ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmaddsub132ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmaddsub132ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmaddsub213ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmaddsub213ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmaddsub213ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmaddsub231ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmaddsub231ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmaddsub231ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmsubadd132ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmsubadd132ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmsubadd132ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmsubadd213ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmsubadd213ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmsubadd213ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmsubadd231ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmsubadd231ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmsubadd231ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmadd132ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmadd132ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmadd132ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmadd213ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmadd213ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmadd213ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmadd231ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmadd231ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmadd231ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfnmadd132ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfnmadd132ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfnmadd132ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfnmadd213ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfnmadd213ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfnmadd213ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfnmadd231ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfnmadd231ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfnmadd231ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmadd132sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfmadd213sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfmadd231sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfnmadd132sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfnmadd213sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfnmadd231sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfmsub132ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmsub132ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmsub132ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmsub213ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmsub213ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmsub213ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmsub231ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfmsub231ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfmsub231ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfnmsub132ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfnmsub132ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfnmsub132ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfnmsub213ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfnmsub213ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfnmsub213ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfnmsub231ph_xmm_k1z_xmm_xmmm128b16, EVEX_Vfnmsub231ph_ymm_k1z_ymm_ymmm256b16, EVEX_Vfnmsub231ph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vfmsub132sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfmsub213sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfmsub231sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfnmsub132sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfnmsub213sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfnmsub231sh_xmm_k1z_xmm_xmmm16_er, EVEX_Vfpclassph_kr_k1_xmmm128b16_imm8, EVEX_Vfpclassph_kr_k1_ymmm256b16_imm8, EVEX_Vfpclassph_kr_k1_zmmm512b16_imm8, EVEX_Vfpclasssh_kr_k1_xmmm16_imm8, EVEX_Vgetexpph_xmm_k1z_xmmm128b16, EVEX_Vgetexpph_ymm_k1z_ymmm256b16, EVEX_Vgetexpph_zmm_k1z_zmmm512b16_sae, EVEX_Vgetexpsh_xmm_k1z_xmm_xmmm16_sae, EVEX_Vgetmantph_xmm_k1z_xmmm128b16_imm8, EVEX_Vgetmantph_ymm_k1z_ymmm256b16_imm8, EVEX_Vgetmantph_zmm_k1z_zmmm512b16_imm8_sae, EVEX_Vgetmantsh_xmm_k1z_xmm_xmmm16_imm8_sae, EVEX_Vmaxph_xmm_k1z_xmm_xmmm128b16, EVEX_Vmaxph_ymm_k1z_ymm_ymmm256b16, EVEX_Vmaxph_zmm_k1z_zmm_zmmm512b16_sae, EVEX_Vmaxsh_xmm_k1z_xmm_xmmm16_sae, EVEX_Vminph_xmm_k1z_xmm_xmmm128b16, EVEX_Vminph_ymm_k1z_ymm_ymmm256b16, EVEX_Vminph_zmm_k1z_zmm_zmmm512b16_sae, EVEX_Vminsh_xmm_k1z_xmm_xmmm16_sae, EVEX_Vmovsh_xmm_k1z_m16, EVEX_Vmovsh_m16_k1_xmm, EVEX_Vmovsh_xmm_k1z_xmm_xmm, EVEX_Vmovsh_xmm_k1z_xmm_xmm_MAP5_11, EVEX_Vmovw_xmm_r32m16, EVEX_Vmovw_xmm_r64m16, EVEX_Vmovw_r32m16_xmm, EVEX_Vmovw_r64m16_xmm, EVEX_Vmulph_xmm_k1z_xmm_xmmm128b16, EVEX_Vmulph_ymm_k1z_ymm_ymmm256b16, EVEX_Vmulph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vmulsh_xmm_k1z_xmm_xmmm16_er, EVEX_Vrcpph_xmm_k1z_xmmm128b16, EVEX_Vrcpph_ymm_k1z_ymmm256b16, EVEX_Vrcpph_zmm_k1z_zmmm512b16, EVEX_Vrcpsh_xmm_k1z_xmm_xmmm16, EVEX_Vreduceph_xmm_k1z_xmmm128b16_imm8, EVEX_Vreduceph_ymm_k1z_ymmm256b16_imm8, EVEX_Vreduceph_zmm_k1z_zmmm512b16_imm8_sae, EVEX_Vreducesh_xmm_k1z_xmm_xmmm16_imm8_sae, EVEX_Vrndscaleph_xmm_k1z_xmmm128b16_imm8, EVEX_Vrndscaleph_ymm_k1z_ymmm256b16_imm8, EVEX_Vrndscaleph_zmm_k1z_zmmm512b16_imm8_sae, EVEX_Vrndscalesh_xmm_k1z_xmm_xmmm16_imm8_sae, EVEX_Vrsqrtph_xmm_k1z_xmmm128b16, EVEX_Vrsqrtph_ymm_k1z_ymmm256b16, EVEX_Vrsqrtph_zmm_k1z_zmmm512b16, EVEX_Vrsqrtsh_xmm_k1z_xmm_xmmm16, EVEX_Vscalefph_xmm_k1z_xmm_xmmm128b16, EVEX_Vscalefph_ymm_k1z_ymm_ymmm256b16, EVEX_Vscalefph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vscalefsh_xmm_k1z_xmm_xmmm16_er, EVEX_Vsqrtph_xmm_k1z_xmmm128b16, EVEX_Vsqrtph_ymm_k1z_ymmm256b16, EVEX_Vsqrtph_zmm_k1z_zmmm512b16_er, EVEX_Vsqrtsh_xmm_k1z_xmm_xmmm16_er, EVEX_Vsubph_xmm_k1z_xmm_xmmm128b16, EVEX_Vsubph_ymm_k1z_ymm_ymmm256b16, EVEX_Vsubph_zmm_k1z_zmm_zmmm512b16_er, EVEX_Vsubsh_xmm_k1z_xmm_xmmm16_er, EVEX_Vucomish_xmm_xmmm16_sae, Rdudbg, Wrudbg, VEX_KNC_Jkzd_kr_rel8_64, VEX_KNC_Jknzd_kr_rel8_64, VEX_KNC_Vprefetchnta_m8, VEX_KNC_Vprefetch0_m8, VEX_KNC_Vprefetch1_m8, VEX_KNC_Vprefetch2_m8, VEX_KNC_Vprefetchenta_m8, VEX_KNC_Vprefetche0_m8, VEX_KNC_Vprefetche1_m8, VEX_KNC_Vprefetche2_m8, VEX_KNC_Kand_kr_kr, VEX_KNC_Kandn_kr_kr, VEX_KNC_Kandnr_kr_kr, VEX_KNC_Knot_kr_kr, VEX_KNC_Kor_kr_kr, VEX_KNC_Kxnor_kr_kr, VEX_KNC_Kxor_kr_kr, VEX_KNC_Kmerge2l1h_kr_kr, VEX_KNC_Kmerge2l1l_kr_kr, VEX_KNC_Jkzd_kr_rel32_64, VEX_KNC_Jknzd_kr_rel32_64, VEX_KNC_Kmov_kr_kr, VEX_KNC_Kmov_kr_r32, VEX_KNC_Kmov_r32_kr, VEX_KNC_Kconcath_r64_kr_kr, VEX_KNC_Kconcatl_r64_kr_kr, VEX_KNC_Kortest_kr_kr, VEX_KNC_Delay_r32, VEX_KNC_Delay_r64, VEX_KNC_Spflt_r32, VEX_KNC_Spflt_r64, VEX_KNC_Clevict1_m8, VEX_KNC_Clevict0_m8, VEX_KNC_Popcnt_r32_r32, VEX_KNC_Popcnt_r64_r64, VEX_KNC_Tzcnt_r32_r32, VEX_KNC_Tzcnt_r64_r64, VEX_KNC_Tzcnti_r32_r32, VEX_KNC_Tzcnti_r64_r64, VEX_KNC_Lzcnt_r32_r32, VEX_KNC_Lzcnt_r64_r64, VEX_KNC_Undoc_r32_rm32_128_F3_0F38_W0_F0, VEX_KNC_Undoc_r64_rm64_128_F3_0F38_W1_F0, VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F0, VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F0, VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F1, VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F1, VEX_KNC_Kextract_kr_r64_imm8, MVEX_Vprefetchnta_m, MVEX_Vprefetch0_m, MVEX_Vprefetch1_m, MVEX_Vprefetch2_m, MVEX_Vprefetchenta_m, MVEX_Vprefetche0_m, MVEX_Vprefetche1_m, MVEX_Vprefetche2_m, MVEX_Vmovaps_zmm_k1_zmmmt, MVEX_Vmovapd_zmm_k1_zmmmt, MVEX_Vmovaps_mt_k1_zmm, MVEX_Vmovapd_mt_k1_zmm, MVEX_Vmovnrapd_m_k1_zmm, MVEX_Vmovnrngoapd_m_k1_zmm, MVEX_Vmovnraps_m_k1_zmm, MVEX_Vmovnrngoaps_m_k1_zmm, MVEX_Vaddps_zmm_k1_zmm_zmmmt, MVEX_Vaddpd_zmm_k1_zmm_zmmmt, MVEX_Vmulps_zmm_k1_zmm_zmmmt, MVEX_Vmulpd_zmm_k1_zmm_zmmmt, MVEX_Vcvtps2pd_zmm_k1_zmmmt, MVEX_Vcvtpd2ps_zmm_k1_zmmmt, MVEX_Vsubps_zmm_k1_zmm_zmmmt, MVEX_Vsubpd_zmm_k1_zmm_zmmmt, MVEX_Vpcmpgtd_kr_k1_zmm_zmmmt, MVEX_Vmovdqa32_zmm_k1_zmmmt, MVEX_Vmovdqa64_zmm_k1_zmmmt, MVEX_Vpshufd_zmm_k1_zmmmt_imm8, MVEX_Vpsrld_zmm_k1_zmmmt_imm8, MVEX_Vpsrad_zmm_k1_zmmmt_imm8, MVEX_Vpslld_zmm_k1_zmmmt_imm8, MVEX_Vpcmpeqd_kr_k1_zmm_zmmmt, MVEX_Vcvtudq2pd_zmm_k1_zmmmt, MVEX_Vmovdqa32_mt_k1_zmm, MVEX_Vmovdqa64_mt_k1_zmm, MVEX_Clevict1_m, MVEX_Clevict0_m, MVEX_Vcmpps_kr_k1_zmm_zmmmt_imm8, MVEX_Vcmppd_kr_k1_zmm_zmmmt_imm8, MVEX_Vpandd_zmm_k1_zmm_zmmmt, MVEX_Vpandq_zmm_k1_zmm_zmmmt, MVEX_Vpandnd_zmm_k1_zmm_zmmmt, MVEX_Vpandnq_zmm_k1_zmm_zmmmt, MVEX_Vcvtdq2pd_zmm_k1_zmmmt, MVEX_Vpord_zmm_k1_zmm_zmmmt, MVEX_Vporq_zmm_k1_zmm_zmmmt, MVEX_Vpxord_zmm_k1_zmm_zmmmt, MVEX_Vpxorq_zmm_k1_zmm_zmmmt, MVEX_Vpsubd_zmm_k1_zmm_zmmmt, MVEX_Vpaddd_zmm_k1_zmm_zmmmt, MVEX_Vbroadcastss_zmm_k1_mt, MVEX_Vbroadcastsd_zmm_k1_mt, MVEX_Vbroadcastf32x4_zmm_k1_mt, MVEX_Vbroadcastf64x4_zmm_k1_mt, MVEX_Vptestmd_kr_k1_zmm_zmmmt, MVEX_Vpermd_zmm_k1_zmm_zmmmt, MVEX_Vpminsd_zmm_k1_zmm_zmmmt, MVEX_Vpminud_zmm_k1_zmm_zmmmt, MVEX_Vpmaxsd_zmm_k1_zmm_zmmmt, MVEX_Vpmaxud_zmm_k1_zmm_zmmmt, MVEX_Vpmulld_zmm_k1_zmm_zmmmt, MVEX_Vgetexpps_zmm_k1_zmmmt, MVEX_Vgetexppd_zmm_k1_zmmmt, MVEX_Vpsrlvd_zmm_k1_zmm_zmmmt, MVEX_Vpsravd_zmm_k1_zmm_zmmmt, MVEX_Vpsllvd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_48, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_49, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4A, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4B, MVEX_Vaddnps_zmm_k1_zmm_zmmmt, MVEX_Vaddnpd_zmm_k1_zmm_zmmmt, MVEX_Vgmaxabsps_zmm_k1_zmm_zmmmt, MVEX_Vgminps_zmm_k1_zmm_zmmmt, MVEX_Vgminpd_zmm_k1_zmm_zmmmt, MVEX_Vgmaxps_zmm_k1_zmm_zmmmt, MVEX_Vgmaxpd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_54, MVEX_Vfixupnanps_zmm_k1_zmm_zmmmt, MVEX_Vfixupnanpd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_56, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_57, MVEX_Vpbroadcastd_zmm_k1_mt, MVEX_Vpbroadcastq_zmm_k1_mt, MVEX_Vbroadcasti32x4_zmm_k1_mt, MVEX_Vbroadcasti64x4_zmm_k1_mt, MVEX_Vpadcd_zmm_k1_kr_zmmmt, MVEX_Vpaddsetcd_zmm_k1_kr_zmmmt, MVEX_Vpsbbd_zmm_k1_kr_zmmmt, MVEX_Vpsubsetbd_zmm_k1_kr_zmmmt, MVEX_Vpblendmd_zmm_k1_zmm_zmmmt, MVEX_Vpblendmq_zmm_k1_zmm_zmmmt, MVEX_Vblendmps_zmm_k1_zmm_zmmmt, MVEX_Vblendmpd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_67, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_68, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_69, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6A, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6B, MVEX_Vpsubrd_zmm_k1_zmm_zmmmt, MVEX_Vsubrps_zmm_k1_zmm_zmmmt, MVEX_Vsubrpd_zmm_k1_zmm_zmmmt, MVEX_Vpsbbrd_zmm_k1_kr_zmmmt, MVEX_Vpsubrsetbd_zmm_k1_kr_zmmmt, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_70, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_71, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_72, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_73, MVEX_Vpcmpltd_kr_k1_zmm_zmmmt, MVEX_Vscaleps_zmm_k1_zmm_zmmmt, MVEX_Vpmulhud_zmm_k1_zmm_zmmmt, MVEX_Vpmulhd_zmm_k1_zmm_zmmmt, MVEX_Vpgatherdd_zmm_k1_mvt, MVEX_Vpgatherdq_zmm_k1_mvt, MVEX_Vgatherdps_zmm_k1_mvt, MVEX_Vgatherdpd_zmm_k1_mvt, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_94, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_94, MVEX_Vfmadd132ps_zmm_k1_zmm_zmmmt, MVEX_Vfmadd132pd_zmm_k1_zmm_zmmmt, MVEX_Vfmsub132ps_zmm_k1_zmm_zmmmt, MVEX_Vfmsub132pd_zmm_k1_zmm_zmmmt, MVEX_Vfnmadd132ps_zmm_k1_zmm_zmmmt, MVEX_Vfnmadd132pd_zmm_k1_zmm_zmmmt, MVEX_Vfnmsub132ps_zmm_k1_zmm_zmmmt, MVEX_Vfnmsub132pd_zmm_k1_zmm_zmmmt, MVEX_Vpscatterdd_mvt_k1_zmm, MVEX_Vpscatterdq_mvt_k1_zmm, MVEX_Vscatterdps_mvt_k1_zmm, MVEX_Vscatterdpd_mvt_k1_zmm, MVEX_Vfmadd233ps_zmm_k1_zmm_zmmmt, MVEX_Vfmadd213ps_zmm_k1_zmm_zmmmt, MVEX_Vfmadd213pd_zmm_k1_zmm_zmmmt, MVEX_Vfmsub213ps_zmm_k1_zmm_zmmmt, MVEX_Vfmsub213pd_zmm_k1_zmm_zmmmt, MVEX_Vfnmadd213ps_zmm_k1_zmm_zmmmt, MVEX_Vfnmadd213pd_zmm_k1_zmm_zmmmt, MVEX_Vfnmsub213ps_zmm_k1_zmm_zmmmt, MVEX_Vfnmsub213pd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B0, MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B2, MVEX_Vpmadd233d_zmm_k1_zmm_zmmmt, MVEX_Vpmadd231d_zmm_k1_zmm_zmmmt, MVEX_Vfmadd231ps_zmm_k1_zmm_zmmmt, MVEX_Vfmadd231pd_zmm_k1_zmm_zmmmt, MVEX_Vfmsub231ps_zmm_k1_zmm_zmmmt, MVEX_Vfmsub231pd_zmm_k1_zmm_zmmmt, MVEX_Vfnmadd231ps_zmm_k1_zmm_zmmmt, MVEX_Vfnmadd231pd_zmm_k1_zmm_zmmmt, MVEX_Vfnmsub231ps_zmm_k1_zmm_zmmmt, MVEX_Vfnmsub231pd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_C0, MVEX_Vgatherpf0hintdps_mvt_k1, MVEX_Vgatherpf0hintdpd_mvt_k1, MVEX_Vgatherpf0dps_mvt_k1, MVEX_Vgatherpf1dps_mvt_k1, MVEX_Vscatterpf0hintdps_mvt_k1, MVEX_Vscatterpf0hintdpd_mvt_k1, MVEX_Vscatterpf0dps_mvt_k1, MVEX_Vscatterpf1dps_mvt_k1, MVEX_Vexp223ps_zmm_k1_zmmmt, MVEX_Vlog2ps_zmm_k1_zmmmt, MVEX_Vrcp23ps_zmm_k1_zmmmt, MVEX_Vrsqrt23ps_zmm_k1_zmmmt, MVEX_Vaddsetsps_zmm_k1_zmm_zmmmt, MVEX_Vpaddsetsd_zmm_k1_zmm_zmmmt, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CE, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_CE, MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CF, MVEX_Vloadunpackld_zmm_k1_mt, MVEX_Vloadunpacklq_zmm_k1_mt, MVEX_Vpackstoreld_mt_k1_zmm, MVEX_Vpackstorelq_mt_k1_zmm, MVEX_Vloadunpacklps_zmm_k1_mt, MVEX_Vloadunpacklpd_zmm_k1_mt, MVEX_Vpackstorelps_mt_k1_zmm, MVEX_Vpackstorelpd_mt_k1_zmm, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D2, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D2, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D3, MVEX_Vloadunpackhd_zmm_k1_mt, MVEX_Vloadunpackhq_zmm_k1_mt, MVEX_Vpackstorehd_mt_k1_zmm, MVEX_Vpackstorehq_mt_k1_zmm, MVEX_Vloadunpackhps_zmm_k1_mt, MVEX_Vloadunpackhpd_zmm_k1_mt, MVEX_Vpackstorehps_mt_k1_zmm, MVEX_Vpackstorehpd_mt_k1_zmm, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D6, MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D6, MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D7, MVEX_Valignd_zmm_k1_zmm_zmmmt_imm8, MVEX_Vpermf32x4_zmm_k1_zmmmt_imm8, MVEX_Vpcmpud_kr_k1_zmm_zmmmt_imm8, MVEX_Vpcmpd_kr_k1_zmm_zmmmt_imm8, MVEX_Vgetmantps_zmm_k1_zmmmt_imm8, MVEX_Vgetmantpd_zmm_k1_zmmmt_imm8, MVEX_Vrndfxpntps_zmm_k1_zmmmt_imm8, MVEX_Vrndfxpntpd_zmm_k1_zmmmt_imm8, MVEX_Vcvtfxpntudq2ps_zmm_k1_zmmmt_imm8, MVEX_Vcvtfxpntps2udq_zmm_k1_zmmmt_imm8, MVEX_Vcvtfxpntpd2udq_zmm_k1_zmmmt_imm8, MVEX_Vcvtfxpntdq2ps_zmm_k1_zmmmt_imm8, MVEX_Vcvtfxpntps2dq_zmm_k1_zmmmt_imm8, MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D0, MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D1, MVEX_Vcvtfxpntpd2dq_zmm_k1_zmmmt_imm8, Via_undoc_F30FA6F0_16, Via_undoc_F30FA6F0_32, Via_undoc_F30FA6F0_64, Via_undoc_F30FA6F8_16, Via_undoc_F30FA6F8_32, Via_undoc_F30FA6F8_64, Xsha512_16, Xsha512_32, Xsha512_64, Xstore_alt_16, Xstore_alt_32, Xstore_alt_64, Xsha512_alt_16, Xsha512_alt_32, Xsha512_alt_64, Zero_bytes, Wrmsrns, Wrmsrlist, Rdmsrlist, Rmpquery, Prefetchit1_m8, Prefetchit0_m8, Aadd_m32_r32, Aadd_m64_r64, Aand_m32_r32, Aand_m64_r64, Axor_m32_r32, Axor_m64_r64, Aor_m32_r32, Aor_m64_r64, VEX_Vpdpbuud_xmm_xmm_xmmm128, VEX_Vpdpbuud_ymm_ymm_ymmm256, VEX_Vpdpbsud_xmm_xmm_xmmm128, VEX_Vpdpbsud_ymm_ymm_ymmm256, VEX_Vpdpbssd_xmm_xmm_xmmm128, VEX_Vpdpbssd_ymm_ymm_ymmm256, VEX_Vpdpbuuds_xmm_xmm_xmmm128, VEX_Vpdpbuuds_ymm_ymm_ymmm256, VEX_Vpdpbsuds_xmm_xmm_xmmm128, VEX_Vpdpbsuds_ymm_ymm_ymmm256, VEX_Vpdpbssds_xmm_xmm_xmmm128, VEX_Vpdpbssds_ymm_ymm_ymmm256, VEX_Tdpfp16ps_tmm_tmm_tmm, VEX_Vcvtneps2bf16_xmm_xmmm128, VEX_Vcvtneps2bf16_xmm_ymmm256, VEX_Vcvtneoph2ps_xmm_m128, VEX_Vcvtneoph2ps_ymm_m256, VEX_Vcvtneeph2ps_xmm_m128, VEX_Vcvtneeph2ps_ymm_m256, VEX_Vcvtneebf162ps_xmm_m128, VEX_Vcvtneebf162ps_ymm_m256, VEX_Vcvtneobf162ps_xmm_m128, VEX_Vcvtneobf162ps_ymm_m256, VEX_Vbcstnesh2ps_xmm_m16, VEX_Vbcstnesh2ps_ymm_m16, VEX_Vbcstnebf162ps_xmm_m16, VEX_Vbcstnebf162ps_ymm_m16, VEX_Vpmadd52luq_xmm_xmm_xmmm128, VEX_Vpmadd52luq_ymm_ymm_ymmm256, VEX_Vpmadd52huq_xmm_xmm_xmmm128, VEX_Vpmadd52huq_ymm_ymm_ymmm256, VEX_Cmpoxadd_m32_r32_r32, VEX_Cmpoxadd_m64_r64_r64, VEX_Cmpnoxadd_m32_r32_r32, VEX_Cmpnoxadd_m64_r64_r64, VEX_Cmpbxadd_m32_r32_r32, VEX_Cmpbxadd_m64_r64_r64, VEX_Cmpnbxadd_m32_r32_r32, VEX_Cmpnbxadd_m64_r64_r64, VEX_Cmpzxadd_m32_r32_r32, VEX_Cmpzxadd_m64_r64_r64, VEX_Cmpnzxadd_m32_r32_r32, VEX_Cmpnzxadd_m64_r64_r64, VEX_Cmpbexadd_m32_r32_r32, VEX_Cmpbexadd_m64_r64_r64, VEX_Cmpnbexadd_m32_r32_r32, VEX_Cmpnbexadd_m64_r64_r64, VEX_Cmpsxadd_m32_r32_r32, VEX_Cmpsxadd_m64_r64_r64, VEX_Cmpnsxadd_m32_r32_r32, VEX_Cmpnsxadd_m64_r64_r64, VEX_Cmppxadd_m32_r32_r32, VEX_Cmppxadd_m64_r64_r64, VEX_Cmpnpxadd_m32_r32_r32, VEX_Cmpnpxadd_m64_r64_r64, VEX_Cmplxadd_m32_r32_r32, VEX_Cmplxadd_m64_r64_r64, VEX_Cmpnlxadd_m32_r32_r32, VEX_Cmpnlxadd_m64_r64_r64, VEX_Cmplexadd_m32_r32_r32, VEX_Cmplexadd_m64_r64_r64, VEX_Cmpnlexadd_m32_r32_r32, VEX_Cmpnlexadd_m64_r64_r64,
}
Expand description

x86 instruction code

Variants (Non-exhaustive)§

This enum is marked as non-exhaustive
Non-exhaustive enums could have additional variants added in future. Therefore, when matching against variants of non-exhaustive enums, an extra wildcard arm must be added to account for any future variants.
§

INVALID

It’s an invalid instruction, eg. it’s a new unknown instruction, garbage or there’s not enough bytes to decode the instruction etc.

§

DeclareByte

A db/.byte asm directive that can store 1-16 bytes

§

DeclareWord

A dw/.word asm directive that can store 1-8 words

§

DeclareDword

A dd/.int asm directive that can store 1-4 dwords

§

DeclareQword

A dq/.quad asm directive that can store 1-2 qwords

§

Add_rm8_r8

ADD r/m8, r8

00 /r

8086+

16/32/64-bit

§

Add_rm16_r16

ADD r/m16, r16

o16 01 /r

8086+

16/32/64-bit

§

Add_rm32_r32

ADD r/m32, r32

o32 01 /r

386+

16/32/64-bit

§

Add_rm64_r64

ADD r/m64, r64

o64 01 /r

X64

64-bit

§

Add_r8_rm8

ADD r8, r/m8

02 /r

8086+

16/32/64-bit

§

Add_r16_rm16

ADD r16, r/m16

o16 03 /r

8086+

16/32/64-bit

§

Add_r32_rm32

ADD r32, r/m32

o32 03 /r

386+

16/32/64-bit

§

Add_r64_rm64

ADD r64, r/m64

o64 03 /r

X64

64-bit

§

Add_AL_imm8

ADD AL, imm8

04 ib

8086+

16/32/64-bit

§

Add_AX_imm16

ADD AX, imm16

o16 05 iw

8086+

16/32/64-bit

§

Add_EAX_imm32

ADD EAX, imm32

o32 05 id

386+

16/32/64-bit

§

Add_RAX_imm32

ADD RAX, imm32

o64 05 id

X64

64-bit

§

Pushw_ES

PUSH ES

o16 06

8086+

16/32-bit

§

Pushd_ES

PUSH ES

o32 06

386+

16/32-bit

§

Popw_ES

POP ES

o16 07

8086+

16/32-bit

§

Popd_ES

POP ES

o32 07

386+

16/32-bit

§

Or_rm8_r8

OR r/m8, r8

08 /r

8086+

16/32/64-bit

§

Or_rm16_r16

OR r/m16, r16

o16 09 /r

8086+

16/32/64-bit

§

Or_rm32_r32

OR r/m32, r32

o32 09 /r

386+

16/32/64-bit

§

Or_rm64_r64

OR r/m64, r64

o64 09 /r

X64

64-bit

§

Or_r8_rm8

OR r8, r/m8

0A /r

8086+

16/32/64-bit

§

Or_r16_rm16

OR r16, r/m16

o16 0B /r

8086+

16/32/64-bit

§

Or_r32_rm32

OR r32, r/m32

o32 0B /r

386+

16/32/64-bit

§

Or_r64_rm64

OR r64, r/m64

o64 0B /r

X64

64-bit

§

Or_AL_imm8

OR AL, imm8

0C ib

8086+

16/32/64-bit

§

Or_AX_imm16

OR AX, imm16

o16 0D iw

8086+

16/32/64-bit

§

Or_EAX_imm32

OR EAX, imm32

o32 0D id

386+

16/32/64-bit

§

Or_RAX_imm32

OR RAX, imm32

o64 0D id

X64

64-bit

§

Pushw_CS

PUSH CS

o16 0E

8086+

16/32-bit

§

Pushd_CS

PUSH CS

o32 0E

386+

16/32-bit

§

Popw_CS

POP CS

o16 0F

8086

16-bit

§

Adc_rm8_r8

ADC r/m8, r8

10 /r

8086+

16/32/64-bit

§

Adc_rm16_r16

ADC r/m16, r16

o16 11 /r

8086+

16/32/64-bit

§

Adc_rm32_r32

ADC r/m32, r32

o32 11 /r

386+

16/32/64-bit

§

Adc_rm64_r64

ADC r/m64, r64

o64 11 /r

X64

64-bit

§

Adc_r8_rm8

ADC r8, r/m8

12 /r

8086+

16/32/64-bit

§

Adc_r16_rm16

ADC r16, r/m16

o16 13 /r

8086+

16/32/64-bit

§

Adc_r32_rm32

ADC r32, r/m32

o32 13 /r

386+

16/32/64-bit

§

Adc_r64_rm64

ADC r64, r/m64

o64 13 /r

X64

64-bit

§

Adc_AL_imm8

ADC AL, imm8

14 ib

8086+

16/32/64-bit

§

Adc_AX_imm16

ADC AX, imm16

o16 15 iw

8086+

16/32/64-bit

§

Adc_EAX_imm32

ADC EAX, imm32

o32 15 id

386+

16/32/64-bit

§

Adc_RAX_imm32

ADC RAX, imm32

o64 15 id

X64

64-bit

§

Pushw_SS

PUSH SS

o16 16

8086+

16/32-bit

§

Pushd_SS

PUSH SS

o32 16

386+

16/32-bit

§

Popw_SS

POP SS

o16 17

8086+

16/32-bit

§

Popd_SS

POP SS

o32 17

386+

16/32-bit

§

Sbb_rm8_r8

SBB r/m8, r8

18 /r

8086+

16/32/64-bit

§

Sbb_rm16_r16

SBB r/m16, r16

o16 19 /r

8086+

16/32/64-bit

§

Sbb_rm32_r32

SBB r/m32, r32

o32 19 /r

386+

16/32/64-bit

§

Sbb_rm64_r64

SBB r/m64, r64

o64 19 /r

X64

64-bit

§

Sbb_r8_rm8

SBB r8, r/m8

1A /r

8086+

16/32/64-bit

§

Sbb_r16_rm16

SBB r16, r/m16

o16 1B /r

8086+

16/32/64-bit

§

Sbb_r32_rm32

SBB r32, r/m32

o32 1B /r

386+

16/32/64-bit

§

Sbb_r64_rm64

SBB r64, r/m64

o64 1B /r

X64

64-bit

§

Sbb_AL_imm8

SBB AL, imm8

1C ib

8086+

16/32/64-bit

§

Sbb_AX_imm16

SBB AX, imm16

o16 1D iw

8086+

16/32/64-bit

§

Sbb_EAX_imm32

SBB EAX, imm32

o32 1D id

386+

16/32/64-bit

§

Sbb_RAX_imm32

SBB RAX, imm32

o64 1D id

X64

64-bit

§

Pushw_DS

PUSH DS

o16 1E

8086+

16/32-bit

§

Pushd_DS

PUSH DS

o32 1E

386+

16/32-bit

§

Popw_DS

POP DS

o16 1F

8086+

16/32-bit

§

Popd_DS

POP DS

o32 1F

386+

16/32-bit

§

And_rm8_r8

AND r/m8, r8

20 /r

8086+

16/32/64-bit

§

And_rm16_r16

AND r/m16, r16

o16 21 /r

8086+

16/32/64-bit

§

And_rm32_r32

AND r/m32, r32

o32 21 /r

386+

16/32/64-bit

§

And_rm64_r64

AND r/m64, r64

o64 21 /r

X64

64-bit

§

And_r8_rm8

AND r8, r/m8

22 /r

8086+

16/32/64-bit

§

And_r16_rm16

AND r16, r/m16

o16 23 /r

8086+

16/32/64-bit

§

And_r32_rm32

AND r32, r/m32

o32 23 /r

386+

16/32/64-bit

§

And_r64_rm64

AND r64, r/m64

o64 23 /r

X64

64-bit

§

And_AL_imm8

AND AL, imm8

24 ib

8086+

16/32/64-bit

§

And_AX_imm16

AND AX, imm16

o16 25 iw

8086+

16/32/64-bit

§

And_EAX_imm32

AND EAX, imm32

o32 25 id

386+

16/32/64-bit

§

And_RAX_imm32

AND RAX, imm32

o64 25 id

X64

64-bit

§

Daa

DAA

27

8086+

16/32-bit

§

Sub_rm8_r8

SUB r/m8, r8

28 /r

8086+

16/32/64-bit

§

Sub_rm16_r16

SUB r/m16, r16

o16 29 /r

8086+

16/32/64-bit

§

Sub_rm32_r32

SUB r/m32, r32

o32 29 /r

386+

16/32/64-bit

§

Sub_rm64_r64

SUB r/m64, r64

o64 29 /r

X64

64-bit

§

Sub_r8_rm8

SUB r8, r/m8

2A /r

8086+

16/32/64-bit

§

Sub_r16_rm16

SUB r16, r/m16

o16 2B /r

8086+

16/32/64-bit

§

Sub_r32_rm32

SUB r32, r/m32

o32 2B /r

386+

16/32/64-bit

§

Sub_r64_rm64

SUB r64, r/m64

o64 2B /r

X64

64-bit

§

Sub_AL_imm8

SUB AL, imm8

2C ib

8086+

16/32/64-bit

§

Sub_AX_imm16

SUB AX, imm16

o16 2D iw

8086+

16/32/64-bit

§

Sub_EAX_imm32

SUB EAX, imm32

o32 2D id

386+

16/32/64-bit

§

Sub_RAX_imm32

SUB RAX, imm32

o64 2D id

X64

64-bit

§

Das

DAS

2F

8086+

16/32-bit

§

Xor_rm8_r8

XOR r/m8, r8

30 /r

8086+

16/32/64-bit

§

Xor_rm16_r16

XOR r/m16, r16

o16 31 /r

8086+

16/32/64-bit

§

Xor_rm32_r32

XOR r/m32, r32

o32 31 /r

386+

16/32/64-bit

§

Xor_rm64_r64

XOR r/m64, r64

o64 31 /r

X64

64-bit

§

Xor_r8_rm8

XOR r8, r/m8

32 /r

8086+

16/32/64-bit

§

Xor_r16_rm16

XOR r16, r/m16

o16 33 /r

8086+

16/32/64-bit

§

Xor_r32_rm32

XOR r32, r/m32

o32 33 /r

386+

16/32/64-bit

§

Xor_r64_rm64

XOR r64, r/m64

o64 33 /r

X64

64-bit

§

Xor_AL_imm8

XOR AL, imm8

34 ib

8086+

16/32/64-bit

§

Xor_AX_imm16

XOR AX, imm16

o16 35 iw

8086+

16/32/64-bit

§

Xor_EAX_imm32

XOR EAX, imm32

o32 35 id

386+

16/32/64-bit

§

Xor_RAX_imm32

XOR RAX, imm32

o64 35 id

X64

64-bit

§

Aaa

AAA

37

8086+

16/32-bit

§

Cmp_rm8_r8

CMP r/m8, r8

38 /r

8086+

16/32/64-bit

§

Cmp_rm16_r16

CMP r/m16, r16

o16 39 /r

8086+

16/32/64-bit

§

Cmp_rm32_r32

CMP r/m32, r32

o32 39 /r

386+

16/32/64-bit

§

Cmp_rm64_r64

CMP r/m64, r64

o64 39 /r

X64

64-bit

§

Cmp_r8_rm8

CMP r8, r/m8

3A /r

8086+

16/32/64-bit

§

Cmp_r16_rm16

CMP r16, r/m16

o16 3B /r

8086+

16/32/64-bit

§

Cmp_r32_rm32

CMP r32, r/m32

o32 3B /r

386+

16/32/64-bit

§

Cmp_r64_rm64

CMP r64, r/m64

o64 3B /r

X64

64-bit

§

Cmp_AL_imm8

CMP AL, imm8

3C ib

8086+

16/32/64-bit

§

Cmp_AX_imm16

CMP AX, imm16

o16 3D iw

8086+

16/32/64-bit

§

Cmp_EAX_imm32

CMP EAX, imm32

o32 3D id

386+

16/32/64-bit

§

Cmp_RAX_imm32

CMP RAX, imm32

o64 3D id

X64

64-bit

§

Aas

AAS

3F

8086+

16/32-bit

§

Inc_r16

INC r16

o16 40+rw

8086+

16/32-bit

§

Inc_r32

INC r32

o32 40+rd

386+

16/32-bit

§

Dec_r16

DEC r16

o16 48+rw

8086+

16/32-bit

§

Dec_r32

DEC r32

o32 48+rd

386+

16/32-bit

§

Push_r16

PUSH r16

o16 50+rw

8086+

16/32/64-bit

§

Push_r32

PUSH r32

o32 50+rd

386+

16/32-bit

§

Push_r64

PUSH r64

o64 50+ro

X64

64-bit

§

Pop_r16

POP r16

o16 58+rw

8086+

16/32/64-bit

§

Pop_r32

POP r32

o32 58+rd

386+

16/32-bit

§

Pop_r64

POP r64

o64 58+ro

X64

64-bit

§

Pushaw

PUSHA

o16 60

186+

16/32-bit

§

Pushad

PUSHAD

o32 60

386+

16/32-bit

§

Popaw

POPA

o16 61

186+

16/32-bit

§

Popad

POPAD

o32 61

386+

16/32-bit

§

Bound_r16_m1616

BOUND r16, m16&16

o16 62 /r

186+

16/32-bit

§

Bound_r32_m3232

BOUND r32, m32&32

o32 62 /r

386+

16/32-bit

§

Arpl_rm16_r16

ARPL r/m16, r16

o16 63 /r

286+

16/32-bit

§

Arpl_r32m16_r32

ARPL r32/m16, r32

o32 63 /r

386+

16/32-bit

§

Movsxd_r16_rm16

MOVSXD r16, r/m16

o16 63 /r

X64

64-bit

§

Movsxd_r32_rm32

MOVSXD r32, r/m32

o32 63 /r

X64

64-bit

§

Movsxd_r64_rm32

MOVSXD r64, r/m32

o64 63 /r

X64

64-bit

§

Push_imm16

PUSH imm16

o16 68 iw

186+

16/32/64-bit

§

Pushd_imm32

PUSH imm32

o32 68 id

386+

16/32-bit

§

Pushq_imm32

PUSH imm32

o64 68 id

X64

64-bit

§

Imul_r16_rm16_imm16

IMUL r16, r/m16, imm16

o16 69 /r iw

186+

16/32/64-bit

§

Imul_r32_rm32_imm32

IMUL r32, r/m32, imm32

o32 69 /r id

386+

16/32/64-bit

§

Imul_r64_rm64_imm32

IMUL r64, r/m64, imm32

o64 69 /r id

X64

64-bit

§

Pushw_imm8

PUSH imm8

o16 6A ib

186+

16/32/64-bit

§

Pushd_imm8

PUSH imm8

o32 6A ib

386+

16/32-bit

§

Pushq_imm8

PUSH imm8

o64 6A ib

X64

64-bit

§

Imul_r16_rm16_imm8

IMUL r16, r/m16, imm8

o16 6B /r ib

186+

16/32/64-bit

§

Imul_r32_rm32_imm8

IMUL r32, r/m32, imm8

o32 6B /r ib

386+

16/32/64-bit

§

Imul_r64_rm64_imm8

IMUL r64, r/m64, imm8

o64 6B /r ib

X64

64-bit

§

Insb_m8_DX

INSB

6C

186+

16/32/64-bit

§

Insw_m16_DX

INSW

o16 6D

186+

16/32/64-bit

§

Insd_m32_DX

INSD

o32 6D

386+

16/32/64-bit

§

Outsb_DX_m8

OUTSB

6E

186+

16/32/64-bit

§

Outsw_DX_m16

OUTSW

o16 6F

186+

16/32/64-bit

§

Outsd_DX_m32

OUTSD

o32 6F

386+

16/32/64-bit

§

Jo_rel8_16

JO rel8

o16 70 cb

8086+

16/32/64-bit

§

Jo_rel8_32

JO rel8

o32 70 cb

386+

16/32-bit

§

Jo_rel8_64

JO rel8

o64 70 cb

X64

64-bit

§

Jno_rel8_16

JNO rel8

o16 71 cb

8086+

16/32/64-bit

§

Jno_rel8_32

JNO rel8

o32 71 cb

386+

16/32-bit

§

Jno_rel8_64

JNO rel8

o64 71 cb

X64

64-bit

§

Jb_rel8_16

JB rel8

o16 72 cb

8086+

16/32/64-bit

§

Jb_rel8_32

JB rel8

o32 72 cb

386+

16/32-bit

§

Jb_rel8_64

JB rel8

o64 72 cb

X64

64-bit

§

Jae_rel8_16

JAE rel8

o16 73 cb

8086+

16/32/64-bit

§

Jae_rel8_32

JAE rel8

o32 73 cb

386+

16/32-bit

§

Jae_rel8_64

JAE rel8

o64 73 cb

X64

64-bit

§

Je_rel8_16

JE rel8

o16 74 cb

8086+

16/32/64-bit

§

Je_rel8_32

JE rel8

o32 74 cb

386+

16/32-bit

§

Je_rel8_64

JE rel8

o64 74 cb

X64

64-bit

§

Jne_rel8_16

JNE rel8

o16 75 cb

8086+

16/32/64-bit

§

Jne_rel8_32

JNE rel8

o32 75 cb

386+

16/32-bit

§

Jne_rel8_64

JNE rel8

o64 75 cb

X64

64-bit

§

Jbe_rel8_16

JBE rel8

o16 76 cb

8086+

16/32/64-bit

§

Jbe_rel8_32

JBE rel8

o32 76 cb

386+

16/32-bit

§

Jbe_rel8_64

JBE rel8

o64 76 cb

X64

64-bit

§

Ja_rel8_16

JA rel8

o16 77 cb

8086+

16/32/64-bit

§

Ja_rel8_32

JA rel8

o32 77 cb

386+

16/32-bit

§

Ja_rel8_64

JA rel8

o64 77 cb

X64

64-bit

§

Js_rel8_16

JS rel8

o16 78 cb

8086+

16/32/64-bit

§

Js_rel8_32

JS rel8

o32 78 cb

386+

16/32-bit

§

Js_rel8_64

JS rel8

o64 78 cb

X64

64-bit

§

Jns_rel8_16

JNS rel8

o16 79 cb

8086+

16/32/64-bit

§

Jns_rel8_32

JNS rel8

o32 79 cb

386+

16/32-bit

§

Jns_rel8_64

JNS rel8

o64 79 cb

X64

64-bit

§

Jp_rel8_16

JP rel8

o16 7A cb

8086+

16/32/64-bit

§

Jp_rel8_32

JP rel8

o32 7A cb

386+

16/32-bit

§

Jp_rel8_64

JP rel8

o64 7A cb

X64

64-bit

§

Jnp_rel8_16

JNP rel8

o16 7B cb

8086+

16/32/64-bit

§

Jnp_rel8_32

JNP rel8

o32 7B cb

386+

16/32-bit

§

Jnp_rel8_64

JNP rel8

o64 7B cb

X64

64-bit

§

Jl_rel8_16

JL rel8

o16 7C cb

8086+

16/32/64-bit

§

Jl_rel8_32

JL rel8

o32 7C cb

386+

16/32-bit

§

Jl_rel8_64

JL rel8

o64 7C cb

X64

64-bit

§

Jge_rel8_16

JGE rel8

o16 7D cb

8086+

16/32/64-bit

§

Jge_rel8_32

JGE rel8

o32 7D cb

386+

16/32-bit

§

Jge_rel8_64

JGE rel8

o64 7D cb

X64

64-bit

§

Jle_rel8_16

JLE rel8

o16 7E cb

8086+

16/32/64-bit

§

Jle_rel8_32

JLE rel8

o32 7E cb

386+

16/32-bit

§

Jle_rel8_64

JLE rel8

o64 7E cb

X64

64-bit

§

Jg_rel8_16

JG rel8

o16 7F cb

8086+

16/32/64-bit

§

Jg_rel8_32

JG rel8

o32 7F cb

386+

16/32-bit

§

Jg_rel8_64

JG rel8

o64 7F cb

X64

64-bit

§

Add_rm8_imm8

ADD r/m8, imm8

80 /0 ib

8086+

16/32/64-bit

§

Or_rm8_imm8

OR r/m8, imm8

80 /1 ib

8086+

16/32/64-bit

§

Adc_rm8_imm8

ADC r/m8, imm8

80 /2 ib

8086+

16/32/64-bit

§

Sbb_rm8_imm8

SBB r/m8, imm8

80 /3 ib

8086+

16/32/64-bit

§

And_rm8_imm8

AND r/m8, imm8

80 /4 ib

8086+

16/32/64-bit

§

Sub_rm8_imm8

SUB r/m8, imm8

80 /5 ib

8086+

16/32/64-bit

§

Xor_rm8_imm8

XOR r/m8, imm8

80 /6 ib

8086+

16/32/64-bit

§

Cmp_rm8_imm8

CMP r/m8, imm8

80 /7 ib

8086+

16/32/64-bit

§

Add_rm16_imm16

ADD r/m16, imm16

o16 81 /0 iw

8086+

16/32/64-bit

§

Add_rm32_imm32

ADD r/m32, imm32

o32 81 /0 id

386+

16/32/64-bit

§

Add_rm64_imm32

ADD r/m64, imm32

o64 81 /0 id

X64

64-bit

§

Or_rm16_imm16

OR r/m16, imm16

o16 81 /1 iw

8086+

16/32/64-bit

§

Or_rm32_imm32

OR r/m32, imm32

o32 81 /1 id

386+

16/32/64-bit

§

Or_rm64_imm32

OR r/m64, imm32

o64 81 /1 id

X64

64-bit

§

Adc_rm16_imm16

ADC r/m16, imm16

o16 81 /2 iw

8086+

16/32/64-bit

§

Adc_rm32_imm32

ADC r/m32, imm32

o32 81 /2 id

386+

16/32/64-bit

§

Adc_rm64_imm32

ADC r/m64, imm32

o64 81 /2 id

X64

64-bit

§

Sbb_rm16_imm16

SBB r/m16, imm16

o16 81 /3 iw

8086+

16/32/64-bit

§

Sbb_rm32_imm32

SBB r/m32, imm32

o32 81 /3 id

386+

16/32/64-bit

§

Sbb_rm64_imm32

SBB r/m64, imm32

o64 81 /3 id

X64

64-bit

§

And_rm16_imm16

AND r/m16, imm16

o16 81 /4 iw

8086+

16/32/64-bit

§

And_rm32_imm32

AND r/m32, imm32

o32 81 /4 id

386+

16/32/64-bit

§

And_rm64_imm32

AND r/m64, imm32

o64 81 /4 id

X64

64-bit

§

Sub_rm16_imm16

SUB r/m16, imm16

o16 81 /5 iw

8086+

16/32/64-bit

§

Sub_rm32_imm32

SUB r/m32, imm32

o32 81 /5 id

386+

16/32/64-bit

§

Sub_rm64_imm32

SUB r/m64, imm32

o64 81 /5 id

X64

64-bit

§

Xor_rm16_imm16

XOR r/m16, imm16

o16 81 /6 iw

8086+

16/32/64-bit

§

Xor_rm32_imm32

XOR r/m32, imm32

o32 81 /6 id

386+

16/32/64-bit

§

Xor_rm64_imm32

XOR r/m64, imm32

o64 81 /6 id

X64

64-bit

§

Cmp_rm16_imm16

CMP r/m16, imm16

o16 81 /7 iw

8086+

16/32/64-bit

§

Cmp_rm32_imm32

CMP r/m32, imm32

o32 81 /7 id

386+

16/32/64-bit

§

Cmp_rm64_imm32

CMP r/m64, imm32

o64 81 /7 id

X64

64-bit

§

Add_rm8_imm8_82

ADD r/m8, imm8

82 /0 ib

8086+

16/32-bit

§

Or_rm8_imm8_82

OR r/m8, imm8

82 /1 ib

8086+

16/32-bit

§

Adc_rm8_imm8_82

ADC r/m8, imm8

82 /2 ib

8086+

16/32-bit

§

Sbb_rm8_imm8_82

SBB r/m8, imm8

82 /3 ib

8086+

16/32-bit

§

And_rm8_imm8_82

AND r/m8, imm8

82 /4 ib

8086+

16/32-bit

§

Sub_rm8_imm8_82

SUB r/m8, imm8

82 /5 ib

8086+

16/32-bit

§

Xor_rm8_imm8_82

XOR r/m8, imm8

82 /6 ib

8086+

16/32-bit

§

Cmp_rm8_imm8_82

CMP r/m8, imm8

82 /7 ib

8086+

16/32-bit

§

Add_rm16_imm8

ADD r/m16, imm8

o16 83 /0 ib

8086+

16/32/64-bit

§

Add_rm32_imm8

ADD r/m32, imm8

o32 83 /0 ib

386+

16/32/64-bit

§

Add_rm64_imm8

ADD r/m64, imm8

o64 83 /0 ib

X64

64-bit

§

Or_rm16_imm8

OR r/m16, imm8

o16 83 /1 ib

8086+

16/32/64-bit

§

Or_rm32_imm8

OR r/m32, imm8

o32 83 /1 ib

386+

16/32/64-bit

§

Or_rm64_imm8

OR r/m64, imm8

o64 83 /1 ib

X64

64-bit

§

Adc_rm16_imm8

ADC r/m16, imm8

o16 83 /2 ib

8086+

16/32/64-bit

§

Adc_rm32_imm8

ADC r/m32, imm8

o32 83 /2 ib

386+

16/32/64-bit

§

Adc_rm64_imm8

ADC r/m64, imm8

o64 83 /2 ib

X64

64-bit

§

Sbb_rm16_imm8

SBB r/m16, imm8

o16 83 /3 ib

8086+

16/32/64-bit

§

Sbb_rm32_imm8

SBB r/m32, imm8

o32 83 /3 ib

386+

16/32/64-bit

§

Sbb_rm64_imm8

SBB r/m64, imm8

o64 83 /3 ib

X64

64-bit

§

And_rm16_imm8

AND r/m16, imm8

o16 83 /4 ib

8086+

16/32/64-bit

§

And_rm32_imm8

AND r/m32, imm8

o32 83 /4 ib

386+

16/32/64-bit

§

And_rm64_imm8

AND r/m64, imm8

o64 83 /4 ib

X64

64-bit

§

Sub_rm16_imm8

SUB r/m16, imm8

o16 83 /5 ib

8086+

16/32/64-bit

§

Sub_rm32_imm8

SUB r/m32, imm8

o32 83 /5 ib

386+

16/32/64-bit

§

Sub_rm64_imm8

SUB r/m64, imm8

o64 83 /5 ib

X64

64-bit

§

Xor_rm16_imm8

XOR r/m16, imm8

o16 83 /6 ib

8086+

16/32/64-bit

§

Xor_rm32_imm8

XOR r/m32, imm8

o32 83 /6 ib

386+

16/32/64-bit

§

Xor_rm64_imm8

XOR r/m64, imm8

o64 83 /6 ib

X64

64-bit

§

Cmp_rm16_imm8

CMP r/m16, imm8

o16 83 /7 ib

8086+

16/32/64-bit

§

Cmp_rm32_imm8

CMP r/m32, imm8

o32 83 /7 ib

386+

16/32/64-bit

§

Cmp_rm64_imm8

CMP r/m64, imm8

o64 83 /7 ib

X64

64-bit

§

Test_rm8_r8

TEST r/m8, r8

84 /r

8086+

16/32/64-bit

§

Test_rm16_r16

TEST r/m16, r16

o16 85 /r

8086+

16/32/64-bit

§

Test_rm32_r32

TEST r/m32, r32

o32 85 /r

386+

16/32/64-bit

§

Test_rm64_r64

TEST r/m64, r64

o64 85 /r

X64

64-bit

§

Xchg_rm8_r8

XCHG r/m8, r8

86 /r

8086+

16/32/64-bit

§

Xchg_rm16_r16

XCHG r/m16, r16

o16 87 /r

8086+

16/32/64-bit

§

Xchg_rm32_r32

XCHG r/m32, r32

o32 87 /r

386+

16/32/64-bit

§

Xchg_rm64_r64

XCHG r/m64, r64

o64 87 /r

X64

64-bit

§

Mov_rm8_r8

MOV r/m8, r8

88 /r

8086+

16/32/64-bit

§

Mov_rm16_r16

MOV r/m16, r16

o16 89 /r

8086+

16/32/64-bit

§

Mov_rm32_r32

MOV r/m32, r32

o32 89 /r

386+

16/32/64-bit

§

Mov_rm64_r64

MOV r/m64, r64

o64 89 /r

X64

64-bit

§

Mov_r8_rm8

MOV r8, r/m8

8A /r

8086+

16/32/64-bit

§

Mov_r16_rm16

MOV r16, r/m16

o16 8B /r

8086+

16/32/64-bit

§

Mov_r32_rm32

MOV r32, r/m32

o32 8B /r

386+

16/32/64-bit

§

Mov_r64_rm64

MOV r64, r/m64

o64 8B /r

X64

64-bit

§

Mov_rm16_Sreg

MOV r/m16, Sreg

o16 8C /r

8086+

16/32/64-bit

§

Mov_r32m16_Sreg

MOV r32/m16, Sreg

o32 8C /r

386+

16/32/64-bit

§

Mov_r64m16_Sreg

MOV r64/m16, Sreg

o64 8C /r

X64

64-bit

§

Lea_r16_m

LEA r16, m

o16 8D /r

8086+

16/32/64-bit

§

Lea_r32_m

LEA r32, m

o32 8D /r

386+

16/32/64-bit

§

Lea_r64_m

LEA r64, m

o64 8D /r

X64

64-bit

§

Mov_Sreg_rm16

MOV Sreg, r/m16

o16 8E /r

8086+

16/32/64-bit

§

Mov_Sreg_r32m16

MOV Sreg, r32/m16

o32 8E /r

386+

16/32/64-bit

§

Mov_Sreg_r64m16

MOV Sreg, r64/m16

o64 8E /r

X64

64-bit

§

Pop_rm16

POP r/m16

o16 8F /0

8086+

16/32/64-bit

§

Pop_rm32

POP r/m32

o32 8F /0

386+

16/32-bit

§

Pop_rm64

POP r/m64

o64 8F /0

X64

64-bit

§

Nopw

NOP

o16 90

8086+

16/32/64-bit

§

Nopd

NOP

o32 90

8086+

16/32/64-bit

§

Nopq

NOP

o64 90

8086+

64-bit

§

Xchg_r16_AX

XCHG r16, AX

o16 90+rw

8086+

16/32/64-bit

§

Xchg_r32_EAX

XCHG r32, EAX

o32 90+rd

386+

16/32/64-bit

§

Xchg_r64_RAX

XCHG r64, RAX

o64 90+ro

X64

64-bit

§

Pause

PAUSE

F3 90

Pentium 4 or later

16/32/64-bit

§

Cbw

CBW

o16 98

8086+

16/32/64-bit

§

Cwde

CWDE

o32 98

386+

16/32/64-bit

§

Cdqe

CDQE

o64 98

X64

64-bit

§

Cwd

CWD

o16 99

8086+

16/32/64-bit

§

Cdq

CDQ

o32 99

386+

16/32/64-bit

§

Cqo

CQO

o64 99

X64

64-bit

§

Call_ptr1616

CALL ptr16:16

o16 9A cd

8086+

16/32-bit

§

Call_ptr1632

CALL ptr16:32

o32 9A cp

386+

16/32-bit

§

Wait

WAIT

9B

8086+

16/32/64-bit

§

Pushfw

PUSHF

o16 9C

8086+

16/32/64-bit

§

Pushfd

PUSHFD

o32 9C

386+

16/32-bit

§

Pushfq

PUSHFQ

o64 9C

X64

64-bit

§

Popfw

POPF

o16 9D

8086+

16/32/64-bit

§

Popfd

POPFD

o32 9D

386+

16/32-bit

§

Popfq

POPFQ

o64 9D

X64

64-bit

§

Sahf

SAHF

9E

8086+

16/32/64-bit

§

Lahf

LAHF

9F

8086+

16/32/64-bit

§

Mov_AL_moffs8

MOV AL, moffs8

A0 mo

8086+

16/32/64-bit

§

Mov_AX_moffs16

MOV AX, moffs16

o16 A1 mo

8086+

16/32/64-bit

§

Mov_EAX_moffs32

MOV EAX, moffs32

o32 A1 mo

386+

16/32/64-bit

§

Mov_RAX_moffs64

MOV RAX, moffs64

o64 A1 mo

X64

64-bit

§

Mov_moffs8_AL

MOV moffs8, AL

A2 mo

8086+

16/32/64-bit

§

Mov_moffs16_AX

MOV moffs16, AX

o16 A3 mo

8086+

16/32/64-bit

§

Mov_moffs32_EAX

MOV moffs32, EAX

o32 A3 mo

386+

16/32/64-bit

§

Mov_moffs64_RAX

MOV moffs64, RAX

o64 A3 mo

X64

64-bit

§

Movsb_m8_m8

MOVSB

A4

8086+

16/32/64-bit

§

Movsw_m16_m16

MOVSW

o16 A5

8086+

16/32/64-bit

§

Movsd_m32_m32

MOVSD

o32 A5

386+

16/32/64-bit

§

Movsq_m64_m64

MOVSQ

o64 A5

X64

64-bit

§

Cmpsb_m8_m8

CMPSB

A6

8086+

16/32/64-bit

§

Cmpsw_m16_m16

CMPSW

o16 A7

8086+

16/32/64-bit

§

Cmpsd_m32_m32

CMPSD

o32 A7

386+

16/32/64-bit

§

Cmpsq_m64_m64

CMPSQ

o64 A7

X64

64-bit

§

Test_AL_imm8

TEST AL, imm8

A8 ib

8086+

16/32/64-bit

§

Test_AX_imm16

TEST AX, imm16

o16 A9 iw

8086+

16/32/64-bit

§

Test_EAX_imm32

TEST EAX, imm32

o32 A9 id

386+

16/32/64-bit

§

Test_RAX_imm32

TEST RAX, imm32

o64 A9 id

X64

64-bit

§

Stosb_m8_AL

STOSB

AA

8086+

16/32/64-bit

§

Stosw_m16_AX

STOSW

o16 AB

8086+

16/32/64-bit

§

Stosd_m32_EAX

STOSD

o32 AB

386+

16/32/64-bit

§

Stosq_m64_RAX

STOSQ

o64 AB

X64

64-bit

§

Lodsb_AL_m8

LODSB

AC

8086+

16/32/64-bit

§

Lodsw_AX_m16

LODSW

o16 AD

8086+

16/32/64-bit

§

Lodsd_EAX_m32

LODSD

o32 AD

386+

16/32/64-bit

§

Lodsq_RAX_m64

LODSQ

o64 AD

X64

64-bit

§

Scasb_AL_m8

SCASB

AE

8086+

16/32/64-bit

§

Scasw_AX_m16

SCASW

o16 AF

8086+

16/32/64-bit

§

Scasd_EAX_m32

SCASD

o32 AF

386+

16/32/64-bit

§

Scasq_RAX_m64

SCASQ

o64 AF

X64

64-bit

§

Mov_r8_imm8

MOV r8, imm8

B0+rb ib

8086+

16/32/64-bit

§

Mov_r16_imm16

MOV r16, imm16

o16 B8+rw iw

8086+

16/32/64-bit

§

Mov_r32_imm32

MOV r32, imm32

o32 B8+rd id

386+

16/32/64-bit

§

Mov_r64_imm64

MOV r64, imm64

o64 B8+ro io

X64

64-bit

§

Rol_rm8_imm8

ROL r/m8, imm8

C0 /0 ib

186+

16/32/64-bit

§

Ror_rm8_imm8

ROR r/m8, imm8

C0 /1 ib

186+

16/32/64-bit

§

Rcl_rm8_imm8

RCL r/m8, imm8

C0 /2 ib

186+

16/32/64-bit

§

Rcr_rm8_imm8

RCR r/m8, imm8

C0 /3 ib

186+

16/32/64-bit

§

Shl_rm8_imm8

SHL r/m8, imm8

C0 /4 ib

186+

16/32/64-bit

§

Shr_rm8_imm8

SHR r/m8, imm8

C0 /5 ib

186+

16/32/64-bit

§

Sal_rm8_imm8

SAL r/m8, imm8

C0 /6 ib

186+

16/32/64-bit

§

Sar_rm8_imm8

SAR r/m8, imm8

C0 /7 ib

186+

16/32/64-bit

§

Rol_rm16_imm8

ROL r/m16, imm8

o16 C1 /0 ib

186+

16/32/64-bit

§

Rol_rm32_imm8

ROL r/m32, imm8

o32 C1 /0 ib

386+

16/32/64-bit

§

Rol_rm64_imm8

ROL r/m64, imm8

o64 C1 /0 ib

X64

64-bit

§

Ror_rm16_imm8

ROR r/m16, imm8

o16 C1 /1 ib

186+

16/32/64-bit

§

Ror_rm32_imm8

ROR r/m32, imm8

o32 C1 /1 ib

386+

16/32/64-bit

§

Ror_rm64_imm8

ROR r/m64, imm8

o64 C1 /1 ib

X64

64-bit

§

Rcl_rm16_imm8

RCL r/m16, imm8

o16 C1 /2 ib

186+

16/32/64-bit

§

Rcl_rm32_imm8

RCL r/m32, imm8

o32 C1 /2 ib

386+

16/32/64-bit

§

Rcl_rm64_imm8

RCL r/m64, imm8

o64 C1 /2 ib

X64

64-bit

§

Rcr_rm16_imm8

RCR r/m16, imm8

o16 C1 /3 ib

186+

16/32/64-bit

§

Rcr_rm32_imm8

RCR r/m32, imm8

o32 C1 /3 ib

386+

16/32/64-bit

§

Rcr_rm64_imm8

RCR r/m64, imm8

o64 C1 /3 ib

X64

64-bit

§

Shl_rm16_imm8

SHL r/m16, imm8

o16 C1 /4 ib

186+

16/32/64-bit

§

Shl_rm32_imm8

SHL r/m32, imm8

o32 C1 /4 ib

386+

16/32/64-bit

§

Shl_rm64_imm8

SHL r/m64, imm8

o64 C1 /4 ib

X64

64-bit

§

Shr_rm16_imm8

SHR r/m16, imm8

o16 C1 /5 ib

186+

16/32/64-bit

§

Shr_rm32_imm8

SHR r/m32, imm8

o32 C1 /5 ib

386+

16/32/64-bit

§

Shr_rm64_imm8

SHR r/m64, imm8

o64 C1 /5 ib

X64

64-bit

§

Sal_rm16_imm8

SAL r/m16, imm8

o16 C1 /6 ib

186+

16/32/64-bit

§

Sal_rm32_imm8

SAL r/m32, imm8

o32 C1 /6 ib

386+

16/32/64-bit

§

Sal_rm64_imm8

SAL r/m64, imm8

o64 C1 /6 ib

X64

64-bit

§

Sar_rm16_imm8

SAR r/m16, imm8

o16 C1 /7 ib

186+

16/32/64-bit

§

Sar_rm32_imm8

SAR r/m32, imm8

o32 C1 /7 ib

386+

16/32/64-bit

§

Sar_rm64_imm8

SAR r/m64, imm8

o64 C1 /7 ib

X64

64-bit

§

Retnw_imm16

RET imm16

o16 C2 iw

8086+

16/32/64-bit

§

Retnd_imm16

RET imm16

o32 C2 iw

386+

16/32-bit

§

Retnq_imm16

RET imm16

o64 C2 iw

X64

64-bit

§

Retnw

RET

o16 C3

8086+

16/32/64-bit

§

Retnd

RET

o32 C3

386+

16/32-bit

§

Retnq

RET

o64 C3

X64

64-bit

§

Les_r16_m1616

LES r16, m16:16

o16 C4 /r

8086+

16/32-bit

§

Les_r32_m1632

LES r32, m16:32

o32 C4 /r

386+

16/32-bit

§

Lds_r16_m1616

LDS r16, m16:16

o16 C5 /r

8086+

16/32-bit

§

Lds_r32_m1632

LDS r32, m16:32

o32 C5 /r

386+

16/32-bit

§

Mov_rm8_imm8

MOV r/m8, imm8

C6 /0 ib

8086+

16/32/64-bit

§

Xabort_imm8

XABORT imm8

C6 F8 ib

RTM

16/32/64-bit

§

Mov_rm16_imm16

MOV r/m16, imm16

o16 C7 /0 iw

8086+

16/32/64-bit

§

Mov_rm32_imm32

MOV r/m32, imm32

o32 C7 /0 id

386+

16/32/64-bit

§

Mov_rm64_imm32

MOV r/m64, imm32

o64 C7 /0 id

X64

64-bit

§

Xbegin_rel16

XBEGIN rel16

o16 C7 F8 cw

RTM

16/32/64-bit

§

Xbegin_rel32

XBEGIN rel32

o32 C7 F8 cd

RTM

16/32/64-bit

§

Enterw_imm16_imm8

ENTER imm16, imm8

o16 C8 iw ib

186+

16/32/64-bit

§

Enterd_imm16_imm8

ENTER imm16, imm8

o32 C8 iw ib

386+

16/32-bit

§

Enterq_imm16_imm8

ENTER imm16, imm8

o64 C8 iw ib

X64

64-bit

§

Leavew

LEAVE

o16 C9

186+

16/32/64-bit

§

Leaved

LEAVE

o32 C9

386+

16/32-bit

§

Leaveq

LEAVE

o64 C9

X64

64-bit

§

Retfw_imm16

RETF imm16

o16 CA iw

8086+

16/32/64-bit

§

Retfd_imm16

RETF imm16

o32 CA iw

386+

16/32/64-bit

§

Retfq_imm16

RETF imm16

o64 CA iw

X64

64-bit

§

Retfw

RETF

o16 CB

8086+

16/32/64-bit

§

Retfd

RETF

o32 CB

386+

16/32/64-bit

§

Retfq

RETF

o64 CB

X64

64-bit

§

Int3

INT3

CC

8086+

16/32/64-bit

§

Int_imm8

INT imm8

CD ib

8086+

16/32/64-bit

§

Into

INTO

CE

8086+

16/32-bit

§

Iretw

IRET

o16 CF

8086+

16/32/64-bit

§

Iretd

IRETD

o32 CF

386+

16/32/64-bit

§

Iretq

IRETQ

o64 CF

X64

64-bit

§

Rol_rm8_1

ROL r/m8, 1

D0 /0

8086+

16/32/64-bit

§

Ror_rm8_1

ROR r/m8, 1

D0 /1

8086+

16/32/64-bit

§

Rcl_rm8_1

RCL r/m8, 1

D0 /2

8086+

16/32/64-bit

§

Rcr_rm8_1

RCR r/m8, 1

D0 /3

8086+

16/32/64-bit

§

Shl_rm8_1

SHL r/m8, 1

D0 /4

8086+

16/32/64-bit

§

Shr_rm8_1

SHR r/m8, 1

D0 /5

8086+

16/32/64-bit

§

Sal_rm8_1

SAL r/m8, 1

D0 /6

8086+

16/32/64-bit

§

Sar_rm8_1

SAR r/m8, 1

D0 /7

8086+

16/32/64-bit

§

Rol_rm16_1

ROL r/m16, 1

o16 D1 /0

8086+

16/32/64-bit

§

Rol_rm32_1

ROL r/m32, 1

o32 D1 /0

386+

16/32/64-bit

§

Rol_rm64_1

ROL r/m64, 1

o64 D1 /0

X64

64-bit

§

Ror_rm16_1

ROR r/m16, 1

o16 D1 /1

8086+

16/32/64-bit

§

Ror_rm32_1

ROR r/m32, 1

o32 D1 /1

386+

16/32/64-bit

§

Ror_rm64_1

ROR r/m64, 1

o64 D1 /1

X64

64-bit

§

Rcl_rm16_1

RCL r/m16, 1

o16 D1 /2

8086+

16/32/64-bit

§

Rcl_rm32_1

RCL r/m32, 1

o32 D1 /2

386+

16/32/64-bit

§

Rcl_rm64_1

RCL r/m64, 1

o64 D1 /2

X64

64-bit

§

Rcr_rm16_1

RCR r/m16, 1

o16 D1 /3

8086+

16/32/64-bit

§

Rcr_rm32_1

RCR r/m32, 1

o32 D1 /3

386+

16/32/64-bit

§

Rcr_rm64_1

RCR r/m64, 1

o64 D1 /3

X64

64-bit

§

Shl_rm16_1

SHL r/m16, 1

o16 D1 /4

8086+

16/32/64-bit

§

Shl_rm32_1

SHL r/m32, 1

o32 D1 /4

386+

16/32/64-bit

§

Shl_rm64_1

SHL r/m64, 1

o64 D1 /4

X64

64-bit

§

Shr_rm16_1

SHR r/m16, 1

o16 D1 /5

8086+

16/32/64-bit

§

Shr_rm32_1

SHR r/m32, 1

o32 D1 /5

386+

16/32/64-bit

§

Shr_rm64_1

SHR r/m64, 1

o64 D1 /5

X64

64-bit

§

Sal_rm16_1

SAL r/m16, 1

o16 D1 /6

8086+

16/32/64-bit

§

Sal_rm32_1

SAL r/m32, 1

o32 D1 /6

386+

16/32/64-bit

§

Sal_rm64_1

SAL r/m64, 1

o64 D1 /6

X64

64-bit

§

Sar_rm16_1

SAR r/m16, 1

o16 D1 /7

8086+

16/32/64-bit

§

Sar_rm32_1

SAR r/m32, 1

o32 D1 /7

386+

16/32/64-bit

§

Sar_rm64_1

SAR r/m64, 1

o64 D1 /7

X64

64-bit

§

Rol_rm8_CL

ROL r/m8, CL

D2 /0

8086+

16/32/64-bit

§

Ror_rm8_CL

ROR r/m8, CL

D2 /1

8086+

16/32/64-bit

§

Rcl_rm8_CL

RCL r/m8, CL

D2 /2

8086+

16/32/64-bit

§

Rcr_rm8_CL

RCR r/m8, CL

D2 /3

8086+

16/32/64-bit

§

Shl_rm8_CL

SHL r/m8, CL

D2 /4

8086+

16/32/64-bit

§

Shr_rm8_CL

SHR r/m8, CL

D2 /5

8086+

16/32/64-bit

§

Sal_rm8_CL

SAL r/m8, CL

D2 /6

8086+

16/32/64-bit

§

Sar_rm8_CL

SAR r/m8, CL

D2 /7

8086+

16/32/64-bit

§

Rol_rm16_CL

ROL r/m16, CL

o16 D3 /0

8086+

16/32/64-bit

§

Rol_rm32_CL

ROL r/m32, CL

o32 D3 /0

386+

16/32/64-bit

§

Rol_rm64_CL

ROL r/m64, CL

o64 D3 /0

X64

64-bit

§

Ror_rm16_CL

ROR r/m16, CL

o16 D3 /1

8086+

16/32/64-bit

§

Ror_rm32_CL

ROR r/m32, CL

o32 D3 /1

386+

16/32/64-bit

§

Ror_rm64_CL

ROR r/m64, CL

o64 D3 /1

X64

64-bit

§

Rcl_rm16_CL

RCL r/m16, CL

o16 D3 /2

8086+

16/32/64-bit

§

Rcl_rm32_CL

RCL r/m32, CL

o32 D3 /2

386+

16/32/64-bit

§

Rcl_rm64_CL

RCL r/m64, CL

o64 D3 /2

X64

64-bit

§

Rcr_rm16_CL

RCR r/m16, CL

o16 D3 /3

8086+

16/32/64-bit

§

Rcr_rm32_CL

RCR r/m32, CL

o32 D3 /3

386+

16/32/64-bit

§

Rcr_rm64_CL

RCR r/m64, CL

o64 D3 /3

X64

64-bit

§

Shl_rm16_CL

SHL r/m16, CL

o16 D3 /4

8086+

16/32/64-bit

§

Shl_rm32_CL

SHL r/m32, CL

o32 D3 /4

386+

16/32/64-bit

§

Shl_rm64_CL

SHL r/m64, CL

o64 D3 /4

X64

64-bit

§

Shr_rm16_CL

SHR r/m16, CL

o16 D3 /5

8086+

16/32/64-bit

§

Shr_rm32_CL

SHR r/m32, CL

o32 D3 /5

386+

16/32/64-bit

§

Shr_rm64_CL

SHR r/m64, CL

o64 D3 /5

X64

64-bit

§

Sal_rm16_CL

SAL r/m16, CL

o16 D3 /6

8086+

16/32/64-bit

§

Sal_rm32_CL

SAL r/m32, CL

o32 D3 /6

386+

16/32/64-bit

§

Sal_rm64_CL

SAL r/m64, CL

o64 D3 /6

X64

64-bit

§

Sar_rm16_CL

SAR r/m16, CL

o16 D3 /7

8086+

16/32/64-bit

§

Sar_rm32_CL

SAR r/m32, CL

o32 D3 /7

386+

16/32/64-bit

§

Sar_rm64_CL

SAR r/m64, CL

o64 D3 /7

X64

64-bit

§

Aam_imm8

AAM imm8

D4 ib

8086+

16/32-bit

§

Aad_imm8

AAD imm8

D5 ib

8086+

16/32-bit

§

Salc

SALC

D6

8086+

16/32-bit

§

Xlat_m8

XLATB

D7

8086+

16/32/64-bit

§

Fadd_m32fp

FADD m32fp

D8 /0

8087+

16/32/64-bit

§

Fmul_m32fp

FMUL m32fp

D8 /1

8087+

16/32/64-bit

§

Fcom_m32fp

FCOM m32fp

D8 /2

8087+

16/32/64-bit

§

Fcomp_m32fp

FCOMP m32fp

D8 /3

8087+

16/32/64-bit

§

Fsub_m32fp

FSUB m32fp

D8 /4

8087+

16/32/64-bit

§

Fsubr_m32fp

FSUBR m32fp

D8 /5

8087+

16/32/64-bit

§

Fdiv_m32fp

FDIV m32fp

D8 /6

8087+

16/32/64-bit

§

Fdivr_m32fp

FDIVR m32fp

D8 /7

8087+

16/32/64-bit

§

Fadd_st0_sti

FADD ST(0), ST(i)

D8 C0+i

8087+

16/32/64-bit

§

Fmul_st0_sti

FMUL ST(0), ST(i)

D8 C8+i

8087+

16/32/64-bit

§

Fcom_st0_sti

FCOM ST(i)

D8 D0+i

8087+

16/32/64-bit

§

Fcomp_st0_sti

FCOMP ST(i)

D8 D8+i

8087+

16/32/64-bit

§

Fsub_st0_sti

FSUB ST(0), ST(i)

D8 E0+i

8087+

16/32/64-bit

§

Fsubr_st0_sti

FSUBR ST(0), ST(i)

D8 E8+i

8087+

16/32/64-bit

§

Fdiv_st0_sti

FDIV ST(0), ST(i)

D8 F0+i

8087+

16/32/64-bit

§

Fdivr_st0_sti

FDIVR ST(0), ST(i)

D8 F8+i

8087+

16/32/64-bit

§

Fld_m32fp

FLD m32fp

D9 /0

8087+

16/32/64-bit

§

Fst_m32fp

FST m32fp

D9 /2

8087+

16/32/64-bit

§

Fstp_m32fp

FSTP m32fp

D9 /3

8087+

16/32/64-bit

§

Fldenv_m14byte

FLDENV m14byte

o16 D9 /4

8087+

16/32/64-bit

§

Fldenv_m28byte

FLDENV m28byte

o32 D9 /4

387+

16/32/64-bit

§

Fldcw_m2byte

FLDCW m2byte

D9 /5

8087+

16/32/64-bit

§

Fnstenv_m14byte

FNSTENV m14byte

o16 D9 /6

8087+

16/32/64-bit

§

Fstenv_m14byte

FSTENV m14byte

9B o16 D9 /6

8087+

16/32/64-bit

§

Fnstenv_m28byte

FNSTENV m28byte

o32 D9 /6

387+

16/32/64-bit

§

Fstenv_m28byte

FSTENV m28byte

9B o32 D9 /6

387+

16/32/64-bit

§

Fnstcw_m2byte

FNSTCW m2byte

D9 /7

8087+

16/32/64-bit

§

Fstcw_m2byte

FSTCW m2byte

9B D9 /7

8087+

16/32/64-bit

§

Fld_sti

FLD ST(i)

D9 C0+i

8087+

16/32/64-bit

§

Fxch_st0_sti

FXCH ST(i)

D9 C8+i

8087+

16/32/64-bit

§

Fnop

FNOP

D9 D0

8087+

16/32/64-bit

§

Fstpnce_sti

FSTPNCE ST(i)

D9 D8+i

8087+

16/32/64-bit

§

Fchs

FCHS

D9 E0

8087+

16/32/64-bit

§

Fabs

FABS

D9 E1

8087+

16/32/64-bit

§

Ftst

FTST

D9 E4

8087+

16/32/64-bit

§

Fxam

FXAM

D9 E5

8087+

16/32/64-bit

§

Fld1

FLD1

D9 E8

8087+

16/32/64-bit

§

Fldl2t

FLDL2T

D9 E9

8087+

16/32/64-bit

§

Fldl2e

FLDL2E

D9 EA

8087+

16/32/64-bit

§

Fldpi

FLDPI

D9 EB

8087+

16/32/64-bit

§

Fldlg2

FLDLG2

D9 EC

8087+

16/32/64-bit

§

Fldln2

FLDLN2

D9 ED

8087+

16/32/64-bit

§

Fldz

FLDZ

D9 EE

8087+

16/32/64-bit

§

F2xm1

F2XM1

D9 F0

8087+

16/32/64-bit

§

Fyl2x

FYL2X

D9 F1

8087+

16/32/64-bit

§

Fptan

FPTAN

D9 F2

8087+

16/32/64-bit

§

Fpatan

FPATAN

D9 F3

8087+

16/32/64-bit

§

Fxtract

FXTRACT

D9 F4

8087+

16/32/64-bit

§

Fprem1

FPREM1

D9 F5

387+

16/32/64-bit

§

Fdecstp

FDECSTP

D9 F6

8087+

16/32/64-bit

§

Fincstp

FINCSTP

D9 F7

8087+

16/32/64-bit

§

Fprem

FPREM

D9 F8

8087+

16/32/64-bit

§

Fyl2xp1

FYL2XP1

D9 F9

8087+

16/32/64-bit

§

Fsqrt

FSQRT

D9 FA

8087+

16/32/64-bit

§

Fsincos

FSINCOS

D9 FB

387+

16/32/64-bit

§

Frndint

FRNDINT

D9 FC

8087+

16/32/64-bit

§

Fscale

FSCALE

D9 FD

8087+

16/32/64-bit

§

Fsin

FSIN

D9 FE

387+

16/32/64-bit

§

Fcos

FCOS

D9 FF

387+

16/32/64-bit

§

Fiadd_m32int

FIADD m32int

DA /0

8087+

16/32/64-bit

§

Fimul_m32int

FIMUL m32int

DA /1

8087+

16/32/64-bit

§

Ficom_m32int

FICOM m32int

DA /2

8087+

16/32/64-bit

§

Ficomp_m32int

FICOMP m32int

DA /3

8087+

16/32/64-bit

§

Fisub_m32int

FISUB m32int

DA /4

8087+

16/32/64-bit

§

Fisubr_m32int

FISUBR m32int

DA /5

8087+

16/32/64-bit

§

Fidiv_m32int

FIDIV m32int

DA /6

8087+

16/32/64-bit

§

Fidivr_m32int

FIDIVR m32int

DA /7

8087+

16/32/64-bit

§

Fcmovb_st0_sti

FCMOVB ST(0), ST(i)

DA C0+i

8087+ and CMOV

16/32/64-bit

§

Fcmove_st0_sti

FCMOVE ST(0), ST(i)

DA C8+i

8087+ and CMOV

16/32/64-bit

§

Fcmovbe_st0_sti

FCMOVBE ST(0), ST(i)

DA D0+i

8087+ and CMOV

16/32/64-bit

§

Fcmovu_st0_sti

FCMOVU ST(0), ST(i)

DA D8+i

8087+ and CMOV

16/32/64-bit

§

Fucompp

FUCOMPP

DA E9

387+

16/32/64-bit

§

Fild_m32int

FILD m32int

DB /0

8087+

16/32/64-bit

§

Fisttp_m32int

FISTTP m32int

DB /1

8087+ and SSE3

16/32/64-bit

§

Fist_m32int

FIST m32int

DB /2

8087+

16/32/64-bit

§

Fistp_m32int

FISTP m32int

DB /3

8087+

16/32/64-bit

§

Fld_m80fp

FLD m80fp

DB /5

8087+

16/32/64-bit

§

Fstp_m80fp

FSTP m80fp

DB /7

8087+

16/32/64-bit

§

Fcmovnb_st0_sti

FCMOVNB ST(0), ST(i)

DB C0+i

8087+ and CMOV

16/32/64-bit

§

Fcmovne_st0_sti

FCMOVNE ST(0), ST(i)

DB C8+i

8087+ and CMOV

16/32/64-bit

§

Fcmovnbe_st0_sti

FCMOVNBE ST(0), ST(i)

DB D0+i

8087+ and CMOV

16/32/64-bit

§

Fcmovnu_st0_sti

FCMOVNU ST(0), ST(i)

DB D8+i

8087+ and CMOV

16/32/64-bit

§

Fneni

FNENI

DB E0

8087+

16/32/64-bit

§

Feni

FENI

9B DB E0

8087+

16/32/64-bit

§

Fndisi

FNDISI

DB E1

8087+

16/32/64-bit

§

Fdisi

FDISI

9B DB E1

8087+

16/32/64-bit

§

Fnclex

FNCLEX

DB E2

8087+

16/32/64-bit

§

Fclex

FCLEX

9B DB E2

8087+

16/32/64-bit

§

Fninit

FNINIT

DB E3

8087+

16/32/64-bit

§

Finit

FINIT

9B DB E3

8087+

16/32/64-bit

§

Fnsetpm

FNSETPM

DB E4

287+

16/32/64-bit

§

Fsetpm

FSETPM

9B DB E4

287+

16/32/64-bit

§

Frstpm

FRSTPM

DB E5

287 XL

16/32-bit

§

Fucomi_st0_sti

FUCOMI ST, ST(i)

DB E8+i

8087+ and CMOV

16/32/64-bit

§

Fcomi_st0_sti

FCOMI ST, ST(i)

DB F0+i

8087+ and CMOV

16/32/64-bit

§

Fadd_m64fp

FADD m64fp

DC /0

8087+

16/32/64-bit

§

Fmul_m64fp

FMUL m64fp

DC /1

8087+

16/32/64-bit

§

Fcom_m64fp

FCOM m64fp

DC /2

8087+

16/32/64-bit

§

Fcomp_m64fp

FCOMP m64fp

DC /3

8087+

16/32/64-bit

§

Fsub_m64fp

FSUB m64fp

DC /4

8087+

16/32/64-bit

§

Fsubr_m64fp

FSUBR m64fp

DC /5

8087+

16/32/64-bit

§

Fdiv_m64fp

FDIV m64fp

DC /6

8087+

16/32/64-bit

§

Fdivr_m64fp

FDIVR m64fp

DC /7

8087+

16/32/64-bit

§

Fadd_sti_st0

FADD ST(i), ST(0)

DC C0+i

8087+

16/32/64-bit

§

Fmul_sti_st0

FMUL ST(i), ST(0)

DC C8+i

8087+

16/32/64-bit

§

Fcom_st0_sti_DCD0

FCOM ST(i)

DC D0+i

8087+

16/32/64-bit

§

Fcomp_st0_sti_DCD8

FCOMP ST(i)

DC D8+i

8087+

16/32/64-bit

§

Fsubr_sti_st0

FSUBR ST(i), ST(0)

DC E0+i

8087+

16/32/64-bit

§

Fsub_sti_st0

FSUB ST(i), ST(0)

DC E8+i

8087+

16/32/64-bit

§

Fdivr_sti_st0

FDIVR ST(i), ST(0)

DC F0+i

8087+

16/32/64-bit

§

Fdiv_sti_st0

FDIV ST(i), ST(0)

DC F8+i

8087+

16/32/64-bit

§

Fld_m64fp

FLD m64fp

DD /0

8087+

16/32/64-bit

§

Fisttp_m64int

FISTTP m64int

DD /1

8087+ and SSE3

16/32/64-bit

§

Fst_m64fp

FST m64fp

DD /2

8087+

16/32/64-bit

§

Fstp_m64fp

FSTP m64fp

DD /3

8087+

16/32/64-bit

§

Frstor_m94byte

FRSTOR m94byte

o16 DD /4

8087+

16/32/64-bit

§

Frstor_m108byte

FRSTOR m108byte

o32 DD /4

387+

16/32/64-bit

§

Fnsave_m94byte

FNSAVE m94byte

o16 DD /6

8087+

16/32/64-bit

§

Fsave_m94byte

FSAVE m94byte

9B o16 DD /6

8087+

16/32/64-bit

§

Fnsave_m108byte

FNSAVE m108byte

o32 DD /6

387+

16/32/64-bit

§

Fsave_m108byte

FSAVE m108byte

9B o32 DD /6

387+

16/32/64-bit

§

Fnstsw_m2byte

FNSTSW m2byte

DD /7

8087+

16/32/64-bit

§

Fstsw_m2byte

FSTSW m2byte

9B DD /7

8087+

16/32/64-bit

§

Ffree_sti

FFREE ST(i)

DD C0+i

8087+

16/32/64-bit

§

Fxch_st0_sti_DDC8

FXCH ST(i)

DD C8+i

8087+

16/32/64-bit

§

Fst_sti

FST ST(i)

DD D0+i

8087+

16/32/64-bit

§

Fstp_sti

FSTP ST(i)

DD D8+i

8087+

16/32/64-bit

§

Fucom_st0_sti

FUCOM ST(i)

DD E0+i

8087+

16/32/64-bit

§

Fucomp_st0_sti

FUCOMP ST(i)

DD E8+i

8087+

16/32/64-bit

§

Fiadd_m16int

FIADD m16int

DE /0

8087+

16/32/64-bit

§

Fimul_m16int

FIMUL m16int

DE /1

8087+

16/32/64-bit

§

Ficom_m16int

FICOM m16int

DE /2

8087+

16/32/64-bit

§

Ficomp_m16int

FICOMP m16int

DE /3

8087+

16/32/64-bit

§

Fisub_m16int

FISUB m16int

DE /4

8087+

16/32/64-bit

§

Fisubr_m16int

FISUBR m16int

DE /5

8087+

16/32/64-bit

§

Fidiv_m16int

FIDIV m16int

DE /6

8087+

16/32/64-bit

§

Fidivr_m16int

FIDIVR m16int

DE /7

8087+

16/32/64-bit

§

Faddp_sti_st0

FADDP ST(i), ST(0)

DE C0+i

8087+

16/32/64-bit

§

Fmulp_sti_st0

FMULP ST(i), ST(0)

DE C8+i

8087+

16/32/64-bit

§

Fcomp_st0_sti_DED0

FCOMP ST(i)

DE D0+i

8087+

16/32/64-bit

§

Fcompp

FCOMPP

DE D9

8087+

16/32/64-bit

§

Fsubrp_sti_st0

FSUBRP ST(i), ST(0)

DE E0+i

8087+

16/32/64-bit

§

Fsubp_sti_st0

FSUBP ST(i), ST(0)

DE E8+i

8087+

16/32/64-bit

§

Fdivrp_sti_st0

FDIVRP ST(i), ST(0)

DE F0+i

8087+

16/32/64-bit

§

Fdivp_sti_st0

FDIVP ST(i), ST(0)

DE F8+i

8087+

16/32/64-bit

§

Fild_m16int

FILD m16int

DF /0

8087+

16/32/64-bit

§

Fisttp_m16int

FISTTP m16int

DF /1

8087+ and SSE3

16/32/64-bit

§

Fist_m16int

FIST m16int

DF /2

8087+

16/32/64-bit

§

Fistp_m16int

FISTP m16int

DF /3

8087+

16/32/64-bit

§

Fbld_m80bcd

FBLD m80bcd

DF /4

8087+

16/32/64-bit

§

Fild_m64int

FILD m64int

DF /5

8087+

16/32/64-bit

§

Fbstp_m80bcd

FBSTP m80bcd

DF /6

8087+

16/32/64-bit

§

Fistp_m64int

FISTP m64int

DF /7

8087+

16/32/64-bit

§

Ffreep_sti

FFREEP ST(i)

DF C0+i

8087+

16/32/64-bit

§

Fxch_st0_sti_DFC8

FXCH ST(i)

DF C8+i

8087+

16/32/64-bit

§

Fstp_sti_DFD0

FSTP ST(i)

DF D0+i

8087+

16/32/64-bit

§

Fstp_sti_DFD8

FSTP ST(i)

DF D8+i

8087+

16/32/64-bit

§

Fnstsw_AX

FNSTSW AX

DF E0

287+

16/32/64-bit

§

Fstsw_AX

FSTSW AX

9B DF E0

287+

16/32/64-bit

§

Fstdw_AX

FSTDW AX

9B DF E1

387 SL

16/32-bit

§

Fstsg_AX

FSTSG AX

9B DF E2

387 SL

16/32-bit

§

Fucomip_st0_sti

FUCOMIP ST, ST(i)

DF E8+i

8087+ and CMOV

16/32/64-bit

§

Fcomip_st0_sti

FCOMIP ST, ST(i)

DF F0+i

8087+ and CMOV

16/32/64-bit

§

Loopne_rel8_16_CX

LOOPNE rel8

a16 o16 E0 cb

8086+

16/32-bit

§

Loopne_rel8_32_CX

LOOPNE rel8

a16 o32 E0 cb

386+

16/32-bit

§

Loopne_rel8_16_ECX

LOOPNE rel8

a32 o16 E0 cb

386+

16/32/64-bit

§

Loopne_rel8_32_ECX

LOOPNE rel8

a32 o32 E0 cb

386+

16/32-bit

§

Loopne_rel8_64_ECX

LOOPNE rel8

a32 o64 E0 cb

X64

64-bit

§

Loopne_rel8_16_RCX

LOOPNE rel8

a64 o16 E0 cb

X64

64-bit

§

Loopne_rel8_64_RCX

LOOPNE rel8

a64 o64 E0 cb

X64

64-bit

§

Loope_rel8_16_CX

LOOPE rel8

a16 o16 E1 cb

8086+

16/32-bit

§

Loope_rel8_32_CX

LOOPE rel8

a16 o32 E1 cb

386+

16/32-bit

§

Loope_rel8_16_ECX

LOOPE rel8

a32 o16 E1 cb

386+

16/32/64-bit

§

Loope_rel8_32_ECX

LOOPE rel8

a32 o32 E1 cb

386+

16/32-bit

§

Loope_rel8_64_ECX

LOOPE rel8

a32 o64 E1 cb

X64

64-bit

§

Loope_rel8_16_RCX

LOOPE rel8

a64 o16 E1 cb

X64

64-bit

§

Loope_rel8_64_RCX

LOOPE rel8

a64 o64 E1 cb

X64

64-bit

§

Loop_rel8_16_CX

LOOP rel8

a16 o16 E2 cb

8086+

16/32-bit

§

Loop_rel8_32_CX

LOOP rel8

a16 o32 E2 cb

386+

16/32-bit

§

Loop_rel8_16_ECX

LOOP rel8

a32 o16 E2 cb

386+

16/32/64-bit

§

Loop_rel8_32_ECX

LOOP rel8

a32 o32 E2 cb

386+

16/32-bit

§

Loop_rel8_64_ECX

LOOP rel8

a32 o64 E2 cb

X64

64-bit

§

Loop_rel8_16_RCX

LOOP rel8

a64 o16 E2 cb

X64

64-bit

§

Loop_rel8_64_RCX

LOOP rel8

a64 o64 E2 cb

X64

64-bit

§

Jcxz_rel8_16

JCXZ rel8

a16 o16 E3 cb

8086+

16/32-bit

§

Jcxz_rel8_32

JCXZ rel8

a16 o32 E3 cb

386+

16/32-bit

§

Jecxz_rel8_16

JECXZ rel8

a32 o16 E3 cb

386+

16/32/64-bit

§

Jecxz_rel8_32

JECXZ rel8

a32 o32 E3 cb

386+

16/32-bit

§

Jecxz_rel8_64

JECXZ rel8

a32 o64 E3 cb

X64

64-bit

§

Jrcxz_rel8_16

JRCXZ rel8

a64 o16 E3 cb

X64

64-bit

§

Jrcxz_rel8_64

JRCXZ rel8

a64 o64 E3 cb

X64

64-bit

§

In_AL_imm8

IN AL, imm8

E4 ib

8086+

16/32/64-bit

§

In_AX_imm8

IN AX, imm8

o16 E5 ib

8086+

16/32/64-bit

§

In_EAX_imm8

IN EAX, imm8

o32 E5 ib

386+

16/32/64-bit

§

Out_imm8_AL

OUT imm8, AL

E6 ib

8086+

16/32/64-bit

§

Out_imm8_AX

OUT imm8, AX

o16 E7 ib

8086+

16/32/64-bit

§

Out_imm8_EAX

OUT imm8, EAX

o32 E7 ib

386+

16/32/64-bit

§

Call_rel16

CALL rel16

o16 E8 cw

8086+

16/32/64-bit

§

Call_rel32_32

CALL rel32

o32 E8 cd

386+

16/32-bit

§

Call_rel32_64

CALL rel32

o64 E8 cd

X64

64-bit

§

Jmp_rel16

JMP rel16

o16 E9 cw

8086+

16/32/64-bit

§

Jmp_rel32_32

JMP rel32

o32 E9 cd

386+

16/32-bit

§

Jmp_rel32_64

JMP rel32

o64 E9 cd

X64

64-bit

§

Jmp_ptr1616

JMP ptr16:16

o16 EA cd

8086+

16/32-bit

§

Jmp_ptr1632

JMP ptr16:32

o32 EA cp

386+

16/32-bit

§

Jmp_rel8_16

JMP rel8

o16 EB cb

8086+

16/32/64-bit

§

Jmp_rel8_32

JMP rel8

o32 EB cb

386+

16/32-bit

§

Jmp_rel8_64

JMP rel8

o64 EB cb

X64

64-bit

§

In_AL_DX

IN AL, DX

EC

8086+

16/32/64-bit

§

In_AX_DX

IN AX, DX

o16 ED

8086+

16/32/64-bit

§

In_EAX_DX

IN EAX, DX

o32 ED

386+

16/32/64-bit

§

Out_DX_AL

OUT DX, AL

EE

8086+

16/32/64-bit

§

Out_DX_AX

OUT DX, AX

o16 EF

8086+

16/32/64-bit

§

Out_DX_EAX

OUT DX, EAX

o32 EF

386+

16/32/64-bit

§

Int1

INT1

F1

386+

16/32/64-bit

§

Hlt

HLT

F4

8086+

16/32/64-bit

§

Cmc

CMC

F5

8086+

16/32/64-bit

§

Test_rm8_imm8

TEST r/m8, imm8

F6 /0 ib

8086+

16/32/64-bit

§

Test_rm8_imm8_F6r1

TEST r/m8, imm8

F6 /1 ib

8086+

16/32/64-bit

§

Not_rm8

NOT r/m8

F6 /2

8086+

16/32/64-bit

§

Neg_rm8

NEG r/m8

F6 /3

8086+

16/32/64-bit

§

Mul_rm8

MUL r/m8

F6 /4

8086+

16/32/64-bit

§

Imul_rm8

IMUL r/m8

F6 /5

8086+

16/32/64-bit

§

Div_rm8

DIV r/m8

F6 /6

8086+

16/32/64-bit

§

Idiv_rm8

IDIV r/m8

F6 /7

8086+

16/32/64-bit

§

Test_rm16_imm16

TEST r/m16, imm16

o16 F7 /0 iw

8086+

16/32/64-bit

§

Test_rm32_imm32

TEST r/m32, imm32

o32 F7 /0 id

386+

16/32/64-bit

§

Test_rm64_imm32

TEST r/m64, imm32

o64 F7 /0 id

X64

64-bit

§

Test_rm16_imm16_F7r1

TEST r/m16, imm16

o16 F7 /1 iw

8086+

16/32/64-bit

§

Test_rm32_imm32_F7r1

TEST r/m32, imm32

o32 F7 /1 id

386+

16/32/64-bit

§

Test_rm64_imm32_F7r1

TEST r/m64, imm32

o64 F7 /1 id

X64

64-bit

§

Not_rm16

NOT r/m16

o16 F7 /2

8086+

16/32/64-bit

§

Not_rm32

NOT r/m32

o32 F7 /2

386+

16/32/64-bit

§

Not_rm64

NOT r/m64

o64 F7 /2

X64

64-bit

§

Neg_rm16

NEG r/m16

o16 F7 /3

8086+

16/32/64-bit

§

Neg_rm32

NEG r/m32

o32 F7 /3

386+

16/32/64-bit

§

Neg_rm64

NEG r/m64

o64 F7 /3

X64

64-bit

§

Mul_rm16

MUL r/m16

o16 F7 /4

8086+

16/32/64-bit

§

Mul_rm32

MUL r/m32

o32 F7 /4

386+

16/32/64-bit

§

Mul_rm64

MUL r/m64

o64 F7 /4

X64

64-bit

§

Imul_rm16

IMUL r/m16

o16 F7 /5

8086+

16/32/64-bit

§

Imul_rm32

IMUL r/m32

o32 F7 /5

386+

16/32/64-bit

§

Imul_rm64

IMUL r/m64

o64 F7 /5

X64

64-bit

§

Div_rm16

DIV r/m16

o16 F7 /6

8086+

16/32/64-bit

§

Div_rm32

DIV r/m32

o32 F7 /6

386+

16/32/64-bit

§

Div_rm64

DIV r/m64

o64 F7 /6

X64

64-bit

§

Idiv_rm16

IDIV r/m16

o16 F7 /7

8086+

16/32/64-bit

§

Idiv_rm32

IDIV r/m32

o32 F7 /7

386+

16/32/64-bit

§

Idiv_rm64

IDIV r/m64

o64 F7 /7

X64

64-bit

§

Clc

CLC

F8

8086+

16/32/64-bit

§

Stc

STC

F9

8086+

16/32/64-bit

§

Cli

CLI

FA

8086+

16/32/64-bit

§

Sti

STI

FB

8086+

16/32/64-bit

§

Cld

CLD

FC

8086+

16/32/64-bit

§

Std

STD

FD

8086+

16/32/64-bit

§

Inc_rm8

INC r/m8

FE /0

8086+

16/32/64-bit

§

Dec_rm8

DEC r/m8

FE /1

8086+

16/32/64-bit

§

Inc_rm16

INC r/m16

o16 FF /0

8086+

16/32/64-bit

§

Inc_rm32

INC r/m32

o32 FF /0

386+

16/32/64-bit

§

Inc_rm64

INC r/m64

o64 FF /0

X64

64-bit

§

Dec_rm16

DEC r/m16

o16 FF /1

8086+

16/32/64-bit

§

Dec_rm32

DEC r/m32

o32 FF /1

386+

16/32/64-bit

§

Dec_rm64

DEC r/m64

o64 FF /1

X64

64-bit

§

Call_rm16

CALL r/m16

o16 FF /2

8086+

16/32/64-bit

§

Call_rm32

CALL r/m32

o32 FF /2

386+

16/32-bit

§

Call_rm64

CALL r/m64

o64 FF /2

X64

64-bit

§

Call_m1616

CALL m16:16

o16 FF /3

8086+

16/32/64-bit

§

Call_m1632

CALL m16:32

o32 FF /3

386+

16/32/64-bit

§

Call_m1664

CALL m16:64

o64 FF /3

X64

64-bit

§

Jmp_rm16

JMP r/m16

o16 FF /4

8086+

16/32/64-bit

§

Jmp_rm32

JMP r/m32

o32 FF /4

386+

16/32-bit

§

Jmp_rm64

JMP r/m64

o64 FF /4

X64

64-bit

§

Jmp_m1616

JMP m16:16

o16 FF /5

8086+

16/32/64-bit

§

Jmp_m1632

JMP m16:32

o32 FF /5

386+

16/32/64-bit

§

Jmp_m1664

JMP m16:64

o64 FF /5

X64

64-bit

§

Push_rm16

PUSH r/m16

o16 FF /6

8086+

16/32/64-bit

§

Push_rm32

PUSH r/m32

o32 FF /6

386+

16/32-bit

§

Push_rm64

PUSH r/m64

o64 FF /6

X64

64-bit

§

Sldt_rm16

SLDT r/m16

o16 0F 00 /0

286+

16/32/64-bit

§

Sldt_r32m16

SLDT r32/m16

o32 0F 00 /0

386+

16/32/64-bit

§

Sldt_r64m16

SLDT r64/m16

o64 0F 00 /0

X64

64-bit

§

Str_rm16

STR r/m16

o16 0F 00 /1

286+

16/32/64-bit

§

Str_r32m16

STR r32/m16

o32 0F 00 /1

386+

16/32/64-bit

§

Str_r64m16

STR r64/m16

o64 0F 00 /1

X64

64-bit

§

Lldt_rm16

LLDT r/m16

o16 0F 00 /2

286+

16/32/64-bit

§

Lldt_r32m16

LLDT r32/m16

o32 0F 00 /2

386+

16/32/64-bit

§

Lldt_r64m16

LLDT r64/m16

o64 0F 00 /2

X64

64-bit

§

Ltr_rm16

LTR r/m16

o16 0F 00 /3

286+

16/32/64-bit

§

Ltr_r32m16

LTR r32/m16

o32 0F 00 /3

386+

16/32/64-bit

§

Ltr_r64m16

LTR r64/m16

o64 0F 00 /3

X64

64-bit

§

Verr_rm16

VERR r/m16

o16 0F 00 /4

286+

16/32/64-bit

§

Verr_r32m16

VERR r32/m16

o32 0F 00 /4

386+

16/32/64-bit

§

Verr_r64m16

VERR r64/m16

o64 0F 00 /4

X64

64-bit

§

Verw_rm16

VERW r/m16

o16 0F 00 /5

286+

16/32/64-bit

§

Verw_r32m16

VERW r32/m16

o32 0F 00 /5

386+

16/32/64-bit

§

Verw_r64m16

VERW r64/m16

o64 0F 00 /5

X64

64-bit

§

Jmpe_rm16

JMPE r/m16

o16 0F 00 /6

IA-64

16/32-bit

§

Jmpe_rm32

JMPE r/m32

o32 0F 00 /6

IA-64

16/32-bit

§

Sgdt_m1632_16

SGDT m

o16 0F 01 /0

286+

16/32-bit

§

Sgdt_m1632

SGDT m

o32 0F 01 /0

386+

16/32-bit

§

Sgdt_m1664

SGDT m

0F 01 /0

X64

64-bit

§

Sidt_m1632_16

SIDT m

o16 0F 01 /1

286+

16/32-bit

§

Sidt_m1632

SIDT m

o32 0F 01 /1

386+

16/32-bit

§

Sidt_m1664

SIDT m

0F 01 /1

X64

64-bit

§

Lgdt_m1632_16

LGDT m16&32

o16 0F 01 /2

286+

16/32-bit

§

Lgdt_m1632

LGDT m16&32

o32 0F 01 /2

386+

16/32-bit

§

Lgdt_m1664

LGDT m16&64

0F 01 /2

X64

64-bit

§

Lidt_m1632_16

LIDT m16&32

o16 0F 01 /3

286+

16/32-bit

§

Lidt_m1632

LIDT m16&32

o32 0F 01 /3

386+

16/32-bit

§

Lidt_m1664

LIDT m16&64

0F 01 /3

X64

64-bit

§

Smsw_rm16

SMSW r/m16

o16 0F 01 /4

286+

16/32/64-bit

§

Smsw_r32m16

SMSW r32/m16

o32 0F 01 /4

386+

16/32/64-bit

§

Smsw_r64m16

SMSW r64/m16

o64 0F 01 /4

X64

64-bit

§

Rstorssp_m64

RSTORSSP m64

F3 0F 01 /5

CET_SS

16/32/64-bit

§

Lmsw_rm16

LMSW r/m16

o16 0F 01 /6

286+

16/32/64-bit

§

Lmsw_r32m16

LMSW r32/m16

o32 0F 01 /6

386+

16/32/64-bit

§

Lmsw_r64m16

LMSW r64/m16

o64 0F 01 /6

X64

64-bit

§

Invlpg_m

INVLPG m

0F 01 /7

486+

16/32/64-bit

§

Enclv

ENCLV

NP 0F 01 C0

OSS

16/32/64-bit

§

Vmcall

VMCALL

NP 0F 01 C1

VMX

16/32/64-bit

§

Vmlaunch

VMLAUNCH

NP 0F 01 C2

VMX

16/32/64-bit

§

Vmresume

VMRESUME

NP 0F 01 C3

VMX

16/32/64-bit

§

Vmxoff

VMXOFF

NP 0F 01 C4

VMX

16/32/64-bit

§

Pconfig

PCONFIG

NP 0F 01 C5

PCONFIG

16/32/64-bit

§

Monitorw

MONITOR

a16 NP 0F 01 C8

MONITOR

16/32-bit

§

Monitord

MONITOR

a32 NP 0F 01 C8

MONITOR

16/32/64-bit

§

Monitorq

MONITOR

a64 NP 0F 01 C8

MONITOR

64-bit

§

Mwait

MWAIT

NP 0F 01 C9

MONITOR

16/32/64-bit

§

Clac

CLAC

NP 0F 01 CA

SMAP

16/32/64-bit

§

Stac

STAC

NP 0F 01 CB

SMAP

16/32/64-bit

§

Encls

ENCLS

NP 0F 01 CF

SGX1

16/32/64-bit

§

Xgetbv

XGETBV

NP 0F 01 D0

XSAVE

16/32/64-bit

§

Xsetbv

XSETBV

NP 0F 01 D1

XSAVE

16/32/64-bit

§

Vmfunc

VMFUNC

NP 0F 01 D4

VMX

16/32/64-bit

§

Xend

XEND

NP 0F 01 D5

RTM

16/32/64-bit

§

Xtest

XTEST

NP 0F 01 D6

HLE or RTM

16/32/64-bit

§

Enclu

ENCLU

NP 0F 01 D7

SGX1

16/32/64-bit

§

Vmrunw

VMRUN

a16 0F 01 D8

SVM

16/32-bit

§

Vmrund

VMRUN

a32 0F 01 D8

SVM

16/32/64-bit

§

Vmrunq

VMRUN

a64 0F 01 D8

SVM

64-bit

§

Vmmcall

VMMCALL

0F 01 D9

SVM

16/32/64-bit

§

Vmloadw

VMLOAD

a16 0F 01 DA

SVM

16/32-bit

§

Vmloadd

VMLOAD

a32 0F 01 DA

SVM

16/32/64-bit

§

Vmloadq

VMLOAD

a64 0F 01 DA

SVM

64-bit

§

Vmsavew

VMSAVE

a16 0F 01 DB

SVM

16/32-bit

§

Vmsaved

VMSAVE

a32 0F 01 DB

SVM

16/32/64-bit

§

Vmsaveq

VMSAVE

a64 0F 01 DB

SVM

64-bit

§

Stgi

STGI

0F 01 DC

SKINIT or SVM

16/32/64-bit

§

Clgi

CLGI

0F 01 DD

SVM

16/32/64-bit

§

Skinit

SKINIT

0F 01 DE

SKINIT or SVM

16/32/64-bit

§

Invlpgaw

INVLPGA

a16 0F 01 DF

SVM

16/32-bit

§

Invlpgad

INVLPGA

a32 0F 01 DF

SVM

16/32/64-bit

§

Invlpgaq

INVLPGA

a64 0F 01 DF

SVM

64-bit

§

Setssbsy

SETSSBSY

F3 0F 01 E8

CET_SS

16/32/64-bit

§

Saveprevssp

SAVEPREVSSP

F3 0F 01 EA

CET_SS

16/32/64-bit

§

Rdpkru

RDPKRU

NP 0F 01 EE

PKU

16/32/64-bit

§

Wrpkru

WRPKRU

NP 0F 01 EF

PKU

16/32/64-bit

§

Swapgs

SWAPGS

0F 01 F8

X64

64-bit

§

Rdtscp

RDTSCP

0F 01 F9

RDTSCP

16/32/64-bit

§

Monitorxw

MONITORX

a16 NP 0F 01 FA

MONITORX

16/32-bit

§

Monitorxd

MONITORX

a32 NP 0F 01 FA

MONITORX

16/32/64-bit

§

Monitorxq

MONITORX

a64 NP 0F 01 FA

MONITORX

64-bit

§

Mcommit

MCOMMIT

F3 0F 01 FA

MCOMMIT

16/32/64-bit

§

Mwaitx

MWAITX

NP 0F 01 FB

MONITORX

16/32/64-bit

§

Clzerow

CLZERO

a16 0F 01 FC

CLZERO

16/32-bit

§

Clzerod

CLZERO

a32 0F 01 FC

CLZERO

16/32/64-bit

§

Clzeroq

CLZERO

a64 0F 01 FC

CLZERO

64-bit

§

Rdpru

RDPRU

NP 0F 01 FD

RDPRU

16/32/64-bit

§

Lar_r16_rm16

LAR r16, r/m16

o16 0F 02 /r

286+

16/32/64-bit

§

Lar_r32_r32m16

LAR r32, r32/m16

o32 0F 02 /r

386+

16/32/64-bit

§

Lar_r64_r64m16

LAR r64, r64/m16

o64 0F 02 /r

X64

64-bit

§

Lsl_r16_rm16

LSL r16, r/m16

o16 0F 03 /r

286+

16/32/64-bit

§

Lsl_r32_r32m16

LSL r32, r32/m16

o32 0F 03 /r

386+

16/32/64-bit

§

Lsl_r64_r64m16

LSL r64, r64/m16

o64 0F 03 /r

X64

64-bit

§

Storeall

STOREALL

0F 04

286

16/32-bit

§

Loadall286

LOADALL

0F 05

286

16/32-bit

§

Syscall

SYSCALL

0F 05

SYSCALL

16/32/64-bit

§

Clts

CLTS

0F 06

286+

16/32/64-bit

§

Loadall386

LOADALL

0F 07

386

16/32-bit

§

Sysretd

SYSRET

0F 07

SYSCALL

16/32/64-bit

§

Sysretq

SYSRETQ

o64 0F 07

SYSCALL

64-bit

§

Invd

INVD

0F 08

486+

16/32/64-bit

§

Wbinvd

WBINVD

0F 09

486+

16/32/64-bit

§

Wbnoinvd

WBNOINVD

F3 0F 09

WBNOINVD

16/32/64-bit

§

Cl1invmb

CL1INVMB

0F 0A

CL1INVMB

16/32-bit

§

Ud2

UD2

0F 0B

286+

16/32/64-bit

§

Reservednop_rm16_r16_0F0D

RESERVEDNOP r/m16, r16

o16 0F 0D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F0D

RESERVEDNOP r/m32, r32

o32 0F 0D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F0D

RESERVEDNOP r/m64, r64

o64 0F 0D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Prefetch_m8

PREFETCH m8

0F 0D /0

PREFETCHW

16/32/64-bit

§

Prefetchw_m8

PREFETCHW m8

0F 0D /1

PREFETCHW

16/32/64-bit

§

Prefetchwt1_m8

PREFETCHWT1 m8

0F 0D /2

PREFETCHWT1

16/32/64-bit

§

Femms

FEMMS

0F 0E

3DNOW

16/32/64-bit

§

Umov_rm8_r8

UMOV r/m8, r8

0F 10 /r

386/486

16/32-bit

§

Umov_rm16_r16

UMOV r/m16, r16

o16 0F 11 /r

386/486

16/32-bit

§

Umov_rm32_r32

UMOV r/m32, r32

o32 0F 11 /r

386/486

16/32-bit

§

Umov_r8_rm8

UMOV r8, r/m8

0F 12 /r

386/486

16/32-bit

§

Umov_r16_rm16

UMOV r16, r/m16

o16 0F 13 /r

386/486

16/32-bit

§

Umov_r32_rm32

UMOV r32, r/m32

o32 0F 13 /r

386/486

16/32-bit

§

Movups_xmm_xmmm128

MOVUPS xmm1, xmm2/m128

NP 0F 10 /r

SSE

16/32/64-bit

§

VEX_Vmovups_xmm_xmmm128

VMOVUPS xmm1, xmm2/m128

VEX.128.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovups_ymm_ymmm256

VMOVUPS ymm1, ymm2/m256

VEX.256.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovups_xmm_k1z_xmmm128

VMOVUPS xmm1 {k1}{z}, xmm2/m128

EVEX.128.0F.W0 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_ymm_k1z_ymmm256

VMOVUPS ymm1 {k1}{z}, ymm2/m256

EVEX.256.0F.W0 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_zmm_k1z_zmmm512

VMOVUPS zmm1 {k1}{z}, zmm2/m512

EVEX.512.0F.W0 10 /r

AVX512F

16/32/64-bit

§

Movupd_xmm_xmmm128

MOVUPD xmm1, xmm2/m128

66 0F 10 /r

SSE2

16/32/64-bit

§

VEX_Vmovupd_xmm_xmmm128

VMOVUPD xmm1, xmm2/m128

VEX.128.66.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovupd_ymm_ymmm256

VMOVUPD ymm1, ymm2/m256

VEX.256.66.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovupd_xmm_k1z_xmmm128

VMOVUPD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W1 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_ymm_k1z_ymmm256

VMOVUPD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W1 10 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_zmm_k1z_zmmm512

VMOVUPD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W1 10 /r

AVX512F

16/32/64-bit

§

Movss_xmm_xmmm32

MOVSS xmm1, xmm2/m32

F3 0F 10 /r

SSE

16/32/64-bit

§

VEX_Vmovss_xmm_xmm_xmm

VMOVSS xmm1, xmm2, xmm3

VEX.LIG.F3.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovss_xmm_m32

VMOVSS xmm1, m32

VEX.LIG.F3.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovss_xmm_k1z_xmm_xmm

VMOVSS xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.0F.W0 10 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovss_xmm_k1z_m32

VMOVSS xmm1 {k1}{z}, m32

EVEX.LIG.F3.0F.W0 10 /r

AVX512F

16/32/64-bit

§

Movsd_xmm_xmmm64

MOVSD xmm1, xmm2/m64

F2 0F 10 /r

SSE2

16/32/64-bit

§

VEX_Vmovsd_xmm_xmm_xmm

VMOVSD xmm1, xmm2, xmm3

VEX.LIG.F2.0F.WIG 10 /r

AVX

16/32/64-bit

§

VEX_Vmovsd_xmm_m64

VMOVSD xmm1, m64

VEX.LIG.F2.0F.WIG 10 /r

AVX

16/32/64-bit

§

EVEX_Vmovsd_xmm_k1z_xmm_xmm

VMOVSD xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F2.0F.W1 10 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovsd_xmm_k1z_m64

VMOVSD xmm1 {k1}{z}, m64

EVEX.LIG.F2.0F.W1 10 /r

AVX512F

16/32/64-bit

§

Movups_xmmm128_xmm

MOVUPS xmm2/m128, xmm1

NP 0F 11 /r

SSE

16/32/64-bit

§

VEX_Vmovups_xmmm128_xmm

VMOVUPS xmm2/m128, xmm1

VEX.128.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovups_ymmm256_ymm

VMOVUPS ymm2/m256, ymm1

VEX.256.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovups_xmmm128_k1z_xmm

VMOVUPS xmm2/m128 {k1}{z}, xmm1

EVEX.128.0F.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_ymmm256_k1z_ymm

VMOVUPS ymm2/m256 {k1}{z}, ymm1

EVEX.256.0F.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovups_zmmm512_k1z_zmm

VMOVUPS zmm2/m512 {k1}{z}, zmm1

EVEX.512.0F.W0 11 /r

AVX512F

16/32/64-bit

§

Movupd_xmmm128_xmm

MOVUPD xmm2/m128, xmm1

66 0F 11 /r

SSE2

16/32/64-bit

§

VEX_Vmovupd_xmmm128_xmm

VMOVUPD xmm2/m128, xmm1

VEX.128.66.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovupd_ymmm256_ymm

VMOVUPD ymm2/m256, ymm1

VEX.256.66.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovupd_xmmm128_k1z_xmm

VMOVUPD xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W1 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_ymmm256_k1z_ymm

VMOVUPD ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W1 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovupd_zmmm512_k1z_zmm

VMOVUPD zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W1 11 /r

AVX512F

16/32/64-bit

§

Movss_xmmm32_xmm

MOVSS xmm2/m32, xmm1

F3 0F 11 /r

SSE

16/32/64-bit

§

VEX_Vmovss_xmm_xmm_xmm_0F11

VMOVSS xmm1, xmm2, xmm3

VEX.LIG.F3.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovss_m32_xmm

VMOVSS m32, xmm1

VEX.LIG.F3.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovss_xmm_k1z_xmm_xmm_0F11

VMOVSS xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.0F.W0 11 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovss_m32_k1_xmm

VMOVSS m32 {k1}, xmm1

EVEX.LIG.F3.0F.W0 11 /r

AVX512F

16/32/64-bit

§

Movsd_xmmm64_xmm

MOVSD xmm1/m64, xmm2

F2 0F 11 /r

SSE2

16/32/64-bit

§

VEX_Vmovsd_xmm_xmm_xmm_0F11

VMOVSD xmm1, xmm2, xmm3

VEX.LIG.F2.0F.WIG 11 /r

AVX

16/32/64-bit

§

VEX_Vmovsd_m64_xmm

VMOVSD m64, xmm1

VEX.LIG.F2.0F.WIG 11 /r

AVX

16/32/64-bit

§

EVEX_Vmovsd_xmm_k1z_xmm_xmm_0F11

VMOVSD xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F2.0F.W1 11 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovsd_m64_k1_xmm

VMOVSD m64 {k1}, xmm1

EVEX.LIG.F2.0F.W1 11 /r

AVX512F

16/32/64-bit

§

Movhlps_xmm_xmm

MOVHLPS xmm1, xmm2

NP 0F 12 /r

SSE

16/32/64-bit

§

Movlps_xmm_m64

MOVLPS xmm1, m64

NP 0F 12 /r

SSE

16/32/64-bit

§

VEX_Vmovhlps_xmm_xmm_xmm

VMOVHLPS xmm1, xmm2, xmm3

VEX.128.0F.WIG 12 /r

AVX

16/32/64-bit

§

VEX_Vmovlps_xmm_xmm_m64

VMOVLPS xmm2, xmm1, m64

VEX.128.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovhlps_xmm_xmm_xmm

VMOVHLPS xmm1, xmm2, xmm3

EVEX.128.0F.W0 12 /r

AVX512F

16/32/64-bit

§

EVEX_Vmovlps_xmm_xmm_m64

VMOVLPS xmm2, xmm1, m64

EVEX.128.0F.W0 12 /r

AVX512F

16/32/64-bit

§

Movlpd_xmm_m64

MOVLPD xmm1, m64

66 0F 12 /r

SSE2

16/32/64-bit

§

VEX_Vmovlpd_xmm_xmm_m64

VMOVLPD xmm2, xmm1, m64

VEX.128.66.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovlpd_xmm_xmm_m64

VMOVLPD xmm2, xmm1, m64

EVEX.128.66.0F.W1 12 /r

AVX512F

16/32/64-bit

§

Movsldup_xmm_xmmm128

MOVSLDUP xmm1, xmm2/m128

F3 0F 12 /r

SSE3

16/32/64-bit

§

VEX_Vmovsldup_xmm_xmmm128

VMOVSLDUP xmm1, xmm2/m128

VEX.128.F3.0F.WIG 12 /r

AVX

16/32/64-bit

§

VEX_Vmovsldup_ymm_ymmm256

VMOVSLDUP ymm1, ymm2/m256

VEX.256.F3.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovsldup_xmm_k1z_xmmm128

VMOVSLDUP xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovsldup_ymm_k1z_ymmm256

VMOVSLDUP ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovsldup_zmm_k1z_zmmm512

VMOVSLDUP zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W0 12 /r

AVX512F

16/32/64-bit

§

Movddup_xmm_xmmm64

MOVDDUP xmm1, xmm2/m64

F2 0F 12 /r

SSE3

16/32/64-bit

§

VEX_Vmovddup_xmm_xmmm64

VMOVDDUP xmm1, xmm2/m64

VEX.128.F2.0F.WIG 12 /r

AVX

16/32/64-bit

§

VEX_Vmovddup_ymm_ymmm256

VMOVDDUP ymm1, ymm2/m256

VEX.256.F2.0F.WIG 12 /r

AVX

16/32/64-bit

§

EVEX_Vmovddup_xmm_k1z_xmmm64

VMOVDDUP xmm1 {k1}{z}, xmm2/m64

EVEX.128.F2.0F.W1 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovddup_ymm_k1z_ymmm256

VMOVDDUP ymm1 {k1}{z}, ymm2/m256

EVEX.256.F2.0F.W1 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovddup_zmm_k1z_zmmm512

VMOVDDUP zmm1 {k1}{z}, zmm2/m512

EVEX.512.F2.0F.W1 12 /r

AVX512F

16/32/64-bit

§

Movlps_m64_xmm

MOVLPS m64, xmm1

NP 0F 13 /r

SSE

16/32/64-bit

§

VEX_Vmovlps_m64_xmm

VMOVLPS m64, xmm1

VEX.128.0F.WIG 13 /r

AVX

16/32/64-bit

§

EVEX_Vmovlps_m64_xmm

VMOVLPS m64, xmm1

EVEX.128.0F.W0 13 /r

AVX512F

16/32/64-bit

§

Movlpd_m64_xmm

MOVLPD m64, xmm1

66 0F 13 /r

SSE2

16/32/64-bit

§

VEX_Vmovlpd_m64_xmm

VMOVLPD m64, xmm1

VEX.128.66.0F.WIG 13 /r

AVX

16/32/64-bit

§

EVEX_Vmovlpd_m64_xmm

VMOVLPD m64, xmm1

EVEX.128.66.0F.W1 13 /r

AVX512F

16/32/64-bit

§

Unpcklps_xmm_xmmm128

UNPCKLPS xmm1, xmm2/m128

NP 0F 14 /r

SSE

16/32/64-bit

§

VEX_Vunpcklps_xmm_xmm_xmmm128

VUNPCKLPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 14 /r

AVX

16/32/64-bit

§

VEX_Vunpcklps_ymm_ymm_ymmm256

VUNPCKLPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 14 /r

AVX

16/32/64-bit

§

EVEX_Vunpcklps_xmm_k1z_xmm_xmmm128b32

VUNPCKLPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklps_ymm_k1z_ymm_ymmm256b32

VUNPCKLPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklps_zmm_k1z_zmm_zmmm512b32

VUNPCKLPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 14 /r

AVX512F

16/32/64-bit

§

Unpcklpd_xmm_xmmm128

UNPCKLPD xmm1, xmm2/m128

66 0F 14 /r

SSE2

16/32/64-bit

§

VEX_Vunpcklpd_xmm_xmm_xmmm128

VUNPCKLPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 14 /r

AVX

16/32/64-bit

§

VEX_Vunpcklpd_ymm_ymm_ymmm256

VUNPCKLPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 14 /r

AVX

16/32/64-bit

§

EVEX_Vunpcklpd_xmm_k1z_xmm_xmmm128b64

VUNPCKLPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklpd_ymm_k1z_ymm_ymmm256b64

VUNPCKLPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpcklpd_zmm_k1z_zmm_zmmm512b64

VUNPCKLPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 14 /r

AVX512F

16/32/64-bit

§

Unpckhps_xmm_xmmm128

UNPCKHPS xmm1, xmm2/m128

NP 0F 15 /r

SSE

16/32/64-bit

§

VEX_Vunpckhps_xmm_xmm_xmmm128

VUNPCKHPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 15 /r

AVX

16/32/64-bit

§

VEX_Vunpckhps_ymm_ymm_ymmm256

VUNPCKHPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 15 /r

AVX

16/32/64-bit

§

EVEX_Vunpckhps_xmm_k1z_xmm_xmmm128b32

VUNPCKHPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhps_ymm_k1z_ymm_ymmm256b32

VUNPCKHPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhps_zmm_k1z_zmm_zmmm512b32

VUNPCKHPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 15 /r

AVX512F

16/32/64-bit

§

Unpckhpd_xmm_xmmm128

UNPCKHPD xmm1, xmm2/m128

66 0F 15 /r

SSE2

16/32/64-bit

§

VEX_Vunpckhpd_xmm_xmm_xmmm128

VUNPCKHPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 15 /r

AVX

16/32/64-bit

§

VEX_Vunpckhpd_ymm_ymm_ymmm256

VUNPCKHPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 15 /r

AVX

16/32/64-bit

§

EVEX_Vunpckhpd_xmm_k1z_xmm_xmmm128b64

VUNPCKHPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhpd_ymm_k1z_ymm_ymmm256b64

VUNPCKHPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vunpckhpd_zmm_k1z_zmm_zmmm512b64

VUNPCKHPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 15 /r

AVX512F

16/32/64-bit

§

Movlhps_xmm_xmm

MOVLHPS xmm1, xmm2

NP 0F 16 /r

SSE

16/32/64-bit

§

VEX_Vmovlhps_xmm_xmm_xmm

VMOVLHPS xmm1, xmm2, xmm3

VEX.128.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovlhps_xmm_xmm_xmm

VMOVLHPS xmm1, xmm2, xmm3

EVEX.128.0F.W0 16 /r

AVX512F

16/32/64-bit

§

Movhps_xmm_m64

MOVHPS xmm1, m64

NP 0F 16 /r

SSE

16/32/64-bit

§

VEX_Vmovhps_xmm_xmm_m64

VMOVHPS xmm2, xmm1, m64

VEX.128.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovhps_xmm_xmm_m64

VMOVHPS xmm2, xmm1, m64

EVEX.128.0F.W0 16 /r

AVX512F

16/32/64-bit

§

Movhpd_xmm_m64

MOVHPD xmm1, m64

66 0F 16 /r

SSE2

16/32/64-bit

§

VEX_Vmovhpd_xmm_xmm_m64

VMOVHPD xmm2, xmm1, m64

VEX.128.66.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovhpd_xmm_xmm_m64

VMOVHPD xmm2, xmm1, m64

EVEX.128.66.0F.W1 16 /r

AVX512F

16/32/64-bit

§

Movshdup_xmm_xmmm128

MOVSHDUP xmm1, xmm2/m128

F3 0F 16 /r

SSE3

16/32/64-bit

§

VEX_Vmovshdup_xmm_xmmm128

VMOVSHDUP xmm1, xmm2/m128

VEX.128.F3.0F.WIG 16 /r

AVX

16/32/64-bit

§

VEX_Vmovshdup_ymm_ymmm256

VMOVSHDUP ymm1, ymm2/m256

VEX.256.F3.0F.WIG 16 /r

AVX

16/32/64-bit

§

EVEX_Vmovshdup_xmm_k1z_xmmm128

VMOVSHDUP xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W0 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovshdup_ymm_k1z_ymmm256

VMOVSHDUP ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W0 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovshdup_zmm_k1z_zmmm512

VMOVSHDUP zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W0 16 /r

AVX512F

16/32/64-bit

§

Movhps_m64_xmm

MOVHPS m64, xmm1

NP 0F 17 /r

SSE

16/32/64-bit

§

VEX_Vmovhps_m64_xmm

VMOVHPS m64, xmm1

VEX.128.0F.WIG 17 /r

AVX

16/32/64-bit

§

EVEX_Vmovhps_m64_xmm

VMOVHPS m64, xmm1

EVEX.128.0F.W0 17 /r

AVX512F

16/32/64-bit

§

Movhpd_m64_xmm

MOVHPD m64, xmm1

66 0F 17 /r

SSE2

16/32/64-bit

§

VEX_Vmovhpd_m64_xmm

VMOVHPD m64, xmm1

VEX.128.66.0F.WIG 17 /r

AVX

16/32/64-bit

§

EVEX_Vmovhpd_m64_xmm

VMOVHPD m64, xmm1

EVEX.128.66.0F.W1 17 /r

AVX512F

16/32/64-bit

§

Reservednop_rm16_r16_0F18

RESERVEDNOP r/m16, r16

o16 0F 18 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F18

RESERVEDNOP r/m32, r32

o32 0F 18 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F18

RESERVEDNOP r/m64, r64

o64 0F 18 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F19

RESERVEDNOP r/m16, r16

o16 0F 19 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F19

RESERVEDNOP r/m32, r32

o32 0F 19 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F19

RESERVEDNOP r/m64, r64

o64 0F 19 /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1A

RESERVEDNOP r/m16, r16

o16 0F 1A /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1A

RESERVEDNOP r/m32, r32

o32 0F 1A /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1A

RESERVEDNOP r/m64, r64

o64 0F 1A /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1B

RESERVEDNOP r/m16, r16

o16 0F 1B /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1B

RESERVEDNOP r/m32, r32

o32 0F 1B /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1B

RESERVEDNOP r/m64, r64

o64 0F 1B /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1C

RESERVEDNOP r/m16, r16

o16 0F 1C /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1C

RESERVEDNOP r/m32, r32

o32 0F 1C /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1C

RESERVEDNOP r/m64, r64

o64 0F 1C /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1D

RESERVEDNOP r/m16, r16

o16 0F 1D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1D

RESERVEDNOP r/m32, r32

o32 0F 1D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1D

RESERVEDNOP r/m64, r64

o64 0F 1D /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1E

RESERVEDNOP r/m16, r16

o16 0F 1E /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1E

RESERVEDNOP r/m32, r32

o32 0F 1E /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1E

RESERVEDNOP r/m64, r64

o64 0F 1E /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Reservednop_rm16_r16_0F1F

RESERVEDNOP r/m16, r16

o16 0F 1F /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm32_r32_0F1F

RESERVEDNOP r/m32, r32

o32 0F 1F /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Reservednop_rm64_r64_0F1F

RESERVEDNOP r/m64, r64

o64 0F 1F /r

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Prefetchnta_m8

PREFETCHNTA m8

0F 18 /0

SSE

16/32/64-bit

§

Prefetcht0_m8

PREFETCHT0 m8

0F 18 /1

SSE

16/32/64-bit

§

Prefetcht1_m8

PREFETCHT1 m8

0F 18 /2

SSE

16/32/64-bit

§

Prefetcht2_m8

PREFETCHT2 m8

0F 18 /3

SSE

16/32/64-bit

§

Bndldx_bnd_mib

BNDLDX bnd, mib

NP 0F 1A /r

MPX

16/32/64-bit

§

Bndmov_bnd_bndm64

BNDMOV bnd1, bnd2/m64

66 0F 1A /r

MPX

16/32-bit

§

Bndmov_bnd_bndm128

BNDMOV bnd1, bnd2/m128

66 0F 1A /r

MPX

64-bit

§

Bndcl_bnd_rm32

BNDCL bnd, r/m32

F3 0F 1A /r

MPX

16/32-bit

§

Bndcl_bnd_rm64

BNDCL bnd, r/m64

F3 0F 1A /r

MPX

64-bit

§

Bndcu_bnd_rm32

BNDCU bnd, r/m32

F2 0F 1A /r

MPX

16/32-bit

§

Bndcu_bnd_rm64

BNDCU bnd, r/m64

F2 0F 1A /r

MPX

64-bit

§

Bndstx_mib_bnd

BNDSTX mib, bnd

NP 0F 1B /r

MPX

16/32/64-bit

§

Bndmov_bndm64_bnd

BNDMOV bnd1/m64, bnd2

66 0F 1B /r

MPX

16/32-bit

§

Bndmov_bndm128_bnd

BNDMOV bnd1/m128, bnd2

66 0F 1B /r

MPX

64-bit

§

Bndmk_bnd_m32

BNDMK bnd, m32

F3 0F 1B /r

MPX

16/32-bit

§

Bndmk_bnd_m64

BNDMK bnd, m64

F3 0F 1B /r

MPX

64-bit

§

Bndcn_bnd_rm32

BNDCN bnd, r/m32

F2 0F 1B /r

MPX

16/32-bit

§

Bndcn_bnd_rm64

BNDCN bnd, r/m64

F2 0F 1B /r

MPX

64-bit

§

Cldemote_m8

CLDEMOTE m8

NP 0F 1C /0

CLDEMOTE

16/32/64-bit

§

Rdsspd_r32

RDSSPD r32

F3 0F 1E /1

CET_SS

16/32/64-bit

§

Rdsspq_r64

RDSSPQ r64

F3 o64 0F 1E /1

CET_SS

64-bit

§

Endbr64

ENDBR64

F3 0F 1E FA

CET_IBT

16/32/64-bit

§

Endbr32

ENDBR32

F3 0F 1E FB

CET_IBT

16/32/64-bit

§

Nop_rm16

NOP r/m16

o16 0F 1F /0

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Nop_rm32

NOP r/m32

o32 0F 1F /0

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

16/32/64-bit

§

Nop_rm64

NOP r/m64

o64 0F 1F /0

CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B

64-bit

§

Mov_r32_cr

MOV r32, cr

0F 20 /r

386+

16/32-bit

§

Mov_r64_cr

MOV r64, cr

0F 20 /r

X64

64-bit

§

Mov_r32_dr

MOV r32, dr

0F 21 /r

386+

16/32-bit

§

Mov_r64_dr

MOV r64, dr

0F 21 /r

X64

64-bit

§

Mov_cr_r32

MOV cr, r32

0F 22 /r

386+

16/32-bit

§

Mov_cr_r64

MOV cr, r64

0F 22 /r

X64

64-bit

§

Mov_dr_r32

MOV dr, r32

0F 23 /r

386+

16/32-bit

§

Mov_dr_r64

MOV dr, r64

0F 23 /r

X64

64-bit

§

Mov_r32_tr

MOV r32, tr

0F 24 /r

386/486/Cyrix/Geode

16/32-bit

§

Mov_tr_r32

MOV tr, r32

0F 26 /r

386/486/Cyrix/Geode

16/32-bit

§

Movaps_xmm_xmmm128

MOVAPS xmm1, xmm2/m128

NP 0F 28 /r

SSE

16/32/64-bit

§

VEX_Vmovaps_xmm_xmmm128

VMOVAPS xmm1, xmm2/m128

VEX.128.0F.WIG 28 /r

AVX

16/32/64-bit

§

VEX_Vmovaps_ymm_ymmm256

VMOVAPS ymm1, ymm2/m256

VEX.256.0F.WIG 28 /r

AVX

16/32/64-bit

§

EVEX_Vmovaps_xmm_k1z_xmmm128

VMOVAPS xmm1 {k1}{z}, xmm2/m128

EVEX.128.0F.W0 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_ymm_k1z_ymmm256

VMOVAPS ymm1 {k1}{z}, ymm2/m256

EVEX.256.0F.W0 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_zmm_k1z_zmmm512

VMOVAPS zmm1 {k1}{z}, zmm2/m512

EVEX.512.0F.W0 28 /r

AVX512F

16/32/64-bit

§

Movapd_xmm_xmmm128

MOVAPD xmm1, xmm2/m128

66 0F 28 /r

SSE2

16/32/64-bit

§

VEX_Vmovapd_xmm_xmmm128

VMOVAPD xmm1, xmm2/m128

VEX.128.66.0F.WIG 28 /r

AVX

16/32/64-bit

§

VEX_Vmovapd_ymm_ymmm256

VMOVAPD ymm1, ymm2/m256

VEX.256.66.0F.WIG 28 /r

AVX

16/32/64-bit

§

EVEX_Vmovapd_xmm_k1z_xmmm128

VMOVAPD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_ymm_k1z_ymmm256

VMOVAPD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_zmm_k1z_zmmm512

VMOVAPD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W1 28 /r

AVX512F

16/32/64-bit

§

Movaps_xmmm128_xmm

MOVAPS xmm2/m128, xmm1

NP 0F 29 /r

SSE

16/32/64-bit

§

VEX_Vmovaps_xmmm128_xmm

VMOVAPS xmm2/m128, xmm1

VEX.128.0F.WIG 29 /r

AVX

16/32/64-bit

§

VEX_Vmovaps_ymmm256_ymm

VMOVAPS ymm2/m256, ymm1

VEX.256.0F.WIG 29 /r

AVX

16/32/64-bit

§

EVEX_Vmovaps_xmmm128_k1z_xmm

VMOVAPS xmm2/m128 {k1}{z}, xmm1

EVEX.128.0F.W0 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_ymmm256_k1z_ymm

VMOVAPS ymm2/m256 {k1}{z}, ymm1

EVEX.256.0F.W0 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovaps_zmmm512_k1z_zmm

VMOVAPS zmm2/m512 {k1}{z}, zmm1

EVEX.512.0F.W0 29 /r

AVX512F

16/32/64-bit

§

Movapd_xmmm128_xmm

MOVAPD xmm2/m128, xmm1

66 0F 29 /r

SSE2

16/32/64-bit

§

VEX_Vmovapd_xmmm128_xmm

VMOVAPD xmm2/m128, xmm1

VEX.128.66.0F.WIG 29 /r

AVX

16/32/64-bit

§

VEX_Vmovapd_ymmm256_ymm

VMOVAPD ymm2/m256, ymm1

VEX.256.66.0F.WIG 29 /r

AVX

16/32/64-bit

§

EVEX_Vmovapd_xmmm128_k1z_xmm

VMOVAPD xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_ymmm256_k1z_ymm

VMOVAPD ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovapd_zmmm512_k1z_zmm

VMOVAPD zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W1 29 /r

AVX512F

16/32/64-bit

§

Cvtpi2ps_xmm_mmm64

CVTPI2PS xmm, mm/m64

NP 0F 2A /r

SSE

16/32/64-bit

§

Cvtpi2pd_xmm_mmm64

CVTPI2PD xmm, mm/m64

66 0F 2A /r

SSE2

16/32/64-bit

§

Cvtsi2ss_xmm_rm32

CVTSI2SS xmm1, r/m32

F3 0F 2A /r

SSE

16/32/64-bit

§

Cvtsi2ss_xmm_rm64

CVTSI2SS xmm1, r/m64

F3 o64 0F 2A /r

SSE

64-bit

§

VEX_Vcvtsi2ss_xmm_xmm_rm32

VCVTSI2SS xmm1, xmm2, r/m32

VEX.LIG.F3.0F.W0 2A /r

AVX

16/32/64-bit

§

VEX_Vcvtsi2ss_xmm_xmm_rm64

VCVTSI2SS xmm1, xmm2, r/m64

VEX.LIG.F3.0F.W1 2A /r

AVX

64-bit

§

EVEX_Vcvtsi2ss_xmm_xmm_rm32_er

VCVTSI2SS xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.0F.W0 2A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsi2ss_xmm_xmm_rm64_er

VCVTSI2SS xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.0F.W1 2A /r

AVX512F

64-bit

§

Cvtsi2sd_xmm_rm32

CVTSI2SD xmm1, r/m32

F2 0F 2A /r

SSE2

16/32/64-bit

§

Cvtsi2sd_xmm_rm64

CVTSI2SD xmm1, r/m64

F2 o64 0F 2A /r

SSE2

64-bit

§

VEX_Vcvtsi2sd_xmm_xmm_rm32

VCVTSI2SD xmm1, xmm2, r/m32

VEX.LIG.F2.0F.W0 2A /r

AVX

16/32/64-bit

§

VEX_Vcvtsi2sd_xmm_xmm_rm64

VCVTSI2SD xmm1, xmm2, r/m64

VEX.LIG.F2.0F.W1 2A /r

AVX

64-bit

§

EVEX_Vcvtsi2sd_xmm_xmm_rm32_er

VCVTSI2SD xmm1, xmm2, r/m32{er}

EVEX.LIG.F2.0F.W0 2A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsi2sd_xmm_xmm_rm64_er

VCVTSI2SD xmm1, xmm2, r/m64{er}

EVEX.LIG.F2.0F.W1 2A /r

AVX512F

64-bit

§

Movntps_m128_xmm

MOVNTPS m128, xmm1

NP 0F 2B /r

SSE

16/32/64-bit

§

VEX_Vmovntps_m128_xmm

VMOVNTPS m128, xmm1

VEX.128.0F.WIG 2B /r

AVX

16/32/64-bit

§

VEX_Vmovntps_m256_ymm

VMOVNTPS m256, ymm1

VEX.256.0F.WIG 2B /r

AVX

16/32/64-bit

§

EVEX_Vmovntps_m128_xmm

VMOVNTPS m128, xmm1

EVEX.128.0F.W0 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntps_m256_ymm

VMOVNTPS m256, ymm1

EVEX.256.0F.W0 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntps_m512_zmm

VMOVNTPS m512, zmm1

EVEX.512.0F.W0 2B /r

AVX512F

16/32/64-bit

§

Movntpd_m128_xmm

MOVNTPD m128, xmm1

66 0F 2B /r

SSE2

16/32/64-bit

§

VEX_Vmovntpd_m128_xmm

VMOVNTPD m128, xmm1

VEX.128.66.0F.WIG 2B /r

AVX

16/32/64-bit

§

VEX_Vmovntpd_m256_ymm

VMOVNTPD m256, ymm1

VEX.256.66.0F.WIG 2B /r

AVX

16/32/64-bit

§

EVEX_Vmovntpd_m128_xmm

VMOVNTPD m128, xmm1

EVEX.128.66.0F.W1 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntpd_m256_ymm

VMOVNTPD m256, ymm1

EVEX.256.66.0F.W1 2B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntpd_m512_zmm

VMOVNTPD m512, zmm1

EVEX.512.66.0F.W1 2B /r

AVX512F

16/32/64-bit

§

Movntss_m32_xmm

MOVNTSS m32, xmm1

F3 0F 2B /r

SSE4A

16/32/64-bit

§

Movntsd_m64_xmm

MOVNTSD m64, xmm1

F2 0F 2B /r

SSE4A

16/32/64-bit

§

Cvttps2pi_mm_xmmm64

CVTTPS2PI mm, xmm/m64

NP 0F 2C /r

SSE

16/32/64-bit

§

Cvttpd2pi_mm_xmmm128

CVTTPD2PI mm, xmm/m128

66 0F 2C /r

SSE2

16/32/64-bit

§

Cvttss2si_r32_xmmm32

CVTTSS2SI r32, xmm1/m32

F3 0F 2C /r

SSE

16/32/64-bit

§

Cvttss2si_r64_xmmm32

CVTTSS2SI r64, xmm1/m32

F3 o64 0F 2C /r

SSE

64-bit

§

VEX_Vcvttss2si_r32_xmmm32

VCVTTSS2SI r32, xmm1/m32

VEX.LIG.F3.0F.W0 2C /r

AVX

16/32/64-bit

§

VEX_Vcvttss2si_r64_xmmm32

VCVTTSS2SI r64, xmm1/m32

VEX.LIG.F3.0F.W1 2C /r

AVX

64-bit

§

EVEX_Vcvttss2si_r32_xmmm32_sae

VCVTTSS2SI r32, xmm1/m32{sae}

EVEX.LIG.F3.0F.W0 2C /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttss2si_r64_xmmm32_sae

VCVTTSS2SI r64, xmm1/m32{sae}

EVEX.LIG.F3.0F.W1 2C /r

AVX512F

64-bit

§

Cvttsd2si_r32_xmmm64

CVTTSD2SI r32, xmm1/m64

F2 0F 2C /r

SSE2

16/32/64-bit

§

Cvttsd2si_r64_xmmm64

CVTTSD2SI r64, xmm1/m64

F2 o64 0F 2C /r

SSE2

64-bit

§

VEX_Vcvttsd2si_r32_xmmm64

VCVTTSD2SI r32, xmm1/m64

VEX.LIG.F2.0F.W0 2C /r

AVX

16/32/64-bit

§

VEX_Vcvttsd2si_r64_xmmm64

VCVTTSD2SI r64, xmm1/m64

VEX.LIG.F2.0F.W1 2C /r

AVX

64-bit

§

EVEX_Vcvttsd2si_r32_xmmm64_sae

VCVTTSD2SI r32, xmm1/m64{sae}

EVEX.LIG.F2.0F.W0 2C /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttsd2si_r64_xmmm64_sae

VCVTTSD2SI r64, xmm1/m64{sae}

EVEX.LIG.F2.0F.W1 2C /r

AVX512F

64-bit

§

Cvtps2pi_mm_xmmm64

CVTPS2PI mm, xmm/m64

NP 0F 2D /r

SSE

16/32/64-bit

§

Cvtpd2pi_mm_xmmm128

CVTPD2PI mm, xmm/m128

66 0F 2D /r

SSE2

16/32/64-bit

§

Cvtss2si_r32_xmmm32

CVTSS2SI r32, xmm1/m32

F3 0F 2D /r

SSE

16/32/64-bit

§

Cvtss2si_r64_xmmm32

CVTSS2SI r64, xmm1/m32

F3 o64 0F 2D /r

SSE

64-bit

§

VEX_Vcvtss2si_r32_xmmm32

VCVTSS2SI r32, xmm1/m32

VEX.LIG.F3.0F.W0 2D /r

AVX

16/32/64-bit

§

VEX_Vcvtss2si_r64_xmmm32

VCVTSS2SI r64, xmm1/m32

VEX.LIG.F3.0F.W1 2D /r

AVX

64-bit

§

EVEX_Vcvtss2si_r32_xmmm32_er

VCVTSS2SI r32, xmm1/m32{er}

EVEX.LIG.F3.0F.W0 2D /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtss2si_r64_xmmm32_er

VCVTSS2SI r64, xmm1/m32{er}

EVEX.LIG.F3.0F.W1 2D /r

AVX512F

64-bit

§

Cvtsd2si_r32_xmmm64

CVTSD2SI r32, xmm1/m64

F2 0F 2D /r

SSE2

16/32/64-bit

§

Cvtsd2si_r64_xmmm64

CVTSD2SI r64, xmm1/m64

F2 o64 0F 2D /r

SSE2

64-bit

§

VEX_Vcvtsd2si_r32_xmmm64

VCVTSD2SI r32, xmm1/m64

VEX.LIG.F2.0F.W0 2D /r

AVX

16/32/64-bit

§

VEX_Vcvtsd2si_r64_xmmm64

VCVTSD2SI r64, xmm1/m64

VEX.LIG.F2.0F.W1 2D /r

AVX

64-bit

§

EVEX_Vcvtsd2si_r32_xmmm64_er

VCVTSD2SI r32, xmm1/m64{er}

EVEX.LIG.F2.0F.W0 2D /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsd2si_r64_xmmm64_er

VCVTSD2SI r64, xmm1/m64{er}

EVEX.LIG.F2.0F.W1 2D /r

AVX512F

64-bit

§

Ucomiss_xmm_xmmm32

UCOMISS xmm1, xmm2/m32

NP 0F 2E /r

SSE

16/32/64-bit

§

VEX_Vucomiss_xmm_xmmm32

VUCOMISS xmm1, xmm2/m32

VEX.LIG.0F.WIG 2E /r

AVX

16/32/64-bit

§

EVEX_Vucomiss_xmm_xmmm32_sae

VUCOMISS xmm1, xmm2/m32{sae}

EVEX.LIG.0F.W0 2E /r

AVX512F

16/32/64-bit

§

Ucomisd_xmm_xmmm64

UCOMISD xmm1, xmm2/m64

66 0F 2E /r

SSE2

16/32/64-bit

§

VEX_Vucomisd_xmm_xmmm64

VUCOMISD xmm1, xmm2/m64

VEX.LIG.66.0F.WIG 2E /r

AVX

16/32/64-bit

§

EVEX_Vucomisd_xmm_xmmm64_sae

VUCOMISD xmm1, xmm2/m64{sae}

EVEX.LIG.66.0F.W1 2E /r

AVX512F

16/32/64-bit

§

Comiss_xmm_xmmm32

COMISS xmm1, xmm2/m32

NP 0F 2F /r

SSE

16/32/64-bit

§

Comisd_xmm_xmmm64

COMISD xmm1, xmm2/m64

66 0F 2F /r

SSE2

16/32/64-bit

§

VEX_Vcomiss_xmm_xmmm32

VCOMISS xmm1, xmm2/m32

VEX.LIG.0F.WIG 2F /r

AVX

16/32/64-bit

§

VEX_Vcomisd_xmm_xmmm64

VCOMISD xmm1, xmm2/m64

VEX.LIG.66.0F.WIG 2F /r

AVX

16/32/64-bit

§

EVEX_Vcomiss_xmm_xmmm32_sae

VCOMISS xmm1, xmm2/m32{sae}

EVEX.LIG.0F.W0 2F /r

AVX512F

16/32/64-bit

§

EVEX_Vcomisd_xmm_xmmm64_sae

VCOMISD xmm1, xmm2/m64{sae}

EVEX.LIG.66.0F.W1 2F /r

AVX512F

16/32/64-bit

§

Wrmsr

WRMSR

0F 30

MSR

16/32/64-bit

§

Rdtsc

RDTSC

0F 31

TSC

16/32/64-bit

§

Rdmsr

RDMSR

0F 32

MSR

16/32/64-bit

§

Rdpmc

RDPMC

0F 33

Pentium MMX or later, or Pentium Pro or later

16/32/64-bit

§

Sysenter

SYSENTER

0F 34

SEP

16/32/64-bit

§

Sysexitd

SYSEXIT

0F 35

SEP

16/32/64-bit

§

Sysexitq

SYSEXITQ

o64 0F 35

SEP

64-bit

§

Getsecd

GETSEC

NP 0F 37

SMX

16/32/64-bit

§

Cmovo_r16_rm16

CMOVO r16, r/m16

o16 0F 40 /r

CMOV

16/32/64-bit

§

Cmovo_r32_rm32

CMOVO r32, r/m32

o32 0F 40 /r

CMOV

16/32/64-bit

§

Cmovo_r64_rm64

CMOVO r64, r/m64

o64 0F 40 /r

CMOV

64-bit

§

Cmovno_r16_rm16

CMOVNO r16, r/m16

o16 0F 41 /r

CMOV

16/32/64-bit

§

Cmovno_r32_rm32

CMOVNO r32, r/m32

o32 0F 41 /r

CMOV

16/32/64-bit

§

Cmovno_r64_rm64

CMOVNO r64, r/m64

o64 0F 41 /r

CMOV

64-bit

§

Cmovb_r16_rm16

CMOVB r16, r/m16

o16 0F 42 /r

CMOV

16/32/64-bit

§

Cmovb_r32_rm32

CMOVB r32, r/m32

o32 0F 42 /r

CMOV

16/32/64-bit

§

Cmovb_r64_rm64

CMOVB r64, r/m64

o64 0F 42 /r

CMOV

64-bit

§

Cmovae_r16_rm16

CMOVAE r16, r/m16

o16 0F 43 /r

CMOV

16/32/64-bit

§

Cmovae_r32_rm32

CMOVAE r32, r/m32

o32 0F 43 /r

CMOV

16/32/64-bit

§

Cmovae_r64_rm64

CMOVAE r64, r/m64

o64 0F 43 /r

CMOV

64-bit

§

Cmove_r16_rm16

CMOVE r16, r/m16

o16 0F 44 /r

CMOV

16/32/64-bit

§

Cmove_r32_rm32

CMOVE r32, r/m32

o32 0F 44 /r

CMOV

16/32/64-bit

§

Cmove_r64_rm64

CMOVE r64, r/m64

o64 0F 44 /r

CMOV

64-bit

§

Cmovne_r16_rm16

CMOVNE r16, r/m16

o16 0F 45 /r

CMOV

16/32/64-bit

§

Cmovne_r32_rm32

CMOVNE r32, r/m32

o32 0F 45 /r

CMOV

16/32/64-bit

§

Cmovne_r64_rm64

CMOVNE r64, r/m64

o64 0F 45 /r

CMOV

64-bit

§

Cmovbe_r16_rm16

CMOVBE r16, r/m16

o16 0F 46 /r

CMOV

16/32/64-bit

§

Cmovbe_r32_rm32

CMOVBE r32, r/m32

o32 0F 46 /r

CMOV

16/32/64-bit

§

Cmovbe_r64_rm64

CMOVBE r64, r/m64

o64 0F 46 /r

CMOV

64-bit

§

Cmova_r16_rm16

CMOVA r16, r/m16

o16 0F 47 /r

CMOV

16/32/64-bit

§

Cmova_r32_rm32

CMOVA r32, r/m32

o32 0F 47 /r

CMOV

16/32/64-bit

§

Cmova_r64_rm64

CMOVA r64, r/m64

o64 0F 47 /r

CMOV

64-bit

§

Cmovs_r16_rm16

CMOVS r16, r/m16

o16 0F 48 /r

CMOV

16/32/64-bit

§

Cmovs_r32_rm32

CMOVS r32, r/m32

o32 0F 48 /r

CMOV

16/32/64-bit

§

Cmovs_r64_rm64

CMOVS r64, r/m64

o64 0F 48 /r

CMOV

64-bit

§

Cmovns_r16_rm16

CMOVNS r16, r/m16

o16 0F 49 /r

CMOV

16/32/64-bit

§

Cmovns_r32_rm32

CMOVNS r32, r/m32

o32 0F 49 /r

CMOV

16/32/64-bit

§

Cmovns_r64_rm64

CMOVNS r64, r/m64

o64 0F 49 /r

CMOV

64-bit

§

Cmovp_r16_rm16

CMOVP r16, r/m16

o16 0F 4A /r

CMOV

16/32/64-bit

§

Cmovp_r32_rm32

CMOVP r32, r/m32

o32 0F 4A /r

CMOV

16/32/64-bit

§

Cmovp_r64_rm64

CMOVP r64, r/m64

o64 0F 4A /r

CMOV

64-bit

§

Cmovnp_r16_rm16

CMOVNP r16, r/m16

o16 0F 4B /r

CMOV

16/32/64-bit

§

Cmovnp_r32_rm32

CMOVNP r32, r/m32

o32 0F 4B /r

CMOV

16/32/64-bit

§

Cmovnp_r64_rm64

CMOVNP r64, r/m64

o64 0F 4B /r

CMOV

64-bit

§

Cmovl_r16_rm16

CMOVL r16, r/m16

o16 0F 4C /r

CMOV

16/32/64-bit

§

Cmovl_r32_rm32

CMOVL r32, r/m32

o32 0F 4C /r

CMOV

16/32/64-bit

§

Cmovl_r64_rm64

CMOVL r64, r/m64

o64 0F 4C /r

CMOV

64-bit

§

Cmovge_r16_rm16

CMOVGE r16, r/m16

o16 0F 4D /r

CMOV

16/32/64-bit

§

Cmovge_r32_rm32

CMOVGE r32, r/m32

o32 0F 4D /r

CMOV

16/32/64-bit

§

Cmovge_r64_rm64

CMOVGE r64, r/m64

o64 0F 4D /r

CMOV

64-bit

§

Cmovle_r16_rm16

CMOVLE r16, r/m16

o16 0F 4E /r

CMOV

16/32/64-bit

§

Cmovle_r32_rm32

CMOVLE r32, r/m32

o32 0F 4E /r

CMOV

16/32/64-bit

§

Cmovle_r64_rm64

CMOVLE r64, r/m64

o64 0F 4E /r

CMOV

64-bit

§

Cmovg_r16_rm16

CMOVG r16, r/m16

o16 0F 4F /r

CMOV

16/32/64-bit

§

Cmovg_r32_rm32

CMOVG r32, r/m32

o32 0F 4F /r

CMOV

16/32/64-bit

§

Cmovg_r64_rm64

CMOVG r64, r/m64

o64 0F 4F /r

CMOV

64-bit

§

VEX_Kandw_kr_kr_kr

KANDW k1, k2, k3

VEX.L1.0F.W0 41 /r

AVX512F

16/32/64-bit

§

VEX_Kandq_kr_kr_kr

KANDQ k1, k2, k3

VEX.L1.0F.W1 41 /r

AVX512BW

16/32/64-bit

§

VEX_Kandb_kr_kr_kr

KANDB k1, k2, k3

VEX.L1.66.0F.W0 41 /r

AVX512DQ

16/32/64-bit

§

VEX_Kandd_kr_kr_kr

KANDD k1, k2, k3

VEX.L1.66.0F.W1 41 /r

AVX512BW

16/32/64-bit

§

VEX_Kandnw_kr_kr_kr

KANDNW k1, k2, k3

VEX.L1.0F.W0 42 /r

AVX512F

16/32/64-bit

§

VEX_Kandnq_kr_kr_kr

KANDNQ k1, k2, k3

VEX.L1.0F.W1 42 /r

AVX512BW

16/32/64-bit

§

VEX_Kandnb_kr_kr_kr

KANDNB k1, k2, k3

VEX.L1.66.0F.W0 42 /r

AVX512DQ

16/32/64-bit

§

VEX_Kandnd_kr_kr_kr

KANDND k1, k2, k3

VEX.L1.66.0F.W1 42 /r

AVX512BW

16/32/64-bit

§

VEX_Knotw_kr_kr

KNOTW k1, k2

VEX.L0.0F.W0 44 /r

AVX512F

16/32/64-bit

§

VEX_Knotq_kr_kr

KNOTQ k1, k2

VEX.L0.0F.W1 44 /r

AVX512BW

16/32/64-bit

§

VEX_Knotb_kr_kr

KNOTB k1, k2

VEX.L0.66.0F.W0 44 /r

AVX512DQ

16/32/64-bit

§

VEX_Knotd_kr_kr

KNOTD k1, k2

VEX.L0.66.0F.W1 44 /r

AVX512BW

16/32/64-bit

§

VEX_Korw_kr_kr_kr

KORW k1, k2, k3

VEX.L1.0F.W0 45 /r

AVX512F

16/32/64-bit

§

VEX_Korq_kr_kr_kr

KORQ k1, k2, k3

VEX.L1.0F.W1 45 /r

AVX512BW

16/32/64-bit

§

VEX_Korb_kr_kr_kr

KORB k1, k2, k3

VEX.L1.66.0F.W0 45 /r

AVX512DQ

16/32/64-bit

§

VEX_Kord_kr_kr_kr

KORD k1, k2, k3

VEX.L1.66.0F.W1 45 /r

AVX512BW

16/32/64-bit

§

VEX_Kxnorw_kr_kr_kr

KXNORW k1, k2, k3

VEX.L1.0F.W0 46 /r

AVX512F

16/32/64-bit

§

VEX_Kxnorq_kr_kr_kr

KXNORQ k1, k2, k3

VEX.L1.0F.W1 46 /r

AVX512BW

16/32/64-bit

§

VEX_Kxnorb_kr_kr_kr

KXNORB k1, k2, k3

VEX.L1.66.0F.W0 46 /r

AVX512DQ

16/32/64-bit

§

VEX_Kxnord_kr_kr_kr

KXNORD k1, k2, k3

VEX.L1.66.0F.W1 46 /r

AVX512BW

16/32/64-bit

§

VEX_Kxorw_kr_kr_kr

KXORW k1, k2, k3

VEX.L1.0F.W0 47 /r

AVX512F

16/32/64-bit

§

VEX_Kxorq_kr_kr_kr

KXORQ k1, k2, k3

VEX.L1.0F.W1 47 /r

AVX512BW

16/32/64-bit

§

VEX_Kxorb_kr_kr_kr

KXORB k1, k2, k3

VEX.L1.66.0F.W0 47 /r

AVX512DQ

16/32/64-bit

§

VEX_Kxord_kr_kr_kr

KXORD k1, k2, k3

VEX.L1.66.0F.W1 47 /r

AVX512BW

16/32/64-bit

§

VEX_Kaddw_kr_kr_kr

KADDW k1, k2, k3

VEX.L1.0F.W0 4A /r

AVX512DQ

16/32/64-bit

§

VEX_Kaddq_kr_kr_kr

KADDQ k1, k2, k3

VEX.L1.0F.W1 4A /r

AVX512BW

16/32/64-bit

§

VEX_Kaddb_kr_kr_kr

KADDB k1, k2, k3

VEX.L1.66.0F.W0 4A /r

AVX512DQ

16/32/64-bit

§

VEX_Kaddd_kr_kr_kr

KADDD k1, k2, k3

VEX.L1.66.0F.W1 4A /r

AVX512BW

16/32/64-bit

§

VEX_Kunpckwd_kr_kr_kr

KUNPCKWD k1, k2, k3

VEX.L1.0F.W0 4B /r

AVX512BW

16/32/64-bit

§

VEX_Kunpckdq_kr_kr_kr

KUNPCKDQ k1, k2, k3

VEX.L1.0F.W1 4B /r

AVX512BW

16/32/64-bit

§

VEX_Kunpckbw_kr_kr_kr

KUNPCKBW k1, k2, k3

VEX.L1.66.0F.W0 4B /r

AVX512F

16/32/64-bit

§

Movmskps_r32_xmm

MOVMSKPS r32, xmm

NP 0F 50 /r

SSE

16/32/64-bit

§

Movmskps_r64_xmm

MOVMSKPS r64, xmm

NP o64 0F 50 /r

SSE

64-bit

§

VEX_Vmovmskps_r32_xmm

VMOVMSKPS r32, xmm2

VEX.128.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskps_r64_xmm

VMOVMSKPS r64, xmm2

VEX.128.0F.W1 50 /r

AVX

64-bit

§

VEX_Vmovmskps_r32_ymm

VMOVMSKPS r32, ymm2

VEX.256.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskps_r64_ymm

VMOVMSKPS r64, ymm2

VEX.256.0F.W1 50 /r

AVX

64-bit

§

Movmskpd_r32_xmm

MOVMSKPD r32, xmm

66 0F 50 /r

SSE2

16/32/64-bit

§

Movmskpd_r64_xmm

MOVMSKPD r64, xmm

66 o64 0F 50 /r

SSE2

64-bit

§

VEX_Vmovmskpd_r32_xmm

VMOVMSKPD r32, xmm2

VEX.128.66.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskpd_r64_xmm

VMOVMSKPD r64, xmm2

VEX.128.66.0F.W1 50 /r

AVX

64-bit

§

VEX_Vmovmskpd_r32_ymm

VMOVMSKPD r32, ymm2

VEX.256.66.0F.W0 50 /r

AVX

16/32/64-bit

§

VEX_Vmovmskpd_r64_ymm

VMOVMSKPD r64, ymm2

VEX.256.66.0F.W1 50 /r

AVX

64-bit

§

Sqrtps_xmm_xmmm128

SQRTPS xmm1, xmm2/m128

NP 0F 51 /r

SSE

16/32/64-bit

§

VEX_Vsqrtps_xmm_xmmm128

VSQRTPS xmm1, xmm2/m128

VEX.128.0F.WIG 51 /r

AVX

16/32/64-bit

§

VEX_Vsqrtps_ymm_ymmm256

VSQRTPS ymm1, ymm2/m256

VEX.256.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtps_xmm_k1z_xmmm128b32

VSQRTPS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtps_ymm_k1z_ymmm256b32

VSQRTPS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtps_zmm_k1z_zmmm512b32_er

VSQRTPS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.0F.W0 51 /r

AVX512F

16/32/64-bit

§

Sqrtpd_xmm_xmmm128

SQRTPD xmm1, xmm2/m128

66 0F 51 /r

SSE2

16/32/64-bit

§

VEX_Vsqrtpd_xmm_xmmm128

VSQRTPD xmm1, xmm2/m128

VEX.128.66.0F.WIG 51 /r

AVX

16/32/64-bit

§

VEX_Vsqrtpd_ymm_ymmm256

VSQRTPD ymm1, ymm2/m256

VEX.256.66.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtpd_xmm_k1z_xmmm128b64

VSQRTPD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtpd_ymm_k1z_ymmm256b64

VSQRTPD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 51 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsqrtpd_zmm_k1z_zmmm512b64_er

VSQRTPD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 51 /r

AVX512F

16/32/64-bit

§

Sqrtss_xmm_xmmm32

SQRTSS xmm1, xmm2/m32

F3 0F 51 /r

SSE

16/32/64-bit

§

VEX_Vsqrtss_xmm_xmm_xmmm32

VSQRTSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtss_xmm_k1z_xmm_xmmm32_er

VSQRTSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 51 /r

AVX512F

16/32/64-bit

§

Sqrtsd_xmm_xmmm64

SQRTSD xmm1, xmm2/m64

F2 0F 51 /r

SSE2

16/32/64-bit

§

VEX_Vsqrtsd_xmm_xmm_xmmm64

VSQRTSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 51 /r

AVX

16/32/64-bit

§

EVEX_Vsqrtsd_xmm_k1z_xmm_xmmm64_er

VSQRTSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 51 /r

AVX512F

16/32/64-bit

§

Rsqrtps_xmm_xmmm128

RSQRTPS xmm1, xmm2/m128

NP 0F 52 /r

SSE

16/32/64-bit

§

VEX_Vrsqrtps_xmm_xmmm128

VRSQRTPS xmm1, xmm2/m128

VEX.128.0F.WIG 52 /r

AVX

16/32/64-bit

§

VEX_Vrsqrtps_ymm_ymmm256

VRSQRTPS ymm1, ymm2/m256

VEX.256.0F.WIG 52 /r

AVX

16/32/64-bit

§

Rsqrtss_xmm_xmmm32

RSQRTSS xmm1, xmm2/m32

F3 0F 52 /r

SSE

16/32/64-bit

§

VEX_Vrsqrtss_xmm_xmm_xmmm32

VRSQRTSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 52 /r

AVX

16/32/64-bit

§

Rcpps_xmm_xmmm128

RCPPS xmm1, xmm2/m128

NP 0F 53 /r

SSE

16/32/64-bit

§

VEX_Vrcpps_xmm_xmmm128

VRCPPS xmm1, xmm2/m128

VEX.128.0F.WIG 53 /r

AVX

16/32/64-bit

§

VEX_Vrcpps_ymm_ymmm256

VRCPPS ymm1, ymm2/m256

VEX.256.0F.WIG 53 /r

AVX

16/32/64-bit

§

Rcpss_xmm_xmmm32

RCPSS xmm1, xmm2/m32

F3 0F 53 /r

SSE

16/32/64-bit

§

VEX_Vrcpss_xmm_xmm_xmmm32

VRCPSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 53 /r

AVX

16/32/64-bit

§

Andps_xmm_xmmm128

ANDPS xmm1, xmm2/m128

NP 0F 54 /r

SSE

16/32/64-bit

§

VEX_Vandps_xmm_xmm_xmmm128

VANDPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 54 /r

AVX

16/32/64-bit

§

VEX_Vandps_ymm_ymm_ymmm256

VANDPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 54 /r

AVX

16/32/64-bit

§

EVEX_Vandps_xmm_k1z_xmm_xmmm128b32

VANDPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandps_ymm_k1z_ymm_ymmm256b32

VANDPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandps_zmm_k1z_zmm_zmmm512b32

VANDPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 54 /r

AVX512DQ

16/32/64-bit

§

Andpd_xmm_xmmm128

ANDPD xmm1, xmm2/m128

66 0F 54 /r

SSE2

16/32/64-bit

§

VEX_Vandpd_xmm_xmm_xmmm128

VANDPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 54 /r

AVX

16/32/64-bit

§

VEX_Vandpd_ymm_ymm_ymmm256

VANDPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 54 /r

AVX

16/32/64-bit

§

EVEX_Vandpd_xmm_k1z_xmm_xmmm128b64

VANDPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandpd_ymm_k1z_ymm_ymmm256b64

VANDPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 54 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandpd_zmm_k1z_zmm_zmmm512b64

VANDPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 54 /r

AVX512DQ

16/32/64-bit

§

Andnps_xmm_xmmm128

ANDNPS xmm1, xmm2/m128

NP 0F 55 /r

SSE

16/32/64-bit

§

VEX_Vandnps_xmm_xmm_xmmm128

VANDNPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 55 /r

AVX

16/32/64-bit

§

VEX_Vandnps_ymm_ymm_ymmm256

VANDNPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 55 /r

AVX

16/32/64-bit

§

EVEX_Vandnps_xmm_k1z_xmm_xmmm128b32

VANDNPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnps_ymm_k1z_ymm_ymmm256b32

VANDNPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnps_zmm_k1z_zmm_zmmm512b32

VANDNPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 55 /r

AVX512DQ

16/32/64-bit

§

Andnpd_xmm_xmmm128

ANDNPD xmm1, xmm2/m128

66 0F 55 /r

SSE2

16/32/64-bit

§

VEX_Vandnpd_xmm_xmm_xmmm128

VANDNPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 55 /r

AVX

16/32/64-bit

§

VEX_Vandnpd_ymm_ymm_ymmm256

VANDNPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 55 /r

AVX

16/32/64-bit

§

EVEX_Vandnpd_xmm_k1z_xmm_xmmm128b64

VANDNPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnpd_ymm_k1z_ymm_ymmm256b64

VANDNPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 55 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vandnpd_zmm_k1z_zmm_zmmm512b64

VANDNPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 55 /r

AVX512DQ

16/32/64-bit

§

Orps_xmm_xmmm128

ORPS xmm1, xmm2/m128

NP 0F 56 /r

SSE

16/32/64-bit

§

VEX_Vorps_xmm_xmm_xmmm128

VORPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 56 /r

AVX

16/32/64-bit

§

VEX_Vorps_ymm_ymm_ymmm256

VORPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 56 /r

AVX

16/32/64-bit

§

EVEX_Vorps_xmm_k1z_xmm_xmmm128b32

VORPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorps_ymm_k1z_ymm_ymmm256b32

VORPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorps_zmm_k1z_zmm_zmmm512b32

VORPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 56 /r

AVX512DQ

16/32/64-bit

§

Orpd_xmm_xmmm128

ORPD xmm1, xmm2/m128

66 0F 56 /r

SSE2

16/32/64-bit

§

VEX_Vorpd_xmm_xmm_xmmm128

VORPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 56 /r

AVX

16/32/64-bit

§

VEX_Vorpd_ymm_ymm_ymmm256

VORPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 56 /r

AVX

16/32/64-bit

§

EVEX_Vorpd_xmm_k1z_xmm_xmmm128b64

VORPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorpd_ymm_k1z_ymm_ymmm256b64

VORPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 56 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vorpd_zmm_k1z_zmm_zmmm512b64

VORPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 56 /r

AVX512DQ

16/32/64-bit

§

Xorps_xmm_xmmm128

XORPS xmm1, xmm2/m128

NP 0F 57 /r

SSE

16/32/64-bit

§

VEX_Vxorps_xmm_xmm_xmmm128

VXORPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 57 /r

AVX

16/32/64-bit

§

VEX_Vxorps_ymm_ymm_ymmm256

VXORPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 57 /r

AVX

16/32/64-bit

§

EVEX_Vxorps_xmm_k1z_xmm_xmmm128b32

VXORPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorps_ymm_k1z_ymm_ymmm256b32

VXORPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorps_zmm_k1z_zmm_zmmm512b32

VXORPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.0F.W0 57 /r

AVX512DQ

16/32/64-bit

§

Xorpd_xmm_xmmm128

XORPD xmm1, xmm2/m128

66 0F 57 /r

SSE2

16/32/64-bit

§

VEX_Vxorpd_xmm_xmm_xmmm128

VXORPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 57 /r

AVX

16/32/64-bit

§

VEX_Vxorpd_ymm_ymm_ymmm256

VXORPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 57 /r

AVX

16/32/64-bit

§

EVEX_Vxorpd_xmm_k1z_xmm_xmmm128b64

VXORPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorpd_ymm_k1z_ymm_ymmm256b64

VXORPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 57 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vxorpd_zmm_k1z_zmm_zmmm512b64

VXORPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 57 /r

AVX512DQ

16/32/64-bit

§

Addps_xmm_xmmm128

ADDPS xmm1, xmm2/m128

NP 0F 58 /r

SSE

16/32/64-bit

§

VEX_Vaddps_xmm_xmm_xmmm128

VADDPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 58 /r

AVX

16/32/64-bit

§

VEX_Vaddps_ymm_ymm_ymmm256

VADDPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddps_xmm_k1z_xmm_xmmm128b32

VADDPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddps_ymm_k1z_ymm_ymmm256b32

VADDPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddps_zmm_k1z_zmm_zmmm512b32_er

VADDPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 58 /r

AVX512F

16/32/64-bit

§

Addpd_xmm_xmmm128

ADDPD xmm1, xmm2/m128

66 0F 58 /r

SSE2

16/32/64-bit

§

VEX_Vaddpd_xmm_xmm_xmmm128

VADDPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 58 /r

AVX

16/32/64-bit

§

VEX_Vaddpd_ymm_ymm_ymmm256

VADDPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddpd_xmm_k1z_xmm_xmmm128b64

VADDPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddpd_ymm_k1z_ymm_ymmm256b64

VADDPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vaddpd_zmm_k1z_zmm_zmmm512b64_er

VADDPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 58 /r

AVX512F

16/32/64-bit

§

Addss_xmm_xmmm32

ADDSS xmm1, xmm2/m32

F3 0F 58 /r

SSE

16/32/64-bit

§

VEX_Vaddss_xmm_xmm_xmmm32

VADDSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddss_xmm_k1z_xmm_xmmm32_er

VADDSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 58 /r

AVX512F

16/32/64-bit

§

Addsd_xmm_xmmm64

ADDSD xmm1, xmm2/m64

F2 0F 58 /r

SSE2

16/32/64-bit

§

VEX_Vaddsd_xmm_xmm_xmmm64

VADDSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 58 /r

AVX

16/32/64-bit

§

EVEX_Vaddsd_xmm_k1z_xmm_xmmm64_er

VADDSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 58 /r

AVX512F

16/32/64-bit

§

Mulps_xmm_xmmm128

MULPS xmm1, xmm2/m128

NP 0F 59 /r

SSE

16/32/64-bit

§

VEX_Vmulps_xmm_xmm_xmmm128

VMULPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 59 /r

AVX

16/32/64-bit

§

VEX_Vmulps_ymm_ymm_ymmm256

VMULPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulps_xmm_k1z_xmm_xmmm128b32

VMULPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulps_ymm_k1z_ymm_ymmm256b32

VMULPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulps_zmm_k1z_zmm_zmmm512b32_er

VMULPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 59 /r

AVX512F

16/32/64-bit

§

Mulpd_xmm_xmmm128

MULPD xmm1, xmm2/m128

66 0F 59 /r

SSE2

16/32/64-bit

§

VEX_Vmulpd_xmm_xmm_xmmm128

VMULPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 59 /r

AVX

16/32/64-bit

§

VEX_Vmulpd_ymm_ymm_ymmm256

VMULPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulpd_xmm_k1z_xmm_xmmm128b64

VMULPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulpd_ymm_k1z_ymm_ymmm256b64

VMULPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmulpd_zmm_k1z_zmm_zmmm512b64_er

VMULPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 59 /r

AVX512F

16/32/64-bit

§

Mulss_xmm_xmmm32

MULSS xmm1, xmm2/m32

F3 0F 59 /r

SSE

16/32/64-bit

§

VEX_Vmulss_xmm_xmm_xmmm32

VMULSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulss_xmm_k1z_xmm_xmmm32_er

VMULSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 59 /r

AVX512F

16/32/64-bit

§

Mulsd_xmm_xmmm64

MULSD xmm1, xmm2/m64

F2 0F 59 /r

SSE2

16/32/64-bit

§

VEX_Vmulsd_xmm_xmm_xmmm64

VMULSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 59 /r

AVX

16/32/64-bit

§

EVEX_Vmulsd_xmm_k1z_xmm_xmmm64_er

VMULSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 59 /r

AVX512F

16/32/64-bit

§

Cvtps2pd_xmm_xmmm64

CVTPS2PD xmm1, xmm2/m64

NP 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtps2pd_xmm_xmmm64

VCVTPS2PD xmm1, xmm2/m64

VEX.128.0F.WIG 5A /r

AVX

16/32/64-bit

§

VEX_Vcvtps2pd_ymm_xmmm128

VCVTPS2PD ymm1, xmm2/m128

VEX.256.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtps2pd_xmm_k1z_xmmm64b32

VCVTPS2PD xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.0F.W0 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2pd_ymm_k1z_xmmm128b32

VCVTPS2PD ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.0F.W0 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2pd_zmm_k1z_ymmm256b32_sae

VCVTPS2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}

EVEX.512.0F.W0 5A /r

AVX512F

16/32/64-bit

§

Cvtpd2ps_xmm_xmmm128

CVTPD2PS xmm1, xmm2/m128

66 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtpd2ps_xmm_xmmm128

VCVTPD2PS xmm1, xmm2/m128

VEX.128.66.0F.WIG 5A /r

AVX

16/32/64-bit

§

VEX_Vcvtpd2ps_xmm_ymmm256

VCVTPD2PS xmm1, ymm2/m256

VEX.256.66.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtpd2ps_xmm_k1z_xmmm128b64

VCVTPD2PS xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2ps_xmm_k1z_ymmm256b64

VCVTPD2PS xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2ps_ymm_k1z_zmmm512b64_er

VCVTPD2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 5A /r

AVX512F

16/32/64-bit

§

Cvtss2sd_xmm_xmmm32

CVTSS2SD xmm1, xmm2/m32

F3 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtss2sd_xmm_xmm_xmmm32

VCVTSS2SD xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtss2sd_xmm_k1z_xmm_xmmm32_sae

VCVTSS2SD xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.F3.0F.W0 5A /r

AVX512F

16/32/64-bit

§

Cvtsd2ss_xmm_xmmm64

CVTSD2SS xmm1, xmm2/m64

F2 0F 5A /r

SSE2

16/32/64-bit

§

VEX_Vcvtsd2ss_xmm_xmm_xmmm64

VCVTSD2SS xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5A /r

AVX

16/32/64-bit

§

EVEX_Vcvtsd2ss_xmm_k1z_xmm_xmmm64_er

VCVTSD2SS xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 5A /r

AVX512F

16/32/64-bit

§

Cvtdq2ps_xmm_xmmm128

CVTDQ2PS xmm1, xmm2/m128

NP 0F 5B /r

SSE2

16/32/64-bit

§

VEX_Vcvtdq2ps_xmm_xmmm128

VCVTDQ2PS xmm1, xmm2/m128

VEX.128.0F.WIG 5B /r

AVX

16/32/64-bit

§

VEX_Vcvtdq2ps_ymm_ymmm256

VCVTDQ2PS ymm1, ymm2/m256

VEX.256.0F.WIG 5B /r

AVX

16/32/64-bit

§

EVEX_Vcvtdq2ps_xmm_k1z_xmmm128b32

VCVTDQ2PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2ps_ymm_k1z_ymmm256b32

VCVTDQ2PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2ps_zmm_k1z_zmmm512b32_er

VCVTDQ2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.0F.W0 5B /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtqq2ps_xmm_k1z_xmmm128b64

VCVTQQ2PS xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.0F.W1 5B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2ps_xmm_k1z_ymmm256b64

VCVTQQ2PS xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.0F.W1 5B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2ps_ymm_k1z_zmmm512b64_er

VCVTQQ2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.0F.W1 5B /r

AVX512DQ

16/32/64-bit

§

Cvtps2dq_xmm_xmmm128

CVTPS2DQ xmm1, xmm2/m128

66 0F 5B /r

SSE2

16/32/64-bit

§

VEX_Vcvtps2dq_xmm_xmmm128

VCVTPS2DQ xmm1, xmm2/m128

VEX.128.66.0F.WIG 5B /r

AVX

16/32/64-bit

§

VEX_Vcvtps2dq_ymm_ymmm256

VCVTPS2DQ ymm1, ymm2/m256

VEX.256.66.0F.WIG 5B /r

AVX

16/32/64-bit

§

EVEX_Vcvtps2dq_xmm_k1z_xmmm128b32

VCVTPS2DQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2dq_ymm_k1z_ymmm256b32

VCVTPS2DQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2dq_zmm_k1z_zmmm512b32_er

VCVTPS2DQ zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.66.0F.W0 5B /r

AVX512F

16/32/64-bit

§

Cvttps2dq_xmm_xmmm128

CVTTPS2DQ xmm1, xmm2/m128

F3 0F 5B /r

SSE2

16/32/64-bit

§

VEX_Vcvttps2dq_xmm_xmmm128

VCVTTPS2DQ xmm1, xmm2/m128

VEX.128.F3.0F.WIG 5B /r

AVX

16/32/64-bit

§

VEX_Vcvttps2dq_ymm_ymmm256

VCVTTPS2DQ ymm1, ymm2/m256

VEX.256.F3.0F.WIG 5B /r

AVX

16/32/64-bit

§

EVEX_Vcvttps2dq_xmm_k1z_xmmm128b32

VCVTTPS2DQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F3.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2dq_ymm_k1z_ymmm256b32

VCVTTPS2DQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F3.0F.W0 5B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2dq_zmm_k1z_zmmm512b32_sae

VCVTTPS2DQ zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.F3.0F.W0 5B /r

AVX512F

16/32/64-bit

§

Subps_xmm_xmmm128

SUBPS xmm1, xmm2/m128

NP 0F 5C /r

SSE

16/32/64-bit

§

VEX_Vsubps_xmm_xmm_xmmm128

VSUBPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5C /r

AVX

16/32/64-bit

§

VEX_Vsubps_ymm_ymm_ymmm256

VSUBPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubps_xmm_k1z_xmm_xmmm128b32

VSUBPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubps_ymm_k1z_ymm_ymmm256b32

VSUBPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubps_zmm_k1z_zmm_zmmm512b32_er

VSUBPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 5C /r

AVX512F

16/32/64-bit

§

Subpd_xmm_xmmm128

SUBPD xmm1, xmm2/m128

66 0F 5C /r

SSE2

16/32/64-bit

§

VEX_Vsubpd_xmm_xmm_xmmm128

VSUBPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5C /r

AVX

16/32/64-bit

§

VEX_Vsubpd_ymm_ymm_ymmm256

VSUBPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubpd_xmm_k1z_xmm_xmmm128b64

VSUBPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubpd_ymm_k1z_ymm_ymmm256b64

VSUBPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vsubpd_zmm_k1z_zmm_zmmm512b64_er

VSUBPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 5C /r

AVX512F

16/32/64-bit

§

Subss_xmm_xmmm32

SUBSS xmm1, xmm2/m32

F3 0F 5C /r

SSE

16/32/64-bit

§

VEX_Vsubss_xmm_xmm_xmmm32

VSUBSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubss_xmm_k1z_xmm_xmmm32_er

VSUBSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 5C /r

AVX512F

16/32/64-bit

§

Subsd_xmm_xmmm64

SUBSD xmm1, xmm2/m64

F2 0F 5C /r

SSE2

16/32/64-bit

§

VEX_Vsubsd_xmm_xmm_xmmm64

VSUBSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5C /r

AVX

16/32/64-bit

§

EVEX_Vsubsd_xmm_k1z_xmm_xmmm64_er

VSUBSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 5C /r

AVX512F

16/32/64-bit

§

Minps_xmm_xmmm128

MINPS xmm1, xmm2/m128

NP 0F 5D /r

SSE

16/32/64-bit

§

VEX_Vminps_xmm_xmm_xmmm128

VMINPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5D /r

AVX

16/32/64-bit

§

VEX_Vminps_ymm_ymm_ymmm256

VMINPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminps_xmm_k1z_xmm_xmmm128b32

VMINPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminps_ymm_k1z_ymm_ymmm256b32

VMINPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminps_zmm_k1z_zmm_zmmm512b32_sae

VMINPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}

EVEX.512.0F.W0 5D /r

AVX512F

16/32/64-bit

§

Minpd_xmm_xmmm128

MINPD xmm1, xmm2/m128

66 0F 5D /r

SSE2

16/32/64-bit

§

VEX_Vminpd_xmm_xmm_xmmm128

VMINPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5D /r

AVX

16/32/64-bit

§

VEX_Vminpd_ymm_ymm_ymmm256

VMINPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminpd_xmm_k1z_xmm_xmmm128b64

VMINPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminpd_ymm_k1z_ymm_ymmm256b64

VMINPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vminpd_zmm_k1z_zmm_zmmm512b64_sae

VMINPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}

EVEX.512.66.0F.W1 5D /r

AVX512F

16/32/64-bit

§

Minss_xmm_xmmm32

MINSS xmm1, xmm2/m32

F3 0F 5D /r

SSE

16/32/64-bit

§

VEX_Vminss_xmm_xmm_xmmm32

VMINSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminss_xmm_k1z_xmm_xmmm32_sae

VMINSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.F3.0F.W0 5D /r

AVX512F

16/32/64-bit

§

Minsd_xmm_xmmm64

MINSD xmm1, xmm2/m64

F2 0F 5D /r

SSE2

16/32/64-bit

§

VEX_Vminsd_xmm_xmm_xmmm64

VMINSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5D /r

AVX

16/32/64-bit

§

EVEX_Vminsd_xmm_k1z_xmm_xmmm64_sae

VMINSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.F2.0F.W1 5D /r

AVX512F

16/32/64-bit

§

Divps_xmm_xmmm128

DIVPS xmm1, xmm2/m128

NP 0F 5E /r

SSE

16/32/64-bit

§

VEX_Vdivps_xmm_xmm_xmmm128

VDIVPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5E /r

AVX

16/32/64-bit

§

VEX_Vdivps_ymm_ymm_ymmm256

VDIVPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivps_xmm_k1z_xmm_xmmm128b32

VDIVPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivps_ymm_k1z_ymm_ymmm256b32

VDIVPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivps_zmm_k1z_zmm_zmmm512b32_er

VDIVPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.0F.W0 5E /r

AVX512F

16/32/64-bit

§

Divpd_xmm_xmmm128

DIVPD xmm1, xmm2/m128

66 0F 5E /r

SSE2

16/32/64-bit

§

VEX_Vdivpd_xmm_xmm_xmmm128

VDIVPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5E /r

AVX

16/32/64-bit

§

VEX_Vdivpd_ymm_ymm_ymmm256

VDIVPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivpd_xmm_k1z_xmm_xmmm128b64

VDIVPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivpd_ymm_k1z_ymm_ymmm256b64

VDIVPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vdivpd_zmm_k1z_zmm_zmmm512b64_er

VDIVPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F.W1 5E /r

AVX512F

16/32/64-bit

§

Divss_xmm_xmmm32

DIVSS xmm1, xmm2/m32

F3 0F 5E /r

SSE

16/32/64-bit

§

VEX_Vdivss_xmm_xmm_xmmm32

VDIVSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivss_xmm_k1z_xmm_xmmm32_er

VDIVSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.0F.W0 5E /r

AVX512F

16/32/64-bit

§

Divsd_xmm_xmmm64

DIVSD xmm1, xmm2/m64

F2 0F 5E /r

SSE2

16/32/64-bit

§

VEX_Vdivsd_xmm_xmm_xmmm64

VDIVSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5E /r

AVX

16/32/64-bit

§

EVEX_Vdivsd_xmm_k1z_xmm_xmmm64_er

VDIVSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.0F.W1 5E /r

AVX512F

16/32/64-bit

§

Maxps_xmm_xmmm128

MAXPS xmm1, xmm2/m128

NP 0F 5F /r

SSE

16/32/64-bit

§

VEX_Vmaxps_xmm_xmm_xmmm128

VMAXPS xmm1, xmm2, xmm3/m128

VEX.128.0F.WIG 5F /r

AVX

16/32/64-bit

§

VEX_Vmaxps_ymm_ymm_ymmm256

VMAXPS ymm1, ymm2, ymm3/m256

VEX.256.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxps_xmm_k1z_xmm_xmmm128b32

VMAXPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.0F.W0 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxps_ymm_k1z_ymm_ymmm256b32

VMAXPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.0F.W0 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxps_zmm_k1z_zmm_zmmm512b32_sae

VMAXPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}

EVEX.512.0F.W0 5F /r

AVX512F

16/32/64-bit

§

Maxpd_xmm_xmmm128

MAXPD xmm1, xmm2/m128

66 0F 5F /r

SSE2

16/32/64-bit

§

VEX_Vmaxpd_xmm_xmm_xmmm128

VMAXPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 5F /r

AVX

16/32/64-bit

§

VEX_Vmaxpd_ymm_ymm_ymmm256

VMAXPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxpd_xmm_k1z_xmm_xmmm128b64

VMAXPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxpd_ymm_k1z_ymm_ymmm256b64

VMAXPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 5F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmaxpd_zmm_k1z_zmm_zmmm512b64_sae

VMAXPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}

EVEX.512.66.0F.W1 5F /r

AVX512F

16/32/64-bit

§

Maxss_xmm_xmmm32

MAXSS xmm1, xmm2/m32

F3 0F 5F /r

SSE

16/32/64-bit

§

VEX_Vmaxss_xmm_xmm_xmmm32

VMAXSS xmm1, xmm2, xmm3/m32

VEX.LIG.F3.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxss_xmm_k1z_xmm_xmmm32_sae

VMAXSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.F3.0F.W0 5F /r

AVX512F

16/32/64-bit

§

Maxsd_xmm_xmmm64

MAXSD xmm1, xmm2/m64

F2 0F 5F /r

SSE2

16/32/64-bit

§

VEX_Vmaxsd_xmm_xmm_xmmm64

VMAXSD xmm1, xmm2, xmm3/m64

VEX.LIG.F2.0F.WIG 5F /r

AVX

16/32/64-bit

§

EVEX_Vmaxsd_xmm_k1z_xmm_xmmm64_sae

VMAXSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.F2.0F.W1 5F /r

AVX512F

16/32/64-bit

§

Punpcklbw_mm_mmm32

PUNPCKLBW mm, mm/m32

NP 0F 60 /r

MMX

16/32/64-bit

§

Punpcklbw_xmm_xmmm128

PUNPCKLBW xmm1, xmm2/m128

66 0F 60 /r

SSE2

16/32/64-bit

§

VEX_Vpunpcklbw_xmm_xmm_xmmm128

VPUNPCKLBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 60 /r

AVX

16/32/64-bit

§

VEX_Vpunpcklbw_ymm_ymm_ymmm256

VPUNPCKLBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 60 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpcklbw_xmm_k1z_xmm_xmmm128

VPUNPCKLBW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 60 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklbw_ymm_k1z_ymm_ymmm256

VPUNPCKLBW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 60 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklbw_zmm_k1z_zmm_zmmm512

VPUNPCKLBW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 60 /r

AVX512BW

16/32/64-bit

§

Punpcklwd_mm_mmm32

PUNPCKLWD mm, mm/m32

NP 0F 61 /r

MMX

16/32/64-bit

§

Punpcklwd_xmm_xmmm128

PUNPCKLWD xmm1, xmm2/m128

66 0F 61 /r

SSE2

16/32/64-bit

§

VEX_Vpunpcklwd_xmm_xmm_xmmm128

VPUNPCKLWD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 61 /r

AVX

16/32/64-bit

§

VEX_Vpunpcklwd_ymm_ymm_ymmm256

VPUNPCKLWD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 61 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpcklwd_xmm_k1z_xmm_xmmm128

VPUNPCKLWD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 61 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklwd_ymm_k1z_ymm_ymmm256

VPUNPCKLWD ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 61 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpcklwd_zmm_k1z_zmm_zmmm512

VPUNPCKLWD zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 61 /r

AVX512BW

16/32/64-bit

§

Punpckldq_mm_mmm32

PUNPCKLDQ mm, mm/m32

NP 0F 62 /r

MMX

16/32/64-bit

§

Punpckldq_xmm_xmmm128

PUNPCKLDQ xmm1, xmm2/m128

66 0F 62 /r

SSE2

16/32/64-bit

§

VEX_Vpunpckldq_xmm_xmm_xmmm128

VPUNPCKLDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 62 /r

AVX

16/32/64-bit

§

VEX_Vpunpckldq_ymm_ymm_ymmm256

VPUNPCKLDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 62 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckldq_xmm_k1z_xmm_xmmm128b32

VPUNPCKLDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 62 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckldq_ymm_k1z_ymm_ymmm256b32

VPUNPCKLDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 62 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckldq_zmm_k1z_zmm_zmmm512b32

VPUNPCKLDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 62 /r

AVX512F

16/32/64-bit

§

Packsswb_mm_mmm64

PACKSSWB mm1, mm2/m64

NP 0F 63 /r

MMX

16/32/64-bit

§

Packsswb_xmm_xmmm128

PACKSSWB xmm1, xmm2/m128

66 0F 63 /r

SSE2

16/32/64-bit

§

VEX_Vpacksswb_xmm_xmm_xmmm128

VPACKSSWB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 63 /r

AVX

16/32/64-bit

§

VEX_Vpacksswb_ymm_ymm_ymmm256

VPACKSSWB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 63 /r

AVX2

16/32/64-bit

§

EVEX_Vpacksswb_xmm_k1z_xmm_xmmm128

VPACKSSWB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 63 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpacksswb_ymm_k1z_ymm_ymmm256

VPACKSSWB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 63 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpacksswb_zmm_k1z_zmm_zmmm512

VPACKSSWB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 63 /r

AVX512BW

16/32/64-bit

§

Pcmpgtb_mm_mmm64

PCMPGTB mm, mm/m64

NP 0F 64 /r

MMX

16/32/64-bit

§

Pcmpgtb_xmm_xmmm128

PCMPGTB xmm1, xmm2/m128

66 0F 64 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpgtb_xmm_xmm_xmmm128

VPCMPGTB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 64 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtb_ymm_ymm_ymmm256

VPCMPGTB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 64 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtb_kr_k1_xmm_xmmm128

VPCMPGTB k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 64 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtb_kr_k1_ymm_ymmm256

VPCMPGTB k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 64 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtb_kr_k1_zmm_zmmm512

VPCMPGTB k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 64 /r

AVX512BW

16/32/64-bit

§

Pcmpgtw_mm_mmm64

PCMPGTW mm, mm/m64

NP 0F 65 /r

MMX

16/32/64-bit

§

Pcmpgtw_xmm_xmmm128

PCMPGTW xmm1, xmm2/m128

66 0F 65 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpgtw_xmm_xmm_xmmm128

VPCMPGTW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 65 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtw_ymm_ymm_ymmm256

VPCMPGTW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 65 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtw_kr_k1_xmm_xmmm128

VPCMPGTW k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 65 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtw_kr_k1_ymm_ymmm256

VPCMPGTW k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 65 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpgtw_kr_k1_zmm_zmmm512

VPCMPGTW k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 65 /r

AVX512BW

16/32/64-bit

§

Pcmpgtd_mm_mmm64

PCMPGTD mm, mm/m64

NP 0F 66 /r

MMX

16/32/64-bit

§

Pcmpgtd_xmm_xmmm128

PCMPGTD xmm1, xmm2/m128

66 0F 66 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpgtd_xmm_xmm_xmmm128

VPCMPGTD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 66 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtd_ymm_ymm_ymmm256

VPCMPGTD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 66 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtd_kr_k1_xmm_xmmm128b32

VPCMPGTD k1 {k2}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 66 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtd_kr_k1_ymm_ymmm256b32

VPCMPGTD k1 {k2}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 66 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtd_kr_k1_zmm_zmmm512b32

VPCMPGTD k1 {k2}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 66 /r

AVX512F

16/32/64-bit

§

Packuswb_mm_mmm64

PACKUSWB mm, mm/m64

NP 0F 67 /r

MMX

16/32/64-bit

§

Packuswb_xmm_xmmm128

PACKUSWB xmm1, xmm2/m128

66 0F 67 /r

SSE2

16/32/64-bit

§

VEX_Vpackuswb_xmm_xmm_xmmm128

VPACKUSWB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 67 /r

AVX

16/32/64-bit

§

VEX_Vpackuswb_ymm_ymm_ymmm256

VPACKUSWB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 67 /r

AVX2

16/32/64-bit

§

EVEX_Vpackuswb_xmm_k1z_xmm_xmmm128

VPACKUSWB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 67 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackuswb_ymm_k1z_ymm_ymmm256

VPACKUSWB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 67 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackuswb_zmm_k1z_zmm_zmmm512

VPACKUSWB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 67 /r

AVX512BW

16/32/64-bit

§

Punpckhbw_mm_mmm64

PUNPCKHBW mm, mm/m64

NP 0F 68 /r

MMX

16/32/64-bit

§

Punpckhbw_xmm_xmmm128

PUNPCKHBW xmm1, xmm2/m128

66 0F 68 /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhbw_xmm_xmm_xmmm128

VPUNPCKHBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 68 /r

AVX

16/32/64-bit

§

VEX_Vpunpckhbw_ymm_ymm_ymmm256

VPUNPCKHBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 68 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhbw_xmm_k1z_xmm_xmmm128

VPUNPCKHBW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 68 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhbw_ymm_k1z_ymm_ymmm256

VPUNPCKHBW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 68 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhbw_zmm_k1z_zmm_zmmm512

VPUNPCKHBW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 68 /r

AVX512BW

16/32/64-bit

§

Punpckhwd_mm_mmm64

PUNPCKHWD mm, mm/m64

NP 0F 69 /r

MMX

16/32/64-bit

§

Punpckhwd_xmm_xmmm128

PUNPCKHWD xmm1, xmm2/m128

66 0F 69 /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhwd_xmm_xmm_xmmm128

VPUNPCKHWD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 69 /r

AVX

16/32/64-bit

§

VEX_Vpunpckhwd_ymm_ymm_ymmm256

VPUNPCKHWD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 69 /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhwd_xmm_k1z_xmm_xmmm128

VPUNPCKHWD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 69 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhwd_ymm_k1z_ymm_ymmm256

VPUNPCKHWD ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 69 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpunpckhwd_zmm_k1z_zmm_zmmm512

VPUNPCKHWD zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 69 /r

AVX512BW

16/32/64-bit

§

Punpckhdq_mm_mmm64

PUNPCKHDQ mm, mm/m64

NP 0F 6A /r

MMX

16/32/64-bit

§

Punpckhdq_xmm_xmmm128

PUNPCKHDQ xmm1, xmm2/m128

66 0F 6A /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhdq_xmm_xmm_xmmm128

VPUNPCKHDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6A /r

AVX

16/32/64-bit

§

VEX_Vpunpckhdq_ymm_ymm_ymmm256

VPUNPCKHDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6A /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhdq_xmm_k1z_xmm_xmmm128b32

VPUNPCKHDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 6A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhdq_ymm_k1z_ymm_ymmm256b32

VPUNPCKHDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 6A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhdq_zmm_k1z_zmm_zmmm512b32

VPUNPCKHDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 6A /r

AVX512F

16/32/64-bit

§

Packssdw_mm_mmm64

PACKSSDW mm1, mm2/m64

NP 0F 6B /r

MMX

16/32/64-bit

§

Packssdw_xmm_xmmm128

PACKSSDW xmm1, xmm2/m128

66 0F 6B /r

SSE2

16/32/64-bit

§

VEX_Vpackssdw_xmm_xmm_xmmm128

VPACKSSDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6B /r

AVX

16/32/64-bit

§

VEX_Vpackssdw_ymm_ymm_ymmm256

VPACKSSDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6B /r

AVX2

16/32/64-bit

§

EVEX_Vpackssdw_xmm_k1z_xmm_xmmm128b32

VPACKSSDW xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 6B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackssdw_ymm_k1z_ymm_ymmm256b32

VPACKSSDW ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 6B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackssdw_zmm_k1z_zmm_zmmm512b32

VPACKSSDW zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 6B /r

AVX512BW

16/32/64-bit

§

Punpcklqdq_xmm_xmmm128

PUNPCKLQDQ xmm1, xmm2/m128

66 0F 6C /r

SSE2

16/32/64-bit

§

VEX_Vpunpcklqdq_xmm_xmm_xmmm128

VPUNPCKLQDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6C /r

AVX

16/32/64-bit

§

VEX_Vpunpcklqdq_ymm_ymm_ymmm256

VPUNPCKLQDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6C /r

AVX2

16/32/64-bit

§

EVEX_Vpunpcklqdq_xmm_k1z_xmm_xmmm128b64

VPUNPCKLQDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 6C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpcklqdq_ymm_k1z_ymm_ymmm256b64

VPUNPCKLQDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 6C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpcklqdq_zmm_k1z_zmm_zmmm512b64

VPUNPCKLQDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 6C /r

AVX512F

16/32/64-bit

§

Punpckhqdq_xmm_xmmm128

PUNPCKHQDQ xmm1, xmm2/m128

66 0F 6D /r

SSE2

16/32/64-bit

§

VEX_Vpunpckhqdq_xmm_xmm_xmmm128

VPUNPCKHQDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 6D /r

AVX

16/32/64-bit

§

VEX_Vpunpckhqdq_ymm_ymm_ymmm256

VPUNPCKHQDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 6D /r

AVX2

16/32/64-bit

§

EVEX_Vpunpckhqdq_xmm_k1z_xmm_xmmm128b64

VPUNPCKHQDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 6D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhqdq_ymm_k1z_ymm_ymmm256b64

VPUNPCKHQDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 6D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpunpckhqdq_zmm_k1z_zmm_zmmm512b64

VPUNPCKHQDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 6D /r

AVX512F

16/32/64-bit

§

Movd_mm_rm32

MOVD mm, r/m32

NP 0F 6E /r

MMX

16/32/64-bit

§

Movq_mm_rm64

MOVQ mm, r/m64

NP o64 0F 6E /r

MMX

64-bit

§

Movd_xmm_rm32

MOVD xmm, r/m32

66 0F 6E /r

SSE2

16/32/64-bit

§

Movq_xmm_rm64

MOVQ xmm, r/m64

66 o64 0F 6E /r

SSE2

64-bit

§

VEX_Vmovd_xmm_rm32

VMOVD xmm1, r/m32

VEX.128.66.0F.W0 6E /r

AVX

16/32/64-bit

§

VEX_Vmovq_xmm_rm64

VMOVQ xmm1, r/m64

VEX.128.66.0F.W1 6E /r

AVX

64-bit

§

EVEX_Vmovd_xmm_rm32

VMOVD xmm1, r/m32

EVEX.128.66.0F.W0 6E /r

AVX512F

16/32/64-bit

§

EVEX_Vmovq_xmm_rm64

VMOVQ xmm1, r/m64

EVEX.128.66.0F.W1 6E /r

AVX512F

64-bit

§

Movq_mm_mmm64

MOVQ mm, mm/m64

NP 0F 6F /r

MMX

16/32/64-bit

§

Movdqa_xmm_xmmm128

MOVDQA xmm1, xmm2/m128

66 0F 6F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqa_xmm_xmmm128

VMOVDQA xmm1, xmm2/m128

VEX.128.66.0F.WIG 6F /r

AVX

16/32/64-bit

§

VEX_Vmovdqa_ymm_ymmm256

VMOVDQA ymm1, ymm2/m256

VEX.256.66.0F.WIG 6F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqa32_xmm_k1z_xmmm128

VMOVDQA32 xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_ymm_k1z_ymmm256

VMOVDQA32 ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_zmm_k1z_zmmm512

VMOVDQA32 zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W0 6F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_xmm_k1z_xmmm128

VMOVDQA64 xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_ymm_k1z_ymmm256

VMOVDQA64 ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_zmm_k1z_zmmm512

VMOVDQA64 zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F.W1 6F /r

AVX512F

16/32/64-bit

§

Movdqu_xmm_xmmm128

MOVDQU xmm1, xmm2/m128

F3 0F 6F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqu_xmm_xmmm128

VMOVDQU xmm1, xmm2/m128

VEX.128.F3.0F.WIG 6F /r

AVX

16/32/64-bit

§

VEX_Vmovdqu_ymm_ymmm256

VMOVDQU ymm1, ymm2/m256

VEX.256.F3.0F.WIG 6F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqu32_xmm_k1z_xmmm128

VMOVDQU32 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_ymm_k1z_ymmm256

VMOVDQU32 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W0 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_zmm_k1z_zmmm512

VMOVDQU32 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W0 6F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_xmm_k1z_xmmm128

VMOVDQU64 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F3.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_ymm_k1z_ymmm256

VMOVDQU64 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F3.0F.W1 6F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_zmm_k1z_zmmm512

VMOVDQU64 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F3.0F.W1 6F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu8_xmm_k1z_xmmm128

VMOVDQU8 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F2.0F.W0 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_ymm_k1z_ymmm256

VMOVDQU8 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F2.0F.W0 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_zmm_k1z_zmmm512

VMOVDQU8 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F2.0F.W0 6F /r

AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_xmm_k1z_xmmm128

VMOVDQU16 xmm1 {k1}{z}, xmm2/m128

EVEX.128.F2.0F.W1 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_ymm_k1z_ymmm256

VMOVDQU16 ymm1 {k1}{z}, ymm2/m256

EVEX.256.F2.0F.W1 6F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_zmm_k1z_zmmm512

VMOVDQU16 zmm1 {k1}{z}, zmm2/m512

EVEX.512.F2.0F.W1 6F /r

AVX512BW

16/32/64-bit

§

Pshufw_mm_mmm64_imm8

PSHUFW mm1, mm2/m64, imm8

NP 0F 70 /r ib

SSE

16/32/64-bit

§

Pshufd_xmm_xmmm128_imm8

PSHUFD xmm1, xmm2/m128, imm8

66 0F 70 /r ib

SSE2

16/32/64-bit

§

VEX_Vpshufd_xmm_xmmm128_imm8

VPSHUFD xmm1, xmm2/m128, imm8

VEX.128.66.0F.WIG 70 /r ib

AVX

16/32/64-bit

§

VEX_Vpshufd_ymm_ymmm256_imm8

VPSHUFD ymm1, ymm2/m256, imm8

VEX.256.66.0F.WIG 70 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpshufd_xmm_k1z_xmmm128b32_imm8

VPSHUFD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 70 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpshufd_ymm_k1z_ymmm256b32_imm8

VPSHUFD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 70 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpshufd_zmm_k1z_zmmm512b32_imm8

VPSHUFD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 70 /r ib

AVX512F

16/32/64-bit

§

Pshufhw_xmm_xmmm128_imm8

PSHUFHW xmm1, xmm2/m128, imm8

F3 0F 70 /r ib

SSE2

16/32/64-bit

§

VEX_Vpshufhw_xmm_xmmm128_imm8

VPSHUFHW xmm1, xmm2/m128, imm8

VEX.128.F3.0F.WIG 70 /r ib

AVX

16/32/64-bit

§

VEX_Vpshufhw_ymm_ymmm256_imm8

VPSHUFHW ymm1, ymm2/m256, imm8

VEX.256.F3.0F.WIG 70 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpshufhw_xmm_k1z_xmmm128_imm8

VPSHUFHW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.F3.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufhw_ymm_k1z_ymmm256_imm8

VPSHUFHW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.F3.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufhw_zmm_k1z_zmmm512_imm8

VPSHUFHW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.F3.0F.WIG 70 /r ib

AVX512BW

16/32/64-bit

§

Pshuflw_xmm_xmmm128_imm8

PSHUFLW xmm1, xmm2/m128, imm8

F2 0F 70 /r ib

SSE2

16/32/64-bit

§

VEX_Vpshuflw_xmm_xmmm128_imm8

VPSHUFLW xmm1, xmm2/m128, imm8

VEX.128.F2.0F.WIG 70 /r ib

AVX

16/32/64-bit

§

VEX_Vpshuflw_ymm_ymmm256_imm8

VPSHUFLW ymm1, ymm2/m256, imm8

VEX.256.F2.0F.WIG 70 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpshuflw_xmm_k1z_xmmm128_imm8

VPSHUFLW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.F2.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshuflw_ymm_k1z_ymmm256_imm8

VPSHUFLW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.F2.0F.WIG 70 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshuflw_zmm_k1z_zmmm512_imm8

VPSHUFLW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.F2.0F.WIG 70 /r ib

AVX512BW

16/32/64-bit

§

Psrlw_mm_imm8

PSRLW mm, imm8

NP 0F 71 /2 ib

MMX

16/32/64-bit

§

Psrlw_xmm_imm8

PSRLW xmm1, imm8

66 0F 71 /2 ib

SSE2

16/32/64-bit

§

VEX_Vpsrlw_xmm_xmm_imm8

VPSRLW xmm1, xmm2, imm8

VEX.128.66.0F.WIG 71 /2 ib

AVX

16/32/64-bit

§

VEX_Vpsrlw_ymm_ymm_imm8

VPSRLW ymm1, ymm2, imm8

VEX.256.66.0F.WIG 71 /2 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrlw_xmm_k1z_xmmm128_imm8

VPSRLW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.66.0F.WIG 71 /2 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_ymm_k1z_ymmm256_imm8

VPSRLW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.66.0F.WIG 71 /2 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_zmm_k1z_zmmm512_imm8

VPSRLW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.66.0F.WIG 71 /2 ib

AVX512BW

16/32/64-bit

§

Psraw_mm_imm8

PSRAW mm, imm8

NP 0F 71 /4 ib

MMX

16/32/64-bit

§

Psraw_xmm_imm8

PSRAW xmm1, imm8

66 0F 71 /4 ib

SSE2

16/32/64-bit

§

VEX_Vpsraw_xmm_xmm_imm8

VPSRAW xmm1, xmm2, imm8

VEX.128.66.0F.WIG 71 /4 ib

AVX

16/32/64-bit

§

VEX_Vpsraw_ymm_ymm_imm8

VPSRAW ymm1, ymm2, imm8

VEX.256.66.0F.WIG 71 /4 ib

AVX2

16/32/64-bit

§

EVEX_Vpsraw_xmm_k1z_xmmm128_imm8

VPSRAW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.66.0F.WIG 71 /4 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_ymm_k1z_ymmm256_imm8

VPSRAW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.66.0F.WIG 71 /4 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_zmm_k1z_zmmm512_imm8

VPSRAW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.66.0F.WIG 71 /4 ib

AVX512BW

16/32/64-bit

§

Psllw_mm_imm8

PSLLW mm1, imm8

NP 0F 71 /6 ib

MMX

16/32/64-bit

§

Psllw_xmm_imm8

PSLLW xmm1, imm8

66 0F 71 /6 ib

SSE2

16/32/64-bit

§

VEX_Vpsllw_xmm_xmm_imm8

VPSLLW xmm1, xmm2, imm8

VEX.128.66.0F.WIG 71 /6 ib

AVX

16/32/64-bit

§

VEX_Vpsllw_ymm_ymm_imm8

VPSLLW ymm1, ymm2, imm8

VEX.256.66.0F.WIG 71 /6 ib

AVX2

16/32/64-bit

§

EVEX_Vpsllw_xmm_k1z_xmmm128_imm8

VPSLLW xmm1 {k1}{z}, xmm2/m128, imm8

EVEX.128.66.0F.WIG 71 /6 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_ymm_k1z_ymmm256_imm8

VPSLLW ymm1 {k1}{z}, ymm2/m256, imm8

EVEX.256.66.0F.WIG 71 /6 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_zmm_k1z_zmmm512_imm8

VPSLLW zmm1 {k1}{z}, zmm2/m512, imm8

EVEX.512.66.0F.WIG 71 /6 ib

AVX512BW

16/32/64-bit

§

EVEX_Vprord_xmm_k1z_xmmm128b32_imm8

VPRORD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprord_ymm_k1z_ymmm256b32_imm8

VPRORD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprord_zmm_k1z_zmmm512b32_imm8

VPRORD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /0 ib

AVX512F

16/32/64-bit

§

EVEX_Vprorq_xmm_k1z_xmmm128b64_imm8

VPRORQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorq_ymm_k1z_ymmm256b64_imm8

VPRORQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 72 /0 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorq_zmm_k1z_zmmm512b64_imm8

VPRORQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 72 /0 ib

AVX512F

16/32/64-bit

§

EVEX_Vprold_xmm_k1z_xmmm128b32_imm8

VPROLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprold_ymm_k1z_ymmm256b32_imm8

VPROLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprold_zmm_k1z_zmmm512b32_imm8

VPROLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /1 ib

AVX512F

16/32/64-bit

§

EVEX_Vprolq_xmm_k1z_xmmm128b64_imm8

VPROLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolq_ymm_k1z_ymmm256b64_imm8

VPROLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 72 /1 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolq_zmm_k1z_zmmm512b64_imm8

VPROLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 72 /1 ib

AVX512F

16/32/64-bit

§

Psrld_mm_imm8

PSRLD mm, imm8

NP 0F 72 /2 ib

MMX

16/32/64-bit

§

Psrld_xmm_imm8

PSRLD xmm1, imm8

66 0F 72 /2 ib

SSE2

16/32/64-bit

§

VEX_Vpsrld_xmm_xmm_imm8

VPSRLD xmm1, xmm2, imm8

VEX.128.66.0F.WIG 72 /2 ib

AVX

16/32/64-bit

§

VEX_Vpsrld_ymm_ymm_imm8

VPSRLD ymm1, ymm2, imm8

VEX.256.66.0F.WIG 72 /2 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrld_xmm_k1z_xmmm128b32_imm8

VPSRLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_ymm_k1z_ymmm256b32_imm8

VPSRLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_zmm_k1z_zmmm512b32_imm8

VPSRLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /2 ib

AVX512F

16/32/64-bit

§

Psrad_mm_imm8

PSRAD mm, imm8

NP 0F 72 /4 ib

MMX

16/32/64-bit

§

Psrad_xmm_imm8

PSRAD xmm1, imm8

66 0F 72 /4 ib

SSE2

16/32/64-bit

§

VEX_Vpsrad_xmm_xmm_imm8

VPSRAD xmm1, xmm2, imm8

VEX.128.66.0F.WIG 72 /4 ib

AVX

16/32/64-bit

§

VEX_Vpsrad_ymm_ymm_imm8

VPSRAD ymm1, ymm2, imm8

VEX.256.66.0F.WIG 72 /4 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrad_xmm_k1z_xmmm128b32_imm8

VPSRAD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_ymm_k1z_ymmm256b32_imm8

VPSRAD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_zmm_k1z_zmmm512b32_imm8

VPSRAD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /4 ib

AVX512F

16/32/64-bit

§

EVEX_Vpsraq_xmm_k1z_xmmm128b64_imm8

VPSRAQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_ymm_k1z_ymmm256b64_imm8

VPSRAQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 72 /4 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_zmm_k1z_zmmm512b64_imm8

VPSRAQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 72 /4 ib

AVX512F

16/32/64-bit

§

Pslld_mm_imm8

PSLLD mm, imm8

NP 0F 72 /6 ib

MMX

16/32/64-bit

§

Pslld_xmm_imm8

PSLLD xmm1, imm8

66 0F 72 /6 ib

SSE2

16/32/64-bit

§

VEX_Vpslld_xmm_xmm_imm8

VPSLLD xmm1, xmm2, imm8

VEX.128.66.0F.WIG 72 /6 ib

AVX

16/32/64-bit

§

VEX_Vpslld_ymm_ymm_imm8

VPSLLD ymm1, ymm2, imm8

VEX.256.66.0F.WIG 72 /6 ib

AVX2

16/32/64-bit

§

EVEX_Vpslld_xmm_k1z_xmmm128b32_imm8

VPSLLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F.W0 72 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_ymm_k1z_ymmm256b32_imm8

VPSLLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F.W0 72 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_zmm_k1z_zmmm512b32_imm8

VPSLLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F.W0 72 /6 ib

AVX512F

16/32/64-bit

§

Psrlq_mm_imm8

PSRLQ mm, imm8

NP 0F 73 /2 ib

MMX

16/32/64-bit

§

Psrlq_xmm_imm8

PSRLQ xmm1, imm8

66 0F 73 /2 ib

SSE2

16/32/64-bit

§

VEX_Vpsrlq_xmm_xmm_imm8

VPSRLQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /2 ib

AVX

16/32/64-bit

§

VEX_Vpsrlq_ymm_ymm_imm8

VPSRLQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /2 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrlq_xmm_k1z_xmmm128b64_imm8

VPSRLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 73 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_ymm_k1z_ymmm256b64_imm8

VPSRLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 73 /2 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_zmm_k1z_zmmm512b64_imm8

VPSRLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 73 /2 ib

AVX512F

16/32/64-bit

§

Psrldq_xmm_imm8

PSRLDQ xmm1, imm8

66 0F 73 /3 ib

SSE2

16/32/64-bit

§

VEX_Vpsrldq_xmm_xmm_imm8

VPSRLDQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /3 ib

AVX

16/32/64-bit

§

VEX_Vpsrldq_ymm_ymm_imm8

VPSRLDQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /3 ib

AVX2

16/32/64-bit

§

EVEX_Vpsrldq_xmm_xmmm128_imm8

VPSRLDQ xmm1, xmm2/m128, imm8

EVEX.128.66.0F.WIG 73 /3 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrldq_ymm_ymmm256_imm8

VPSRLDQ ymm1, ymm2/m256, imm8

EVEX.256.66.0F.WIG 73 /3 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrldq_zmm_zmmm512_imm8

VPSRLDQ zmm1, zmm2/m512, imm8

EVEX.512.66.0F.WIG 73 /3 ib

AVX512BW

16/32/64-bit

§

Psllq_mm_imm8

PSLLQ mm, imm8

NP 0F 73 /6 ib

MMX

16/32/64-bit

§

Psllq_xmm_imm8

PSLLQ xmm1, imm8

66 0F 73 /6 ib

SSE2

16/32/64-bit

§

VEX_Vpsllq_xmm_xmm_imm8

VPSLLQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /6 ib

AVX

16/32/64-bit

§

VEX_Vpsllq_ymm_ymm_imm8

VPSLLQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /6 ib

AVX2

16/32/64-bit

§

EVEX_Vpsllq_xmm_k1z_xmmm128b64_imm8

VPSLLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F.W1 73 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_ymm_k1z_ymmm256b64_imm8

VPSLLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F.W1 73 /6 ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_zmm_k1z_zmmm512b64_imm8

VPSLLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F.W1 73 /6 ib

AVX512F

16/32/64-bit

§

Pslldq_xmm_imm8

PSLLDQ xmm1, imm8

66 0F 73 /7 ib

SSE2

16/32/64-bit

§

VEX_Vpslldq_xmm_xmm_imm8

VPSLLDQ xmm1, xmm2, imm8

VEX.128.66.0F.WIG 73 /7 ib

AVX

16/32/64-bit

§

VEX_Vpslldq_ymm_ymm_imm8

VPSLLDQ ymm1, ymm2, imm8

VEX.256.66.0F.WIG 73 /7 ib

AVX2

16/32/64-bit

§

EVEX_Vpslldq_xmm_xmmm128_imm8

VPSLLDQ xmm1, xmm2/m128, imm8

EVEX.128.66.0F.WIG 73 /7 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpslldq_ymm_ymmm256_imm8

VPSLLDQ ymm1, ymm2/m256, imm8

EVEX.256.66.0F.WIG 73 /7 ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpslldq_zmm_zmmm512_imm8

VPSLLDQ zmm1, zmm2/m512, imm8

EVEX.512.66.0F.WIG 73 /7 ib

AVX512BW

16/32/64-bit

§

Pcmpeqb_mm_mmm64

PCMPEQB mm, mm/m64

NP 0F 74 /r

MMX

16/32/64-bit

§

Pcmpeqb_xmm_xmmm128

PCMPEQB xmm1, xmm2/m128

66 0F 74 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpeqb_xmm_xmm_xmmm128

VPCMPEQB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 74 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqb_ymm_ymm_ymmm256

VPCMPEQB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 74 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqb_kr_k1_xmm_xmmm128

VPCMPEQB k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 74 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqb_kr_k1_ymm_ymmm256

VPCMPEQB k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 74 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqb_kr_k1_zmm_zmmm512

VPCMPEQB k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 74 /r

AVX512BW

16/32/64-bit

§

Pcmpeqw_mm_mmm64

PCMPEQW mm, mm/m64

NP 0F 75 /r

MMX

16/32/64-bit

§

Pcmpeqw_xmm_xmmm128

PCMPEQW xmm1, xmm2/m128

66 0F 75 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpeqw_xmm_xmm_xmmm128

VPCMPEQW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 75 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqw_ymm_ymm_ymmm256

VPCMPEQW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 75 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqw_kr_k1_xmm_xmmm128

VPCMPEQW k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqw_kr_k1_ymm_ymmm256

VPCMPEQW k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpeqw_kr_k1_zmm_zmmm512

VPCMPEQW k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG 75 /r

AVX512BW

16/32/64-bit

§

Pcmpeqd_mm_mmm64

PCMPEQD mm, mm/m64

NP 0F 76 /r

MMX

16/32/64-bit

§

Pcmpeqd_xmm_xmmm128

PCMPEQD xmm1, xmm2/m128

66 0F 76 /r

SSE2

16/32/64-bit

§

VEX_Vpcmpeqd_xmm_xmm_xmmm128

VPCMPEQD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 76 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqd_ymm_ymm_ymmm256

VPCMPEQD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 76 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqd_kr_k1_xmm_xmmm128b32

VPCMPEQD k1 {k2}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqd_kr_k1_ymm_ymmm256b32

VPCMPEQD k1 {k2}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqd_kr_k1_zmm_zmmm512b32

VPCMPEQD k1 {k2}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 76 /r

AVX512F

16/32/64-bit

§

Emms

EMMS

NP 0F 77

MMX

16/32/64-bit

§

VEX_Vzeroupper

VZEROUPPER

VEX.128.0F.WIG 77

AVX

16/32/64-bit

§

VEX_Vzeroall

VZEROALL

VEX.256.0F.WIG 77

AVX

16/32/64-bit

§

Vmread_rm32_r32

VMREAD r/m32, r32

NP 0F 78 /r

VMX

16/32-bit

§

Vmread_rm64_r64

VMREAD r/m64, r64

NP 0F 78 /r

VMX

64-bit

§

EVEX_Vcvttps2udq_xmm_k1z_xmmm128b32

VCVTTPS2UDQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2udq_ymm_k1z_ymmm256b32

VCVTTPS2UDQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttps2udq_zmm_k1z_zmmm512b32_sae

VCVTTPS2UDQ zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.0F.W0 78 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2udq_xmm_k1z_xmmm128b64

VCVTTPD2UDQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.0F.W1 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2udq_xmm_k1z_ymmm256b64

VCVTTPD2UDQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.0F.W1 78 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2udq_ymm_k1z_zmmm512b64_sae

VCVTTPD2UDQ ymm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.0F.W1 78 /r

AVX512F

16/32/64-bit

§

Extrq_xmm_imm8_imm8

EXTRQ xmm1, imm8, imm8

66 0F 78 /0 ib ib

SSE4A

16/32/64-bit

§

EVEX_Vcvttps2uqq_xmm_k1z_xmmm64b32

VCVTTPS2UQQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2uqq_ymm_k1z_xmmm128b32

VCVTTPS2UQQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2uqq_zmm_k1z_ymmm256b32_sae

VCVTTPS2UQQ zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}

EVEX.512.66.0F.W0 78 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2uqq_xmm_k1z_xmmm128b64

VCVTTPD2UQQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2uqq_ymm_k1z_ymmm256b64

VCVTTPD2UQQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 78 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2uqq_zmm_k1z_zmmm512b64_sae

VCVTTPD2UQQ zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F.W1 78 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvttss2usi_r32_xmmm32_sae

VCVTTSS2USI r32, xmm1/m32{sae}

EVEX.LIG.F3.0F.W0 78 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttss2usi_r64_xmmm32_sae

VCVTTSS2USI r64, xmm1/m32{sae}

EVEX.LIG.F3.0F.W1 78 /r

AVX512F

64-bit

§

Insertq_xmm_xmm_imm8_imm8

INSERTQ xmm1, xmm2, imm8, imm8

F2 0F 78 /r ib ib

SSE4A

16/32/64-bit

§

EVEX_Vcvttsd2usi_r32_xmmm64_sae

VCVTTSD2USI r32, xmm1/m64{sae}

EVEX.LIG.F2.0F.W0 78 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvttsd2usi_r64_xmmm64_sae

VCVTTSD2USI r64, xmm1/m64{sae}

EVEX.LIG.F2.0F.W1 78 /r

AVX512F

64-bit

§

Vmwrite_r32_rm32

VMWRITE r32, r/m32

NP 0F 79 /r

VMX

16/32-bit

§

Vmwrite_r64_rm64

VMWRITE r64, r/m64

NP 0F 79 /r

VMX

64-bit

§

EVEX_Vcvtps2udq_xmm_k1z_xmmm128b32

VCVTPS2UDQ xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.0F.W0 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2udq_ymm_k1z_ymmm256b32

VCVTPS2UDQ ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.0F.W0 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2udq_zmm_k1z_zmmm512b32_er

VCVTPS2UDQ zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.0F.W0 79 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2udq_xmm_k1z_xmmm128b64

VCVTPD2UDQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.0F.W1 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2udq_xmm_k1z_ymmm256b64

VCVTPD2UDQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.0F.W1 79 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2udq_ymm_k1z_zmmm512b64_er

VCVTPD2UDQ ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.0F.W1 79 /r

AVX512F

16/32/64-bit

§

Extrq_xmm_xmm

EXTRQ xmm1, xmm2

66 0F 79 /r

SSE4A

16/32/64-bit

§

EVEX_Vcvtps2uqq_xmm_k1z_xmmm64b32

VCVTPS2UQQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2uqq_ymm_k1z_xmmm128b32

VCVTPS2UQQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2uqq_zmm_k1z_ymmm256b32_er

VCVTPS2UQQ zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.66.0F.W0 79 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2uqq_xmm_k1z_xmmm128b64

VCVTPD2UQQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2uqq_ymm_k1z_ymmm256b64

VCVTPD2UQQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 79 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2uqq_zmm_k1z_zmmm512b64_er

VCVTPD2UQQ zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 79 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtss2usi_r32_xmmm32_er

VCVTSS2USI r32, xmm1/m32{er}

EVEX.LIG.F3.0F.W0 79 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtss2usi_r64_xmmm32_er

VCVTSS2USI r64, xmm1/m32{er}

EVEX.LIG.F3.0F.W1 79 /r

AVX512F

64-bit

§

Insertq_xmm_xmm

INSERTQ xmm1, xmm2

F2 0F 79 /r

SSE4A

16/32/64-bit

§

EVEX_Vcvtsd2usi_r32_xmmm64_er

VCVTSD2USI r32, xmm1/m64{er}

EVEX.LIG.F2.0F.W0 79 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtsd2usi_r64_xmmm64_er

VCVTSD2USI r64, xmm1/m64{er}

EVEX.LIG.F2.0F.W1 79 /r

AVX512F

64-bit

§

EVEX_Vcvttps2qq_xmm_k1z_xmmm64b32

VCVTTPS2QQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2qq_ymm_k1z_xmmm128b32

VCVTTPS2QQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttps2qq_zmm_k1z_ymmm256b32_sae

VCVTTPS2QQ zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}

EVEX.512.66.0F.W0 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2qq_xmm_k1z_xmmm128b64

VCVTTPD2QQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2qq_ymm_k1z_ymmm256b64

VCVTTPD2QQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvttpd2qq_zmm_k1z_zmmm512b64_sae

VCVTTPD2QQ zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F.W1 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtudq2pd_xmm_k1z_xmmm64b32

VCVTUDQ2PD xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.F3.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2pd_ymm_k1z_xmmm128b32

VCVTUDQ2PD ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.F3.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2pd_zmm_k1z_ymmm256b32_er

VCVTUDQ2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.F3.0F.W0 7A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtuqq2pd_xmm_k1z_xmmm128b64

VCVTUQQ2PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F3.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2pd_ymm_k1z_ymmm256b64

VCVTUQQ2PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F3.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2pd_zmm_k1z_zmmm512b64_er

VCVTUQQ2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F3.0F.W1 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtudq2ps_xmm_k1z_xmmm128b32

VCVTUDQ2PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F2.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2ps_ymm_k1z_ymmm256b32

VCVTUDQ2PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F2.0F.W0 7A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtudq2ps_zmm_k1z_zmmm512b32_er

VCVTUDQ2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.F2.0F.W0 7A /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtuqq2ps_xmm_k1z_xmmm128b64

VCVTUQQ2PS xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F2.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2ps_xmm_k1z_ymmm256b64

VCVTUQQ2PS xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F2.0F.W1 7A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtuqq2ps_ymm_k1z_zmmm512b64_er

VCVTUQQ2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F2.0F.W1 7A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2qq_xmm_k1z_xmmm64b32

VCVTPS2QQ xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.66.0F.W0 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2qq_ymm_k1z_xmmm128b32

VCVTPS2QQ ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.66.0F.W0 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtps2qq_zmm_k1z_ymmm256b32_er

VCVTPS2QQ zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.66.0F.W0 7B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2qq_xmm_k1z_xmmm128b64

VCVTPD2QQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2qq_ymm_k1z_ymmm256b64

VCVTPD2QQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 7B /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtpd2qq_zmm_k1z_zmmm512b64_er

VCVTPD2QQ zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.0F.W1 7B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vcvtusi2ss_xmm_xmm_rm32_er

VCVTUSI2SS xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.0F.W0 7B /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtusi2ss_xmm_xmm_rm64_er

VCVTUSI2SS xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.0F.W1 7B /r

AVX512F

64-bit

§

EVEX_Vcvtusi2sd_xmm_xmm_rm32_er

VCVTUSI2SD xmm1, xmm2, r/m32{er}

EVEX.LIG.F2.0F.W0 7B /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtusi2sd_xmm_xmm_rm64_er

VCVTUSI2SD xmm1, xmm2, r/m64{er}

EVEX.LIG.F2.0F.W1 7B /r

AVX512F

64-bit

§

Haddpd_xmm_xmmm128

HADDPD xmm1, xmm2/m128

66 0F 7C /r

SSE3

16/32/64-bit

§

VEX_Vhaddpd_xmm_xmm_xmmm128

VHADDPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 7C /r

AVX

16/32/64-bit

§

VEX_Vhaddpd_ymm_ymm_ymmm256

VHADDPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 7C /r

AVX

16/32/64-bit

§

Haddps_xmm_xmmm128

HADDPS xmm1, xmm2/m128

F2 0F 7C /r

SSE3

16/32/64-bit

§

VEX_Vhaddps_xmm_xmm_xmmm128

VHADDPS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F.WIG 7C /r

AVX

16/32/64-bit

§

VEX_Vhaddps_ymm_ymm_ymmm256

VHADDPS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F.WIG 7C /r

AVX

16/32/64-bit

§

Hsubpd_xmm_xmmm128

HSUBPD xmm1, xmm2/m128

66 0F 7D /r

SSE3

16/32/64-bit

§

VEX_Vhsubpd_xmm_xmm_xmmm128

VHSUBPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG 7D /r

AVX

16/32/64-bit

§

VEX_Vhsubpd_ymm_ymm_ymmm256

VHSUBPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG 7D /r

AVX

16/32/64-bit

§

Hsubps_xmm_xmmm128

HSUBPS xmm1, xmm2/m128

F2 0F 7D /r

SSE3

16/32/64-bit

§

VEX_Vhsubps_xmm_xmm_xmmm128

VHSUBPS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F.WIG 7D /r

AVX

16/32/64-bit

§

VEX_Vhsubps_ymm_ymm_ymmm256

VHSUBPS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F.WIG 7D /r

AVX

16/32/64-bit

§

Movd_rm32_mm

MOVD r/m32, mm

NP 0F 7E /r

MMX

16/32/64-bit

§

Movq_rm64_mm

MOVQ r/m64, mm

NP o64 0F 7E /r

MMX

64-bit

§

Movd_rm32_xmm

MOVD r/m32, xmm

66 0F 7E /r

SSE2

16/32/64-bit

§

Movq_rm64_xmm

MOVQ r/m64, xmm

66 o64 0F 7E /r

SSE2

64-bit

§

VEX_Vmovd_rm32_xmm

VMOVD r/m32, xmm1

VEX.128.66.0F.W0 7E /r

AVX

16/32/64-bit

§

VEX_Vmovq_rm64_xmm

VMOVQ r/m64, xmm1

VEX.128.66.0F.W1 7E /r

AVX

64-bit

§

EVEX_Vmovd_rm32_xmm

VMOVD r/m32, xmm1

EVEX.128.66.0F.W0 7E /r

AVX512F

16/32/64-bit

§

EVEX_Vmovq_rm64_xmm

VMOVQ r/m64, xmm1

EVEX.128.66.0F.W1 7E /r

AVX512F

64-bit

§

Movq_xmm_xmmm64

MOVQ xmm1, xmm2/m64

F3 0F 7E /r

SSE2

16/32/64-bit

§

VEX_Vmovq_xmm_xmmm64

VMOVQ xmm1, xmm2/m64

VEX.128.F3.0F.WIG 7E /r

AVX

16/32/64-bit

§

EVEX_Vmovq_xmm_xmmm64

VMOVQ xmm1, xmm2/m64

EVEX.128.F3.0F.W1 7E /r

AVX512F

16/32/64-bit

§

Movq_mmm64_mm

MOVQ mm/m64, mm

NP 0F 7F /r

MMX

16/32/64-bit

§

Movdqa_xmmm128_xmm

MOVDQA xmm2/m128, xmm1

66 0F 7F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqa_xmmm128_xmm

VMOVDQA xmm2/m128, xmm1

VEX.128.66.0F.WIG 7F /r

AVX

16/32/64-bit

§

VEX_Vmovdqa_ymmm256_ymm

VMOVDQA ymm2/m256, ymm1

VEX.256.66.0F.WIG 7F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqa32_xmmm128_k1z_xmm

VMOVDQA32 xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_ymmm256_k1z_ymm

VMOVDQA32 ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa32_zmmm512_k1z_zmm

VMOVDQA32 zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W0 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_xmmm128_k1z_xmm

VMOVDQA64 xmm2/m128 {k1}{z}, xmm1

EVEX.128.66.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_ymmm256_k1z_ymm

VMOVDQA64 ymm2/m256 {k1}{z}, ymm1

EVEX.256.66.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqa64_zmmm512_k1z_zmm

VMOVDQA64 zmm2/m512 {k1}{z}, zmm1

EVEX.512.66.0F.W1 7F /r

AVX512F

16/32/64-bit

§

Movdqu_xmmm128_xmm

MOVDQU xmm2/m128, xmm1

F3 0F 7F /r

SSE2

16/32/64-bit

§

VEX_Vmovdqu_xmmm128_xmm

VMOVDQU xmm2/m128, xmm1

VEX.128.F3.0F.WIG 7F /r

AVX

16/32/64-bit

§

VEX_Vmovdqu_ymmm256_ymm

VMOVDQU ymm2/m256, ymm1

VEX.256.F3.0F.WIG 7F /r

AVX

16/32/64-bit

§

EVEX_Vmovdqu32_xmmm128_k1z_xmm

VMOVDQU32 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F3.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_ymmm256_k1z_ymm

VMOVDQU32 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F3.0F.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu32_zmmm512_k1z_zmm

VMOVDQU32 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F3.0F.W0 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_xmmm128_k1z_xmm

VMOVDQU64 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F3.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_ymmm256_k1z_ymm

VMOVDQU64 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F3.0F.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovdqu64_zmmm512_k1z_zmm

VMOVDQU64 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F3.0F.W1 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vmovdqu8_xmmm128_k1z_xmm

VMOVDQU8 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F2.0F.W0 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_ymmm256_k1z_ymm

VMOVDQU8 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F2.0F.W0 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu8_zmmm512_k1z_zmm

VMOVDQU8 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F2.0F.W0 7F /r

AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_xmmm128_k1z_xmm

VMOVDQU16 xmm2/m128 {k1}{z}, xmm1

EVEX.128.F2.0F.W1 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_ymmm256_k1z_ymm

VMOVDQU16 ymm2/m256 {k1}{z}, ymm1

EVEX.256.F2.0F.W1 7F /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vmovdqu16_zmmm512_k1z_zmm

VMOVDQU16 zmm2/m512 {k1}{z}, zmm1

EVEX.512.F2.0F.W1 7F /r

AVX512BW

16/32/64-bit

§

Jo_rel16

JO rel16

o16 0F 80 cw

386+

16/32/64-bit

§

Jo_rel32_32

JO rel32

o32 0F 80 cd

386+

16/32-bit

§

Jo_rel32_64

JO rel32

o64 0F 80 cd

X64

64-bit

§

Jno_rel16

JNO rel16

o16 0F 81 cw

386+

16/32/64-bit

§

Jno_rel32_32

JNO rel32

o32 0F 81 cd

386+

16/32-bit

§

Jno_rel32_64

JNO rel32

o64 0F 81 cd

X64

64-bit

§

Jb_rel16

JB rel16

o16 0F 82 cw

386+

16/32/64-bit

§

Jb_rel32_32

JB rel32

o32 0F 82 cd

386+

16/32-bit

§

Jb_rel32_64

JB rel32

o64 0F 82 cd

X64

64-bit

§

Jae_rel16

JAE rel16

o16 0F 83 cw

386+

16/32/64-bit

§

Jae_rel32_32

JAE rel32

o32 0F 83 cd

386+

16/32-bit

§

Jae_rel32_64

JAE rel32

o64 0F 83 cd

X64

64-bit

§

Je_rel16

JE rel16

o16 0F 84 cw

386+

16/32/64-bit

§

Je_rel32_32

JE rel32

o32 0F 84 cd

386+

16/32-bit

§

Je_rel32_64

JE rel32

o64 0F 84 cd

X64

64-bit

§

Jne_rel16

JNE rel16

o16 0F 85 cw

386+

16/32/64-bit

§

Jne_rel32_32

JNE rel32

o32 0F 85 cd

386+

16/32-bit

§

Jne_rel32_64

JNE rel32

o64 0F 85 cd

X64

64-bit

§

Jbe_rel16

JBE rel16

o16 0F 86 cw

386+

16/32/64-bit

§

Jbe_rel32_32

JBE rel32

o32 0F 86 cd

386+

16/32-bit

§

Jbe_rel32_64

JBE rel32

o64 0F 86 cd

X64

64-bit

§

Ja_rel16

JA rel16

o16 0F 87 cw

386+

16/32/64-bit

§

Ja_rel32_32

JA rel32

o32 0F 87 cd

386+

16/32-bit

§

Ja_rel32_64

JA rel32

o64 0F 87 cd

X64

64-bit

§

Js_rel16

JS rel16

o16 0F 88 cw

386+

16/32/64-bit

§

Js_rel32_32

JS rel32

o32 0F 88 cd

386+

16/32-bit

§

Js_rel32_64

JS rel32

o64 0F 88 cd

X64

64-bit

§

Jns_rel16

JNS rel16

o16 0F 89 cw

386+

16/32/64-bit

§

Jns_rel32_32

JNS rel32

o32 0F 89 cd

386+

16/32-bit

§

Jns_rel32_64

JNS rel32

o64 0F 89 cd

X64

64-bit

§

Jp_rel16

JP rel16

o16 0F 8A cw

386+

16/32/64-bit

§

Jp_rel32_32

JP rel32

o32 0F 8A cd

386+

16/32-bit

§

Jp_rel32_64

JP rel32

o64 0F 8A cd

X64

64-bit

§

Jnp_rel16

JNP rel16

o16 0F 8B cw

386+

16/32/64-bit

§

Jnp_rel32_32

JNP rel32

o32 0F 8B cd

386+

16/32-bit

§

Jnp_rel32_64

JNP rel32

o64 0F 8B cd

X64

64-bit

§

Jl_rel16

JL rel16

o16 0F 8C cw

386+

16/32/64-bit

§

Jl_rel32_32

JL rel32

o32 0F 8C cd

386+

16/32-bit

§

Jl_rel32_64

JL rel32

o64 0F 8C cd

X64

64-bit

§

Jge_rel16

JGE rel16

o16 0F 8D cw

386+

16/32/64-bit

§

Jge_rel32_32

JGE rel32

o32 0F 8D cd

386+

16/32-bit

§

Jge_rel32_64

JGE rel32

o64 0F 8D cd

X64

64-bit

§

Jle_rel16

JLE rel16

o16 0F 8E cw

386+

16/32/64-bit

§

Jle_rel32_32

JLE rel32

o32 0F 8E cd

386+

16/32-bit

§

Jle_rel32_64

JLE rel32

o64 0F 8E cd

X64

64-bit

§

Jg_rel16

JG rel16

o16 0F 8F cw

386+

16/32/64-bit

§

Jg_rel32_32

JG rel32

o32 0F 8F cd

386+

16/32-bit

§

Jg_rel32_64

JG rel32

o64 0F 8F cd

X64

64-bit

§

Seto_rm8

SETO r/m8

0F 90 /r

386+

16/32/64-bit

§

Setno_rm8

SETNO r/m8

0F 91 /r

386+

16/32/64-bit

§

Setb_rm8

SETB r/m8

0F 92 /r

386+

16/32/64-bit

§

Setae_rm8

SETAE r/m8

0F 93 /r

386+

16/32/64-bit

§

Sete_rm8

SETE r/m8

0F 94 /r

386+

16/32/64-bit

§

Setne_rm8

SETNE r/m8

0F 95 /r

386+

16/32/64-bit

§

Setbe_rm8

SETBE r/m8

0F 96 /r

386+

16/32/64-bit

§

Seta_rm8

SETA r/m8

0F 97 /r

386+

16/32/64-bit

§

Sets_rm8

SETS r/m8

0F 98 /r

386+

16/32/64-bit

§

Setns_rm8

SETNS r/m8

0F 99 /r

386+

16/32/64-bit

§

Setp_rm8

SETP r/m8

0F 9A /r

386+

16/32/64-bit

§

Setnp_rm8

SETNP r/m8

0F 9B /r

386+

16/32/64-bit

§

Setl_rm8

SETL r/m8

0F 9C /r

386+

16/32/64-bit

§

Setge_rm8

SETGE r/m8

0F 9D /r

386+

16/32/64-bit

§

Setle_rm8

SETLE r/m8

0F 9E /r

386+

16/32/64-bit

§

Setg_rm8

SETG r/m8

0F 9F /r

386+

16/32/64-bit

§

VEX_Kmovw_kr_km16

KMOVW k1, k2/m16

VEX.L0.0F.W0 90 /r

AVX512F

16/32/64-bit

§

VEX_Kmovq_kr_km64

KMOVQ k1, k2/m64

VEX.L0.0F.W1 90 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovb_kr_km8

KMOVB k1, k2/m8

VEX.L0.66.0F.W0 90 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_kr_km32

KMOVD k1, k2/m32

VEX.L0.66.0F.W1 90 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovw_m16_kr

KMOVW m16, k1

VEX.L0.0F.W0 91 /r

AVX512F

16/32/64-bit

§

VEX_Kmovq_m64_kr

KMOVQ m64, k1

VEX.L0.0F.W1 91 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovb_m8_kr

KMOVB m8, k1

VEX.L0.66.0F.W0 91 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_m32_kr

KMOVD m32, k1

VEX.L0.66.0F.W1 91 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovw_kr_r32

KMOVW k1, r32

VEX.L0.0F.W0 92 /r

AVX512F

16/32/64-bit

§

VEX_Kmovb_kr_r32

KMOVB k1, r32

VEX.L0.66.0F.W0 92 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_kr_r32

KMOVD k1, r32

VEX.L0.F2.0F.W0 92 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovq_kr_r64

KMOVQ k1, r64

VEX.L0.F2.0F.W1 92 /r

AVX512BW

64-bit

§

VEX_Kmovw_r32_kr

KMOVW r32, k1

VEX.L0.0F.W0 93 /r

AVX512F

16/32/64-bit

§

VEX_Kmovb_r32_kr

KMOVB r32, k1

VEX.L0.66.0F.W0 93 /r

AVX512DQ

16/32/64-bit

§

VEX_Kmovd_r32_kr

KMOVD r32, k1

VEX.L0.F2.0F.W0 93 /r

AVX512BW

16/32/64-bit

§

VEX_Kmovq_r64_kr

KMOVQ r64, k1

VEX.L0.F2.0F.W1 93 /r

AVX512BW

64-bit

§

VEX_Kortestw_kr_kr

KORTESTW k1, k2

VEX.L0.0F.W0 98 /r

AVX512F

16/32/64-bit

§

VEX_Kortestq_kr_kr

KORTESTQ k1, k2

VEX.L0.0F.W1 98 /r

AVX512BW

16/32/64-bit

§

VEX_Kortestb_kr_kr

KORTESTB k1, k2

VEX.L0.66.0F.W0 98 /r

AVX512DQ

16/32/64-bit

§

VEX_Kortestd_kr_kr

KORTESTD k1, k2

VEX.L0.66.0F.W1 98 /r

AVX512BW

16/32/64-bit

§

VEX_Ktestw_kr_kr

KTESTW k1, k2

VEX.L0.0F.W0 99 /r

AVX512DQ

16/32/64-bit

§

VEX_Ktestq_kr_kr

KTESTQ k1, k2

VEX.L0.0F.W1 99 /r

AVX512BW

16/32/64-bit

§

VEX_Ktestb_kr_kr

KTESTB k1, k2

VEX.L0.66.0F.W0 99 /r

AVX512DQ

16/32/64-bit

§

VEX_Ktestd_kr_kr

KTESTD k1, k2

VEX.L0.66.0F.W1 99 /r

AVX512BW

16/32/64-bit

§

Pushw_FS

PUSH FS

o16 0F A0

386+

16/32/64-bit

§

Pushd_FS

PUSH FS

o32 0F A0

386+

16/32-bit

§

Pushq_FS

PUSH FS

o64 0F A0

X64

64-bit

§

Popw_FS

POP FS

o16 0F A1

386+

16/32/64-bit

§

Popd_FS

POP FS

o32 0F A1

386+

16/32-bit

§

Popq_FS

POP FS

o64 0F A1

X64

64-bit

§

Cpuid

CPUID

0F A2

CPUID

16/32/64-bit

§

Bt_rm16_r16

BT r/m16, r16

o16 0F A3 /r

386+

16/32/64-bit

§

Bt_rm32_r32

BT r/m32, r32

o32 0F A3 /r

386+

16/32/64-bit

§

Bt_rm64_r64

BT r/m64, r64

o64 0F A3 /r

X64

64-bit

§

Shld_rm16_r16_imm8

SHLD r/m16, r16, imm8

o16 0F A4 /r ib

386+

16/32/64-bit

§

Shld_rm32_r32_imm8

SHLD r/m32, r32, imm8

o32 0F A4 /r ib

386+

16/32/64-bit

§

Shld_rm64_r64_imm8

SHLD r/m64, r64, imm8

o64 0F A4 /r ib

X64

64-bit

§

Shld_rm16_r16_CL

SHLD r/m16, r16, CL

o16 0F A5 /r

386+

16/32/64-bit

§

Shld_rm32_r32_CL

SHLD r/m32, r32, CL

o32 0F A5 /r

386+

16/32/64-bit

§

Shld_rm64_r64_CL

SHLD r/m64, r64, CL

o64 0F A5 /r

X64

64-bit

§

Montmul_16

MONTMUL

a16 F3 0F A6 C0

PADLOCK_PMM

16/32-bit

§

Montmul_32

MONTMUL

a32 F3 0F A6 C0

PADLOCK_PMM

16/32/64-bit

§

Montmul_64

MONTMUL

a64 F3 0F A6 C0

PADLOCK_PMM

64-bit

§

Xsha1_16

XSHA1

a16 F3 0F A6 C8

PADLOCK_PHE

16/32-bit

§

Xsha1_32

XSHA1

a32 F3 0F A6 C8

PADLOCK_PHE

16/32/64-bit

§

Xsha1_64

XSHA1

a64 F3 0F A6 C8

PADLOCK_PHE

64-bit

§

Xsha256_16

XSHA256

a16 F3 0F A6 D0

PADLOCK_PHE

16/32-bit

§

Xsha256_32

XSHA256

a32 F3 0F A6 D0

PADLOCK_PHE

16/32/64-bit

§

Xsha256_64

XSHA256

a64 F3 0F A6 D0

PADLOCK_PHE

64-bit

§

Xbts_r16_rm16

XBTS r16, r/m16

o16 0F A6 /r

386 A0

16/32-bit

§

Xbts_r32_rm32

XBTS r32, r/m32

o32 0F A6 /r

386 A0

16/32-bit

§

Xstore_16

XSTORE

a16 0F A7 C0

PADLOCK_RNG

16/32-bit

§

Xstore_32

XSTORE

a32 0F A7 C0

PADLOCK_RNG

16/32/64-bit

§

Xstore_64

XSTORE

a64 0F A7 C0

PADLOCK_RNG

64-bit

§

Xcryptecb_16

XCRYPTECB

a16 F3 0F A7 C8

PADLOCK_ACE

16/32-bit

§

Xcryptecb_32

XCRYPTECB

a32 F3 0F A7 C8

PADLOCK_ACE

16/32/64-bit

§

Xcryptecb_64

XCRYPTECB

a64 F3 0F A7 C8

PADLOCK_ACE

64-bit

§

Xcryptcbc_16

XCRYPTCBC

a16 F3 0F A7 D0

PADLOCK_ACE

16/32-bit

§

Xcryptcbc_32

XCRYPTCBC

a32 F3 0F A7 D0

PADLOCK_ACE

16/32/64-bit

§

Xcryptcbc_64

XCRYPTCBC

a64 F3 0F A7 D0

PADLOCK_ACE

64-bit

§

Xcryptctr_16

XCRYPTCTR

a16 F3 0F A7 D8

PADLOCK_ACE

16/32-bit

§

Xcryptctr_32

XCRYPTCTR

a32 F3 0F A7 D8

PADLOCK_ACE

16/32/64-bit

§

Xcryptctr_64

XCRYPTCTR

a64 F3 0F A7 D8

PADLOCK_ACE

64-bit

§

Xcryptcfb_16

XCRYPTCFB

a16 F3 0F A7 E0

PADLOCK_ACE

16/32-bit

§

Xcryptcfb_32

XCRYPTCFB

a32 F3 0F A7 E0

PADLOCK_ACE

16/32/64-bit

§

Xcryptcfb_64

XCRYPTCFB

a64 F3 0F A7 E0

PADLOCK_ACE

64-bit

§

Xcryptofb_16

XCRYPTOFB

a16 F3 0F A7 E8

PADLOCK_ACE

16/32-bit

§

Xcryptofb_32

XCRYPTOFB

a32 F3 0F A7 E8

PADLOCK_ACE

16/32/64-bit

§

Xcryptofb_64

XCRYPTOFB

a64 F3 0F A7 E8

PADLOCK_ACE

64-bit

§

Ibts_rm16_r16

IBTS r/m16, r16

o16 0F A7 /r

386 A0

16/32-bit

§

Ibts_rm32_r32

IBTS r/m32, r32

o32 0F A7 /r

386 A0

16/32-bit

§

Cmpxchg486_rm8_r8

CMPXCHG r/m8, r8

0F A6 /r

486 A

16/32-bit

§

Cmpxchg486_rm16_r16

CMPXCHG r/m16, r16

o16 0F A7 /r

486 A

16/32-bit

§

Cmpxchg486_rm32_r32

CMPXCHG r/m32, r32

o32 0F A7 /r

486 A

16/32-bit

§

Pushw_GS

PUSH GS

o16 0F A8

386+

16/32/64-bit

§

Pushd_GS

PUSH GS

o32 0F A8

386+

16/32-bit

§

Pushq_GS

PUSH GS

o64 0F A8

X64

64-bit

§

Popw_GS

POP GS

o16 0F A9

386+

16/32/64-bit

§

Popd_GS

POP GS

o32 0F A9

386+

16/32-bit

§

Popq_GS

POP GS

o64 0F A9

X64

64-bit

§

Rsm

RSM

0F AA

386+

16/32/64-bit

§

Bts_rm16_r16

BTS r/m16, r16

o16 0F AB /r

386+

16/32/64-bit

§

Bts_rm32_r32

BTS r/m32, r32

o32 0F AB /r

386+

16/32/64-bit

§

Bts_rm64_r64

BTS r/m64, r64

o64 0F AB /r

X64

64-bit

§

Shrd_rm16_r16_imm8

SHRD r/m16, r16, imm8

o16 0F AC /r ib

386+

16/32/64-bit

§

Shrd_rm32_r32_imm8

SHRD r/m32, r32, imm8

o32 0F AC /r ib

386+

16/32/64-bit

§

Shrd_rm64_r64_imm8

SHRD r/m64, r64, imm8

o64 0F AC /r ib

X64

64-bit

§

Shrd_rm16_r16_CL

SHRD r/m16, r16, CL

o16 0F AD /r

386+

16/32/64-bit

§

Shrd_rm32_r32_CL

SHRD r/m32, r32, CL

o32 0F AD /r

386+

16/32/64-bit

§

Shrd_rm64_r64_CL

SHRD r/m64, r64, CL

o64 0F AD /r

X64

64-bit

§

Fxsave_m512byte

FXSAVE m512byte

NP 0F AE /0

FXSR

16/32/64-bit

§

Fxsave64_m512byte

FXSAVE64 m512byte

NP o64 0F AE /0

FXSR

64-bit

§

Rdfsbase_r32

RDFSBASE r32

F3 0F AE /0

FSGSBASE

64-bit

§

Rdfsbase_r64

RDFSBASE r64

F3 o64 0F AE /0

FSGSBASE

64-bit

§

Fxrstor_m512byte

FXRSTOR m512byte

NP 0F AE /1

FXSR

16/32/64-bit

§

Fxrstor64_m512byte

FXRSTOR64 m512byte

NP o64 0F AE /1

FXSR

64-bit

§

Rdgsbase_r32

RDGSBASE r32

F3 0F AE /1

FSGSBASE

64-bit

§

Rdgsbase_r64

RDGSBASE r64

F3 o64 0F AE /1

FSGSBASE

64-bit

§

Ldmxcsr_m32

LDMXCSR m32

NP 0F AE /2

SSE

16/32/64-bit

§

Wrfsbase_r32

WRFSBASE r32

F3 0F AE /2

FSGSBASE

64-bit

§

Wrfsbase_r64

WRFSBASE r64

F3 o64 0F AE /2

FSGSBASE

64-bit

§

VEX_Vldmxcsr_m32

VLDMXCSR m32

VEX.LZ.0F.WIG AE /2

AVX

16/32/64-bit

§

Stmxcsr_m32

STMXCSR m32

NP 0F AE /3

SSE

16/32/64-bit

§

Wrgsbase_r32

WRGSBASE r32

F3 0F AE /3

FSGSBASE

64-bit

§

Wrgsbase_r64

WRGSBASE r64

F3 o64 0F AE /3

FSGSBASE

64-bit

§

VEX_Vstmxcsr_m32

VSTMXCSR m32

VEX.LZ.0F.WIG AE /3

AVX

16/32/64-bit

§

Xsave_mem

XSAVE mem

NP 0F AE /4

XSAVE

16/32/64-bit

§

Xsave64_mem

XSAVE64 mem

NP o64 0F AE /4

XSAVE

64-bit

§

Ptwrite_rm32

PTWRITE r/m32

F3 0F AE /4

PTWRITE

16/32/64-bit

§

Ptwrite_rm64

PTWRITE r/m64

F3 o64 0F AE /4

PTWRITE

64-bit

§

Xrstor_mem

XRSTOR mem

NP 0F AE /5

XSAVE

16/32/64-bit

§

Xrstor64_mem

XRSTOR64 mem

NP o64 0F AE /5

XSAVE

64-bit

§

Incsspd_r32

INCSSPD r32

F3 0F AE /5

CET_SS

16/32/64-bit

§

Incsspq_r64

INCSSPQ r64

F3 o64 0F AE /5

CET_SS

64-bit

§

Xsaveopt_mem

XSAVEOPT mem

NP 0F AE /6

XSAVEOPT

16/32/64-bit

§

Xsaveopt64_mem

XSAVEOPT64 mem

NP o64 0F AE /6

XSAVEOPT

64-bit

§

Clwb_m8

CLWB m8

66 0F AE /6

CLWB

16/32/64-bit

§

Tpause_r32

TPAUSE r32, <edx>, <eax>

66 0F AE /6

WAITPKG

16/32/64-bit

§

Tpause_r64

TPAUSE r64, <edx>, <eax>

66 o64 0F AE /6

WAITPKG

64-bit

§

Clrssbsy_m64

CLRSSBSY m64

F3 0F AE /6

CET_SS

16/32/64-bit

§

Umonitor_r16

UMONITOR r16

a16 F3 0F AE /6

WAITPKG

16/32-bit

§

Umonitor_r32

UMONITOR r32

a32 F3 0F AE /6

WAITPKG

16/32/64-bit

§

Umonitor_r64

UMONITOR r64

a64 F3 0F AE /6

WAITPKG

64-bit

§

Umwait_r32

UMWAIT r32, <edx>, <eax>

F2 0F AE /6

WAITPKG

16/32/64-bit

§

Umwait_r64

UMWAIT r64, <edx>, <eax>

F2 o64 0F AE /6

WAITPKG

64-bit

§

Clflush_m8

CLFLUSH m8

NP 0F AE /7

CLFSH

16/32/64-bit

§

Clflushopt_m8

CLFLUSHOPT m8

66 0F AE /7

CLFLUSHOPT

16/32/64-bit

§

Lfence

LFENCE

NP 0F AE E8

SSE2

16/32/64-bit

§

Lfence_E9

LFENCE

NP 0F AE E9

SSE2

16/32/64-bit

§

Lfence_EA

LFENCE

NP 0F AE EA

SSE2

16/32/64-bit

§

Lfence_EB

LFENCE

NP 0F AE EB

SSE2

16/32/64-bit

§

Lfence_EC

LFENCE

NP 0F AE EC

SSE2

16/32/64-bit

§

Lfence_ED

LFENCE

NP 0F AE ED

SSE2

16/32/64-bit

§

Lfence_EE

LFENCE

NP 0F AE EE

SSE2

16/32/64-bit

§

Lfence_EF

LFENCE

NP 0F AE EF

SSE2

16/32/64-bit

§

Mfence

MFENCE

NP 0F AE F0

SSE2

16/32/64-bit

§

Mfence_F1

MFENCE

NP 0F AE F1

SSE2

16/32/64-bit

§

Mfence_F2

MFENCE

NP 0F AE F2

SSE2

16/32/64-bit

§

Mfence_F3

MFENCE

NP 0F AE F3

SSE2

16/32/64-bit

§

Mfence_F4

MFENCE

NP 0F AE F4

SSE2

16/32/64-bit

§

Mfence_F5

MFENCE

NP 0F AE F5

SSE2

16/32/64-bit

§

Mfence_F6

MFENCE

NP 0F AE F6

SSE2

16/32/64-bit

§

Mfence_F7

MFENCE

NP 0F AE F7

SSE2

16/32/64-bit

§

Sfence

SFENCE

NP 0F AE F8

SSE

16/32/64-bit

§

Sfence_F9

SFENCE

NP 0F AE F9

SSE

16/32/64-bit

§

Sfence_FA

SFENCE

NP 0F AE FA

SSE

16/32/64-bit

§

Sfence_FB

SFENCE

NP 0F AE FB

SSE

16/32/64-bit

§

Sfence_FC

SFENCE

NP 0F AE FC

SSE

16/32/64-bit

§

Sfence_FD

SFENCE

NP 0F AE FD

SSE

16/32/64-bit

§

Sfence_FE

SFENCE

NP 0F AE FE

SSE

16/32/64-bit

§

Sfence_FF

SFENCE

NP 0F AE FF

SSE

16/32/64-bit

§

Pcommit

PCOMMIT

66 0F AE F8

PCOMMIT

16/32/64-bit

§

Imul_r16_rm16

IMUL r16, r/m16

o16 0F AF /r

386+

16/32/64-bit

§

Imul_r32_rm32

IMUL r32, r/m32

o32 0F AF /r

386+

16/32/64-bit

§

Imul_r64_rm64

IMUL r64, r/m64

o64 0F AF /r

X64

64-bit

§

Cmpxchg_rm8_r8

CMPXCHG r/m8, r8

0F B0 /r

486+

16/32/64-bit

§

Cmpxchg_rm16_r16

CMPXCHG r/m16, r16

o16 0F B1 /r

486+

16/32/64-bit

§

Cmpxchg_rm32_r32

CMPXCHG r/m32, r32

o32 0F B1 /r

486+

16/32/64-bit

§

Cmpxchg_rm64_r64

CMPXCHG r/m64, r64

o64 0F B1 /r

X64

64-bit

§

Lss_r16_m1616

LSS r16, m16:16

o16 0F B2 /r

386+

16/32/64-bit

§

Lss_r32_m1632

LSS r32, m16:32

o32 0F B2 /r

386+

16/32/64-bit

§

Lss_r64_m1664

LSS r64, m16:64

o64 0F B2 /r

X64

64-bit

§

Btr_rm16_r16

BTR r/m16, r16

o16 0F B3 /r

386+

16/32/64-bit

§

Btr_rm32_r32

BTR r/m32, r32

o32 0F B3 /r

386+

16/32/64-bit

§

Btr_rm64_r64

BTR r/m64, r64

o64 0F B3 /r

X64

64-bit

§

Lfs_r16_m1616

LFS r16, m16:16

o16 0F B4 /r

386+

16/32/64-bit

§

Lfs_r32_m1632

LFS r32, m16:32

o32 0F B4 /r

386+

16/32/64-bit

§

Lfs_r64_m1664

LFS r64, m16:64

o64 0F B4 /r

X64

64-bit

§

Lgs_r16_m1616

LGS r16, m16:16

o16 0F B5 /r

386+

16/32/64-bit

§

Lgs_r32_m1632

LGS r32, m16:32

o32 0F B5 /r

386+

16/32/64-bit

§

Lgs_r64_m1664

LGS r64, m16:64

o64 0F B5 /r

X64

64-bit

§

Movzx_r16_rm8

MOVZX r16, r/m8

o16 0F B6 /r

386+

16/32/64-bit

§

Movzx_r32_rm8

MOVZX r32, r/m8

o32 0F B6 /r

386+

16/32/64-bit

§

Movzx_r64_rm8

MOVZX r64, r/m8

o64 0F B6 /r

X64

64-bit

§

Movzx_r16_rm16

MOVZX r16, r/m16

o16 0F B7 /r

386+

16/32/64-bit

§

Movzx_r32_rm16

MOVZX r32, r/m16

o32 0F B7 /r

386+

16/32/64-bit

§

Movzx_r64_rm16

MOVZX r64, r/m16

o64 0F B7 /r

X64

64-bit

§

Jmpe_disp16

JMPE disp16

o16 0F B8 cw

IA-64

16/32-bit

§

Jmpe_disp32

JMPE disp32

o32 0F B8 cd

IA-64

16/32-bit

§

Popcnt_r16_rm16

POPCNT r16, r/m16

o16 F3 0F B8 /r

POPCNT

16/32/64-bit

§

Popcnt_r32_rm32

POPCNT r32, r/m32

o32 F3 0F B8 /r

POPCNT

16/32/64-bit

§

Popcnt_r64_rm64

POPCNT r64, r/m64

F3 o64 0F B8 /r

POPCNT

64-bit

§

Ud1_r16_rm16

UD1 r16, r/m16

o16 0F B9 /r

286+

16/32/64-bit

§

Ud1_r32_rm32

UD1 r32, r/m32

o32 0F B9 /r

386+

16/32/64-bit

§

Ud1_r64_rm64

UD1 r64, r/m64

o64 0F B9 /r

X64

64-bit

§

Bt_rm16_imm8

BT r/m16, imm8

o16 0F BA /4 ib

386+

16/32/64-bit

§

Bt_rm32_imm8

BT r/m32, imm8

o32 0F BA /4 ib

386+

16/32/64-bit

§

Bt_rm64_imm8

BT r/m64, imm8

o64 0F BA /4 ib

X64

64-bit

§

Bts_rm16_imm8

BTS r/m16, imm8

o16 0F BA /5 ib

386+

16/32/64-bit

§

Bts_rm32_imm8

BTS r/m32, imm8

o32 0F BA /5 ib

386+

16/32/64-bit

§

Bts_rm64_imm8

BTS r/m64, imm8

o64 0F BA /5 ib

X64

64-bit

§

Btr_rm16_imm8

BTR r/m16, imm8

o16 0F BA /6 ib

386+

16/32/64-bit

§

Btr_rm32_imm8

BTR r/m32, imm8

o32 0F BA /6 ib

386+

16/32/64-bit

§

Btr_rm64_imm8

BTR r/m64, imm8

o64 0F BA /6 ib

X64

64-bit

§

Btc_rm16_imm8

BTC r/m16, imm8

o16 0F BA /7 ib

386+

16/32/64-bit

§

Btc_rm32_imm8

BTC r/m32, imm8

o32 0F BA /7 ib

386+

16/32/64-bit

§

Btc_rm64_imm8

BTC r/m64, imm8

o64 0F BA /7 ib

X64

64-bit

§

Btc_rm16_r16

BTC r/m16, r16

o16 0F BB /r

386+

16/32/64-bit

§

Btc_rm32_r32

BTC r/m32, r32

o32 0F BB /r

386+

16/32/64-bit

§

Btc_rm64_r64

BTC r/m64, r64

o64 0F BB /r

X64

64-bit

§

Bsf_r16_rm16

BSF r16, r/m16

o16 0F BC /r

386+

16/32/64-bit

§

Bsf_r32_rm32

BSF r32, r/m32

o32 0F BC /r

386+

16/32/64-bit

§

Bsf_r64_rm64

BSF r64, r/m64

o64 0F BC /r

X64

64-bit

§

Tzcnt_r16_rm16

TZCNT r16, r/m16

o16 F3 0F BC /r

BMI1

16/32/64-bit

§

Tzcnt_r32_rm32

TZCNT r32, r/m32

o32 F3 0F BC /r

BMI1

16/32/64-bit

§

Tzcnt_r64_rm64

TZCNT r64, r/m64

F3 o64 0F BC /r

BMI1

64-bit

§

Bsr_r16_rm16

BSR r16, r/m16

o16 0F BD /r

386+

16/32/64-bit

§

Bsr_r32_rm32

BSR r32, r/m32

o32 0F BD /r

386+

16/32/64-bit

§

Bsr_r64_rm64

BSR r64, r/m64

o64 0F BD /r

X64

64-bit

§

Lzcnt_r16_rm16

LZCNT r16, r/m16

o16 F3 0F BD /r

LZCNT

16/32/64-bit

§

Lzcnt_r32_rm32

LZCNT r32, r/m32

o32 F3 0F BD /r

LZCNT

16/32/64-bit

§

Lzcnt_r64_rm64

LZCNT r64, r/m64

F3 o64 0F BD /r

LZCNT

64-bit

§

Movsx_r16_rm8

MOVSX r16, r/m8

o16 0F BE /r

386+

16/32/64-bit

§

Movsx_r32_rm8

MOVSX r32, r/m8

o32 0F BE /r

386+

16/32/64-bit

§

Movsx_r64_rm8

MOVSX r64, r/m8

o64 0F BE /r

X64

64-bit

§

Movsx_r16_rm16

MOVSX r16, r/m16

o16 0F BF /r

386+

16/32/64-bit

§

Movsx_r32_rm16

MOVSX r32, r/m16

o32 0F BF /r

386+

16/32/64-bit

§

Movsx_r64_rm16

MOVSX r64, r/m16

o64 0F BF /r

X64

64-bit

§

Xadd_rm8_r8

XADD r/m8, r8

0F C0 /r

486+

16/32/64-bit

§

Xadd_rm16_r16

XADD r/m16, r16

o16 0F C1 /r

486+

16/32/64-bit

§

Xadd_rm32_r32

XADD r/m32, r32

o32 0F C1 /r

486+

16/32/64-bit

§

Xadd_rm64_r64

XADD r/m64, r64

o64 0F C1 /r

X64

64-bit

§

Cmpps_xmm_xmmm128_imm8

CMPPS xmm1, xmm2/m128, imm8

NP 0F C2 /r ib

SSE

16/32/64-bit

§

VEX_Vcmpps_xmm_xmm_xmmm128_imm8

VCMPPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

VEX_Vcmpps_ymm_ymm_ymmm256_imm8

VCMPPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmpps_kr_k1_xmm_xmmm128b32_imm8

VCMPPS k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.0F.W0 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmpps_kr_k1_ymm_ymmm256b32_imm8

VCMPPS k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.0F.W0 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmpps_kr_k1_zmm_zmmm512b32_imm8_sae

VCMPPS k1 {k2}, zmm2, zmm3/m512/m32bcst{sae}, imm8

EVEX.512.0F.W0 C2 /r ib

AVX512F

16/32/64-bit

§

Cmppd_xmm_xmmm128_imm8

CMPPD xmm1, xmm2/m128, imm8

66 0F C2 /r ib

SSE2

16/32/64-bit

§

VEX_Vcmppd_xmm_xmm_xmmm128_imm8

VCMPPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

VEX_Vcmppd_ymm_ymm_ymmm256_imm8

VCMPPD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmppd_kr_k1_xmm_xmmm128b64_imm8

VCMPPD k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F.W1 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmppd_kr_k1_ymm_ymmm256b64_imm8

VCMPPD k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F.W1 C2 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcmppd_kr_k1_zmm_zmmm512b64_imm8_sae

VCMPPD k1 {k2}, zmm2, zmm3/m512/m64bcst{sae}, imm8

EVEX.512.66.0F.W1 C2 /r ib

AVX512F

16/32/64-bit

§

Cmpss_xmm_xmmm32_imm8

CMPSS xmm1, xmm2/m32, imm8

F3 0F C2 /r ib

SSE

16/32/64-bit

§

VEX_Vcmpss_xmm_xmm_xmmm32_imm8

VCMPSS xmm1, xmm2, xmm3/m32, imm8

VEX.LIG.F3.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmpss_kr_k1_xmm_xmmm32_imm8_sae

VCMPSS k1 {k2}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.F3.0F.W0 C2 /r ib

AVX512F

16/32/64-bit

§

Cmpsd_xmm_xmmm64_imm8

CMPSD xmm1, xmm2/m64, imm8

F2 0F C2 /r ib

SSE2

16/32/64-bit

§

VEX_Vcmpsd_xmm_xmm_xmmm64_imm8

VCMPSD xmm1, xmm2, xmm3/m64, imm8

VEX.LIG.F2.0F.WIG C2 /r ib

AVX

16/32/64-bit

§

EVEX_Vcmpsd_kr_k1_xmm_xmmm64_imm8_sae

VCMPSD k1 {k2}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.F2.0F.W1 C2 /r ib

AVX512F

16/32/64-bit

§

Movnti_m32_r32

MOVNTI m32, r32

NP 0F C3 /r

SSE2

16/32/64-bit

§

Movnti_m64_r64

MOVNTI m64, r64

NP o64 0F C3 /r

SSE2

64-bit

§

Pinsrw_mm_r32m16_imm8

PINSRW mm, r32/m16, imm8

NP 0F C4 /r ib

SSE

16/32/64-bit

§

Pinsrw_mm_r64m16_imm8

PINSRW mm, r64/m16, imm8

NP o64 0F C4 /r ib

SSE

64-bit

§

Pinsrw_xmm_r32m16_imm8

PINSRW xmm, r32/m16, imm8

66 0F C4 /r ib

SSE2

16/32/64-bit

§

Pinsrw_xmm_r64m16_imm8

PINSRW xmm, r64/m16, imm8

66 o64 0F C4 /r ib

SSE2

64-bit

§

VEX_Vpinsrw_xmm_xmm_r32m16_imm8

VPINSRW xmm1, xmm2, r32/m16, imm8

VEX.128.66.0F.W0 C4 /r ib

AVX

16/32/64-bit

§

VEX_Vpinsrw_xmm_xmm_r64m16_imm8

VPINSRW xmm1, xmm2, r64/m16, imm8

VEX.128.66.0F.W1 C4 /r ib

AVX

64-bit

§

EVEX_Vpinsrw_xmm_xmm_r32m16_imm8

VPINSRW xmm1, xmm2, r32/m16, imm8

EVEX.128.66.0F.W0 C4 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpinsrw_xmm_xmm_r64m16_imm8

VPINSRW xmm1, xmm2, r64/m16, imm8

EVEX.128.66.0F.W1 C4 /r ib

AVX512BW

64-bit

§

Pextrw_r32_mm_imm8

PEXTRW r32, mm, imm8

NP 0F C5 /r ib

SSE

16/32/64-bit

§

Pextrw_r64_mm_imm8

PEXTRW r64, mm, imm8

NP o64 0F C5 /r ib

SSE

64-bit

§

Pextrw_r32_xmm_imm8

PEXTRW r32, xmm, imm8

66 0F C5 /r ib

SSE2

16/32/64-bit

§

Pextrw_r64_xmm_imm8

PEXTRW r64, xmm, imm8

66 o64 0F C5 /r ib

SSE2

64-bit

§

VEX_Vpextrw_r32_xmm_imm8

VPEXTRW r32, xmm1, imm8

VEX.128.66.0F.W0 C5 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrw_r64_xmm_imm8

VPEXTRW r64, xmm1, imm8

VEX.128.66.0F.W1 C5 /r ib

AVX

64-bit

§

EVEX_Vpextrw_r32_xmm_imm8

VPEXTRW r32, xmm1, imm8

EVEX.128.66.0F.W0 C5 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpextrw_r64_xmm_imm8

VPEXTRW r64, xmm1, imm8

EVEX.128.66.0F.W1 C5 /r ib

AVX512BW

64-bit

§

Shufps_xmm_xmmm128_imm8

SHUFPS xmm1, xmm2/m128, imm8

NP 0F C6 /r ib

SSE

16/32/64-bit

§

VEX_Vshufps_xmm_xmm_xmmm128_imm8

VSHUFPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

VEX_Vshufps_ymm_ymm_ymmm256_imm8

VSHUFPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

EVEX_Vshufps_xmm_k1z_xmm_xmmm128b32_imm8

VSHUFPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.0F.W0 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufps_ymm_k1z_ymm_ymmm256b32_imm8

VSHUFPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.0F.W0 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufps_zmm_k1z_zmm_zmmm512b32_imm8

VSHUFPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.0F.W0 C6 /r ib

AVX512F

16/32/64-bit

§

Shufpd_xmm_xmmm128_imm8

SHUFPD xmm1, xmm2/m128, imm8

66 0F C6 /r ib

SSE2

16/32/64-bit

§

VEX_Vshufpd_xmm_xmm_xmmm128_imm8

VSHUFPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

VEX_Vshufpd_ymm_ymm_ymmm256_imm8

VSHUFPD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F.WIG C6 /r ib

AVX

16/32/64-bit

§

EVEX_Vshufpd_xmm_k1z_xmm_xmmm128b64_imm8

VSHUFPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F.W1 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufpd_ymm_k1z_ymm_ymmm256b64_imm8

VSHUFPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F.W1 C6 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufpd_zmm_k1z_zmm_zmmm512b64_imm8

VSHUFPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F.W1 C6 /r ib

AVX512F

16/32/64-bit

§

Cmpxchg8b_m64

CMPXCHG8B m64

0F C7 /1

CX8

16/32/64-bit

§

Cmpxchg16b_m128

CMPXCHG16B m128

o64 0F C7 /1

CMPXCHG16B

64-bit

§

Xrstors_mem

XRSTORS mem

NP 0F C7 /3

XSAVES

16/32/64-bit

§

Xrstors64_mem

XRSTORS64 mem

NP o64 0F C7 /3

XSAVES

64-bit

§

Xsavec_mem

XSAVEC mem

NP 0F C7 /4

XSAVEC

16/32/64-bit

§

Xsavec64_mem

XSAVEC64 mem

NP o64 0F C7 /4

XSAVEC

64-bit

§

Xsaves_mem

XSAVES mem

NP 0F C7 /5

XSAVES

16/32/64-bit

§

Xsaves64_mem

XSAVES64 mem

NP o64 0F C7 /5

XSAVES

64-bit

§

Vmptrld_m64

VMPTRLD m64

NP 0F C7 /6

VMX

16/32/64-bit

§

Vmclear_m64

VMCLEAR m64

66 0F C7 /6

VMX

16/32/64-bit

§

Vmxon_m64

VMXON m64

F3 0F C7 /6

VMX

16/32/64-bit

§

Rdrand_r16

RDRAND r16

o16 0F C7 /6

RDRAND

16/32/64-bit

§

Rdrand_r32

RDRAND r32

o32 0F C7 /6

RDRAND

16/32/64-bit

§

Rdrand_r64

RDRAND r64

o64 0F C7 /6

RDRAND

64-bit

§

Vmptrst_m64

VMPTRST m64

NP 0F C7 /7

VMX

16/32/64-bit

§

Rdseed_r16

RDSEED r16

o16 0F C7 /7

RDSEED

16/32/64-bit

§

Rdseed_r32

RDSEED r32

o32 0F C7 /7

RDSEED

16/32/64-bit

§

Rdseed_r64

RDSEED r64

o64 0F C7 /7

RDSEED

64-bit

§

Rdpid_r32

RDPID r32

F3 0F C7 /7

RDPID

16/32-bit

§

Rdpid_r64

RDPID r64

F3 0F C7 /7

RDPID

64-bit

§

Bswap_r16

BSWAP r16

o16 0F C8+rw

486+

16/32/64-bit

§

Bswap_r32

BSWAP r32

o32 0F C8+rd

486+

16/32/64-bit

§

Bswap_r64

BSWAP r64

o64 0F C8+ro

X64

64-bit

§

Addsubpd_xmm_xmmm128

ADDSUBPD xmm1, xmm2/m128

66 0F D0 /r

SSE3

16/32/64-bit

§

VEX_Vaddsubpd_xmm_xmm_xmmm128

VADDSUBPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D0 /r

AVX

16/32/64-bit

§

VEX_Vaddsubpd_ymm_ymm_ymmm256

VADDSUBPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D0 /r

AVX

16/32/64-bit

§

Addsubps_xmm_xmmm128

ADDSUBPS xmm1, xmm2/m128

F2 0F D0 /r

SSE3

16/32/64-bit

§

VEX_Vaddsubps_xmm_xmm_xmmm128

VADDSUBPS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F.WIG D0 /r

AVX

16/32/64-bit

§

VEX_Vaddsubps_ymm_ymm_ymmm256

VADDSUBPS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F.WIG D0 /r

AVX

16/32/64-bit

§

Psrlw_mm_mmm64

PSRLW mm, mm/m64

NP 0F D1 /r

MMX

16/32/64-bit

§

Psrlw_xmm_xmmm128

PSRLW xmm1, xmm2/m128

66 0F D1 /r

SSE2

16/32/64-bit

§

VEX_Vpsrlw_xmm_xmm_xmmm128

VPSRLW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D1 /r

AVX

16/32/64-bit

§

VEX_Vpsrlw_ymm_ymm_xmmm128

VPSRLW ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG D1 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrlw_xmm_k1z_xmm_xmmm128

VPSRLW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_ymm_k1z_ymm_xmmm128

VPSRLW ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.WIG D1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlw_zmm_k1z_zmm_xmmm128

VPSRLW zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.WIG D1 /r

AVX512BW

16/32/64-bit

§

Psrld_mm_mmm64

PSRLD mm, mm/m64

NP 0F D2 /r

MMX

16/32/64-bit

§

Psrld_xmm_xmmm128

PSRLD xmm1, xmm2/m128

66 0F D2 /r

SSE2

16/32/64-bit

§

VEX_Vpsrld_xmm_xmm_xmmm128

VPSRLD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D2 /r

AVX

16/32/64-bit

§

VEX_Vpsrld_ymm_ymm_xmmm128

VPSRLD ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG D2 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrld_xmm_k1z_xmm_xmmm128

VPSRLD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W0 D2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_ymm_k1z_ymm_xmmm128

VPSRLD ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W0 D2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrld_zmm_k1z_zmm_xmmm128

VPSRLD zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W0 D2 /r

AVX512F

16/32/64-bit

§

Psrlq_mm_mmm64

PSRLQ mm, mm/m64

NP 0F D3 /r

MMX

16/32/64-bit

§

Psrlq_xmm_xmmm128

PSRLQ xmm1, xmm2/m128

66 0F D3 /r

SSE2

16/32/64-bit

§

VEX_Vpsrlq_xmm_xmm_xmmm128

VPSRLQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D3 /r

AVX

16/32/64-bit

§

VEX_Vpsrlq_ymm_ymm_xmmm128

VPSRLQ ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG D3 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrlq_xmm_k1z_xmm_xmmm128

VPSRLQ xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W1 D3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_ymm_k1z_ymm_xmmm128

VPSRLQ ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W1 D3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlq_zmm_k1z_zmm_xmmm128

VPSRLQ zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W1 D3 /r

AVX512F

16/32/64-bit

§

Paddq_mm_mmm64

PADDQ mm, mm/m64

NP 0F D4 /r

MMX

16/32/64-bit

§

Paddq_xmm_xmmm128

PADDQ xmm1, xmm2/m128

66 0F D4 /r

SSE2

16/32/64-bit

§

VEX_Vpaddq_xmm_xmm_xmmm128

VPADDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D4 /r

AVX

16/32/64-bit

§

VEX_Vpaddq_ymm_ymm_ymmm256

VPADDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D4 /r

AVX2

16/32/64-bit

§

EVEX_Vpaddq_xmm_k1z_xmm_xmmm128b64

VPADDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 D4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddq_ymm_k1z_ymm_ymmm256b64

VPADDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 D4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddq_zmm_k1z_zmm_zmmm512b64

VPADDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 D4 /r

AVX512F

16/32/64-bit

§

Pmullw_mm_mmm64

PMULLW mm, mm/m64

NP 0F D5 /r

MMX

16/32/64-bit

§

Pmullw_xmm_xmmm128

PMULLW xmm1, xmm2/m128

66 0F D5 /r

SSE2

16/32/64-bit

§

VEX_Vpmullw_xmm_xmm_xmmm128

VPMULLW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D5 /r

AVX

16/32/64-bit

§

VEX_Vpmullw_ymm_ymm_ymmm256

VPMULLW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D5 /r

AVX2

16/32/64-bit

§

EVEX_Vpmullw_xmm_k1z_xmm_xmmm128

VPMULLW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmullw_ymm_k1z_ymm_ymmm256

VPMULLW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG D5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmullw_zmm_k1z_zmm_zmmm512

VPMULLW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG D5 /r

AVX512BW

16/32/64-bit

§

Movq_xmmm64_xmm

MOVQ xmm2/m64, xmm1

66 0F D6 /r

SSE2

16/32/64-bit

§

VEX_Vmovq_xmmm64_xmm

VMOVQ xmm1/m64, xmm2

VEX.128.66.0F.WIG D6 /r

AVX

16/32/64-bit

§

EVEX_Vmovq_xmmm64_xmm

VMOVQ xmm1/m64, xmm2

EVEX.128.66.0F.W1 D6 /r

AVX512F

16/32/64-bit

§

Movq2dq_xmm_mm

MOVQ2DQ xmm, mm

F3 0F D6 /r

SSE2

16/32/64-bit

§

Movdq2q_mm_xmm

MOVDQ2Q mm, xmm

F2 0F D6 /r

SSE2

16/32/64-bit

§

Pmovmskb_r32_mm

PMOVMSKB r32, mm

NP 0F D7 /r

SSE

16/32/64-bit

§

Pmovmskb_r64_mm

PMOVMSKB r64, mm

NP o64 0F D7 /r

SSE

64-bit

§

Pmovmskb_r32_xmm

PMOVMSKB r32, xmm

66 0F D7 /r

SSE2

16/32/64-bit

§

Pmovmskb_r64_xmm

PMOVMSKB r64, xmm

66 o64 0F D7 /r

SSE2

64-bit

§

VEX_Vpmovmskb_r32_xmm

VPMOVMSKB r32, xmm1

VEX.128.66.0F.W0 D7 /r

AVX

16/32/64-bit

§

VEX_Vpmovmskb_r64_xmm

VPMOVMSKB r64, xmm1

VEX.128.66.0F.W1 D7 /r

AVX

64-bit

§

VEX_Vpmovmskb_r32_ymm

VPMOVMSKB r32, ymm1

VEX.256.66.0F.W0 D7 /r

AVX2

16/32/64-bit

§

VEX_Vpmovmskb_r64_ymm

VPMOVMSKB r64, ymm1

VEX.256.66.0F.W1 D7 /r

AVX2

64-bit

§

Psubusb_mm_mmm64

PSUBUSB mm, mm/m64

NP 0F D8 /r

MMX

16/32/64-bit

§

Psubusb_xmm_xmmm128

PSUBUSB xmm1, xmm2/m128

66 0F D8 /r

SSE2

16/32/64-bit

§

VEX_Vpsubusb_xmm_xmm_xmmm128

VPSUBUSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D8 /r

AVX

16/32/64-bit

§

VEX_Vpsubusb_ymm_ymm_ymmm256

VPSUBUSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D8 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubusb_xmm_k1z_xmm_xmmm128

VPSUBUSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusb_ymm_k1z_ymm_ymmm256

VPSUBUSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG D8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusb_zmm_k1z_zmm_zmmm512

VPSUBUSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG D8 /r

AVX512BW

16/32/64-bit

§

Psubusw_mm_mmm64

PSUBUSW mm, mm/m64

NP 0F D9 /r

MMX

16/32/64-bit

§

Psubusw_xmm_xmmm128

PSUBUSW xmm1, xmm2/m128

66 0F D9 /r

SSE2

16/32/64-bit

§

VEX_Vpsubusw_xmm_xmm_xmmm128

VPSUBUSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG D9 /r

AVX

16/32/64-bit

§

VEX_Vpsubusw_ymm_ymm_ymmm256

VPSUBUSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG D9 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubusw_xmm_k1z_xmm_xmmm128

VPSUBUSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG D9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusw_ymm_k1z_ymm_ymmm256

VPSUBUSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG D9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubusw_zmm_k1z_zmm_zmmm512

VPSUBUSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG D9 /r

AVX512BW

16/32/64-bit

§

Pminub_mm_mmm64

PMINUB mm1, mm2/m64

NP 0F DA /r

SSE

16/32/64-bit

§

Pminub_xmm_xmmm128

PMINUB xmm1, xmm2/m128

66 0F DA /r

SSE2

16/32/64-bit

§

VEX_Vpminub_xmm_xmm_xmmm128

VPMINUB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DA /r

AVX

16/32/64-bit

§

VEX_Vpminub_ymm_ymm_ymmm256

VPMINUB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DA /r

AVX2

16/32/64-bit

§

EVEX_Vpminub_xmm_k1z_xmm_xmmm128

VPMINUB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminub_ymm_k1z_ymm_ymmm256

VPMINUB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminub_zmm_k1z_zmm_zmmm512

VPMINUB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DA /r

AVX512BW

16/32/64-bit

§

Pand_mm_mmm64

PAND mm, mm/m64

NP 0F DB /r

MMX

16/32/64-bit

§

Pand_xmm_xmmm128

PAND xmm1, xmm2/m128

66 0F DB /r

SSE2

16/32/64-bit

§

VEX_Vpand_xmm_xmm_xmmm128

VPAND xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DB /r

AVX

16/32/64-bit

§

VEX_Vpand_ymm_ymm_ymmm256

VPAND ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DB /r

AVX2

16/32/64-bit

§

EVEX_Vpandd_xmm_k1z_xmm_xmmm128b32

VPANDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandd_ymm_k1z_ymm_ymmm256b32

VPANDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandd_zmm_k1z_zmm_zmmm512b32

VPANDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 DB /r

AVX512F

16/32/64-bit

§

EVEX_Vpandq_xmm_k1z_xmm_xmmm128b64

VPANDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandq_ymm_k1z_ymm_ymmm256b64

VPANDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 DB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandq_zmm_k1z_zmm_zmmm512b64

VPANDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 DB /r

AVX512F

16/32/64-bit

§

Paddusb_mm_mmm64

PADDUSB mm, mm/m64

NP 0F DC /r

MMX

16/32/64-bit

§

Paddusb_xmm_xmmm128

PADDUSB xmm1, xmm2/m128

66 0F DC /r

SSE2

16/32/64-bit

§

VEX_Vpaddusb_xmm_xmm_xmmm128

VPADDUSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DC /r

AVX

16/32/64-bit

§

VEX_Vpaddusb_ymm_ymm_ymmm256

VPADDUSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DC /r

AVX2

16/32/64-bit

§

EVEX_Vpaddusb_xmm_k1z_xmm_xmmm128

VPADDUSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusb_ymm_k1z_ymm_ymmm256

VPADDUSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusb_zmm_k1z_zmm_zmmm512

VPADDUSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DC /r

AVX512BW

16/32/64-bit

§

Paddusw_mm_mmm64

PADDUSW mm, mm/m64

NP 0F DD /r

MMX

16/32/64-bit

§

Paddusw_xmm_xmmm128

PADDUSW xmm1, xmm2/m128

66 0F DD /r

SSE2

16/32/64-bit

§

VEX_Vpaddusw_xmm_xmm_xmmm128

VPADDUSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DD /r

AVX

16/32/64-bit

§

VEX_Vpaddusw_ymm_ymm_ymmm256

VPADDUSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DD /r

AVX2

16/32/64-bit

§

EVEX_Vpaddusw_xmm_k1z_xmm_xmmm128

VPADDUSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusw_ymm_k1z_ymm_ymmm256

VPADDUSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddusw_zmm_k1z_zmm_zmmm512

VPADDUSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DD /r

AVX512BW

16/32/64-bit

§

Pmaxub_mm_mmm64

PMAXUB mm1, mm2/m64

NP 0F DE /r

SSE

16/32/64-bit

§

Pmaxub_xmm_xmmm128

PMAXUB xmm1, xmm2/m128

66 0F DE /r

SSE2

16/32/64-bit

§

VEX_Vpmaxub_xmm_xmm_xmmm128

VPMAXUB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DE /r

AVX

16/32/64-bit

§

VEX_Vpmaxub_ymm_ymm_ymmm256

VPMAXUB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DE /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxub_xmm_k1z_xmm_xmmm128

VPMAXUB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG DE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxub_ymm_k1z_ymm_ymmm256

VPMAXUB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG DE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxub_zmm_k1z_zmm_zmmm512

VPMAXUB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG DE /r

AVX512BW

16/32/64-bit

§

Pandn_mm_mmm64

PANDN mm, mm/m64

NP 0F DF /r

MMX

16/32/64-bit

§

Pandn_xmm_xmmm128

PANDN xmm1, xmm2/m128

66 0F DF /r

SSE2

16/32/64-bit

§

VEX_Vpandn_xmm_xmm_xmmm128

VPANDN xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG DF /r

AVX

16/32/64-bit

§

VEX_Vpandn_ymm_ymm_ymmm256

VPANDN ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG DF /r

AVX2

16/32/64-bit

§

EVEX_Vpandnd_xmm_k1z_xmm_xmmm128b32

VPANDND xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnd_ymm_k1z_ymm_ymmm256b32

VPANDND ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnd_zmm_k1z_zmm_zmmm512b32

VPANDND zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 DF /r

AVX512F

16/32/64-bit

§

EVEX_Vpandnq_xmm_k1z_xmm_xmmm128b64

VPANDNQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnq_ymm_k1z_ymm_ymmm256b64

VPANDNQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 DF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpandnq_zmm_k1z_zmm_zmmm512b64

VPANDNQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 DF /r

AVX512F

16/32/64-bit

§

Pavgb_mm_mmm64

PAVGB mm1, mm2/m64

NP 0F E0 /r

SSE

16/32/64-bit

§

Pavgb_xmm_xmmm128

PAVGB xmm1, xmm2/m128

66 0F E0 /r

SSE2

16/32/64-bit

§

VEX_Vpavgb_xmm_xmm_xmmm128

VPAVGB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E0 /r

AVX

16/32/64-bit

§

VEX_Vpavgb_ymm_ymm_ymmm256

VPAVGB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E0 /r

AVX2

16/32/64-bit

§

EVEX_Vpavgb_xmm_k1z_xmm_xmmm128

VPAVGB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E0 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgb_ymm_k1z_ymm_ymmm256

VPAVGB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E0 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgb_zmm_k1z_zmm_zmmm512

VPAVGB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E0 /r

AVX512BW

16/32/64-bit

§

Psraw_mm_mmm64

PSRAW mm, mm/m64

NP 0F E1 /r

MMX

16/32/64-bit

§

Psraw_xmm_xmmm128

PSRAW xmm1, xmm2/m128

66 0F E1 /r

SSE2

16/32/64-bit

§

VEX_Vpsraw_xmm_xmm_xmmm128

VPSRAW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E1 /r

AVX

16/32/64-bit

§

VEX_Vpsraw_ymm_ymm_xmmm128

VPSRAW ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG E1 /r

AVX2

16/32/64-bit

§

EVEX_Vpsraw_xmm_k1z_xmm_xmmm128

VPSRAW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_ymm_k1z_ymm_xmmm128

VPSRAW ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.WIG E1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsraw_zmm_k1z_zmm_xmmm128

VPSRAW zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.WIG E1 /r

AVX512BW

16/32/64-bit

§

Psrad_mm_mmm64

PSRAD mm, mm/m64

NP 0F E2 /r

MMX

16/32/64-bit

§

Psrad_xmm_xmmm128

PSRAD xmm1, xmm2/m128

66 0F E2 /r

SSE2

16/32/64-bit

§

VEX_Vpsrad_xmm_xmm_xmmm128

VPSRAD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E2 /r

AVX

16/32/64-bit

§

VEX_Vpsrad_ymm_ymm_xmmm128

VPSRAD ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG E2 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrad_xmm_k1z_xmm_xmmm128

VPSRAD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W0 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_ymm_k1z_ymm_xmmm128

VPSRAD ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W0 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrad_zmm_k1z_zmm_xmmm128

VPSRAD zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W0 E2 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsraq_xmm_k1z_xmm_xmmm128

VPSRAQ xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W1 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_ymm_k1z_ymm_xmmm128

VPSRAQ ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W1 E2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsraq_zmm_k1z_zmm_xmmm128

VPSRAQ zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W1 E2 /r

AVX512F

16/32/64-bit

§

Pavgw_mm_mmm64

PAVGW mm1, mm2/m64

NP 0F E3 /r

SSE

16/32/64-bit

§

Pavgw_xmm_xmmm128

PAVGW xmm1, xmm2/m128

66 0F E3 /r

SSE2

16/32/64-bit

§

VEX_Vpavgw_xmm_xmm_xmmm128

VPAVGW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E3 /r

AVX

16/32/64-bit

§

VEX_Vpavgw_ymm_ymm_ymmm256

VPAVGW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E3 /r

AVX2

16/32/64-bit

§

EVEX_Vpavgw_xmm_k1z_xmm_xmmm128

VPAVGW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E3 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgw_ymm_k1z_ymm_ymmm256

VPAVGW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E3 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpavgw_zmm_k1z_zmm_zmmm512

VPAVGW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E3 /r

AVX512BW

16/32/64-bit

§

Pmulhuw_mm_mmm64

PMULHUW mm1, mm2/m64

NP 0F E4 /r

SSE

16/32/64-bit

§

Pmulhuw_xmm_xmmm128

PMULHUW xmm1, xmm2/m128

66 0F E4 /r

SSE2

16/32/64-bit

§

VEX_Vpmulhuw_xmm_xmm_xmmm128

VPMULHUW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E4 /r

AVX

16/32/64-bit

§

VEX_Vpmulhuw_ymm_ymm_ymmm256

VPMULHUW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E4 /r

AVX2

16/32/64-bit

§

EVEX_Vpmulhuw_xmm_k1z_xmm_xmmm128

VPMULHUW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E4 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhuw_ymm_k1z_ymm_ymmm256

VPMULHUW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E4 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhuw_zmm_k1z_zmm_zmmm512

VPMULHUW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E4 /r

AVX512BW

16/32/64-bit

§

Pmulhw_mm_mmm64

PMULHW mm, mm/m64

NP 0F E5 /r

MMX

16/32/64-bit

§

Pmulhw_xmm_xmmm128

PMULHW xmm1, xmm2/m128

66 0F E5 /r

SSE2

16/32/64-bit

§

VEX_Vpmulhw_xmm_xmm_xmmm128

VPMULHW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E5 /r

AVX

16/32/64-bit

§

VEX_Vpmulhw_ymm_ymm_ymmm256

VPMULHW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E5 /r

AVX2

16/32/64-bit

§

EVEX_Vpmulhw_xmm_k1z_xmm_xmmm128

VPMULHW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhw_ymm_k1z_ymm_ymmm256

VPMULHW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhw_zmm_k1z_zmm_zmmm512

VPMULHW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E5 /r

AVX512BW

16/32/64-bit

§

Cvttpd2dq_xmm_xmmm128

CVTTPD2DQ xmm1, xmm2/m128

66 0F E6 /r

SSE2

16/32/64-bit

§

VEX_Vcvttpd2dq_xmm_xmmm128

VCVTTPD2DQ xmm1, xmm2/m128

VEX.128.66.0F.WIG E6 /r

AVX

16/32/64-bit

§

VEX_Vcvttpd2dq_xmm_ymmm256

VCVTTPD2DQ xmm1, ymm2/m256

VEX.256.66.0F.WIG E6 /r

AVX

16/32/64-bit

§

EVEX_Vcvttpd2dq_xmm_k1z_xmmm128b64

VCVTTPD2DQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2dq_xmm_k1z_ymmm256b64

VCVTTPD2DQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvttpd2dq_ymm_k1z_zmmm512b64_sae

VCVTTPD2DQ ymm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F.W1 E6 /r

AVX512F

16/32/64-bit

§

Cvtdq2pd_xmm_xmmm64

CVTDQ2PD xmm1, xmm2/m64

F3 0F E6 /r

SSE2

16/32/64-bit

§

VEX_Vcvtdq2pd_xmm_xmmm64

VCVTDQ2PD xmm1, xmm2/m64

VEX.128.F3.0F.WIG E6 /r

AVX

16/32/64-bit

§

VEX_Vcvtdq2pd_ymm_xmmm128

VCVTDQ2PD ymm1, xmm2/m128

VEX.256.F3.0F.WIG E6 /r

AVX

16/32/64-bit

§

EVEX_Vcvtdq2pd_xmm_k1z_xmmm64b32

VCVTDQ2PD xmm1 {k1}{z}, xmm2/m64/m32bcst

EVEX.128.F3.0F.W0 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2pd_ymm_k1z_xmmm128b32

VCVTDQ2PD ymm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.256.F3.0F.W0 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtdq2pd_zmm_k1z_ymmm256b32_er

VCVTDQ2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{er}

EVEX.512.F3.0F.W0 E6 /r

AVX512F

16/32/64-bit

§

EVEX_Vcvtqq2pd_xmm_k1z_xmmm128b64

VCVTQQ2PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F3.0F.W1 E6 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2pd_ymm_k1z_ymmm256b64

VCVTQQ2PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F3.0F.W1 E6 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vcvtqq2pd_zmm_k1z_zmmm512b64_er

VCVTQQ2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F3.0F.W1 E6 /r

AVX512DQ

16/32/64-bit

§

Cvtpd2dq_xmm_xmmm128

CVTPD2DQ xmm1, xmm2/m128

F2 0F E6 /r

SSE2

16/32/64-bit

§

VEX_Vcvtpd2dq_xmm_xmmm128

VCVTPD2DQ xmm1, xmm2/m128

VEX.128.F2.0F.WIG E6 /r

AVX

16/32/64-bit

§

VEX_Vcvtpd2dq_xmm_ymmm256

VCVTPD2DQ xmm1, ymm2/m256

VEX.256.F2.0F.WIG E6 /r

AVX

16/32/64-bit

§

EVEX_Vcvtpd2dq_xmm_k1z_xmmm128b64

VCVTPD2DQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F2.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2dq_xmm_k1z_ymmm256b64

VCVTPD2DQ xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F2.0F.W1 E6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtpd2dq_ymm_k1z_zmmm512b64_er

VCVTPD2DQ ymm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F2.0F.W1 E6 /r

AVX512F

16/32/64-bit

§

Movntq_m64_mm

MOVNTQ m64, mm

NP 0F E7 /r

SSE

16/32/64-bit

§

Movntdq_m128_xmm

MOVNTDQ m128, xmm1

66 0F E7 /r

SSE2

16/32/64-bit

§

VEX_Vmovntdq_m128_xmm

VMOVNTDQ m128, xmm1

VEX.128.66.0F.WIG E7 /r

AVX

16/32/64-bit

§

VEX_Vmovntdq_m256_ymm

VMOVNTDQ m256, ymm1

VEX.256.66.0F.WIG E7 /r

AVX

16/32/64-bit

§

EVEX_Vmovntdq_m128_xmm

VMOVNTDQ m128, xmm1

EVEX.128.66.0F.W0 E7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdq_m256_ymm

VMOVNTDQ m256, ymm1

EVEX.256.66.0F.W0 E7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdq_m512_zmm

VMOVNTDQ m512, zmm1

EVEX.512.66.0F.W0 E7 /r

AVX512F

16/32/64-bit

§

Psubsb_mm_mmm64

PSUBSB mm, mm/m64

NP 0F E8 /r

MMX

16/32/64-bit

§

Psubsb_xmm_xmmm128

PSUBSB xmm1, xmm2/m128

66 0F E8 /r

SSE2

16/32/64-bit

§

VEX_Vpsubsb_xmm_xmm_xmmm128

VPSUBSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E8 /r

AVX

16/32/64-bit

§

VEX_Vpsubsb_ymm_ymm_ymmm256

VPSUBSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E8 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubsb_xmm_k1z_xmm_xmmm128

VPSUBSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsb_ymm_k1z_ymm_ymmm256

VPSUBSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsb_zmm_k1z_zmm_zmmm512

VPSUBSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E8 /r

AVX512BW

16/32/64-bit

§

Psubsw_mm_mmm64

PSUBSW mm, mm/m64

NP 0F E9 /r

MMX

16/32/64-bit

§

Psubsw_xmm_xmmm128

PSUBSW xmm1, xmm2/m128

66 0F E9 /r

SSE2

16/32/64-bit

§

VEX_Vpsubsw_xmm_xmm_xmmm128

VPSUBSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG E9 /r

AVX

16/32/64-bit

§

VEX_Vpsubsw_ymm_ymm_ymmm256

VPSUBSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG E9 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubsw_xmm_k1z_xmm_xmmm128

VPSUBSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG E9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsw_ymm_k1z_ymm_ymmm256

VPSUBSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG E9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubsw_zmm_k1z_zmm_zmmm512

VPSUBSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG E9 /r

AVX512BW

16/32/64-bit

§

Pminsw_mm_mmm64

PMINSW mm1, mm2/m64

NP 0F EA /r

SSE

16/32/64-bit

§

Pminsw_xmm_xmmm128

PMINSW xmm1, xmm2/m128

66 0F EA /r

SSE2

16/32/64-bit

§

VEX_Vpminsw_xmm_xmm_xmmm128

VPMINSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EA /r

AVX

16/32/64-bit

§

VEX_Vpminsw_ymm_ymm_ymmm256

VPMINSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EA /r

AVX2

16/32/64-bit

§

EVEX_Vpminsw_xmm_k1z_xmm_xmmm128

VPMINSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG EA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsw_ymm_k1z_ymm_ymmm256

VPMINSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG EA /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsw_zmm_k1z_zmm_zmmm512

VPMINSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG EA /r

AVX512BW

16/32/64-bit

§

Por_mm_mmm64

POR mm, mm/m64

NP 0F EB /r

MMX

16/32/64-bit

§

Por_xmm_xmmm128

POR xmm1, xmm2/m128

66 0F EB /r

SSE2

16/32/64-bit

§

VEX_Vpor_xmm_xmm_xmmm128

VPOR xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EB /r

AVX

16/32/64-bit

§

VEX_Vpor_ymm_ymm_ymmm256

VPOR ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EB /r

AVX2

16/32/64-bit

§

EVEX_Vpord_xmm_k1z_xmm_xmmm128b32

VPORD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpord_ymm_k1z_ymm_ymmm256b32

VPORD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpord_zmm_k1z_zmm_zmmm512b32

VPORD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 EB /r

AVX512F

16/32/64-bit

§

EVEX_Vporq_xmm_k1z_xmm_xmmm128b64

VPORQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vporq_ymm_k1z_ymm_ymmm256b64

VPORQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 EB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vporq_zmm_k1z_zmm_zmmm512b64

VPORQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 EB /r

AVX512F

16/32/64-bit

§

Paddsb_mm_mmm64

PADDSB mm, mm/m64

NP 0F EC /r

MMX

16/32/64-bit

§

Paddsb_xmm_xmmm128

PADDSB xmm1, xmm2/m128

66 0F EC /r

SSE2

16/32/64-bit

§

VEX_Vpaddsb_xmm_xmm_xmmm128

VPADDSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EC /r

AVX

16/32/64-bit

§

VEX_Vpaddsb_ymm_ymm_ymmm256

VPADDSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EC /r

AVX2

16/32/64-bit

§

EVEX_Vpaddsb_xmm_k1z_xmm_xmmm128

VPADDSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG EC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsb_ymm_k1z_ymm_ymmm256

VPADDSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG EC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsb_zmm_k1z_zmm_zmmm512

VPADDSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG EC /r

AVX512BW

16/32/64-bit

§

Paddsw_mm_mmm64

PADDSW mm, mm/m64

NP 0F ED /r

MMX

16/32/64-bit

§

Paddsw_xmm_xmmm128

PADDSW xmm1, xmm2/m128

66 0F ED /r

SSE2

16/32/64-bit

§

VEX_Vpaddsw_xmm_xmm_xmmm128

VPADDSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG ED /r

AVX

16/32/64-bit

§

VEX_Vpaddsw_ymm_ymm_ymmm256

VPADDSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG ED /r

AVX2

16/32/64-bit

§

EVEX_Vpaddsw_xmm_k1z_xmm_xmmm128

VPADDSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG ED /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsw_ymm_k1z_ymm_ymmm256

VPADDSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG ED /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddsw_zmm_k1z_zmm_zmmm512

VPADDSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG ED /r

AVX512BW

16/32/64-bit

§

Pmaxsw_mm_mmm64

PMAXSW mm1, mm2/m64

NP 0F EE /r

SSE

16/32/64-bit

§

Pmaxsw_xmm_xmmm128

PMAXSW xmm1, xmm2/m128

66 0F EE /r

SSE2

16/32/64-bit

§

VEX_Vpmaxsw_xmm_xmm_xmmm128

VPMAXSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EE /r

AVX

16/32/64-bit

§

VEX_Vpmaxsw_ymm_ymm_ymmm256

VPMAXSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EE /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxsw_xmm_k1z_xmm_xmmm128

VPMAXSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG EE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsw_ymm_k1z_ymm_ymmm256

VPMAXSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG EE /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsw_zmm_k1z_zmm_zmmm512

VPMAXSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG EE /r

AVX512BW

16/32/64-bit

§

Pxor_mm_mmm64

PXOR mm, mm/m64

NP 0F EF /r

MMX

16/32/64-bit

§

Pxor_xmm_xmmm128

PXOR xmm1, xmm2/m128

66 0F EF /r

SSE2

16/32/64-bit

§

VEX_Vpxor_xmm_xmm_xmmm128

VPXOR xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG EF /r

AVX

16/32/64-bit

§

VEX_Vpxor_ymm_ymm_ymmm256

VPXOR ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG EF /r

AVX2

16/32/64-bit

§

EVEX_Vpxord_xmm_k1z_xmm_xmmm128b32

VPXORD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxord_ymm_k1z_ymm_ymmm256b32

VPXORD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxord_zmm_k1z_zmm_zmmm512b32

VPXORD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 EF /r

AVX512F

16/32/64-bit

§

EVEX_Vpxorq_xmm_k1z_xmm_xmmm128b64

VPXORQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxorq_ymm_k1z_ymm_ymmm256b64

VPXORQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 EF /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpxorq_zmm_k1z_zmm_zmmm512b64

VPXORQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 EF /r

AVX512F

16/32/64-bit

§

Lddqu_xmm_m128

LDDQU xmm1, m128

F2 0F F0 /r

SSE3

16/32/64-bit

§

VEX_Vlddqu_xmm_m128

VLDDQU xmm1, m128

VEX.128.F2.0F.WIG F0 /r

AVX

16/32/64-bit

§

VEX_Vlddqu_ymm_m256

VLDDQU ymm1, m256

VEX.256.F2.0F.WIG F0 /r

AVX

16/32/64-bit

§

Psllw_mm_mmm64

PSLLW mm, mm/m64

NP 0F F1 /r

MMX

16/32/64-bit

§

Psllw_xmm_xmmm128

PSLLW xmm1, xmm2/m128

66 0F F1 /r

SSE2

16/32/64-bit

§

VEX_Vpsllw_xmm_xmm_xmmm128

VPSLLW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F1 /r

AVX

16/32/64-bit

§

VEX_Vpsllw_ymm_ymm_xmmm128

VPSLLW ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG F1 /r

AVX2

16/32/64-bit

§

EVEX_Vpsllw_xmm_k1z_xmm_xmmm128

VPSLLW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_ymm_k1z_ymm_xmmm128

VPSLLW ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.WIG F1 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllw_zmm_k1z_zmm_xmmm128

VPSLLW zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.WIG F1 /r

AVX512BW

16/32/64-bit

§

Pslld_mm_mmm64

PSLLD mm, mm/m64

NP 0F F2 /r

MMX

16/32/64-bit

§

Pslld_xmm_xmmm128

PSLLD xmm1, xmm2/m128

66 0F F2 /r

SSE2

16/32/64-bit

§

VEX_Vpslld_xmm_xmm_xmmm128

VPSLLD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F2 /r

AVX

16/32/64-bit

§

VEX_Vpslld_ymm_ymm_xmmm128

VPSLLD ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG F2 /r

AVX2

16/32/64-bit

§

EVEX_Vpslld_xmm_k1z_xmm_xmmm128

VPSLLD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W0 F2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_ymm_k1z_ymm_xmmm128

VPSLLD ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W0 F2 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpslld_zmm_k1z_zmm_xmmm128

VPSLLD zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W0 F2 /r

AVX512F

16/32/64-bit

§

Psllq_mm_mmm64

PSLLQ mm, mm/m64

NP 0F F3 /r

MMX

16/32/64-bit

§

Psllq_xmm_xmmm128

PSLLQ xmm1, xmm2/m128

66 0F F3 /r

SSE2

16/32/64-bit

§

VEX_Vpsllq_xmm_xmm_xmmm128

VPSLLQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F3 /r

AVX

16/32/64-bit

§

VEX_Vpsllq_ymm_ymm_xmmm128

VPSLLQ ymm1, ymm2, xmm3/m128

VEX.256.66.0F.WIG F3 /r

AVX2

16/32/64-bit

§

EVEX_Vpsllq_xmm_k1z_xmm_xmmm128

VPSLLQ xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.W1 F3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_ymm_k1z_ymm_xmmm128

VPSLLQ ymm1 {k1}{z}, ymm2, xmm3/m128

EVEX.256.66.0F.W1 F3 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllq_zmm_k1z_zmm_xmmm128

VPSLLQ zmm1 {k1}{z}, zmm2, xmm3/m128

EVEX.512.66.0F.W1 F3 /r

AVX512F

16/32/64-bit

§

Pmuludq_mm_mmm64

PMULUDQ mm1, mm2/m64

NP 0F F4 /r

SSE2

16/32/64-bit

§

Pmuludq_xmm_xmmm128

PMULUDQ xmm1, xmm2/m128

66 0F F4 /r

SSE2

16/32/64-bit

§

VEX_Vpmuludq_xmm_xmm_xmmm128

VPMULUDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F4 /r

AVX

16/32/64-bit

§

VEX_Vpmuludq_ymm_ymm_ymmm256

VPMULUDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F4 /r

AVX2

16/32/64-bit

§

EVEX_Vpmuludq_xmm_k1z_xmm_xmmm128b64

VPMULUDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 F4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuludq_ymm_k1z_ymm_ymmm256b64

VPMULUDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 F4 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuludq_zmm_k1z_zmm_zmmm512b64

VPMULUDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 F4 /r

AVX512F

16/32/64-bit

§

Pmaddwd_mm_mmm64

PMADDWD mm, mm/m64

NP 0F F5 /r

MMX

16/32/64-bit

§

Pmaddwd_xmm_xmmm128

PMADDWD xmm1, xmm2/m128

66 0F F5 /r

SSE2

16/32/64-bit

§

VEX_Vpmaddwd_xmm_xmm_xmmm128

VPMADDWD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F5 /r

AVX

16/32/64-bit

§

VEX_Vpmaddwd_ymm_ymm_ymmm256

VPMADDWD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F5 /r

AVX2

16/32/64-bit

§

EVEX_Vpmaddwd_xmm_k1z_xmm_xmmm128

VPMADDWD xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddwd_ymm_k1z_ymm_ymmm256

VPMADDWD ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F5 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddwd_zmm_k1z_zmm_zmmm512

VPMADDWD zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F5 /r

AVX512BW

16/32/64-bit

§

Psadbw_mm_mmm64

PSADBW mm1, mm2/m64

NP 0F F6 /r

SSE

16/32/64-bit

§

Psadbw_xmm_xmmm128

PSADBW xmm1, xmm2/m128

66 0F F6 /r

SSE2

16/32/64-bit

§

VEX_Vpsadbw_xmm_xmm_xmmm128

VPSADBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F6 /r

AVX

16/32/64-bit

§

VEX_Vpsadbw_ymm_ymm_ymmm256

VPSADBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F6 /r

AVX2

16/32/64-bit

§

EVEX_Vpsadbw_xmm_xmm_xmmm128

VPSADBW xmm1, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F6 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsadbw_ymm_ymm_ymmm256

VPSADBW ymm1, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F6 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsadbw_zmm_zmm_zmmm512

VPSADBW zmm1, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F6 /r

AVX512BW

16/32/64-bit

§

Maskmovq_rDI_mm_mm

MASKMOVQ mm1, mm2

NP 0F F7 /r

SSE

16/32/64-bit

§

Maskmovdqu_rDI_xmm_xmm

MASKMOVDQU xmm1, xmm2

66 0F F7 /r

SSE2

16/32/64-bit

§

VEX_Vmaskmovdqu_rDI_xmm_xmm

VMASKMOVDQU xmm1, xmm2

VEX.128.66.0F.WIG F7 /r

AVX

16/32/64-bit

§

Psubb_mm_mmm64

PSUBB mm, mm/m64

NP 0F F8 /r

MMX

16/32/64-bit

§

Psubb_xmm_xmmm128

PSUBB xmm1, xmm2/m128

66 0F F8 /r

SSE2

16/32/64-bit

§

VEX_Vpsubb_xmm_xmm_xmmm128

VPSUBB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F8 /r

AVX

16/32/64-bit

§

VEX_Vpsubb_ymm_ymm_ymmm256

VPSUBB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F8 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubb_xmm_k1z_xmm_xmmm128

VPSUBB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubb_ymm_k1z_ymm_ymmm256

VPSUBB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F8 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubb_zmm_k1z_zmm_zmmm512

VPSUBB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F8 /r

AVX512BW

16/32/64-bit

§

Psubw_mm_mmm64

PSUBW mm, mm/m64

NP 0F F9 /r

MMX

16/32/64-bit

§

Psubw_xmm_xmmm128

PSUBW xmm1, xmm2/m128

66 0F F9 /r

SSE2

16/32/64-bit

§

VEX_Vpsubw_xmm_xmm_xmmm128

VPSUBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG F9 /r

AVX

16/32/64-bit

§

VEX_Vpsubw_ymm_ymm_ymmm256

VPSUBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG F9 /r

AVX2

16/32/64-bit

§

EVEX_Vpsubw_xmm_k1z_xmm_xmmm128

VPSUBW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG F9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubw_ymm_k1z_ymm_ymmm256

VPSUBW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG F9 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsubw_zmm_k1z_zmm_zmmm512

VPSUBW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG F9 /r

AVX512BW

16/32/64-bit

§

Psubd_mm_mmm64

PSUBD mm, mm/m64

NP 0F FA /r

MMX

16/32/64-bit

§

Psubd_xmm_xmmm128

PSUBD xmm1, xmm2/m128

66 0F FA /r

SSE2

16/32/64-bit

§

VEX_Vpsubd_xmm_xmm_xmmm128

VPSUBD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FA /r

AVX

16/32/64-bit

§

VEX_Vpsubd_ymm_ymm_ymmm256

VPSUBD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FA /r

AVX2

16/32/64-bit

§

EVEX_Vpsubd_xmm_k1z_xmm_xmmm128b32

VPSUBD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 FA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubd_ymm_k1z_ymm_ymmm256b32

VPSUBD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 FA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubd_zmm_k1z_zmm_zmmm512b32

VPSUBD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 FA /r

AVX512F

16/32/64-bit

§

Psubq_mm_mmm64

PSUBQ mm1, mm2/m64

NP 0F FB /r

SSE2

16/32/64-bit

§

Psubq_xmm_xmmm128

PSUBQ xmm1, xmm2/m128

66 0F FB /r

SSE2

16/32/64-bit

§

VEX_Vpsubq_xmm_xmm_xmmm128

VPSUBQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FB /r

AVX

16/32/64-bit

§

VEX_Vpsubq_ymm_ymm_ymmm256

VPSUBQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FB /r

AVX2

16/32/64-bit

§

EVEX_Vpsubq_xmm_k1z_xmm_xmmm128b64

VPSUBQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F.W1 FB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubq_ymm_k1z_ymm_ymmm256b64

VPSUBQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F.W1 FB /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsubq_zmm_k1z_zmm_zmmm512b64

VPSUBQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F.W1 FB /r

AVX512F

16/32/64-bit

§

Paddb_mm_mmm64

PADDB mm, mm/m64

NP 0F FC /r

MMX

16/32/64-bit

§

Paddb_xmm_xmmm128

PADDB xmm1, xmm2/m128

66 0F FC /r

SSE2

16/32/64-bit

§

VEX_Vpaddb_xmm_xmm_xmmm128

VPADDB xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FC /r

AVX

16/32/64-bit

§

VEX_Vpaddb_ymm_ymm_ymmm256

VPADDB ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FC /r

AVX2

16/32/64-bit

§

EVEX_Vpaddb_xmm_k1z_xmm_xmmm128

VPADDB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG FC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddb_ymm_k1z_ymm_ymmm256

VPADDB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG FC /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddb_zmm_k1z_zmm_zmmm512

VPADDB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG FC /r

AVX512BW

16/32/64-bit

§

Paddw_mm_mmm64

PADDW mm, mm/m64

NP 0F FD /r

MMX

16/32/64-bit

§

Paddw_xmm_xmmm128

PADDW xmm1, xmm2/m128

66 0F FD /r

SSE2

16/32/64-bit

§

VEX_Vpaddw_xmm_xmm_xmmm128

VPADDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FD /r

AVX

16/32/64-bit

§

VEX_Vpaddw_ymm_ymm_ymmm256

VPADDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FD /r

AVX2

16/32/64-bit

§

EVEX_Vpaddw_xmm_k1z_xmm_xmmm128

VPADDW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F.WIG FD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddw_ymm_k1z_ymm_ymmm256

VPADDW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F.WIG FD /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpaddw_zmm_k1z_zmm_zmmm512

VPADDW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F.WIG FD /r

AVX512BW

16/32/64-bit

§

Paddd_mm_mmm64

PADDD mm, mm/m64

NP 0F FE /r

MMX

16/32/64-bit

§

Paddd_xmm_xmmm128

PADDD xmm1, xmm2/m128

66 0F FE /r

SSE2

16/32/64-bit

§

VEX_Vpaddd_xmm_xmm_xmmm128

VPADDD xmm1, xmm2, xmm3/m128

VEX.128.66.0F.WIG FE /r

AVX

16/32/64-bit

§

VEX_Vpaddd_ymm_ymm_ymmm256

VPADDD ymm1, ymm2, ymm3/m256

VEX.256.66.0F.WIG FE /r

AVX2

16/32/64-bit

§

EVEX_Vpaddd_xmm_k1z_xmm_xmmm128b32

VPADDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F.W0 FE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddd_ymm_k1z_ymm_ymmm256b32

VPADDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F.W0 FE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpaddd_zmm_k1z_zmm_zmmm512b32

VPADDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F.W0 FE /r

AVX512F

16/32/64-bit

§

Ud0_r16_rm16

UD0 r16, r/m16

o16 0F FF /r

286+

16/32/64-bit

§

Ud0_r32_rm32

UD0 r32, r/m32

o32 0F FF /r

386+

16/32/64-bit

§

Ud0_r64_rm64

UD0 r64, r/m64

o64 0F FF /r

X64

64-bit

§

Pshufb_mm_mmm64

PSHUFB mm1, mm2/m64

NP 0F 38 00 /r

SSSE3

16/32/64-bit

§

Pshufb_xmm_xmmm128

PSHUFB xmm1, xmm2/m128

66 0F 38 00 /r

SSSE3

16/32/64-bit

§

VEX_Vpshufb_xmm_xmm_xmmm128

VPSHUFB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 00 /r

AVX

16/32/64-bit

§

VEX_Vpshufb_ymm_ymm_ymmm256

VPSHUFB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 00 /r

AVX2

16/32/64-bit

§

EVEX_Vpshufb_xmm_k1z_xmm_xmmm128

VPSHUFB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 00 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufb_ymm_k1z_ymm_ymmm256

VPSHUFB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 00 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpshufb_zmm_k1z_zmm_zmmm512

VPSHUFB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 00 /r

AVX512BW

16/32/64-bit

§

Phaddw_mm_mmm64

PHADDW mm1, mm2/m64

NP 0F 38 01 /r

SSSE3

16/32/64-bit

§

Phaddw_xmm_xmmm128

PHADDW xmm1, xmm2/m128

66 0F 38 01 /r

SSSE3

16/32/64-bit

§

VEX_Vphaddw_xmm_xmm_xmmm128

VPHADDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 01 /r

AVX

16/32/64-bit

§

VEX_Vphaddw_ymm_ymm_ymmm256

VPHADDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 01 /r

AVX2

16/32/64-bit

§

Phaddd_mm_mmm64

PHADDD mm1, mm2/m64

NP 0F 38 02 /r

SSSE3

16/32/64-bit

§

Phaddd_xmm_xmmm128

PHADDD xmm1, xmm2/m128

66 0F 38 02 /r

SSSE3

16/32/64-bit

§

VEX_Vphaddd_xmm_xmm_xmmm128

VPHADDD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 02 /r

AVX

16/32/64-bit

§

VEX_Vphaddd_ymm_ymm_ymmm256

VPHADDD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 02 /r

AVX2

16/32/64-bit

§

Phaddsw_mm_mmm64

PHADDSW mm1, mm2/m64

NP 0F 38 03 /r

SSSE3

16/32/64-bit

§

Phaddsw_xmm_xmmm128

PHADDSW xmm1, xmm2/m128

66 0F 38 03 /r

SSSE3

16/32/64-bit

§

VEX_Vphaddsw_xmm_xmm_xmmm128

VPHADDSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 03 /r

AVX

16/32/64-bit

§

VEX_Vphaddsw_ymm_ymm_ymmm256

VPHADDSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 03 /r

AVX2

16/32/64-bit

§

Pmaddubsw_mm_mmm64

PMADDUBSW mm1, mm2/m64

NP 0F 38 04 /r

SSSE3

16/32/64-bit

§

Pmaddubsw_xmm_xmmm128

PMADDUBSW xmm1, xmm2/m128

66 0F 38 04 /r

SSSE3

16/32/64-bit

§

VEX_Vpmaddubsw_xmm_xmm_xmmm128

VPMADDUBSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 04 /r

AVX

16/32/64-bit

§

VEX_Vpmaddubsw_ymm_ymm_ymmm256

VPMADDUBSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 04 /r

AVX2

16/32/64-bit

§

EVEX_Vpmaddubsw_xmm_k1z_xmm_xmmm128

VPMADDUBSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 04 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddubsw_ymm_k1z_ymm_ymmm256

VPMADDUBSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 04 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaddubsw_zmm_k1z_zmm_zmmm512

VPMADDUBSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 04 /r

AVX512BW

16/32/64-bit

§

Phsubw_mm_mmm64

PHSUBW mm1, mm2/m64

NP 0F 38 05 /r

SSSE3

16/32/64-bit

§

Phsubw_xmm_xmmm128

PHSUBW xmm1, xmm2/m128

66 0F 38 05 /r

SSSE3

16/32/64-bit

§

VEX_Vphsubw_xmm_xmm_xmmm128

VPHSUBW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 05 /r

AVX

16/32/64-bit

§

VEX_Vphsubw_ymm_ymm_ymmm256

VPHSUBW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 05 /r

AVX2

16/32/64-bit

§

Phsubd_mm_mmm64

PHSUBD mm1, mm2/m64

NP 0F 38 06 /r

SSSE3

16/32/64-bit

§

Phsubd_xmm_xmmm128

PHSUBD xmm1, xmm2/m128

66 0F 38 06 /r

SSSE3

16/32/64-bit

§

VEX_Vphsubd_xmm_xmm_xmmm128

VPHSUBD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 06 /r

AVX

16/32/64-bit

§

VEX_Vphsubd_ymm_ymm_ymmm256

VPHSUBD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 06 /r

AVX2

16/32/64-bit

§

Phsubsw_mm_mmm64

PHSUBSW mm1, mm2/m64

NP 0F 38 07 /r

SSSE3

16/32/64-bit

§

Phsubsw_xmm_xmmm128

PHSUBSW xmm1, xmm2/m128

66 0F 38 07 /r

SSSE3

16/32/64-bit

§

VEX_Vphsubsw_xmm_xmm_xmmm128

VPHSUBSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 07 /r

AVX

16/32/64-bit

§

VEX_Vphsubsw_ymm_ymm_ymmm256

VPHSUBSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 07 /r

AVX2

16/32/64-bit

§

Psignb_mm_mmm64

PSIGNB mm1, mm2/m64

NP 0F 38 08 /r

SSSE3

16/32/64-bit

§

Psignb_xmm_xmmm128

PSIGNB xmm1, xmm2/m128

66 0F 38 08 /r

SSSE3

16/32/64-bit

§

VEX_Vpsignb_xmm_xmm_xmmm128

VPSIGNB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 08 /r

AVX

16/32/64-bit

§

VEX_Vpsignb_ymm_ymm_ymmm256

VPSIGNB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 08 /r

AVX2

16/32/64-bit

§

Psignw_mm_mmm64

PSIGNW mm1, mm2/m64

NP 0F 38 09 /r

SSSE3

16/32/64-bit

§

Psignw_xmm_xmmm128

PSIGNW xmm1, xmm2/m128

66 0F 38 09 /r

SSSE3

16/32/64-bit

§

VEX_Vpsignw_xmm_xmm_xmmm128

VPSIGNW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 09 /r

AVX

16/32/64-bit

§

VEX_Vpsignw_ymm_ymm_ymmm256

VPSIGNW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 09 /r

AVX2

16/32/64-bit

§

Psignd_mm_mmm64

PSIGND mm1, mm2/m64

NP 0F 38 0A /r

SSSE3

16/32/64-bit

§

Psignd_xmm_xmmm128

PSIGND xmm1, xmm2/m128

66 0F 38 0A /r

SSSE3

16/32/64-bit

§

VEX_Vpsignd_xmm_xmm_xmmm128

VPSIGND xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 0A /r

AVX

16/32/64-bit

§

VEX_Vpsignd_ymm_ymm_ymmm256

VPSIGND ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 0A /r

AVX2

16/32/64-bit

§

Pmulhrsw_mm_mmm64

PMULHRSW mm1, mm2/m64

NP 0F 38 0B /r

SSSE3

16/32/64-bit

§

Pmulhrsw_xmm_xmmm128

PMULHRSW xmm1, xmm2/m128

66 0F 38 0B /r

SSSE3

16/32/64-bit

§

VEX_Vpmulhrsw_xmm_xmm_xmmm128

VPMULHRSW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 0B /r

AVX

16/32/64-bit

§

VEX_Vpmulhrsw_ymm_ymm_ymmm256

VPMULHRSW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 0B /r

AVX2

16/32/64-bit

§

EVEX_Vpmulhrsw_xmm_k1z_xmm_xmmm128

VPMULHRSW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 0B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhrsw_ymm_k1z_ymm_ymmm256

VPMULHRSW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 0B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmulhrsw_zmm_k1z_zmm_zmmm512

VPMULHRSW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 0B /r

AVX512BW

16/32/64-bit

§

VEX_Vpermilps_xmm_xmm_xmmm128

VPERMILPS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 0C /r

AVX

16/32/64-bit

§

VEX_Vpermilps_ymm_ymm_ymmm256

VPERMILPS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 0C /r

AVX

16/32/64-bit

§

EVEX_Vpermilps_xmm_k1z_xmm_xmmm128b32

VPERMILPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 0C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_ymm_k1z_ymm_ymmm256b32

VPERMILPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 0C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_zmm_k1z_zmm_zmmm512b32

VPERMILPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 0C /r

AVX512F

16/32/64-bit

§

VEX_Vpermilpd_xmm_xmm_xmmm128

VPERMILPD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 0D /r

AVX

16/32/64-bit

§

VEX_Vpermilpd_ymm_ymm_ymmm256

VPERMILPD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 0D /r

AVX

16/32/64-bit

§

EVEX_Vpermilpd_xmm_k1z_xmm_xmmm128b64

VPERMILPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 0D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_ymm_k1z_ymm_ymmm256b64

VPERMILPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 0D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_zmm_k1z_zmm_zmmm512b64

VPERMILPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 0D /r

AVX512F

16/32/64-bit

§

VEX_Vtestps_xmm_xmmm128

VTESTPS xmm1, xmm2/m128

VEX.128.66.0F38.W0 0E /r

AVX

16/32/64-bit

§

VEX_Vtestps_ymm_ymmm256

VTESTPS ymm1, ymm2/m256

VEX.256.66.0F38.W0 0E /r

AVX

16/32/64-bit

§

VEX_Vtestpd_xmm_xmmm128

VTESTPD xmm1, xmm2/m128

VEX.128.66.0F38.W0 0F /r

AVX

16/32/64-bit

§

VEX_Vtestpd_ymm_ymmm256

VTESTPD ymm1, ymm2/m256

VEX.256.66.0F38.W0 0F /r

AVX

16/32/64-bit

§

Pblendvb_xmm_xmmm128

PBLENDVB xmm1, xmm2/m128, <XMM0>

66 0F 38 10 /r

SSE4.1

16/32/64-bit

§

EVEX_Vpsrlvw_xmm_k1z_xmm_xmmm128

VPSRLVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlvw_ymm_k1z_ymm_ymmm256

VPSRLVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsrlvw_zmm_k1z_zmm_zmmm512

VPSRLVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 10 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovuswb_xmmm64_k1z_xmm

VPMOVUSWB xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovuswb_xmmm128_k1z_ymm

VPMOVUSWB xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 10 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovuswb_ymmm256_k1z_zmm

VPMOVUSWB ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 10 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpsravw_xmm_k1z_xmm_xmmm128

VPSRAVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 11 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsravw_ymm_k1z_ymm_ymmm256

VPSRAVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 11 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsravw_zmm_k1z_zmm_zmmm512

VPSRAVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 11 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovusdb_xmmm32_k1z_xmm

VPMOVUSDB xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdb_xmmm64_k1z_ymm

VPMOVUSDB xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 11 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdb_xmmm128_k1z_zmm

VPMOVUSDB xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 11 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsllvw_xmm_k1z_xmm_xmmm128

VPSLLVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 12 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllvw_ymm_k1z_ymm_ymmm256

VPSLLVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 12 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpsllvw_zmm_k1z_zmm_zmmm512

VPSLLVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 12 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovusqb_xmmm16_k1z_xmm

VPMOVUSQB xmm1/m16 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqb_xmmm32_k1z_ymm

VPMOVUSQB xmm1/m32 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 12 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqb_xmmm64_k1z_zmm

VPMOVUSQB xmm1/m64 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 12 /r

AVX512F

16/32/64-bit

§

VEX_Vcvtph2ps_xmm_xmmm64

VCVTPH2PS xmm1, xmm2/m64

VEX.128.66.0F38.W0 13 /r

F16C

16/32/64-bit

§

VEX_Vcvtph2ps_ymm_xmmm128

VCVTPH2PS ymm1, xmm2/m128

VEX.256.66.0F38.W0 13 /r

F16C

16/32/64-bit

§

EVEX_Vcvtph2ps_xmm_k1z_xmmm64

VCVTPH2PS xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtph2ps_ymm_k1z_xmmm128

VCVTPH2PS ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtph2ps_zmm_k1z_ymmm256_sae

VCVTPH2PS zmm1 {k1}{z}, ymm2/m256{sae}

EVEX.512.66.0F38.W0 13 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovusdw_xmmm64_k1z_xmm

VPMOVUSDW xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdw_xmmm128_k1z_ymm

VPMOVUSDW xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 13 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusdw_ymmm256_k1z_zmm

VPMOVUSDW ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 13 /r

AVX512F

16/32/64-bit

§

Blendvps_xmm_xmmm128

BLENDVPS xmm1, xmm2/m128, <XMM0>

66 0F 38 14 /r

SSE4.1

16/32/64-bit

§

EVEX_Vprorvd_xmm_k1z_xmm_xmmm128b32

VPRORVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvd_ymm_k1z_ymm_ymmm256b32

VPRORVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvd_zmm_k1z_zmm_zmmm512b32

VPRORVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 14 /r

AVX512F

16/32/64-bit

§

EVEX_Vprorvq_xmm_k1z_xmm_xmmm128b64

VPRORVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvq_ymm_k1z_ymm_ymmm256b64

VPRORVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprorvq_zmm_k1z_zmm_zmmm512b64

VPRORVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 14 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovusqw_xmmm32_k1z_xmm

VPMOVUSQW xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqw_xmmm64_k1z_ymm

VPMOVUSQW xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 14 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqw_xmmm128_k1z_zmm

VPMOVUSQW xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 14 /r

AVX512F

16/32/64-bit

§

Blendvpd_xmm_xmmm128

BLENDVPD xmm1, xmm2/m128, <XMM0>

66 0F 38 15 /r

SSE4.1

16/32/64-bit

§

EVEX_Vprolvd_xmm_k1z_xmm_xmmm128b32

VPROLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvd_ymm_k1z_ymm_ymmm256b32

VPROLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvd_zmm_k1z_zmm_zmmm512b32

VPROLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 15 /r

AVX512F

16/32/64-bit

§

EVEX_Vprolvq_xmm_k1z_xmm_xmmm128b64

VPROLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvq_ymm_k1z_ymm_ymmm256b64

VPROLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vprolvq_zmm_k1z_zmm_zmmm512b64

VPROLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 15 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovusqd_xmmm64_k1z_xmm

VPMOVUSQD xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqd_xmmm128_k1z_ymm

VPMOVUSQD xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 15 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovusqd_ymmm256_k1z_zmm

VPMOVUSQD ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 15 /r

AVX512F

16/32/64-bit

§

VEX_Vpermps_ymm_ymm_ymmm256

VPERMPS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 16 /r

AVX2

16/32/64-bit

§

EVEX_Vpermps_ymm_k1z_ymm_ymmm256b32

VPERMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermps_zmm_k1z_zmm_zmmm512b32

VPERMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 16 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermpd_ymm_k1z_ymm_ymmm256b64

VPERMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 16 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermpd_zmm_k1z_zmm_zmmm512b64

VPERMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 16 /r

AVX512F

16/32/64-bit

§

Ptest_xmm_xmmm128

PTEST xmm1, xmm2/m128

66 0F 38 17 /r

SSE4.1

16/32/64-bit

§

VEX_Vptest_xmm_xmmm128

VPTEST xmm1, xmm2/m128

VEX.128.66.0F38.WIG 17 /r

AVX

16/32/64-bit

§

VEX_Vptest_ymm_ymmm256

VPTEST ymm1, ymm2/m256

VEX.256.66.0F38.WIG 17 /r

AVX

16/32/64-bit

§

VEX_Vbroadcastss_xmm_m32

VBROADCASTSS xmm1, m32

VEX.128.66.0F38.W0 18 /r

AVX

16/32/64-bit

§

VEX_Vbroadcastss_ymm_m32

VBROADCASTSS ymm1, m32

VEX.256.66.0F38.W0 18 /r

AVX

16/32/64-bit

§

EVEX_Vbroadcastss_xmm_k1z_xmmm32

VBROADCASTSS xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.W0 18 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastss_ymm_k1z_xmmm32

VBROADCASTSS ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.W0 18 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastss_zmm_k1z_xmmm32

VBROADCASTSS zmm1 {k1}{z}, xmm2/m32

EVEX.512.66.0F38.W0 18 /r

AVX512F

16/32/64-bit

§

VEX_Vbroadcastsd_ymm_m64

VBROADCASTSD ymm1, m64

VEX.256.66.0F38.W0 19 /r

AVX

16/32/64-bit

§

EVEX_Vbroadcastf32x2_ymm_k1z_xmmm64

VBROADCASTF32X2 ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W0 19 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf32x2_zmm_k1z_xmmm64

VBROADCASTF32X2 zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W0 19 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastsd_ymm_k1z_xmmm64

VBROADCASTSD ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W1 19 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastsd_zmm_k1z_xmmm64

VBROADCASTSD zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W1 19 /r

AVX512F

16/32/64-bit

§

VEX_Vbroadcastf128_ymm_m128

VBROADCASTF128 ymm1, m128

VEX.256.66.0F38.W0 1A /r

AVX

16/32/64-bit

§

EVEX_Vbroadcastf32x4_ymm_k1z_m128

VBROADCASTF32X4 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W0 1A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcastf32x4_zmm_k1z_m128

VBROADCASTF32X4 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W0 1A /r

AVX512F

16/32/64-bit

§

EVEX_Vbroadcastf64x2_ymm_k1z_m128

VBROADCASTF64X2 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W1 1A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf64x2_zmm_k1z_m128

VBROADCASTF64X2 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W1 1A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf32x8_zmm_k1z_m256

VBROADCASTF32X8 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W0 1B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcastf64x4_zmm_k1z_m256

VBROADCASTF64X4 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W1 1B /r

AVX512F

16/32/64-bit

§

Pabsb_mm_mmm64

PABSB mm1, mm2/m64

NP 0F 38 1C /r

SSSE3

16/32/64-bit

§

Pabsb_xmm_xmmm128

PABSB xmm1, xmm2/m128

66 0F 38 1C /r

SSSE3

16/32/64-bit

§

VEX_Vpabsb_xmm_xmmm128

VPABSB xmm1, xmm2/m128

VEX.128.66.0F38.WIG 1C /r

AVX

16/32/64-bit

§

VEX_Vpabsb_ymm_ymmm256

VPABSB ymm1, ymm2/m256

VEX.256.66.0F38.WIG 1C /r

AVX2

16/32/64-bit

§

EVEX_Vpabsb_xmm_k1z_xmmm128

VPABSB xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.WIG 1C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsb_ymm_k1z_ymmm256

VPABSB ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.WIG 1C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsb_zmm_k1z_zmmm512

VPABSB zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.WIG 1C /r

AVX512BW

16/32/64-bit

§

Pabsw_mm_mmm64

PABSW mm1, mm2/m64

NP 0F 38 1D /r

SSSE3

16/32/64-bit

§

Pabsw_xmm_xmmm128

PABSW xmm1, xmm2/m128

66 0F 38 1D /r

SSSE3

16/32/64-bit

§

VEX_Vpabsw_xmm_xmmm128

VPABSW xmm1, xmm2/m128

VEX.128.66.0F38.WIG 1D /r

AVX

16/32/64-bit

§

VEX_Vpabsw_ymm_ymmm256

VPABSW ymm1, ymm2/m256

VEX.256.66.0F38.WIG 1D /r

AVX2

16/32/64-bit

§

EVEX_Vpabsw_xmm_k1z_xmmm128

VPABSW xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.WIG 1D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsw_ymm_k1z_ymmm256

VPABSW ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.WIG 1D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpabsw_zmm_k1z_zmmm512

VPABSW zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.WIG 1D /r

AVX512BW

16/32/64-bit

§

Pabsd_mm_mmm64

PABSD mm1, mm2/m64

NP 0F 38 1E /r

SSSE3

16/32/64-bit

§

Pabsd_xmm_xmmm128

PABSD xmm1, xmm2/m128

66 0F 38 1E /r

SSSE3

16/32/64-bit

§

VEX_Vpabsd_xmm_xmmm128

VPABSD xmm1, xmm2/m128

VEX.128.66.0F38.WIG 1E /r

AVX

16/32/64-bit

§

VEX_Vpabsd_ymm_ymmm256

VPABSD ymm1, ymm2/m256

VEX.256.66.0F38.WIG 1E /r

AVX2

16/32/64-bit

§

EVEX_Vpabsd_xmm_k1z_xmmm128b32

VPABSD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 1E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsd_ymm_k1z_ymmm256b32

VPABSD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 1E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsd_zmm_k1z_zmmm512b32

VPABSD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 1E /r

AVX512F

16/32/64-bit

§

EVEX_Vpabsq_xmm_k1z_xmmm128b64

VPABSQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 1F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsq_ymm_k1z_ymmm256b64

VPABSQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 1F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpabsq_zmm_k1z_zmmm512b64

VPABSQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 1F /r

AVX512F

16/32/64-bit

§

Pmovsxbw_xmm_xmmm64

PMOVSXBW xmm1, xmm2/m64

66 0F 38 20 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxbw_xmm_xmmm64

VPMOVSXBW xmm1, xmm2/m64

VEX.128.66.0F38.WIG 20 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxbw_ymm_xmmm128

VPMOVSXBW ymm1, xmm2/m128

VEX.256.66.0F38.WIG 20 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxbw_xmm_k1z_xmmm64

VPMOVSXBW xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovsxbw_ymm_k1z_xmmm128

VPMOVSXBW ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovsxbw_zmm_k1z_ymmm256

VPMOVSXBW zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 20 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovswb_xmmm64_k1z_xmm

VPMOVSWB xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovswb_xmmm128_k1z_ymm

VPMOVSWB xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 20 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovswb_ymmm256_k1z_zmm

VPMOVSWB ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 20 /r

AVX512BW

16/32/64-bit

§

Pmovsxbd_xmm_xmmm32

PMOVSXBD xmm1, xmm2/m32

66 0F 38 21 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxbd_xmm_xmmm32

VPMOVSXBD xmm1, xmm2/m32

VEX.128.66.0F38.WIG 21 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxbd_ymm_xmmm64

VPMOVSXBD ymm1, xmm2/m64

VEX.256.66.0F38.WIG 21 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxbd_xmm_k1z_xmmm32

VPMOVSXBD xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbd_ymm_k1z_xmmm64

VPMOVSXBD ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbd_zmm_k1z_xmmm128

VPMOVSXBD zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 21 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsdb_xmmm32_k1z_xmm

VPMOVSDB xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdb_xmmm64_k1z_ymm

VPMOVSDB xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 21 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdb_xmmm128_k1z_zmm

VPMOVSDB xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 21 /r

AVX512F

16/32/64-bit

§

Pmovsxbq_xmm_xmmm16

PMOVSXBQ xmm1, xmm2/m16

66 0F 38 22 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxbq_xmm_xmmm16

VPMOVSXBQ xmm1, xmm2/m16

VEX.128.66.0F38.WIG 22 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxbq_ymm_xmmm32

VPMOVSXBQ ymm1, xmm2/m32

VEX.256.66.0F38.WIG 22 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxbq_xmm_k1z_xmmm16

VPMOVSXBQ xmm1 {k1}{z}, xmm2/m16

EVEX.128.66.0F38.WIG 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbq_ymm_k1z_xmmm32

VPMOVSXBQ ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.WIG 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxbq_zmm_k1z_xmmm64

VPMOVSXBQ zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.WIG 22 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsqb_xmmm16_k1z_xmm

VPMOVSQB xmm1/m16 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqb_xmmm32_k1z_ymm

VPMOVSQB xmm1/m32 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 22 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqb_xmmm64_k1z_zmm

VPMOVSQB xmm1/m64 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 22 /r

AVX512F

16/32/64-bit

§

Pmovsxwd_xmm_xmmm64

PMOVSXWD xmm1, xmm2/m64

66 0F 38 23 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxwd_xmm_xmmm64

VPMOVSXWD xmm1, xmm2/m64

VEX.128.66.0F38.WIG 23 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxwd_ymm_xmmm128

VPMOVSXWD ymm1, xmm2/m128

VEX.256.66.0F38.WIG 23 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxwd_xmm_k1z_xmmm64

VPMOVSXWD xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwd_ymm_k1z_xmmm128

VPMOVSXWD ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwd_zmm_k1z_ymmm256

VPMOVSXWD zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 23 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsdw_xmmm64_k1z_xmm

VPMOVSDW xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdw_xmmm128_k1z_ymm

VPMOVSDW xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 23 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsdw_ymmm256_k1z_zmm

VPMOVSDW ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 23 /r

AVX512F

16/32/64-bit

§

Pmovsxwq_xmm_xmmm32

PMOVSXWQ xmm1, xmm2/m32

66 0F 38 24 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxwq_xmm_xmmm32

VPMOVSXWQ xmm1, xmm2/m32

VEX.128.66.0F38.WIG 24 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxwq_ymm_xmmm64

VPMOVSXWQ ymm1, xmm2/m64

VEX.256.66.0F38.WIG 24 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxwq_xmm_k1z_xmmm32

VPMOVSXWQ xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwq_ymm_k1z_xmmm64

VPMOVSXWQ ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxwq_zmm_k1z_xmmm128

VPMOVSXWQ zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 24 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsqw_xmmm32_k1z_xmm

VPMOVSQW xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqw_xmmm64_k1z_ymm

VPMOVSQW xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 24 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqw_xmmm128_k1z_zmm

VPMOVSQW xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 24 /r

AVX512F

16/32/64-bit

§

Pmovsxdq_xmm_xmmm64

PMOVSXDQ xmm1, xmm2/m64

66 0F 38 25 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovsxdq_xmm_xmmm64

VPMOVSXDQ xmm1, xmm2/m64

VEX.128.66.0F38.WIG 25 /r

AVX

16/32/64-bit

§

VEX_Vpmovsxdq_ymm_xmmm128

VPMOVSXDQ ymm1, xmm2/m128

VEX.256.66.0F38.WIG 25 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovsxdq_xmm_k1z_xmmm64

VPMOVSXDQ xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxdq_ymm_k1z_xmmm128

VPMOVSXDQ ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsxdq_zmm_k1z_ymmm256

VPMOVSXDQ zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.W0 25 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovsqd_xmmm64_k1z_xmm

VPMOVSQD xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqd_xmmm128_k1z_ymm

VPMOVSQD xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 25 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovsqd_ymmm256_k1z_zmm

VPMOVSQD ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 25 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestmb_kr_k1_xmm_xmmm128

VPTESTMB k2 {k1}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmb_kr_k1_ymm_ymmm256

VPTESTMB k2 {k1}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmb_kr_k1_zmm_zmmm512

VPTESTMB k2 {k1}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestmw_kr_k1_xmm_xmmm128

VPTESTMW k2 {k1}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmw_kr_k1_ymm_ymmm256

VPTESTMW k2 {k1}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestmw_kr_k1_zmm_zmmm512

VPTESTMW k2 {k1}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestnmb_kr_k1_xmm_xmmm128

VPTESTNMB k2 {k1}, xmm2, xmm3/m128

EVEX.128.F3.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmb_kr_k1_ymm_ymmm256

VPTESTNMB k2 {k1}, ymm2, ymm3/m256

EVEX.256.F3.0F38.W0 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmb_kr_k1_zmm_zmmm512

VPTESTNMB k2 {k1}, zmm2, zmm3/m512

EVEX.512.F3.0F38.W0 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestnmw_kr_k1_xmm_xmmm128

VPTESTNMW k2 {k1}, xmm2, xmm3/m128

EVEX.128.F3.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmw_kr_k1_ymm_ymmm256

VPTESTNMW k2 {k1}, ymm2, ymm3/m256

EVEX.256.F3.0F38.W1 26 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vptestnmw_kr_k1_zmm_zmmm512

VPTESTNMW k2 {k1}, zmm2, zmm3/m512

EVEX.512.F3.0F38.W1 26 /r

AVX512BW

16/32/64-bit

§

EVEX_Vptestmd_kr_k1_xmm_xmmm128b32

VPTESTMD k2 {k1}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmd_kr_k1_ymm_ymmm256b32

VPTESTMD k2 {k1}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmd_kr_k1_zmm_zmmm512b32

VPTESTMD k2 {k1}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 27 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestmq_kr_k1_xmm_xmmm128b64

VPTESTMQ k2 {k1}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmq_kr_k1_ymm_ymmm256b64

VPTESTMQ k2 {k1}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestmq_kr_k1_zmm_zmmm512b64

VPTESTMQ k2 {k1}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 27 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestnmd_kr_k1_xmm_xmmm128b32

VPTESTNMD k2 {k1}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmd_kr_k1_ymm_ymmm256b32

VPTESTNMD k2 {k1}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.0F38.W0 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmd_kr_k1_zmm_zmmm512b32

VPTESTNMD k2 {k1}, zmm2, zmm3/m512/m32bcst

EVEX.512.F3.0F38.W0 27 /r

AVX512F

16/32/64-bit

§

EVEX_Vptestnmq_kr_k1_xmm_xmmm128b64

VPTESTNMQ k2 {k1}, xmm2, xmm3/m128/m64bcst

EVEX.128.F3.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmq_kr_k1_ymm_ymmm256b64

VPTESTNMQ k2 {k1}, ymm2, ymm3/m256/m64bcst

EVEX.256.F3.0F38.W1 27 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vptestnmq_kr_k1_zmm_zmmm512b64

VPTESTNMQ k2 {k1}, zmm2, zmm3/m512/m64bcst

EVEX.512.F3.0F38.W1 27 /r

AVX512F

16/32/64-bit

§

Pmuldq_xmm_xmmm128

PMULDQ xmm1, xmm2/m128

66 0F 38 28 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmuldq_xmm_xmm_xmmm128

VPMULDQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 28 /r

AVX

16/32/64-bit

§

VEX_Vpmuldq_ymm_ymm_ymmm256

VPMULDQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 28 /r

AVX2

16/32/64-bit

§

EVEX_Vpmuldq_xmm_k1z_xmm_xmmm128b64

VPMULDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuldq_ymm_k1z_ymm_ymmm256b64

VPMULDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 28 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmuldq_zmm_k1z_zmm_zmmm512b64

VPMULDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 28 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovm2b_xmm_kr

VPMOVM2B xmm1, k1

EVEX.128.F3.0F38.W0 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2b_ymm_kr

VPMOVM2B ymm1, k1

EVEX.256.F3.0F38.W0 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2b_zmm_kr

VPMOVM2B zmm1, k1

EVEX.512.F3.0F38.W0 28 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2w_xmm_kr

VPMOVM2W xmm1, k1

EVEX.128.F3.0F38.W1 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2w_ymm_kr

VPMOVM2W ymm1, k1

EVEX.256.F3.0F38.W1 28 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2w_zmm_kr

VPMOVM2W zmm1, k1

EVEX.512.F3.0F38.W1 28 /r

AVX512BW

16/32/64-bit

§

Pcmpeqq_xmm_xmmm128

PCMPEQQ xmm1, xmm2/m128

66 0F 38 29 /r

SSE4.1

16/32/64-bit

§

VEX_Vpcmpeqq_xmm_xmm_xmmm128

VPCMPEQQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 29 /r

AVX

16/32/64-bit

§

VEX_Vpcmpeqq_ymm_ymm_ymmm256

VPCMPEQQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 29 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpeqq_kr_k1_xmm_xmmm128b64

VPCMPEQQ k1 {k2}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqq_kr_k1_ymm_ymmm256b64

VPCMPEQQ k1 {k2}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 29 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpeqq_kr_k1_zmm_zmmm512b64

VPCMPEQQ k1 {k2}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 29 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovb2m_kr_xmm

VPMOVB2M k1, xmm1

EVEX.128.F3.0F38.W0 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovb2m_kr_ymm

VPMOVB2M k1, ymm1

EVEX.256.F3.0F38.W0 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovb2m_kr_zmm

VPMOVB2M k1, zmm1

EVEX.512.F3.0F38.W0 29 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovw2m_kr_xmm

VPMOVW2M k1, xmm1

EVEX.128.F3.0F38.W1 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovw2m_kr_ymm

VPMOVW2M k1, ymm1

EVEX.256.F3.0F38.W1 29 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovw2m_kr_zmm

VPMOVW2M k1, zmm1

EVEX.512.F3.0F38.W1 29 /r

AVX512BW

16/32/64-bit

§

Movntdqa_xmm_m128

MOVNTDQA xmm1, m128

66 0F 38 2A /r

SSE4.1

16/32/64-bit

§

VEX_Vmovntdqa_xmm_m128

VMOVNTDQA xmm1, m128

VEX.128.66.0F38.WIG 2A /r

AVX

16/32/64-bit

§

VEX_Vmovntdqa_ymm_m256

VMOVNTDQA ymm1, m256

VEX.256.66.0F38.WIG 2A /r

AVX2

16/32/64-bit

§

EVEX_Vmovntdqa_xmm_m128

VMOVNTDQA xmm1, m128

EVEX.128.66.0F38.W0 2A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdqa_ymm_m256

VMOVNTDQA ymm1, m256

EVEX.256.66.0F38.W0 2A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vmovntdqa_zmm_m512

VMOVNTDQA zmm1, m512

EVEX.512.66.0F38.W0 2A /r

AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastmb2q_xmm_kr

VPBROADCASTMB2Q xmm1, k1

EVEX.128.F3.0F38.W1 2A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmb2q_ymm_kr

VPBROADCASTMB2Q ymm1, k1

EVEX.256.F3.0F38.W1 2A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmb2q_zmm_kr

VPBROADCASTMB2Q zmm1, k1

EVEX.512.F3.0F38.W1 2A /r

AVX512CD

16/32/64-bit

§

Packusdw_xmm_xmmm128

PACKUSDW xmm1, xmm2/m128

66 0F 38 2B /r

SSE4.1

16/32/64-bit

§

VEX_Vpackusdw_xmm_xmm_xmmm128

VPACKUSDW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 2B /r

AVX

16/32/64-bit

§

VEX_Vpackusdw_ymm_ymm_ymmm256

VPACKUSDW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 2B /r

AVX2

16/32/64-bit

§

EVEX_Vpackusdw_xmm_k1z_xmm_xmmm128b32

VPACKUSDW xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 2B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackusdw_ymm_k1z_ymm_ymmm256b32

VPACKUSDW ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 2B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpackusdw_zmm_k1z_zmm_zmmm512b32

VPACKUSDW zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 2B /r

AVX512BW

16/32/64-bit

§

VEX_Vmaskmovps_xmm_xmm_m128

VMASKMOVPS xmm1, xmm2, m128

VEX.128.66.0F38.W0 2C /r

AVX

16/32/64-bit

§

VEX_Vmaskmovps_ymm_ymm_m256

VMASKMOVPS ymm1, ymm2, m256

VEX.256.66.0F38.W0 2C /r

AVX

16/32/64-bit

§

EVEX_Vscalefps_xmm_k1z_xmm_xmmm128b32

VSCALEFPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefps_ymm_k1z_ymm_ymmm256b32

VSCALEFPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefps_zmm_k1z_zmm_zmmm512b32_er

VSCALEFPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 2C /r

AVX512F

16/32/64-bit

§

EVEX_Vscalefpd_xmm_k1z_xmm_xmmm128b64

VSCALEFPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefpd_ymm_k1z_ymm_ymmm256b64

VSCALEFPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 2C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscalefpd_zmm_k1z_zmm_zmmm512b64_er

VSCALEFPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 2C /r

AVX512F

16/32/64-bit

§

VEX_Vmaskmovpd_xmm_xmm_m128

VMASKMOVPD xmm1, xmm2, m128

VEX.128.66.0F38.W0 2D /r

AVX

16/32/64-bit

§

VEX_Vmaskmovpd_ymm_ymm_m256

VMASKMOVPD ymm1, ymm2, m256

VEX.256.66.0F38.W0 2D /r

AVX

16/32/64-bit

§

EVEX_Vscalefss_xmm_k1z_xmm_xmmm32_er

VSCALEFSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 2D /r

AVX512F

16/32/64-bit

§

EVEX_Vscalefsd_xmm_k1z_xmm_xmmm64_er

VSCALEFSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 2D /r

AVX512F

16/32/64-bit

§

VEX_Vmaskmovps_m128_xmm_xmm

VMASKMOVPS m128, xmm1, xmm2

VEX.128.66.0F38.W0 2E /r

AVX

16/32/64-bit

§

VEX_Vmaskmovps_m256_ymm_ymm

VMASKMOVPS m256, ymm1, ymm2

VEX.256.66.0F38.W0 2E /r

AVX

16/32/64-bit

§

VEX_Vmaskmovpd_m128_xmm_xmm

VMASKMOVPD m128, xmm1, xmm2

VEX.128.66.0F38.W0 2F /r

AVX

16/32/64-bit

§

VEX_Vmaskmovpd_m256_ymm_ymm

VMASKMOVPD m256, ymm1, ymm2

VEX.256.66.0F38.W0 2F /r

AVX

16/32/64-bit

§

Pmovzxbw_xmm_xmmm64

PMOVZXBW xmm1, xmm2/m64

66 0F 38 30 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxbw_xmm_xmmm64

VPMOVZXBW xmm1, xmm2/m64

VEX.128.66.0F38.WIG 30 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxbw_ymm_xmmm128

VPMOVZXBW ymm1, xmm2/m128

VEX.256.66.0F38.WIG 30 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxbw_xmm_k1z_xmmm64

VPMOVZXBW xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovzxbw_ymm_k1z_xmmm128

VPMOVZXBW ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovzxbw_zmm_k1z_ymmm256

VPMOVZXBW zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 30 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovwb_xmmm64_k1z_xmm

VPMOVWB xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovwb_xmmm128_k1z_ymm

VPMOVWB xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 30 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmovwb_ymmm256_k1z_zmm

VPMOVWB ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 30 /r

AVX512BW

16/32/64-bit

§

Pmovzxbd_xmm_xmmm32

PMOVZXBD xmm1, xmm2/m32

66 0F 38 31 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxbd_xmm_xmmm32

VPMOVZXBD xmm1, xmm2/m32

VEX.128.66.0F38.WIG 31 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxbd_ymm_xmmm64

VPMOVZXBD ymm1, xmm2/m64

VEX.256.66.0F38.WIG 31 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxbd_xmm_k1z_xmmm32

VPMOVZXBD xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbd_ymm_k1z_xmmm64

VPMOVZXBD ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbd_zmm_k1z_xmmm128

VPMOVZXBD zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 31 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovdb_xmmm32_k1z_xmm

VPMOVDB xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdb_xmmm64_k1z_ymm

VPMOVDB xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 31 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdb_xmmm128_k1z_zmm

VPMOVDB xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 31 /r

AVX512F

16/32/64-bit

§

Pmovzxbq_xmm_xmmm16

PMOVZXBQ xmm1, xmm2/m16

66 0F 38 32 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxbq_xmm_xmmm16

VPMOVZXBQ xmm1, xmm2/m16

VEX.128.66.0F38.WIG 32 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxbq_ymm_xmmm32

VPMOVZXBQ ymm1, xmm2/m32

VEX.256.66.0F38.WIG 32 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxbq_xmm_k1z_xmmm16

VPMOVZXBQ xmm1 {k1}{z}, xmm2/m16

EVEX.128.66.0F38.WIG 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbq_ymm_k1z_xmmm32

VPMOVZXBQ ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.WIG 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxbq_zmm_k1z_xmmm64

VPMOVZXBQ zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.WIG 32 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovqb_xmmm16_k1z_xmm

VPMOVQB xmm1/m16 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqb_xmmm32_k1z_ymm

VPMOVQB xmm1/m32 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 32 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqb_xmmm64_k1z_zmm

VPMOVQB xmm1/m64 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 32 /r

AVX512F

16/32/64-bit

§

Pmovzxwd_xmm_xmmm64

PMOVZXWD xmm1, xmm2/m64

66 0F 38 33 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxwd_xmm_xmmm64

VPMOVZXWD xmm1, xmm2/m64

VEX.128.66.0F38.WIG 33 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxwd_ymm_xmmm128

VPMOVZXWD ymm1, xmm2/m128

VEX.256.66.0F38.WIG 33 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxwd_xmm_k1z_xmmm64

VPMOVZXWD xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.WIG 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwd_ymm_k1z_xmmm128

VPMOVZXWD ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.WIG 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwd_zmm_k1z_ymmm256

VPMOVZXWD zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.WIG 33 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovdw_xmmm64_k1z_xmm

VPMOVDW xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdw_xmmm128_k1z_ymm

VPMOVDW xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 33 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovdw_ymmm256_k1z_zmm

VPMOVDW ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 33 /r

AVX512F

16/32/64-bit

§

Pmovzxwq_xmm_xmmm32

PMOVZXWQ xmm1, xmm2/m32

66 0F 38 34 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxwq_xmm_xmmm32

VPMOVZXWQ xmm1, xmm2/m32

VEX.128.66.0F38.WIG 34 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxwq_ymm_xmmm64

VPMOVZXWQ ymm1, xmm2/m64

VEX.256.66.0F38.WIG 34 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxwq_xmm_k1z_xmmm32

VPMOVZXWQ xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.WIG 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwq_ymm_k1z_xmmm64

VPMOVZXWQ ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.WIG 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxwq_zmm_k1z_xmmm128

VPMOVZXWQ zmm1 {k1}{z}, xmm2/m128

EVEX.512.66.0F38.WIG 34 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovqw_xmmm32_k1z_xmm

VPMOVQW xmm1/m32 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqw_xmmm64_k1z_ymm

VPMOVQW xmm1/m64 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 34 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqw_xmmm128_k1z_zmm

VPMOVQW xmm1/m128 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 34 /r

AVX512F

16/32/64-bit

§

Pmovzxdq_xmm_xmmm64

PMOVZXDQ xmm1, xmm2/m64

66 0F 38 35 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmovzxdq_xmm_xmmm64

VPMOVZXDQ xmm1, xmm2/m64

VEX.128.66.0F38.WIG 35 /r

AVX

16/32/64-bit

§

VEX_Vpmovzxdq_ymm_xmmm128

VPMOVZXDQ ymm1, xmm2/m128

VEX.256.66.0F38.WIG 35 /r

AVX2

16/32/64-bit

§

EVEX_Vpmovzxdq_xmm_k1z_xmmm64

VPMOVZXDQ xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxdq_ymm_k1z_xmmm128

VPMOVZXDQ ymm1 {k1}{z}, xmm2/m128

EVEX.256.66.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovzxdq_zmm_k1z_ymmm256

VPMOVZXDQ zmm1 {k1}{z}, ymm2/m256

EVEX.512.66.0F38.W0 35 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovqd_xmmm64_k1z_xmm

VPMOVQD xmm1/m64 {k1}{z}, xmm2

EVEX.128.F3.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqd_xmmm128_k1z_ymm

VPMOVQD xmm1/m128 {k1}{z}, ymm2

EVEX.256.F3.0F38.W0 35 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmovqd_ymmm256_k1z_zmm

VPMOVQD ymm1/m256 {k1}{z}, zmm2

EVEX.512.F3.0F38.W0 35 /r

AVX512F

16/32/64-bit

§

VEX_Vpermd_ymm_ymm_ymmm256

VPERMD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 36 /r

AVX2

16/32/64-bit

§

EVEX_Vpermd_ymm_k1z_ymm_ymmm256b32

VPERMD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 36 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermd_zmm_k1z_zmm_zmmm512b32

VPERMD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 36 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermq_ymm_k1z_ymm_ymmm256b64

VPERMQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 36 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermq_zmm_k1z_zmm_zmmm512b64

VPERMQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 36 /r

AVX512F

16/32/64-bit

§

Pcmpgtq_xmm_xmmm128

PCMPGTQ xmm1, xmm2/m128

66 0F 38 37 /r

SSE4.2

16/32/64-bit

§

VEX_Vpcmpgtq_xmm_xmm_xmmm128

VPCMPGTQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 37 /r

AVX

16/32/64-bit

§

VEX_Vpcmpgtq_ymm_ymm_ymmm256

VPCMPGTQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 37 /r

AVX2

16/32/64-bit

§

EVEX_Vpcmpgtq_kr_k1_xmm_xmmm128b64

VPCMPGTQ k1 {k2}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 37 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtq_kr_k1_ymm_ymmm256b64

VPCMPGTQ k1 {k2}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 37 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpgtq_kr_k1_zmm_zmmm512b64

VPCMPGTQ k1 {k2}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 37 /r

AVX512F

16/32/64-bit

§

Pminsb_xmm_xmmm128

PMINSB xmm1, xmm2/m128

66 0F 38 38 /r

SSE4.1

16/32/64-bit

§

VEX_Vpminsb_xmm_xmm_xmmm128

VPMINSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 38 /r

AVX

16/32/64-bit

§

VEX_Vpminsb_ymm_ymm_ymmm256

VPMINSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 38 /r

AVX2

16/32/64-bit

§

EVEX_Vpminsb_xmm_k1z_xmm_xmmm128

VPMINSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 38 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsb_ymm_k1z_ymm_ymmm256

VPMINSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 38 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminsb_zmm_k1z_zmm_zmmm512

VPMINSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 38 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpmovm2d_xmm_kr

VPMOVM2D xmm1, k1

EVEX.128.F3.0F38.W0 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2d_ymm_kr

VPMOVM2D ymm1, k1

EVEX.256.F3.0F38.W0 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2d_zmm_kr

VPMOVM2D zmm1, k1

EVEX.512.F3.0F38.W0 38 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2q_xmm_kr

VPMOVM2Q xmm1, k1

EVEX.128.F3.0F38.W1 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2q_ymm_kr

VPMOVM2Q ymm1, k1

EVEX.256.F3.0F38.W1 38 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovm2q_zmm_kr

VPMOVM2Q zmm1, k1

EVEX.512.F3.0F38.W1 38 /r

AVX512DQ

16/32/64-bit

§

Pminsd_xmm_xmmm128

PMINSD xmm1, xmm2/m128

66 0F 38 39 /r

SSE4.1

16/32/64-bit

§

VEX_Vpminsd_xmm_xmm_xmmm128

VPMINSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 39 /r

AVX

16/32/64-bit

§

VEX_Vpminsd_ymm_ymm_ymmm256

VPMINSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 39 /r

AVX2

16/32/64-bit

§

EVEX_Vpminsd_xmm_k1z_xmm_xmmm128b32

VPMINSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsd_ymm_k1z_ymm_ymmm256b32

VPMINSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsd_zmm_k1z_zmm_zmmm512b32

VPMINSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 39 /r

AVX512F

16/32/64-bit

§

EVEX_Vpminsq_xmm_k1z_xmm_xmmm128b64

VPMINSQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsq_ymm_k1z_ymm_ymmm256b64

VPMINSQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 39 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminsq_zmm_k1z_zmm_zmmm512b64

VPMINSQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 39 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmovd2m_kr_xmm

VPMOVD2M k1, xmm1

EVEX.128.F3.0F38.W0 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovd2m_kr_ymm

VPMOVD2M k1, ymm1

EVEX.256.F3.0F38.W0 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovd2m_kr_zmm

VPMOVD2M k1, zmm1

EVEX.512.F3.0F38.W0 39 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vpmovq2m_kr_xmm

VPMOVQ2M k1, xmm1

EVEX.128.F3.0F38.W1 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovq2m_kr_ymm

VPMOVQ2M k1, ymm1

EVEX.256.F3.0F38.W1 39 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmovq2m_kr_zmm

VPMOVQ2M k1, zmm1

EVEX.512.F3.0F38.W1 39 /r

AVX512DQ

16/32/64-bit

§

Pminuw_xmm_xmmm128

PMINUW xmm1, xmm2/m128

66 0F 38 3A /r

SSE4.1

16/32/64-bit

§

VEX_Vpminuw_xmm_xmm_xmmm128

VPMINUW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3A /r

AVX

16/32/64-bit

§

VEX_Vpminuw_ymm_ymm_ymmm256

VPMINUW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3A /r

AVX2

16/32/64-bit

§

EVEX_Vpminuw_xmm_k1z_xmm_xmmm128

VPMINUW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 3A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminuw_ymm_k1z_ymm_ymmm256

VPMINUW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 3A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpminuw_zmm_k1z_zmm_zmmm512

VPMINUW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 3A /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastmw2d_xmm_kr

VPBROADCASTMW2D xmm1, k1

EVEX.128.F3.0F38.W0 3A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmw2d_ymm_kr

VPBROADCASTMW2D ymm1, k1

EVEX.256.F3.0F38.W0 3A /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpbroadcastmw2d_zmm_kr

VPBROADCASTMW2D zmm1, k1

EVEX.512.F3.0F38.W0 3A /r

AVX512CD

16/32/64-bit

§

Pminud_xmm_xmmm128

PMINUD xmm1, xmm2/m128

66 0F 38 3B /r

SSE4.1

16/32/64-bit

§

VEX_Vpminud_xmm_xmm_xmmm128

VPMINUD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3B /r

AVX

16/32/64-bit

§

VEX_Vpminud_ymm_ymm_ymmm256

VPMINUD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3B /r

AVX2

16/32/64-bit

§

EVEX_Vpminud_xmm_k1z_xmm_xmmm128b32

VPMINUD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminud_ymm_k1z_ymm_ymmm256b32

VPMINUD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminud_zmm_k1z_zmm_zmmm512b32

VPMINUD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 3B /r

AVX512F

16/32/64-bit

§

EVEX_Vpminuq_xmm_k1z_xmm_xmmm128b64

VPMINUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminuq_ymm_k1z_ymm_ymmm256b64

VPMINUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 3B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpminuq_zmm_k1z_zmm_zmmm512b64

VPMINUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 3B /r

AVX512F

16/32/64-bit

§

Pmaxsb_xmm_xmmm128

PMAXSB xmm1, xmm2/m128

66 0F 38 3C /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxsb_xmm_xmm_xmmm128

VPMAXSB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3C /r

AVX

16/32/64-bit

§

VEX_Vpmaxsb_ymm_ymm_ymmm256

VPMAXSB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3C /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxsb_xmm_k1z_xmm_xmmm128

VPMAXSB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 3C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsb_ymm_k1z_ymm_ymmm256

VPMAXSB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 3C /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxsb_zmm_k1z_zmm_zmmm512

VPMAXSB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 3C /r

AVX512BW

16/32/64-bit

§

Pmaxsd_xmm_xmmm128

PMAXSD xmm1, xmm2/m128

66 0F 38 3D /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxsd_xmm_xmm_xmmm128

VPMAXSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3D /r

AVX

16/32/64-bit

§

VEX_Vpmaxsd_ymm_ymm_ymmm256

VPMAXSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3D /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxsd_xmm_k1z_xmm_xmmm128b32

VPMAXSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsd_ymm_k1z_ymm_ymmm256b32

VPMAXSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsd_zmm_k1z_zmm_zmmm512b32

VPMAXSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 3D /r

AVX512F

16/32/64-bit

§

EVEX_Vpmaxsq_xmm_k1z_xmm_xmmm128b64

VPMAXSQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsq_ymm_k1z_ymm_ymmm256b64

VPMAXSQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 3D /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxsq_zmm_k1z_zmm_zmmm512b64

VPMAXSQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 3D /r

AVX512F

16/32/64-bit

§

Pmaxuw_xmm_xmmm128

PMAXUW xmm1, xmm2/m128

66 0F 38 3E /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxuw_xmm_xmm_xmmm128

VPMAXUW xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3E /r

AVX

16/32/64-bit

§

VEX_Vpmaxuw_ymm_ymm_ymmm256

VPMAXUW ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3E /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxuw_xmm_k1z_xmm_xmmm128

VPMAXUW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG 3E /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxuw_ymm_k1z_ymm_ymmm256

VPMAXUW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG 3E /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpmaxuw_zmm_k1z_zmm_zmmm512

VPMAXUW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG 3E /r

AVX512BW

16/32/64-bit

§

Pmaxud_xmm_xmmm128

PMAXUD xmm1, xmm2/m128

66 0F 38 3F /r

SSE4.1

16/32/64-bit

§

VEX_Vpmaxud_xmm_xmm_xmmm128

VPMAXUD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 3F /r

AVX

16/32/64-bit

§

VEX_Vpmaxud_ymm_ymm_ymmm256

VPMAXUD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 3F /r

AVX2

16/32/64-bit

§

EVEX_Vpmaxud_xmm_k1z_xmm_xmmm128b32

VPMAXUD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxud_ymm_k1z_ymm_ymmm256b32

VPMAXUD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxud_zmm_k1z_zmm_zmmm512b32

VPMAXUD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 3F /r

AVX512F

16/32/64-bit

§

EVEX_Vpmaxuq_xmm_k1z_xmm_xmmm128b64

VPMAXUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxuq_ymm_k1z_ymm_ymmm256b64

VPMAXUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 3F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmaxuq_zmm_k1z_zmm_zmmm512b64

VPMAXUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 3F /r

AVX512F

16/32/64-bit

§

Pmulld_xmm_xmmm128

PMULLD xmm1, xmm2/m128

66 0F 38 40 /r

SSE4.1

16/32/64-bit

§

VEX_Vpmulld_xmm_xmm_xmmm128

VPMULLD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG 40 /r

AVX

16/32/64-bit

§

VEX_Vpmulld_ymm_ymm_ymmm256

VPMULLD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG 40 /r

AVX2

16/32/64-bit

§

EVEX_Vpmulld_xmm_k1z_xmm_xmmm128b32

VPMULLD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 40 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmulld_ymm_k1z_ymm_ymmm256b32

VPMULLD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 40 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpmulld_zmm_k1z_zmm_zmmm512b32

VPMULLD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 40 /r

AVX512F

16/32/64-bit

§

EVEX_Vpmullq_xmm_k1z_xmm_xmmm128b64

VPMULLQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 40 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmullq_ymm_k1z_ymm_ymmm256b64

VPMULLQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 40 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vpmullq_zmm_k1z_zmm_zmmm512b64

VPMULLQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 40 /r

AVX512DQ

16/32/64-bit

§

Phminposuw_xmm_xmmm128

PHMINPOSUW xmm1, xmm2/m128

66 0F 38 41 /r

SSE4.1

16/32/64-bit

§

VEX_Vphminposuw_xmm_xmmm128

VPHMINPOSUW xmm1, xmm2/m128

VEX.128.66.0F38.WIG 41 /r

AVX

16/32/64-bit

§

EVEX_Vgetexpps_xmm_k1z_xmmm128b32

VGETEXPPS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexpps_ymm_k1z_ymmm256b32

VGETEXPPS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexpps_zmm_k1z_zmmm512b32_sae

VGETEXPPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 42 /r

AVX512F

16/32/64-bit

§

EVEX_Vgetexppd_xmm_k1z_xmmm128b64

VGETEXPPD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexppd_ymm_k1z_ymmm256b64

VGETEXPPD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 42 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetexppd_zmm_k1z_zmmm512b64_sae

VGETEXPPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 42 /r

AVX512F

16/32/64-bit

§

EVEX_Vgetexpss_xmm_k1z_xmm_xmmm32_sae

VGETEXPSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.66.0F38.W0 43 /r

AVX512F

16/32/64-bit

§

EVEX_Vgetexpsd_xmm_k1z_xmm_xmmm64_sae

VGETEXPSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.66.0F38.W1 43 /r

AVX512F

16/32/64-bit

§

EVEX_Vplzcntd_xmm_k1z_xmmm128b32

VPLZCNTD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntd_ymm_k1z_ymmm256b32

VPLZCNTD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntd_zmm_k1z_zmmm512b32

VPLZCNTD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 44 /r

AVX512CD

16/32/64-bit

§

EVEX_Vplzcntq_xmm_k1z_xmmm128b64

VPLZCNTQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntq_ymm_k1z_ymmm256b64

VPLZCNTQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 44 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vplzcntq_zmm_k1z_zmmm512b64

VPLZCNTQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 44 /r

AVX512CD

16/32/64-bit

§

VEX_Vpsrlvd_xmm_xmm_xmmm128

VPSRLVD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 45 /r

AVX2

16/32/64-bit

§

VEX_Vpsrlvd_ymm_ymm_ymmm256

VPSRLVD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 45 /r

AVX2

16/32/64-bit

§

VEX_Vpsrlvq_xmm_xmm_xmmm128

VPSRLVQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 45 /r

AVX2

16/32/64-bit

§

VEX_Vpsrlvq_ymm_ymm_ymmm256

VPSRLVQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 45 /r

AVX2

16/32/64-bit

§

EVEX_Vpsrlvd_xmm_k1z_xmm_xmmm128b32

VPSRLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvd_ymm_k1z_ymm_ymmm256b32

VPSRLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvd_zmm_k1z_zmm_zmmm512b32

VPSRLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 45 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsrlvq_xmm_k1z_xmm_xmmm128b64

VPSRLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvq_ymm_k1z_ymm_ymmm256b64

VPSRLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 45 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsrlvq_zmm_k1z_zmm_zmmm512b64

VPSRLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 45 /r

AVX512F

16/32/64-bit

§

VEX_Vpsravd_xmm_xmm_xmmm128

VPSRAVD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 46 /r

AVX2

16/32/64-bit

§

VEX_Vpsravd_ymm_ymm_ymmm256

VPSRAVD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 46 /r

AVX2

16/32/64-bit

§

EVEX_Vpsravd_xmm_k1z_xmm_xmmm128b32

VPSRAVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravd_ymm_k1z_ymm_ymmm256b32

VPSRAVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravd_zmm_k1z_zmm_zmmm512b32

VPSRAVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 46 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsravq_xmm_k1z_xmm_xmmm128b64

VPSRAVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravq_ymm_k1z_ymm_ymmm256b64

VPSRAVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 46 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsravq_zmm_k1z_zmm_zmmm512b64

VPSRAVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 46 /r

AVX512F

16/32/64-bit

§

VEX_Vpsllvd_xmm_xmm_xmmm128

VPSLLVD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 47 /r

AVX2

16/32/64-bit

§

VEX_Vpsllvd_ymm_ymm_ymmm256

VPSLLVD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 47 /r

AVX2

16/32/64-bit

§

VEX_Vpsllvq_xmm_xmm_xmmm128

VPSLLVQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 47 /r

AVX2

16/32/64-bit

§

VEX_Vpsllvq_ymm_ymm_ymmm256

VPSLLVQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 47 /r

AVX2

16/32/64-bit

§

EVEX_Vpsllvd_xmm_k1z_xmm_xmmm128b32

VPSLLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvd_ymm_k1z_ymm_ymmm256b32

VPSLLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvd_zmm_k1z_zmm_zmmm512b32

VPSLLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 47 /r

AVX512F

16/32/64-bit

§

EVEX_Vpsllvq_xmm_k1z_xmm_xmmm128b64

VPSLLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvq_ymm_k1z_ymm_ymmm256b64

VPSLLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 47 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpsllvq_zmm_k1z_zmm_zmmm512b64

VPSLLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 47 /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14ps_xmm_k1z_xmmm128b32

VRCP14PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14ps_ymm_k1z_ymmm256b32

VRCP14PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14ps_zmm_k1z_zmmm512b32

VRCP14PS zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 4C /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14pd_xmm_k1z_xmmm128b64

VRCP14PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14pd_ymm_k1z_ymmm256b64

VRCP14PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 4C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrcp14pd_zmm_k1z_zmmm512b64

VRCP14PD zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 4C /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14ss_xmm_k1z_xmm_xmmm32

VRCP14SS xmm1 {k1}{z}, xmm2, xmm3/m32

EVEX.LIG.66.0F38.W0 4D /r

AVX512F

16/32/64-bit

§

EVEX_Vrcp14sd_xmm_k1z_xmm_xmmm64

VRCP14SD xmm1 {k1}{z}, xmm2, xmm3/m64

EVEX.LIG.66.0F38.W1 4D /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ps_xmm_k1z_xmmm128b32

VRSQRT14PS xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ps_ymm_k1z_ymmm256b32

VRSQRT14PS ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ps_zmm_k1z_zmmm512b32

VRSQRT14PS zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 4E /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14pd_xmm_k1z_xmmm128b64

VRSQRT14PD xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14pd_ymm_k1z_ymmm256b64

VRSQRT14PD ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 4E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14pd_zmm_k1z_zmmm512b64

VRSQRT14PD zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 4E /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14ss_xmm_k1z_xmm_xmmm32

VRSQRT14SS xmm1 {k1}{z}, xmm2, xmm3/m32

EVEX.LIG.66.0F38.W0 4F /r

AVX512F

16/32/64-bit

§

EVEX_Vrsqrt14sd_xmm_k1z_xmm_xmmm64

VRSQRT14SD xmm1 {k1}{z}, xmm2, xmm3/m64

EVEX.LIG.66.0F38.W1 4F /r

AVX512F

16/32/64-bit

§

EVEX_Vpdpbusd_xmm_k1z_xmm_xmmm128b32

VPDPBUSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 50 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusd_ymm_k1z_ymm_ymmm256b32

VPDPBUSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 50 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusd_zmm_k1z_zmm_zmmm512b32

VPDPBUSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 50 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusds_xmm_k1z_xmm_xmmm128b32

VPDPBUSDS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 51 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusds_ymm_k1z_ymm_ymmm256b32

VPDPBUSDS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 51 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpbusds_zmm_k1z_zmm_zmmm512b32

VPDPBUSDS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 51 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssd_xmm_k1z_xmm_xmmm128b32

VPDPWSSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 52 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssd_ymm_k1z_ymm_ymmm256b32

VPDPWSSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 52 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssd_zmm_k1z_zmm_zmmm512b32

VPDPWSSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 52 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vdpbf16ps_xmm_k1z_xmm_xmmm128b32

VDPBF16PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.0F38.W0 52 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vdpbf16ps_ymm_k1z_ymm_ymmm256b32

VDPBF16PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.0F38.W0 52 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vdpbf16ps_zmm_k1z_zmm_zmmm512b32

VDPBF16PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.F3.0F38.W0 52 /r

AVX512F and AVX512_BF16

16/32/64-bit

§

EVEX_Vp4dpwssd_zmm_k1z_zmmp3_m128

VP4DPWSSD zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 52 /r

AVX512_4VNNIW

16/32/64-bit

§

EVEX_Vpdpwssds_xmm_k1z_xmm_xmmm128b32

VPDPWSSDS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 53 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssds_ymm_k1z_ymm_ymmm256b32

VPDPWSSDS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 53 /r

AVX512VL and AVX512_VNNI

16/32/64-bit

§

EVEX_Vpdpwssds_zmm_k1z_zmm_zmmm512b32

VPDPWSSDS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 53 /r

AVX512_VNNI

16/32/64-bit

§

EVEX_Vp4dpwssds_zmm_k1z_zmmp3_m128

VP4DPWSSDS zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 53 /r

AVX512_4VNNIW

16/32/64-bit

§

EVEX_Vpopcntb_xmm_k1z_xmmm128

VPOPCNTB xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntb_ymm_k1z_ymmm256

VPOPCNTB ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntb_zmm_k1z_zmmm512

VPOPCNTB zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 54 /r

AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntw_xmm_k1z_xmmm128

VPOPCNTW xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntw_ymm_k1z_ymmm256

VPOPCNTW ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 54 /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntw_zmm_k1z_zmmm512

VPOPCNTW zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 54 /r

AVX512_BITALG

16/32/64-bit

§

EVEX_Vpopcntd_xmm_k1z_xmmm128b32

VPOPCNTD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntd_ymm_k1z_ymmm256b32

VPOPCNTD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntd_zmm_k1z_zmmm512b32

VPOPCNTD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 55 /r

AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntq_xmm_k1z_xmmm128b64

VPOPCNTQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntq_ymm_k1z_ymmm256b64

VPOPCNTQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 55 /r

AVX512VL and AVX512_VPOPCNTDQ

16/32/64-bit

§

EVEX_Vpopcntq_zmm_k1z_zmmm512b64

VPOPCNTQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 55 /r

AVX512_VPOPCNTDQ

16/32/64-bit

§

VEX_Vpbroadcastd_xmm_xmmm32

VPBROADCASTD xmm1, xmm2/m32

VEX.128.66.0F38.W0 58 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastd_ymm_xmmm32

VPBROADCASTD ymm1, xmm2/m32

VEX.256.66.0F38.W0 58 /r

AVX2

16/32/64-bit

§

EVEX_Vpbroadcastd_xmm_k1z_xmmm32

VPBROADCASTD xmm1 {k1}{z}, xmm2/m32

EVEX.128.66.0F38.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_ymm_k1z_xmmm32

VPBROADCASTD ymm1 {k1}{z}, xmm2/m32

EVEX.256.66.0F38.W0 58 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_zmm_k1z_xmmm32

VPBROADCASTD zmm1 {k1}{z}, xmm2/m32

EVEX.512.66.0F38.W0 58 /r

AVX512F

16/32/64-bit

§

VEX_Vpbroadcastq_xmm_xmmm64

VPBROADCASTQ xmm1, xmm2/m64

VEX.128.66.0F38.W0 59 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastq_ymm_xmmm64

VPBROADCASTQ ymm1, xmm2/m64

VEX.256.66.0F38.W0 59 /r

AVX2

16/32/64-bit

§

EVEX_Vbroadcasti32x2_xmm_k1z_xmmm64

VBROADCASTI32X2 xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W0 59 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti32x2_ymm_k1z_xmmm64

VBROADCASTI32X2 ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W0 59 /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti32x2_zmm_k1z_xmmm64

VBROADCASTI32X2 zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W0 59 /r

AVX512DQ

16/32/64-bit

§

EVEX_Vpbroadcastq_xmm_k1z_xmmm64

VPBROADCASTQ xmm1 {k1}{z}, xmm2/m64

EVEX.128.66.0F38.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastq_ymm_k1z_xmmm64

VPBROADCASTQ ymm1 {k1}{z}, xmm2/m64

EVEX.256.66.0F38.W1 59 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastq_zmm_k1z_xmmm64

VPBROADCASTQ zmm1 {k1}{z}, xmm2/m64

EVEX.512.66.0F38.W1 59 /r

AVX512F

16/32/64-bit

§

VEX_Vbroadcasti128_ymm_m128

VBROADCASTI128 ymm1, m128

VEX.256.66.0F38.W0 5A /r

AVX2

16/32/64-bit

§

EVEX_Vbroadcasti32x4_ymm_k1z_m128

VBROADCASTI32X4 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W0 5A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vbroadcasti32x4_zmm_k1z_m128

VBROADCASTI32X4 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W0 5A /r

AVX512F

16/32/64-bit

§

EVEX_Vbroadcasti64x2_ymm_k1z_m128

VBROADCASTI64X2 ymm1 {k1}{z}, m128

EVEX.256.66.0F38.W1 5A /r

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti64x2_zmm_k1z_m128

VBROADCASTI64X2 zmm1 {k1}{z}, m128

EVEX.512.66.0F38.W1 5A /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti32x8_zmm_k1z_m256

VBROADCASTI32X8 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W0 5B /r

AVX512DQ

16/32/64-bit

§

EVEX_Vbroadcasti64x4_zmm_k1z_m256

VBROADCASTI64X4 zmm1 {k1}{z}, m256

EVEX.512.66.0F38.W1 5B /r

AVX512F

16/32/64-bit

§

EVEX_Vpexpandb_xmm_k1z_xmmm128

VPEXPANDB xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandb_ymm_k1z_ymmm256

VPEXPANDB ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandb_zmm_k1z_zmmm512

VPEXPANDB zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 62 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandw_xmm_k1z_xmmm128

VPEXPANDW xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandw_ymm_k1z_ymmm256

VPEXPANDW ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 62 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpexpandw_zmm_k1z_zmmm512

VPEXPANDW zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 62 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressb_xmmm128_k1z_xmm

VPCOMPRESSB xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W0 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressb_ymmm256_k1z_ymm

VPCOMPRESSB ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W0 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressb_zmmm512_k1z_zmm

VPCOMPRESSB zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W0 63 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressw_xmmm128_k1z_xmm

VPCOMPRESSW xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W1 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressw_ymmm256_k1z_ymm

VPCOMPRESSW ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W1 63 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpcompressw_zmmm512_k1z_zmm

VPCOMPRESSW zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W1 63 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpblendmd_xmm_k1z_xmm_xmmm128b32

VPBLENDMD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmd_ymm_k1z_ymm_ymmm256b32

VPBLENDMD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmd_zmm_k1z_zmm_zmmm512b32

VPBLENDMD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 64 /r

AVX512F

16/32/64-bit

§

EVEX_Vpblendmq_xmm_k1z_xmm_xmmm128b64

VPBLENDMQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmq_ymm_k1z_ymm_ymmm256b64

VPBLENDMQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 64 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpblendmq_zmm_k1z_zmm_zmmm512b64

VPBLENDMQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 64 /r

AVX512F

16/32/64-bit

§

EVEX_Vblendmps_xmm_k1z_xmm_xmmm128b32

VBLENDMPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmps_ymm_k1z_ymm_ymmm256b32

VBLENDMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmps_zmm_k1z_zmm_zmmm512b32

VBLENDMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 65 /r

AVX512F

16/32/64-bit

§

EVEX_Vblendmpd_xmm_k1z_xmm_xmmm128b64

VBLENDMPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmpd_ymm_k1z_ymm_ymmm256b64

VBLENDMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 65 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vblendmpd_zmm_k1z_zmm_zmmm512b64

VBLENDMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 65 /r

AVX512F

16/32/64-bit

§

EVEX_Vpblendmb_xmm_k1z_xmm_xmmm128

VPBLENDMB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmb_ymm_k1z_ymm_ymmm256

VPBLENDMB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmb_zmm_k1z_zmm_zmmm512

VPBLENDMB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 66 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpblendmw_xmm_k1z_xmm_xmmm128

VPBLENDMW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmw_ymm_k1z_ymm_ymmm256

VPBLENDMW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 66 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpblendmw_zmm_k1z_zmm_zmmm512

VPBLENDMW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 66 /r

AVX512BW

16/32/64-bit

§

EVEX_Vp2intersectd_kp1_xmm_xmmm128b32

VP2INTERSECTD k1+1, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.0F38.W0 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectd_kp1_ymm_ymmm256b32

VP2INTERSECTD k1+1, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.0F38.W0 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectd_kp1_zmm_zmmm512b32

VP2INTERSECTD k1+1, zmm2, zmm3/m512/m32bcst

EVEX.512.F2.0F38.W0 68 /r

AVX512F and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectq_kp1_xmm_xmmm128b64

VP2INTERSECTQ k1+1, xmm2, xmm3/m128/m64bcst

EVEX.128.F2.0F38.W1 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectq_kp1_ymm_ymmm256b64

VP2INTERSECTQ k1+1, ymm2, ymm3/m256/m64bcst

EVEX.256.F2.0F38.W1 68 /r

AVX512VL and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vp2intersectq_kp1_zmm_zmmm512b64

VP2INTERSECTQ k1+1, zmm2, zmm3/m512/m64bcst

EVEX.512.F2.0F38.W1 68 /r

AVX512F and AVX512_VP2INTERSECT

16/32/64-bit

§

EVEX_Vpshldvw_xmm_k1z_xmm_xmmm128

VPSHLDVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 70 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvw_ymm_k1z_ymm_ymmm256

VPSHLDVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 70 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvw_zmm_k1z_zmm_zmmm512

VPSHLDVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 70 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvd_xmm_k1z_xmm_xmmm128b32

VPSHLDVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvd_ymm_k1z_ymm_ymmm256b32

VPSHLDVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvd_zmm_k1z_zmm_zmmm512b32

VPSHLDVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 71 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvq_xmm_k1z_xmm_xmmm128b64

VPSHLDVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvq_ymm_k1z_ymm_ymmm256b64

VPSHLDVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 71 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldvq_zmm_k1z_zmm_zmmm512b64

VPSHLDVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 71 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvw_xmm_k1z_xmm_xmmm128

VPSHRDVW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 72 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvw_ymm_k1z_ymm_ymmm256

VPSHRDVW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 72 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvw_zmm_k1z_zmm_zmmm512

VPSHRDVW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 72 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vcvtneps2bf16_xmm_k1z_xmmm128b32

VCVTNEPS2BF16 xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F3.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtneps2bf16_xmm_k1z_ymmm256b32

VCVTNEPS2BF16 xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F3.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtneps2bf16_ymm_k1z_zmmm512b32

VCVTNEPS2BF16 ymm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.F3.0F38.W0 72 /r

AVX512F and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtne2ps2bf16_xmm_k1z_xmm_xmmm128b32

VCVTNE2PS2BF16 xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtne2ps2bf16_ymm_k1z_ymm_ymmm256b32

VCVTNE2PS2BF16 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.0F38.W0 72 /r

AVX512VL and AVX512_BF16

16/32/64-bit

§

EVEX_Vcvtne2ps2bf16_zmm_k1z_zmm_zmmm512b32

VCVTNE2PS2BF16 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.F2.0F38.W0 72 /r

AVX512F and AVX512_BF16

16/32/64-bit

§

EVEX_Vpshrdvd_xmm_k1z_xmm_xmmm128b32

VPSHRDVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvd_ymm_k1z_ymm_ymmm256b32

VPSHRDVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvd_zmm_k1z_zmm_zmmm512b32

VPSHRDVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 73 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvq_xmm_k1z_xmm_xmmm128b64

VPSHRDVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvq_ymm_k1z_ymm_ymmm256b64

VPSHRDVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 73 /r

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdvq_zmm_k1z_zmm_zmmm512b64

VPSHRDVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 73 /r

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpermi2b_xmm_k1z_xmm_xmmm128

VPERMI2B xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 75 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermi2b_ymm_k1z_ymm_ymmm256

VPERMI2B ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 75 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermi2b_zmm_k1z_zmm_zmmm512

VPERMI2B zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 75 /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermi2w_xmm_k1z_xmm_xmmm128

VPERMI2W xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermi2w_ymm_k1z_ymm_ymmm256

VPERMI2W ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 75 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermi2w_zmm_k1z_zmm_zmmm512

VPERMI2W zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 75 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpermi2d_xmm_k1z_xmm_xmmm128b32

VPERMI2D xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2d_ymm_k1z_ymm_ymmm256b32

VPERMI2D ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2d_zmm_k1z_zmm_zmmm512b32

VPERMI2D zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 76 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermi2q_xmm_k1z_xmm_xmmm128b64

VPERMI2Q xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2q_ymm_k1z_ymm_ymmm256b64

VPERMI2Q ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 76 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2q_zmm_k1z_zmm_zmmm512b64

VPERMI2Q zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 76 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermi2ps_xmm_k1z_xmm_xmmm128b32

VPERMI2PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2ps_ymm_k1z_ymm_ymmm256b32

VPERMI2PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2ps_zmm_k1z_zmm_zmmm512b32

VPERMI2PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 77 /r

AVX512F

16/32/64-bit

§

EVEX_Vpermi2pd_xmm_k1z_xmm_xmmm128b64

VPERMI2PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2pd_ymm_k1z_ymm_ymmm256b64

VPERMI2PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 77 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermi2pd_zmm_k1z_zmm_zmmm512b64

VPERMI2PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 77 /r

AVX512F

16/32/64-bit

§

VEX_Vpbroadcastb_xmm_xmmm8

VPBROADCASTB xmm1, xmm2/m8

VEX.128.66.0F38.W0 78 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastb_ymm_xmmm8

VPBROADCASTB ymm1, xmm2/m8

VEX.256.66.0F38.W0 78 /r

AVX2

16/32/64-bit

§

EVEX_Vpbroadcastb_xmm_k1z_xmmm8

VPBROADCASTB xmm1 {k1}{z}, xmm2/m8

EVEX.128.66.0F38.W0 78 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_ymm_k1z_xmmm8

VPBROADCASTB ymm1 {k1}{z}, xmm2/m8

EVEX.256.66.0F38.W0 78 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_zmm_k1z_xmmm8

VPBROADCASTB zmm1 {k1}{z}, xmm2/m8

EVEX.512.66.0F38.W0 78 /r

AVX512BW

16/32/64-bit

§

VEX_Vpbroadcastw_xmm_xmmm16

VPBROADCASTW xmm1, xmm2/m16

VEX.128.66.0F38.W0 79 /r

AVX2

16/32/64-bit

§

VEX_Vpbroadcastw_ymm_xmmm16

VPBROADCASTW ymm1, xmm2/m16

VEX.256.66.0F38.W0 79 /r

AVX2

16/32/64-bit

§

EVEX_Vpbroadcastw_xmm_k1z_xmmm16

VPBROADCASTW xmm1 {k1}{z}, xmm2/m16

EVEX.128.66.0F38.W0 79 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_ymm_k1z_xmmm16

VPBROADCASTW ymm1 {k1}{z}, xmm2/m16

EVEX.256.66.0F38.W0 79 /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_zmm_k1z_xmmm16

VPBROADCASTW zmm1 {k1}{z}, xmm2/m16

EVEX.512.66.0F38.W0 79 /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_xmm_k1z_r32

VPBROADCASTB xmm1 {k1}{z}, r32

EVEX.128.66.0F38.W0 7A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_ymm_k1z_r32

VPBROADCASTB ymm1 {k1}{z}, r32

EVEX.256.66.0F38.W0 7A /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastb_zmm_k1z_r32

VPBROADCASTB zmm1 {k1}{z}, r32

EVEX.512.66.0F38.W0 7A /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_xmm_k1z_r32

VPBROADCASTW xmm1 {k1}{z}, r32

EVEX.128.66.0F38.W0 7B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_ymm_k1z_r32

VPBROADCASTW ymm1 {k1}{z}, r32

EVEX.256.66.0F38.W0 7B /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastw_zmm_k1z_r32

VPBROADCASTW zmm1 {k1}{z}, r32

EVEX.512.66.0F38.W0 7B /r

AVX512BW

16/32/64-bit

§

EVEX_Vpbroadcastd_xmm_k1z_r32

VPBROADCASTD xmm1 {k1}{z}, r32

EVEX.128.66.0F38.W0 7C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_ymm_k1z_r32

VPBROADCASTD ymm1 {k1}{z}, r32

EVEX.256.66.0F38.W0 7C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastd_zmm_k1z_r32

VPBROADCASTD zmm1 {k1}{z}, r32

EVEX.512.66.0F38.W0 7C /r

AVX512F

16/32/64-bit

§

EVEX_Vpbroadcastq_xmm_k1z_r64

VPBROADCASTQ xmm1 {k1}{z}, r64

EVEX.128.66.0F38.W1 7C /r

AVX512VL and AVX512F

64-bit

§

EVEX_Vpbroadcastq_ymm_k1z_r64

VPBROADCASTQ ymm1 {k1}{z}, r64

EVEX.256.66.0F38.W1 7C /r

AVX512VL and AVX512F

64-bit

§

EVEX_Vpbroadcastq_zmm_k1z_r64

VPBROADCASTQ zmm1 {k1}{z}, r64

EVEX.512.66.0F38.W1 7C /r

AVX512F

64-bit

§

EVEX_Vpermt2b_xmm_k1z_xmm_xmmm128

VPERMT2B xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 7D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermt2b_ymm_k1z_ymm_ymmm256

VPERMT2B ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 7D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermt2b_zmm_k1z_zmm_zmmm512

VPERMT2B zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 7D /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermt2w_xmm_k1z_xmm_xmmm128

VPERMT2W xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 7D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermt2w_ymm_k1z_ymm_ymmm256

VPERMT2W ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 7D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermt2w_zmm_k1z_zmm_zmmm512

VPERMT2W zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 7D /r

AVX512BW

16/32/64-bit

§

EVEX_Vpermt2d_xmm_k1z_xmm_xmmm128b32

VPERMT2D xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2d_ymm_k1z_ymm_ymmm256b32

VPERMT2D ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2d_zmm_k1z_zmm_zmmm512b32

VPERMT2D zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 7E /r

AVX512F

16/32/64-bit

§

EVEX_Vpermt2q_xmm_k1z_xmm_xmmm128b64

VPERMT2Q xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2q_ymm_k1z_ymm_ymmm256b64

VPERMT2Q ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 7E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2q_zmm_k1z_zmm_zmmm512b64

VPERMT2Q zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 7E /r

AVX512F

16/32/64-bit

§

EVEX_Vpermt2ps_xmm_k1z_xmm_xmmm128b32

VPERMT2PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2ps_ymm_k1z_ymm_ymmm256b32

VPERMT2PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2ps_zmm_k1z_zmm_zmmm512b32

VPERMT2PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst

EVEX.512.66.0F38.W0 7F /r

AVX512F

16/32/64-bit

§

EVEX_Vpermt2pd_xmm_k1z_xmm_xmmm128b64

VPERMT2PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2pd_ymm_k1z_ymm_ymmm256b64

VPERMT2PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 7F /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermt2pd_zmm_k1z_zmm_zmmm512b64

VPERMT2PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 7F /r

AVX512F

16/32/64-bit

§

Invept_r32_m128

INVEPT r32, m128

66 0F 38 80 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 20]

16/32-bit

§

Invept_r64_m128

INVEPT r64, m128

66 0F 38 80 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 20]

64-bit

§

Invvpid_r32_m128

INVVPID r32, m128

66 0F 38 81 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 32]

16/32-bit

§

Invvpid_r64_m128

INVVPID r64, m128

66 0F 38 81 /r

VMX and IA32_VMX_EPT_VPID_CAP[bit 32]

64-bit

§

Invpcid_r32_m128

INVPCID r32, m128

66 0F 38 82 /r

INVPCID

16/32-bit

§

Invpcid_r64_m128

INVPCID r64, m128

66 0F 38 82 /r

INVPCID

64-bit

§

EVEX_Vpmultishiftqb_xmm_k1z_xmm_xmmm128b64

VPMULTISHIFTQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 83 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpmultishiftqb_ymm_k1z_ymm_ymmm256b64

VPMULTISHIFTQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 83 /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpmultishiftqb_zmm_k1z_zmm_zmmm512b64

VPMULTISHIFTQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 83 /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vexpandps_xmm_k1z_xmmm128

VEXPANDPS xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandps_ymm_k1z_ymmm256

VEXPANDPS ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandps_zmm_k1z_zmmm512

VEXPANDPS zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 88 /r

AVX512F

16/32/64-bit

§

EVEX_Vexpandpd_xmm_k1z_xmmm128

VEXPANDPD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandpd_ymm_k1z_ymmm256

VEXPANDPD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 88 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vexpandpd_zmm_k1z_zmmm512

VEXPANDPD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 88 /r

AVX512F

16/32/64-bit

§

EVEX_Vpexpandd_xmm_k1z_xmmm128

VPEXPANDD xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W0 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandd_ymm_k1z_ymmm256

VPEXPANDD ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W0 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandd_zmm_k1z_zmmm512

VPEXPANDD zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W0 89 /r

AVX512F

16/32/64-bit

§

EVEX_Vpexpandq_xmm_k1z_xmmm128

VPEXPANDQ xmm1 {k1}{z}, xmm2/m128

EVEX.128.66.0F38.W1 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandq_ymm_k1z_ymmm256

VPEXPANDQ ymm1 {k1}{z}, ymm2/m256

EVEX.256.66.0F38.W1 89 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpexpandq_zmm_k1z_zmmm512

VPEXPANDQ zmm1 {k1}{z}, zmm2/m512

EVEX.512.66.0F38.W1 89 /r

AVX512F

16/32/64-bit

§

EVEX_Vcompressps_xmmm128_k1z_xmm

VCOMPRESSPS xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W0 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompressps_ymmm256_k1z_ymm

VCOMPRESSPS ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W0 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompressps_zmmm512_k1z_zmm

VCOMPRESSPS zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W0 8A /r

AVX512F

16/32/64-bit

§

EVEX_Vcompresspd_xmmm128_k1z_xmm

VCOMPRESSPD xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W1 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompresspd_ymmm256_k1z_ymm

VCOMPRESSPD ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W1 8A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcompresspd_zmmm512_k1z_zmm

VCOMPRESSPD zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W1 8A /r

AVX512F

16/32/64-bit

§

EVEX_Vpcompressd_xmmm128_k1z_xmm

VPCOMPRESSD xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W0 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressd_ymmm256_k1z_ymm

VPCOMPRESSD ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W0 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressd_zmmm512_k1z_zmm

VPCOMPRESSD zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W0 8B /r

AVX512F

16/32/64-bit

§

EVEX_Vpcompressq_xmmm128_k1z_xmm

VPCOMPRESSQ xmm1/m128 {k1}{z}, xmm2

EVEX.128.66.0F38.W1 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressq_ymmm256_k1z_ymm

VPCOMPRESSQ ymm1/m256 {k1}{z}, ymm2

EVEX.256.66.0F38.W1 8B /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcompressq_zmmm512_k1z_zmm

VPCOMPRESSQ zmm1/m512 {k1}{z}, zmm2

EVEX.512.66.0F38.W1 8B /r

AVX512F

16/32/64-bit

§

VEX_Vpmaskmovd_xmm_xmm_m128

VPMASKMOVD xmm1, xmm2, m128

VEX.128.66.0F38.W0 8C /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovd_ymm_ymm_m256

VPMASKMOVD ymm1, ymm2, m256

VEX.256.66.0F38.W0 8C /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_xmm_xmm_m128

VPMASKMOVQ xmm1, xmm2, m128

VEX.128.66.0F38.W1 8C /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_ymm_ymm_m256

VPMASKMOVQ ymm1, ymm2, m256

VEX.256.66.0F38.W1 8C /r

AVX2

16/32/64-bit

§

EVEX_Vpermb_xmm_k1z_xmm_xmmm128

VPERMB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 8D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermb_ymm_k1z_ymm_ymmm256

VPERMB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 8D /r

AVX512VL and AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermb_zmm_k1z_zmm_zmmm512

VPERMB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 8D /r

AVX512_VBMI

16/32/64-bit

§

EVEX_Vpermw_xmm_k1z_xmm_xmmm128

VPERMW xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W1 8D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermw_ymm_k1z_ymm_ymmm256

VPERMW ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W1 8D /r

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpermw_zmm_k1z_zmm_zmmm512

VPERMW zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W1 8D /r

AVX512BW

16/32/64-bit

§

VEX_Vpmaskmovd_m128_xmm_xmm

VPMASKMOVD m128, xmm1, xmm2

VEX.128.66.0F38.W0 8E /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovd_m256_ymm_ymm

VPMASKMOVD m256, ymm1, ymm2

VEX.256.66.0F38.W0 8E /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_m128_xmm_xmm

VPMASKMOVQ m128, xmm1, xmm2

VEX.128.66.0F38.W1 8E /r

AVX2

16/32/64-bit

§

VEX_Vpmaskmovq_m256_ymm_ymm

VPMASKMOVQ m256, ymm1, ymm2

VEX.256.66.0F38.W1 8E /r

AVX2

16/32/64-bit

§

EVEX_Vpshufbitqmb_kr_k1_xmm_xmmm128

VPSHUFBITQMB k1 {k2}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 8F /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpshufbitqmb_kr_k1_ymm_ymmm256

VPSHUFBITQMB k1 {k2}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 8F /r

AVX512VL and AVX512_BITALG

16/32/64-bit

§

EVEX_Vpshufbitqmb_kr_k1_zmm_zmmm512

VPSHUFBITQMB k1 {k2}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 8F /r

AVX512_BITALG

16/32/64-bit

§

VEX_Vpgatherdd_xmm_vm32x_xmm

VPGATHERDD xmm1, vm32x, xmm2

VEX.128.66.0F38.W0 90 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherdd_ymm_vm32y_ymm

VPGATHERDD ymm1, vm32y, ymm2

VEX.256.66.0F38.W0 90 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherdq_xmm_vm32x_xmm

VPGATHERDQ xmm1, vm32x, xmm2

VEX.128.66.0F38.W1 90 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherdq_ymm_vm32x_ymm

VPGATHERDQ ymm1, vm32x, ymm2

VEX.256.66.0F38.W1 90 /r

AVX2

16/32/64-bit

§

EVEX_Vpgatherdd_xmm_k1_vm32x

VPGATHERDD xmm1 {k1}, vm32x

EVEX.128.66.0F38.W0 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdd_ymm_k1_vm32y

VPGATHERDD ymm1 {k1}, vm32y

EVEX.256.66.0F38.W0 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdd_zmm_k1_vm32z

VPGATHERDD zmm1 {k1}, vm32z

EVEX.512.66.0F38.W0 90 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpgatherdq_xmm_k1_vm32x

VPGATHERDQ xmm1 {k1}, vm32x

EVEX.128.66.0F38.W1 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdq_ymm_k1_vm32x

VPGATHERDQ ymm1 {k1}, vm32x

EVEX.256.66.0F38.W1 90 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherdq_zmm_k1_vm32y

VPGATHERDQ zmm1 {k1}, vm32y

EVEX.512.66.0F38.W1 90 /vsib

AVX512F

16/32/64-bit

§

VEX_Vpgatherqd_xmm_vm64x_xmm

VPGATHERQD xmm1, vm64x, xmm2

VEX.128.66.0F38.W0 91 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherqd_xmm_vm64y_xmm

VPGATHERQD xmm1, vm64y, xmm2

VEX.256.66.0F38.W0 91 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherqq_xmm_vm64x_xmm

VPGATHERQQ xmm1, vm64x, xmm2

VEX.128.66.0F38.W1 91 /r

AVX2

16/32/64-bit

§

VEX_Vpgatherqq_ymm_vm64y_ymm

VPGATHERQQ ymm1, vm64y, ymm2

VEX.256.66.0F38.W1 91 /r

AVX2

16/32/64-bit

§

EVEX_Vpgatherqd_xmm_k1_vm64x

VPGATHERQD xmm1 {k1}, vm64x

EVEX.128.66.0F38.W0 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqd_xmm_k1_vm64y

VPGATHERQD xmm1 {k1}, vm64y

EVEX.256.66.0F38.W0 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqd_ymm_k1_vm64z

VPGATHERQD ymm1 {k1}, vm64z

EVEX.512.66.0F38.W0 91 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpgatherqq_xmm_k1_vm64x

VPGATHERQQ xmm1 {k1}, vm64x

EVEX.128.66.0F38.W1 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqq_ymm_k1_vm64y

VPGATHERQQ ymm1 {k1}, vm64y

EVEX.256.66.0F38.W1 91 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpgatherqq_zmm_k1_vm64z

VPGATHERQQ zmm1 {k1}, vm64z

EVEX.512.66.0F38.W1 91 /vsib

AVX512F

16/32/64-bit

§

VEX_Vgatherdps_xmm_vm32x_xmm

VGATHERDPS xmm1, vm32x, xmm2

VEX.128.66.0F38.W0 92 /r

AVX2

16/32/64-bit

§

VEX_Vgatherdps_ymm_vm32y_ymm

VGATHERDPS ymm1, vm32y, ymm2

VEX.256.66.0F38.W0 92 /r

AVX2

16/32/64-bit

§

VEX_Vgatherdpd_xmm_vm32x_xmm

VGATHERDPD xmm1, vm32x, xmm2

VEX.128.66.0F38.W1 92 /r

AVX2

16/32/64-bit

§

VEX_Vgatherdpd_ymm_vm32x_ymm

VGATHERDPD ymm1, vm32x, ymm2

VEX.256.66.0F38.W1 92 /r

AVX2

16/32/64-bit

§

EVEX_Vgatherdps_xmm_k1_vm32x

VGATHERDPS xmm1 {k1}, vm32x

EVEX.128.66.0F38.W0 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdps_ymm_k1_vm32y

VGATHERDPS ymm1 {k1}, vm32y

EVEX.256.66.0F38.W0 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdps_zmm_k1_vm32z

VGATHERDPS zmm1 {k1}, vm32z

EVEX.512.66.0F38.W0 92 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vgatherdpd_xmm_k1_vm32x

VGATHERDPD xmm1 {k1}, vm32x

EVEX.128.66.0F38.W1 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdpd_ymm_k1_vm32x

VGATHERDPD ymm1 {k1}, vm32x

EVEX.256.66.0F38.W1 92 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherdpd_zmm_k1_vm32y

VGATHERDPD zmm1 {k1}, vm32y

EVEX.512.66.0F38.W1 92 /vsib

AVX512F

16/32/64-bit

§

VEX_Vgatherqps_xmm_vm64x_xmm

VGATHERQPS xmm1, vm64x, xmm2

VEX.128.66.0F38.W0 93 /r

AVX2

16/32/64-bit

§

VEX_Vgatherqps_xmm_vm64y_xmm

VGATHERQPS xmm1, vm64y, xmm2

VEX.256.66.0F38.W0 93 /r

AVX2

16/32/64-bit

§

VEX_Vgatherqpd_xmm_vm64x_xmm

VGATHERQPD xmm1, vm64x, xmm2

VEX.128.66.0F38.W1 93 /r

AVX2

16/32/64-bit

§

VEX_Vgatherqpd_ymm_vm64y_ymm

VGATHERQPD ymm1, vm64y, ymm2

VEX.256.66.0F38.W1 93 /r

AVX2

16/32/64-bit

§

EVEX_Vgatherqps_xmm_k1_vm64x

VGATHERQPS xmm1 {k1}, vm64x

EVEX.128.66.0F38.W0 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqps_xmm_k1_vm64y

VGATHERQPS xmm1 {k1}, vm64y

EVEX.256.66.0F38.W0 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqps_ymm_k1_vm64z

VGATHERQPS ymm1 {k1}, vm64z

EVEX.512.66.0F38.W0 93 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vgatherqpd_xmm_k1_vm64x

VGATHERQPD xmm1 {k1}, vm64x

EVEX.128.66.0F38.W1 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqpd_ymm_k1_vm64y

VGATHERQPD ymm1 {k1}, vm64y

EVEX.256.66.0F38.W1 93 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgatherqpd_zmm_k1_vm64z

VGATHERQPD zmm1 {k1}, vm64z

EVEX.512.66.0F38.W1 93 /vsib

AVX512F

16/32/64-bit

§

VEX_Vfmaddsub132ps_xmm_xmm_xmmm128

VFMADDSUB132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 96 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub132ps_ymm_ymm_ymmm256

VFMADDSUB132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 96 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub132pd_xmm_xmm_xmmm128

VFMADDSUB132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 96 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub132pd_ymm_ymm_ymmm256

VFMADDSUB132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 96 /r

FMA

16/32/64-bit

§

EVEX_Vfmaddsub132ps_xmm_k1z_xmm_xmmm128b32

VFMADDSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132ps_ymm_k1z_ymm_ymmm256b32

VFMADDSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132ps_zmm_k1z_zmm_zmmm512b32_er

VFMADDSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 96 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132pd_xmm_k1z_xmm_xmmm128b64

VFMADDSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132pd_ymm_k1z_ymm_ymmm256b64

VFMADDSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 96 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub132pd_zmm_k1z_zmm_zmmm512b64_er

VFMADDSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 96 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsubadd132ps_xmm_xmm_xmmm128

VFMSUBADD132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 97 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd132ps_ymm_ymm_ymmm256

VFMSUBADD132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 97 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd132pd_xmm_xmm_xmmm128

VFMSUBADD132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 97 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd132pd_ymm_ymm_ymmm256

VFMSUBADD132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 97 /r

FMA

16/32/64-bit

§

EVEX_Vfmsubadd132ps_xmm_k1z_xmm_xmmm128b32

VFMSUBADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132ps_ymm_k1z_ymm_ymmm256b32

VFMSUBADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132ps_zmm_k1z_zmm_zmmm512b32_er

VFMSUBADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 97 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132pd_xmm_k1z_xmm_xmmm128b64

VFMSUBADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132pd_ymm_k1z_ymm_ymmm256b64

VFMSUBADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 97 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd132pd_zmm_k1z_zmm_zmmm512b64_er

VFMSUBADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 97 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd132ps_xmm_xmm_xmmm128

VFMADD132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 98 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132ps_ymm_ymm_ymmm256

VFMADD132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 98 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132pd_xmm_xmm_xmmm128

VFMADD132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 98 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132pd_ymm_ymm_ymmm256

VFMADD132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 98 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd132ps_xmm_k1z_xmm_xmmm128b32

VFMADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132ps_ymm_k1z_ymm_ymmm256b32

VFMADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132ps_zmm_k1z_zmm_zmmm512b32_er

VFMADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 98 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd132pd_xmm_k1z_xmm_xmmm128b64

VFMADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132pd_ymm_k1z_ymm_ymmm256b64

VFMADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 98 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd132pd_zmm_k1z_zmm_zmmm512b64_er

VFMADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 98 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd132ss_xmm_xmm_xmmm32

VFMADD132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 99 /r

FMA

16/32/64-bit

§

VEX_Vfmadd132sd_xmm_xmm_xmmm64

VFMADD132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 99 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd132ss_xmm_k1z_xmm_xmmm32_er

VFMADD132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 99 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd132sd_xmm_k1z_xmm_xmmm64_er

VFMADD132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 99 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub132ps_xmm_xmm_xmmm128

VFMSUB132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 9A /r

FMA

16/32/64-bit

§

VEX_Vfmsub132ps_ymm_ymm_ymmm256

VFMSUB132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 9A /r

FMA

16/32/64-bit

§

VEX_Vfmsub132pd_xmm_xmm_xmmm128

VFMSUB132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 9A /r

FMA

16/32/64-bit

§

VEX_Vfmsub132pd_ymm_ymm_ymmm256

VFMSUB132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 9A /r

FMA

16/32/64-bit

§

EVEX_Vfmsub132ps_xmm_k1z_xmm_xmmm128b32

VFMSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132ps_ymm_k1z_ymm_ymmm256b32

VFMSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132ps_zmm_k1z_zmm_zmmm512b32_er

VFMSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 9A /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub132pd_xmm_k1z_xmm_xmmm128b64

VFMSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132pd_ymm_k1z_ymm_ymmm256b64

VFMSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 9A /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub132pd_zmm_k1z_zmm_zmmm512b64_er

VFMSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 9A /r

AVX512F

16/32/64-bit

§

EVEX_V4fmaddps_zmm_k1z_zmmp3_m128

V4FMADDPS zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 9A /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfmsub132ss_xmm_xmm_xmmm32

VFMSUB132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 9B /r

FMA

16/32/64-bit

§

VEX_Vfmsub132sd_xmm_xmm_xmmm64

VFMSUB132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 9B /r

FMA

16/32/64-bit

§

EVEX_Vfmsub132ss_xmm_k1z_xmm_xmmm32_er

VFMSUB132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 9B /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub132sd_xmm_k1z_xmm_xmmm64_er

VFMSUB132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 9B /r

AVX512F

16/32/64-bit

§

EVEX_V4fmaddss_xmm_k1z_xmmp3_m128

V4FMADDSS xmm1 {k1}{z}, xmm2+3, m128

EVEX.LIG.F2.0F38.W0 9B /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfnmadd132ps_xmm_xmm_xmmm128

VFNMADD132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 9C /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132ps_ymm_ymm_ymmm256

VFNMADD132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 9C /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132pd_xmm_xmm_xmmm128

VFNMADD132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 9C /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132pd_ymm_ymm_ymmm256

VFNMADD132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 9C /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd132ps_xmm_k1z_xmm_xmmm128b32

VFNMADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132ps_ymm_k1z_ymm_ymmm256b32

VFNMADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132ps_zmm_k1z_zmm_zmmm512b32_er

VFNMADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 9C /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132pd_xmm_k1z_xmm_xmmm128b64

VFNMADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132pd_ymm_k1z_ymm_ymmm256b64

VFNMADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 9C /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132pd_zmm_k1z_zmm_zmmm512b64_er

VFNMADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 9C /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd132ss_xmm_xmm_xmmm32

VFNMADD132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 9D /r

FMA

16/32/64-bit

§

VEX_Vfnmadd132sd_xmm_xmm_xmmm64

VFNMADD132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 9D /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd132ss_xmm_k1z_xmm_xmmm32_er

VFNMADD132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 9D /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd132sd_xmm_k1z_xmm_xmmm64_er

VFNMADD132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 9D /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub132ps_xmm_xmm_xmmm128

VFNMSUB132PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 9E /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132ps_ymm_ymm_ymmm256

VFNMSUB132PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 9E /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132pd_xmm_xmm_xmmm128

VFNMSUB132PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 9E /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132pd_ymm_ymm_ymmm256

VFNMSUB132PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 9E /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub132ps_xmm_k1z_xmm_xmmm128b32

VFNMSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132ps_ymm_k1z_ymm_ymmm256b32

VFNMSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132ps_zmm_k1z_zmm_zmmm512b32_er

VFNMSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 9E /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132pd_xmm_k1z_xmm_xmmm128b64

VFNMSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132pd_ymm_k1z_ymm_ymmm256b64

VFNMSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 9E /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132pd_zmm_k1z_zmm_zmmm512b64_er

VFNMSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 9E /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub132ss_xmm_xmm_xmmm32

VFNMSUB132SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 9F /r

FMA

16/32/64-bit

§

VEX_Vfnmsub132sd_xmm_xmm_xmmm64

VFNMSUB132SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 9F /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub132ss_xmm_k1z_xmm_xmmm32_er

VFNMSUB132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 9F /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub132sd_xmm_k1z_xmm_xmmm64_er

VFNMSUB132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 9F /r

AVX512F

16/32/64-bit

§

EVEX_Vpscatterdd_vm32x_k1_xmm

VPSCATTERDD vm32x {k1}, xmm1

EVEX.128.66.0F38.W0 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdd_vm32y_k1_ymm

VPSCATTERDD vm32y {k1}, ymm1

EVEX.256.66.0F38.W0 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdd_vm32z_k1_zmm

VPSCATTERDD vm32z {k1}, zmm1

EVEX.512.66.0F38.W0 A0 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpscatterdq_vm32x_k1_xmm

VPSCATTERDQ vm32x {k1}, xmm1

EVEX.128.66.0F38.W1 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdq_vm32x_k1_ymm

VPSCATTERDQ vm32x {k1}, ymm1

EVEX.256.66.0F38.W1 A0 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterdq_vm32y_k1_zmm

VPSCATTERDQ vm32y {k1}, zmm1

EVEX.512.66.0F38.W1 A0 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpscatterqd_vm64x_k1_xmm

VPSCATTERQD vm64x {k1}, xmm1

EVEX.128.66.0F38.W0 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqd_vm64y_k1_xmm

VPSCATTERQD vm64y {k1}, xmm1

EVEX.256.66.0F38.W0 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqd_vm64z_k1_ymm

VPSCATTERQD vm64z {k1}, ymm1

EVEX.512.66.0F38.W0 A1 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vpscatterqq_vm64x_k1_xmm

VPSCATTERQQ vm64x {k1}, xmm1

EVEX.128.66.0F38.W1 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqq_vm64y_k1_ymm

VPSCATTERQQ vm64y {k1}, ymm1

EVEX.256.66.0F38.W1 A1 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpscatterqq_vm64z_k1_zmm

VPSCATTERQQ vm64z {k1}, zmm1

EVEX.512.66.0F38.W1 A1 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterdps_vm32x_k1_xmm

VSCATTERDPS vm32x {k1}, xmm1

EVEX.128.66.0F38.W0 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdps_vm32y_k1_ymm

VSCATTERDPS vm32y {k1}, ymm1

EVEX.256.66.0F38.W0 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdps_vm32z_k1_zmm

VSCATTERDPS vm32z {k1}, zmm1

EVEX.512.66.0F38.W0 A2 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterdpd_vm32x_k1_xmm

VSCATTERDPD vm32x {k1}, xmm1

EVEX.128.66.0F38.W1 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdpd_vm32x_k1_ymm

VSCATTERDPD vm32x {k1}, ymm1

EVEX.256.66.0F38.W1 A2 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterdpd_vm32y_k1_zmm

VSCATTERDPD vm32y {k1}, zmm1

EVEX.512.66.0F38.W1 A2 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterqps_vm64x_k1_xmm

VSCATTERQPS vm64x {k1}, xmm1

EVEX.128.66.0F38.W0 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqps_vm64y_k1_xmm

VSCATTERQPS vm64y {k1}, xmm1

EVEX.256.66.0F38.W0 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqps_vm64z_k1_ymm

VSCATTERQPS vm64z {k1}, ymm1

EVEX.512.66.0F38.W0 A3 /vsib

AVX512F

16/32/64-bit

§

EVEX_Vscatterqpd_vm64x_k1_xmm

VSCATTERQPD vm64x {k1}, xmm1

EVEX.128.66.0F38.W1 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqpd_vm64y_k1_ymm

VSCATTERQPD vm64y {k1}, ymm1

EVEX.256.66.0F38.W1 A3 /vsib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vscatterqpd_vm64z_k1_zmm

VSCATTERQPD vm64z {k1}, zmm1

EVEX.512.66.0F38.W1 A3 /vsib

AVX512F

16/32/64-bit

§

VEX_Vfmaddsub213ps_xmm_xmm_xmmm128

VFMADDSUB213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 A6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub213ps_ymm_ymm_ymmm256

VFMADDSUB213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 A6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub213pd_xmm_xmm_xmmm128

VFMADDSUB213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 A6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub213pd_ymm_ymm_ymmm256

VFMADDSUB213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 A6 /r

FMA

16/32/64-bit

§

EVEX_Vfmaddsub213ps_xmm_k1z_xmm_xmmm128b32

VFMADDSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213ps_ymm_k1z_ymm_ymmm256b32

VFMADDSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213ps_zmm_k1z_zmm_zmmm512b32_er

VFMADDSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 A6 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213pd_xmm_k1z_xmm_xmmm128b64

VFMADDSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213pd_ymm_k1z_ymm_ymmm256b64

VFMADDSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 A6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub213pd_zmm_k1z_zmm_zmmm512b64_er

VFMADDSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 A6 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsubadd213ps_xmm_xmm_xmmm128

VFMSUBADD213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 A7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd213ps_ymm_ymm_ymmm256

VFMSUBADD213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 A7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd213pd_xmm_xmm_xmmm128

VFMSUBADD213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 A7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd213pd_ymm_ymm_ymmm256

VFMSUBADD213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 A7 /r

FMA

16/32/64-bit

§

EVEX_Vfmsubadd213ps_xmm_k1z_xmm_xmmm128b32

VFMSUBADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213ps_ymm_k1z_ymm_ymmm256b32

VFMSUBADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213ps_zmm_k1z_zmm_zmmm512b32_er

VFMSUBADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 A7 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213pd_xmm_k1z_xmm_xmmm128b64

VFMSUBADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213pd_ymm_k1z_ymm_ymmm256b64

VFMSUBADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 A7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd213pd_zmm_k1z_zmm_zmmm512b64_er

VFMSUBADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 A7 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd213ps_xmm_xmm_xmmm128

VFMADD213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 A8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213ps_ymm_ymm_ymmm256

VFMADD213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 A8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213pd_xmm_xmm_xmmm128

VFMADD213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 A8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213pd_ymm_ymm_ymmm256

VFMADD213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 A8 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd213ps_xmm_k1z_xmm_xmmm128b32

VFMADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213ps_ymm_k1z_ymm_ymmm256b32

VFMADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213ps_zmm_k1z_zmm_zmmm512b32_er

VFMADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 A8 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd213pd_xmm_k1z_xmm_xmmm128b64

VFMADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213pd_ymm_k1z_ymm_ymmm256b64

VFMADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 A8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd213pd_zmm_k1z_zmm_zmmm512b64_er

VFMADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 A8 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd213ss_xmm_xmm_xmmm32

VFMADD213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 A9 /r

FMA

16/32/64-bit

§

VEX_Vfmadd213sd_xmm_xmm_xmmm64

VFMADD213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 A9 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd213ss_xmm_k1z_xmm_xmmm32_er

VFMADD213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 A9 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd213sd_xmm_k1z_xmm_xmmm64_er

VFMADD213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 A9 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub213ps_xmm_xmm_xmmm128

VFMSUB213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 AA /r

FMA

16/32/64-bit

§

VEX_Vfmsub213ps_ymm_ymm_ymmm256

VFMSUB213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 AA /r

FMA

16/32/64-bit

§

VEX_Vfmsub213pd_xmm_xmm_xmmm128

VFMSUB213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 AA /r

FMA

16/32/64-bit

§

VEX_Vfmsub213pd_ymm_ymm_ymmm256

VFMSUB213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 AA /r

FMA

16/32/64-bit

§

EVEX_Vfmsub213ps_xmm_k1z_xmm_xmmm128b32

VFMSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213ps_ymm_k1z_ymm_ymmm256b32

VFMSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213ps_zmm_k1z_zmm_zmmm512b32_er

VFMSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 AA /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub213pd_xmm_k1z_xmm_xmmm128b64

VFMSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213pd_ymm_k1z_ymm_ymmm256b64

VFMSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 AA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub213pd_zmm_k1z_zmm_zmmm512b64_er

VFMSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 AA /r

AVX512F

16/32/64-bit

§

EVEX_V4fnmaddps_zmm_k1z_zmmp3_m128

V4FNMADDPS zmm1 {k1}{z}, zmm2+3, m128

EVEX.512.F2.0F38.W0 AA /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfmsub213ss_xmm_xmm_xmmm32

VFMSUB213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 AB /r

FMA

16/32/64-bit

§

VEX_Vfmsub213sd_xmm_xmm_xmmm64

VFMSUB213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 AB /r

FMA

16/32/64-bit

§

EVEX_Vfmsub213ss_xmm_k1z_xmm_xmmm32_er

VFMSUB213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 AB /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub213sd_xmm_k1z_xmm_xmmm64_er

VFMSUB213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 AB /r

AVX512F

16/32/64-bit

§

EVEX_V4fnmaddss_xmm_k1z_xmmp3_m128

V4FNMADDSS xmm1 {k1}{z}, xmm2+3, m128

EVEX.LIG.F2.0F38.W0 AB /r

AVX512_4FMAPS

16/32/64-bit

§

VEX_Vfnmadd213ps_xmm_xmm_xmmm128

VFNMADD213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 AC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213ps_ymm_ymm_ymmm256

VFNMADD213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 AC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213pd_xmm_xmm_xmmm128

VFNMADD213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 AC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213pd_ymm_ymm_ymmm256

VFNMADD213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 AC /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd213ps_xmm_k1z_xmm_xmmm128b32

VFNMADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213ps_ymm_k1z_ymm_ymmm256b32

VFNMADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213ps_zmm_k1z_zmm_zmmm512b32_er

VFNMADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 AC /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213pd_xmm_k1z_xmm_xmmm128b64

VFNMADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213pd_ymm_k1z_ymm_ymmm256b64

VFNMADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 AC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213pd_zmm_k1z_zmm_zmmm512b64_er

VFNMADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 AC /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd213ss_xmm_xmm_xmmm32

VFNMADD213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 AD /r

FMA

16/32/64-bit

§

VEX_Vfnmadd213sd_xmm_xmm_xmmm64

VFNMADD213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 AD /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd213ss_xmm_k1z_xmm_xmmm32_er

VFNMADD213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 AD /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd213sd_xmm_k1z_xmm_xmmm64_er

VFNMADD213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 AD /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub213ps_xmm_xmm_xmmm128

VFNMSUB213PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 AE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213ps_ymm_ymm_ymmm256

VFNMSUB213PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 AE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213pd_xmm_xmm_xmmm128

VFNMSUB213PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 AE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213pd_ymm_ymm_ymmm256

VFNMSUB213PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 AE /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub213ps_xmm_k1z_xmm_xmmm128b32

VFNMSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213ps_ymm_k1z_ymm_ymmm256b32

VFNMSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213ps_zmm_k1z_zmm_zmmm512b32_er

VFNMSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 AE /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213pd_xmm_k1z_xmm_xmmm128b64

VFNMSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213pd_ymm_k1z_ymm_ymmm256b64

VFNMSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 AE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213pd_zmm_k1z_zmm_zmmm512b64_er

VFNMSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 AE /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub213ss_xmm_xmm_xmmm32

VFNMSUB213SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 AF /r

FMA

16/32/64-bit

§

VEX_Vfnmsub213sd_xmm_xmm_xmmm64

VFNMSUB213SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 AF /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub213ss_xmm_k1z_xmm_xmmm32_er

VFNMSUB213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 AF /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub213sd_xmm_k1z_xmm_xmmm64_er

VFNMSUB213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 AF /r

AVX512F

16/32/64-bit

§

EVEX_Vpmadd52luq_xmm_k1z_xmm_xmmm128b64

VPMADD52LUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B4 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52luq_ymm_k1z_ymm_ymmm256b64

VPMADD52LUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B4 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52luq_zmm_k1z_zmm_zmmm512b64

VPMADD52LUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 B4 /r

AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52huq_xmm_k1z_xmm_xmmm128b64

VPMADD52HUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B5 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52huq_ymm_k1z_ymm_ymmm256b64

VPMADD52HUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B5 /r

AVX512VL and AVX512_IFMA

16/32/64-bit

§

EVEX_Vpmadd52huq_zmm_k1z_zmm_zmmm512b64

VPMADD52HUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst

EVEX.512.66.0F38.W1 B5 /r

AVX512_IFMA

16/32/64-bit

§

VEX_Vfmaddsub231ps_xmm_xmm_xmmm128

VFMADDSUB231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 B6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub231ps_ymm_ymm_ymmm256

VFMADDSUB231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 B6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub231pd_xmm_xmm_xmmm128

VFMADDSUB231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B6 /r

FMA

16/32/64-bit

§

VEX_Vfmaddsub231pd_ymm_ymm_ymmm256

VFMADDSUB231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B6 /r

FMA

16/32/64-bit

§

EVEX_Vfmaddsub231ps_xmm_k1z_xmm_xmmm128b32

VFMADDSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231ps_ymm_k1z_ymm_ymmm256b32

VFMADDSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231ps_zmm_k1z_zmm_zmmm512b32_er

VFMADDSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 B6 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231pd_xmm_k1z_xmm_xmmm128b64

VFMADDSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231pd_ymm_k1z_ymm_ymmm256b64

VFMADDSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B6 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmaddsub231pd_zmm_k1z_zmm_zmmm512b64_er

VFMADDSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 B6 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsubadd231ps_xmm_xmm_xmmm128

VFMSUBADD231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 B7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd231ps_ymm_ymm_ymmm256

VFMSUBADD231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 B7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd231pd_xmm_xmm_xmmm128

VFMSUBADD231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B7 /r

FMA

16/32/64-bit

§

VEX_Vfmsubadd231pd_ymm_ymm_ymmm256

VFMSUBADD231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B7 /r

FMA

16/32/64-bit

§

EVEX_Vfmsubadd231ps_xmm_k1z_xmm_xmmm128b32

VFMSUBADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231ps_ymm_k1z_ymm_ymmm256b32

VFMSUBADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231ps_zmm_k1z_zmm_zmmm512b32_er

VFMSUBADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 B7 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231pd_xmm_k1z_xmm_xmmm128b64

VFMSUBADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231pd_ymm_k1z_ymm_ymmm256b64

VFMSUBADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B7 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsubadd231pd_zmm_k1z_zmm_zmmm512b64_er

VFMSUBADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 B7 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd231ps_xmm_xmm_xmmm128

VFMADD231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 B8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231ps_ymm_ymm_ymmm256

VFMADD231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 B8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231pd_xmm_xmm_xmmm128

VFMADD231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B8 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231pd_ymm_ymm_ymmm256

VFMADD231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B8 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd231ps_xmm_k1z_xmm_xmmm128b32

VFMADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231ps_ymm_k1z_ymm_ymmm256b32

VFMADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231ps_zmm_k1z_zmm_zmmm512b32_er

VFMADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 B8 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd231pd_xmm_k1z_xmm_xmmm128b64

VFMADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231pd_ymm_k1z_ymm_ymmm256b64

VFMADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 B8 /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmadd231pd_zmm_k1z_zmm_zmmm512b64_er

VFMADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 B8 /r

AVX512F

16/32/64-bit

§

VEX_Vfmadd231ss_xmm_xmm_xmmm32

VFMADD231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 B9 /r

FMA

16/32/64-bit

§

VEX_Vfmadd231sd_xmm_xmm_xmmm64

VFMADD231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 B9 /r

FMA

16/32/64-bit

§

EVEX_Vfmadd231ss_xmm_k1z_xmm_xmmm32_er

VFMADD231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 B9 /r

AVX512F

16/32/64-bit

§

EVEX_Vfmadd231sd_xmm_k1z_xmm_xmmm64_er

VFMADD231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 B9 /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub231ps_xmm_xmm_xmmm128

VFMSUB231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 BA /r

FMA

16/32/64-bit

§

VEX_Vfmsub231ps_ymm_ymm_ymmm256

VFMSUB231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 BA /r

FMA

16/32/64-bit

§

VEX_Vfmsub231pd_xmm_xmm_xmmm128

VFMSUB231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 BA /r

FMA

16/32/64-bit

§

VEX_Vfmsub231pd_ymm_ymm_ymmm256

VFMSUB231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 BA /r

FMA

16/32/64-bit

§

EVEX_Vfmsub231ps_xmm_k1z_xmm_xmmm128b32

VFMSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231ps_ymm_k1z_ymm_ymmm256b32

VFMSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231ps_zmm_k1z_zmm_zmmm512b32_er

VFMSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 BA /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub231pd_xmm_k1z_xmm_xmmm128b64

VFMSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231pd_ymm_k1z_ymm_ymmm256b64

VFMSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 BA /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfmsub231pd_zmm_k1z_zmm_zmmm512b64_er

VFMSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 BA /r

AVX512F

16/32/64-bit

§

VEX_Vfmsub231ss_xmm_xmm_xmmm32

VFMSUB231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 BB /r

FMA

16/32/64-bit

§

VEX_Vfmsub231sd_xmm_xmm_xmmm64

VFMSUB231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 BB /r

FMA

16/32/64-bit

§

EVEX_Vfmsub231ss_xmm_k1z_xmm_xmmm32_er

VFMSUB231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 BB /r

AVX512F

16/32/64-bit

§

EVEX_Vfmsub231sd_xmm_k1z_xmm_xmmm64_er

VFMSUB231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 BB /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd231ps_xmm_xmm_xmmm128

VFNMADD231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 BC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231ps_ymm_ymm_ymmm256

VFNMADD231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 BC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231pd_xmm_xmm_xmmm128

VFNMADD231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 BC /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231pd_ymm_ymm_ymmm256

VFNMADD231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 BC /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd231ps_xmm_k1z_xmm_xmmm128b32

VFNMADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231ps_ymm_k1z_ymm_ymmm256b32

VFNMADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231ps_zmm_k1z_zmm_zmmm512b32_er

VFNMADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 BC /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231pd_xmm_k1z_xmm_xmmm128b64

VFNMADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231pd_ymm_k1z_ymm_ymmm256b64

VFNMADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 BC /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231pd_zmm_k1z_zmm_zmmm512b64_er

VFNMADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 BC /r

AVX512F

16/32/64-bit

§

VEX_Vfnmadd231ss_xmm_xmm_xmmm32

VFNMADD231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 BD /r

FMA

16/32/64-bit

§

VEX_Vfnmadd231sd_xmm_xmm_xmmm64

VFNMADD231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 BD /r

FMA

16/32/64-bit

§

EVEX_Vfnmadd231ss_xmm_k1z_xmm_xmmm32_er

VFNMADD231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 BD /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmadd231sd_xmm_k1z_xmm_xmmm64_er

VFNMADD231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 BD /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub231ps_xmm_xmm_xmmm128

VFNMSUB231PS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 BE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231ps_ymm_ymm_ymmm256

VFNMSUB231PS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 BE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231pd_xmm_xmm_xmmm128

VFNMSUB231PD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 BE /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231pd_ymm_ymm_ymmm256

VFNMSUB231PD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 BE /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub231ps_xmm_k1z_xmm_xmmm128b32

VFNMSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.66.0F38.W0 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231ps_ymm_k1z_ymm_ymmm256b32

VFNMSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.66.0F38.W0 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231ps_zmm_k1z_zmm_zmmm512b32_er

VFNMSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.66.0F38.W0 BE /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231pd_xmm_k1z_xmm_xmmm128b64

VFNMSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst

EVEX.128.66.0F38.W1 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231pd_ymm_k1z_ymm_ymmm256b64

VFNMSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst

EVEX.256.66.0F38.W1 BE /r

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231pd_zmm_k1z_zmm_zmmm512b64_er

VFNMSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}

EVEX.512.66.0F38.W1 BE /r

AVX512F

16/32/64-bit

§

VEX_Vfnmsub231ss_xmm_xmm_xmmm32

VFNMSUB231SS xmm1, xmm2, xmm3/m32

VEX.LIG.66.0F38.W0 BF /r

FMA

16/32/64-bit

§

VEX_Vfnmsub231sd_xmm_xmm_xmmm64

VFNMSUB231SD xmm1, xmm2, xmm3/m64

VEX.LIG.66.0F38.W1 BF /r

FMA

16/32/64-bit

§

EVEX_Vfnmsub231ss_xmm_k1z_xmm_xmmm32_er

VFNMSUB231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.66.0F38.W0 BF /r

AVX512F

16/32/64-bit

§

EVEX_Vfnmsub231sd_xmm_k1z_xmm_xmmm64_er

VFNMSUB231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.66.0F38.W1 BF /r

AVX512F

16/32/64-bit

§

EVEX_Vpconflictd_xmm_k1z_xmmm128b32

VPCONFLICTD xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.0F38.W0 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictd_ymm_k1z_ymmm256b32

VPCONFLICTD ymm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.0F38.W0 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictd_zmm_k1z_zmmm512b32

VPCONFLICTD zmm1 {k1}{z}, zmm2/m512/m32bcst

EVEX.512.66.0F38.W0 C4 /r

AVX512CD

16/32/64-bit

§

EVEX_Vpconflictq_xmm_k1z_xmmm128b64

VPCONFLICTQ xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.0F38.W1 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictq_ymm_k1z_ymmm256b64

VPCONFLICTQ ymm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.0F38.W1 C4 /r

AVX512VL and AVX512CD

16/32/64-bit

§

EVEX_Vpconflictq_zmm_k1z_zmmm512b64

VPCONFLICTQ zmm1 {k1}{z}, zmm2/m512/m64bcst

EVEX.512.66.0F38.W1 C4 /r

AVX512CD

16/32/64-bit

§

EVEX_Vgatherpf0dps_vm32z_k1

VGATHERPF0DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf0dpd_vm32y_k1

VGATHERPF0DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1dps_vm32z_k1

VGATHERPF1DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1dpd_vm32y_k1

VGATHERPF1DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0dps_vm32z_k1

VSCATTERPF0DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0dpd_vm32y_k1

VSCATTERPF0DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1dps_vm32z_k1

VSCATTERPF1DPS vm32z {k1}

EVEX.512.66.0F38.W0 C6 /6 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1dpd_vm32y_k1

VSCATTERPF1DPD vm32y {k1}

EVEX.512.66.0F38.W1 C6 /6 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf0qps_vm64z_k1

VGATHERPF0QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf0qpd_vm64z_k1

VGATHERPF0QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /1 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1qps_vm64z_k1

VGATHERPF1QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vgatherpf1qpd_vm64z_k1

VGATHERPF1QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /2 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0qps_vm64z_k1

VSCATTERPF0QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf0qpd_vm64z_k1

VSCATTERPF0QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /5 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1qps_vm64z_k1

VSCATTERPF1QPS vm64z {k1}

EVEX.512.66.0F38.W0 C7 /6 /vsib

AVX512PF

16/32/64-bit

§

EVEX_Vscatterpf1qpd_vm64z_k1

VSCATTERPF1QPD vm64z {k1}

EVEX.512.66.0F38.W1 C7 /6 /vsib

AVX512PF

16/32/64-bit

§

Sha1nexte_xmm_xmmm128

SHA1NEXTE xmm1, xmm2/m128

NP 0F 38 C8 /r

SHA

16/32/64-bit

§

EVEX_Vexp2ps_zmm_k1z_zmmm512b32_sae

VEXP2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 C8 /r

AVX512ER

16/32/64-bit

§

EVEX_Vexp2pd_zmm_k1z_zmmm512b64_sae

VEXP2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 C8 /r

AVX512ER

16/32/64-bit

§

Sha1msg1_xmm_xmmm128

SHA1MSG1 xmm1, xmm2/m128

NP 0F 38 C9 /r

SHA

16/32/64-bit

§

Sha1msg2_xmm_xmmm128

SHA1MSG2 xmm1, xmm2/m128

NP 0F 38 CA /r

SHA

16/32/64-bit

§

EVEX_Vrcp28ps_zmm_k1z_zmmm512b32_sae

VRCP28PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 CA /r

AVX512ER

16/32/64-bit

§

EVEX_Vrcp28pd_zmm_k1z_zmmm512b64_sae

VRCP28PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 CA /r

AVX512ER

16/32/64-bit

§

Sha256rnds2_xmm_xmmm128

SHA256RNDS2 xmm1, xmm2/m128, <XMM0>

NP 0F 38 CB /r

SHA

16/32/64-bit

§

EVEX_Vrcp28ss_xmm_k1z_xmm_xmmm32_sae

VRCP28SS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.66.0F38.W0 CB /r

AVX512ER

16/32/64-bit

§

EVEX_Vrcp28sd_xmm_k1z_xmm_xmmm64_sae

VRCP28SD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.66.0F38.W1 CB /r

AVX512ER

16/32/64-bit

§

Sha256msg1_xmm_xmmm128

SHA256MSG1 xmm1, xmm2/m128

NP 0F 38 CC /r

SHA

16/32/64-bit

§

EVEX_Vrsqrt28ps_zmm_k1z_zmmm512b32_sae

VRSQRT28PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}

EVEX.512.66.0F38.W0 CC /r

AVX512ER

16/32/64-bit

§

EVEX_Vrsqrt28pd_zmm_k1z_zmmm512b64_sae

VRSQRT28PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}

EVEX.512.66.0F38.W1 CC /r

AVX512ER

16/32/64-bit

§

Sha256msg2_xmm_xmmm128

SHA256MSG2 xmm1, xmm2/m128

NP 0F 38 CD /r

SHA

16/32/64-bit

§

EVEX_Vrsqrt28ss_xmm_k1z_xmm_xmmm32_sae

VRSQRT28SS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}

EVEX.LIG.66.0F38.W0 CD /r

AVX512ER

16/32/64-bit

§

EVEX_Vrsqrt28sd_xmm_k1z_xmm_xmmm64_sae

VRSQRT28SD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}

EVEX.LIG.66.0F38.W1 CD /r

AVX512ER

16/32/64-bit

§

Gf2p8mulb_xmm_xmmm128

GF2P8MULB xmm1, xmm2/m128

66 0F 38 CF /r

GFNI

16/32/64-bit

§

VEX_Vgf2p8mulb_xmm_xmm_xmmm128

VGF2P8MULB xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 CF /r

AVX and GFNI

16/32/64-bit

§

VEX_Vgf2p8mulb_ymm_ymm_ymmm256

VGF2P8MULB ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 CF /r

AVX and GFNI

16/32/64-bit

§

EVEX_Vgf2p8mulb_xmm_k1z_xmm_xmmm128

VGF2P8MULB xmm1 {k1}{z}, xmm2, xmm3/m128

EVEX.128.66.0F38.W0 CF /r

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8mulb_ymm_k1z_ymm_ymmm256

VGF2P8MULB ymm1 {k1}{z}, ymm2, ymm3/m256

EVEX.256.66.0F38.W0 CF /r

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8mulb_zmm_k1z_zmm_zmmm512

VGF2P8MULB zmm1 {k1}{z}, zmm2, zmm3/m512

EVEX.512.66.0F38.W0 CF /r

AVX512F and GFNI

16/32/64-bit

§

Aesimc_xmm_xmmm128

AESIMC xmm1, xmm2/m128

66 0F 38 DB /r

AES

16/32/64-bit

§

VEX_Vaesimc_xmm_xmmm128

VAESIMC xmm1, xmm2/m128

VEX.128.66.0F38.WIG DB /r

AES and AVX

16/32/64-bit

§

Aesenc_xmm_xmmm128

AESENC xmm1, xmm2/m128

66 0F 38 DC /r

AES

16/32/64-bit

§

VEX_Vaesenc_xmm_xmm_xmmm128

VAESENC xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DC /r

AES and AVX

16/32/64-bit

§

VEX_Vaesenc_ymm_ymm_ymmm256

VAESENC ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DC /r

VAES

16/32/64-bit

§

EVEX_Vaesenc_xmm_xmm_xmmm128

VAESENC xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DC /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenc_ymm_ymm_ymmm256

VAESENC ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DC /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenc_zmm_zmm_zmmm512

VAESENC zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DC /r

AVX512F and VAES

16/32/64-bit

§

Aesenclast_xmm_xmmm128

AESENCLAST xmm1, xmm2/m128

66 0F 38 DD /r

AES

16/32/64-bit

§

VEX_Vaesenclast_xmm_xmm_xmmm128

VAESENCLAST xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DD /r

AES and AVX

16/32/64-bit

§

VEX_Vaesenclast_ymm_ymm_ymmm256

VAESENCLAST ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DD /r

VAES

16/32/64-bit

§

EVEX_Vaesenclast_xmm_xmm_xmmm128

VAESENCLAST xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DD /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenclast_ymm_ymm_ymmm256

VAESENCLAST ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DD /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesenclast_zmm_zmm_zmmm512

VAESENCLAST zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DD /r

AVX512F and VAES

16/32/64-bit

§

Aesdec_xmm_xmmm128

AESDEC xmm1, xmm2/m128

66 0F 38 DE /r

AES

16/32/64-bit

§

VEX_Vaesdec_xmm_xmm_xmmm128

VAESDEC xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DE /r

AES and AVX

16/32/64-bit

§

VEX_Vaesdec_ymm_ymm_ymmm256

VAESDEC ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DE /r

VAES

16/32/64-bit

§

EVEX_Vaesdec_xmm_xmm_xmmm128

VAESDEC xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DE /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdec_ymm_ymm_ymmm256

VAESDEC ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DE /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdec_zmm_zmm_zmmm512

VAESDEC zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DE /r

AVX512F and VAES

16/32/64-bit

§

Aesdeclast_xmm_xmmm128

AESDECLAST xmm1, xmm2/m128

66 0F 38 DF /r

AES

16/32/64-bit

§

VEX_Vaesdeclast_xmm_xmm_xmmm128

VAESDECLAST xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.WIG DF /r

AES and AVX

16/32/64-bit

§

VEX_Vaesdeclast_ymm_ymm_ymmm256

VAESDECLAST ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.WIG DF /r

VAES

16/32/64-bit

§

EVEX_Vaesdeclast_xmm_xmm_xmmm128

VAESDECLAST xmm1, xmm2, xmm3/m128

EVEX.128.66.0F38.WIG DF /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdeclast_ymm_ymm_ymmm256

VAESDECLAST ymm1, ymm2, ymm3/m256

EVEX.256.66.0F38.WIG DF /r

AVX512VL and VAES

16/32/64-bit

§

EVEX_Vaesdeclast_zmm_zmm_zmmm512

VAESDECLAST zmm1, zmm2, zmm3/m512

EVEX.512.66.0F38.WIG DF /r

AVX512F and VAES

16/32/64-bit

§

Movbe_r16_m16

MOVBE r16, m16

o16 0F 38 F0 /r

MOVBE

16/32/64-bit

§

Movbe_r32_m32

MOVBE r32, m32

o32 0F 38 F0 /r

MOVBE

16/32/64-bit

§

Movbe_r64_m64

MOVBE r64, m64

o64 0F 38 F0 /r

MOVBE

64-bit

§

Crc32_r32_rm8

CRC32 r32, r/m8

F2 0F 38 F0 /r

SSE4.2

16/32/64-bit

§

Crc32_r64_rm8

CRC32 r64, r/m8

F2 o64 0F 38 F0 /r

SSE4.2

64-bit

§

Movbe_m16_r16

MOVBE m16, r16

o16 0F 38 F1 /r

MOVBE

16/32/64-bit

§

Movbe_m32_r32

MOVBE m32, r32

o32 0F 38 F1 /r

MOVBE

16/32/64-bit

§

Movbe_m64_r64

MOVBE m64, r64

o64 0F 38 F1 /r

MOVBE

64-bit

§

Crc32_r32_rm16

CRC32 r32, r/m16

o16 F2 0F 38 F1 /r

SSE4.2

16/32/64-bit

§

Crc32_r32_rm32

CRC32 r32, r/m32

o32 F2 0F 38 F1 /r

SSE4.2

16/32/64-bit

§

Crc32_r64_rm64

CRC32 r64, r/m64

F2 o64 0F 38 F1 /r

SSE4.2

64-bit

§

VEX_Andn_r32_r32_rm32

ANDN r32a, r32b, r/m32

VEX.LZ.0F38.W0 F2 /r

BMI1

16/32/64-bit

§

VEX_Andn_r64_r64_rm64

ANDN r64a, r64b, r/m64

VEX.LZ.0F38.W1 F2 /r

BMI1

64-bit

§

VEX_Blsr_r32_rm32

BLSR r32, r/m32

VEX.LZ.0F38.W0 F3 /1

BMI1

16/32/64-bit

§

VEX_Blsr_r64_rm64

BLSR r64, r/m64

VEX.LZ.0F38.W1 F3 /1

BMI1

64-bit

§

VEX_Blsmsk_r32_rm32

BLSMSK r32, r/m32

VEX.LZ.0F38.W0 F3 /2

BMI1

16/32/64-bit

§

VEX_Blsmsk_r64_rm64

BLSMSK r64, r/m64

VEX.LZ.0F38.W1 F3 /2

BMI1

64-bit

§

VEX_Blsi_r32_rm32

BLSI r32, r/m32

VEX.LZ.0F38.W0 F3 /3

BMI1

16/32/64-bit

§

VEX_Blsi_r64_rm64

BLSI r64, r/m64

VEX.LZ.0F38.W1 F3 /3

BMI1

64-bit

§

VEX_Bzhi_r32_rm32_r32

BZHI r32a, r/m32, r32b

VEX.LZ.0F38.W0 F5 /r

BMI2

16/32/64-bit

§

VEX_Bzhi_r64_rm64_r64

BZHI r64a, r/m64, r64b

VEX.LZ.0F38.W1 F5 /r

BMI2

64-bit

§

Wrussd_m32_r32

WRUSSD m32, r32

66 0F 38 F5 /r

CET_SS

16/32/64-bit

§

Wrussq_m64_r64

WRUSSQ m64, r64

66 o64 0F 38 F5 /r

CET_SS

64-bit

§

VEX_Pext_r32_r32_rm32

PEXT r32a, r32b, r/m32

VEX.LZ.F3.0F38.W0 F5 /r

BMI2

16/32/64-bit

§

VEX_Pext_r64_r64_rm64

PEXT r64a, r64b, r/m64

VEX.LZ.F3.0F38.W1 F5 /r

BMI2

64-bit

§

VEX_Pdep_r32_r32_rm32

PDEP r32a, r32b, r/m32

VEX.LZ.F2.0F38.W0 F5 /r

BMI2

16/32/64-bit

§

VEX_Pdep_r64_r64_rm64

PDEP r64a, r64b, r/m64

VEX.LZ.F2.0F38.W1 F5 /r

BMI2

64-bit

§

Wrssd_m32_r32

WRSSD m32, r32

NP 0F 38 F6 /r

CET_SS

16/32/64-bit

§

Wrssq_m64_r64

WRSSQ m64, r64

NP o64 0F 38 F6 /r

CET_SS

64-bit

§

Adcx_r32_rm32

ADCX r32, r/m32

66 0F 38 F6 /r

ADX

16/32/64-bit

§

Adcx_r64_rm64

ADCX r64, r/m64

66 o64 0F 38 F6 /r

ADX

64-bit

§

Adox_r32_rm32

ADOX r32, r/m32

F3 0F 38 F6 /r

ADX

16/32/64-bit

§

Adox_r64_rm64

ADOX r64, r/m64

F3 o64 0F 38 F6 /r

ADX

64-bit

§

VEX_Mulx_r32_r32_rm32

MULX r32a, r32b, r/m32

VEX.LZ.F2.0F38.W0 F6 /r

BMI2

16/32/64-bit

§

VEX_Mulx_r64_r64_rm64

MULX r64a, r64b, r/m64

VEX.LZ.F2.0F38.W1 F6 /r

BMI2

64-bit

§

VEX_Bextr_r32_rm32_r32

BEXTR r32a, r/m32, r32b

VEX.LZ.0F38.W0 F7 /r

BMI1

16/32/64-bit

§

VEX_Bextr_r64_rm64_r64

BEXTR r64a, r/m64, r64b

VEX.LZ.0F38.W1 F7 /r

BMI1

64-bit

§

VEX_Shlx_r32_rm32_r32

SHLX r32a, r/m32, r32b

VEX.LZ.66.0F38.W0 F7 /r

BMI2

16/32/64-bit

§

VEX_Shlx_r64_rm64_r64

SHLX r64a, r/m64, r64b

VEX.LZ.66.0F38.W1 F7 /r

BMI2

64-bit

§

VEX_Sarx_r32_rm32_r32

SARX r32a, r/m32, r32b

VEX.LZ.F3.0F38.W0 F7 /r

BMI2

16/32/64-bit

§

VEX_Sarx_r64_rm64_r64

SARX r64a, r/m64, r64b

VEX.LZ.F3.0F38.W1 F7 /r

BMI2

64-bit

§

VEX_Shrx_r32_rm32_r32

SHRX r32a, r/m32, r32b

VEX.LZ.F2.0F38.W0 F7 /r

BMI2

16/32/64-bit

§

VEX_Shrx_r64_rm64_r64

SHRX r64a, r/m64, r64b

VEX.LZ.F2.0F38.W1 F7 /r

BMI2

64-bit

§

Movdir64b_r16_m512

MOVDIR64B r16, m512

a16 66 0F 38 F8 /r

MOVDIR64B

16/32-bit

§

Movdir64b_r32_m512

MOVDIR64B r32, m512

a32 66 0F 38 F8 /r

MOVDIR64B

16/32/64-bit

§

Movdir64b_r64_m512

MOVDIR64B r64, m512

a64 66 0F 38 F8 /r

MOVDIR64B

64-bit

§

Enqcmds_r16_m512

ENQCMDS r16, m512

a16 F3 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32-bit

§

Enqcmds_r32_m512

ENQCMDS r32, m512

a32 F3 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32/64-bit

§

Enqcmds_r64_m512

ENQCMDS r64, m512

a64 F3 0F 38 F8 !(11):rrr:bbb

ENQCMD

64-bit

§

Enqcmd_r16_m512

ENQCMD r16, m512

a16 F2 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32-bit

§

Enqcmd_r32_m512

ENQCMD r32, m512

a32 F2 0F 38 F8 !(11):rrr:bbb

ENQCMD

16/32/64-bit

§

Enqcmd_r64_m512

ENQCMD r64, m512

a64 F2 0F 38 F8 !(11):rrr:bbb

ENQCMD

64-bit

§

Movdiri_m32_r32

MOVDIRI m32, r32

NP 0F 38 F9 /r

MOVDIRI

16/32/64-bit

§

Movdiri_m64_r64

MOVDIRI m64, r64

NP o64 0F 38 F9 /r

MOVDIRI

64-bit

§

VEX_Vpermq_ymm_ymmm256_imm8

VPERMQ ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W1 00 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpermq_ymm_k1z_ymmm256b64_imm8

VPERMQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 00 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermq_zmm_k1z_zmmm512b64_imm8

VPERMQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 00 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpermpd_ymm_ymmm256_imm8

VPERMPD ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W1 01 /r ib

AVX2

16/32/64-bit

§

EVEX_Vpermpd_ymm_k1z_ymmm256b64_imm8

VPERMPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 01 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermpd_zmm_k1z_zmmm512b64_imm8

VPERMPD zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 01 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpblendd_xmm_xmm_xmmm128_imm8

VPBLENDD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W0 02 /r ib

AVX2

16/32/64-bit

§

VEX_Vpblendd_ymm_ymm_ymmm256_imm8

VPBLENDD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W0 02 /r ib

AVX2

16/32/64-bit

§

EVEX_Valignd_xmm_k1z_xmm_xmmm128b32_imm8

VALIGND xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignd_ymm_k1z_ymm_ymmm256b32_imm8

VALIGND ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignd_zmm_k1z_zmm_zmmm512b32_imm8

VALIGND zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 03 /r ib

AVX512F

16/32/64-bit

§

EVEX_Valignq_xmm_k1z_xmm_xmmm128b64_imm8

VALIGNQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignq_ymm_k1z_ymm_ymmm256b64_imm8

VALIGNQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 03 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Valignq_zmm_k1z_zmm_zmmm512b64_imm8

VALIGNQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 03 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpermilps_xmm_xmmm128_imm8

VPERMILPS xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 04 /r ib

AVX

16/32/64-bit

§

VEX_Vpermilps_ymm_ymmm256_imm8

VPERMILPS ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W0 04 /r ib

AVX

16/32/64-bit

§

EVEX_Vpermilps_xmm_k1z_xmmm128b32_imm8

VPERMILPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 04 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_ymm_k1z_ymmm256b32_imm8

VPERMILPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 04 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilps_zmm_k1z_zmmm512b32_imm8

VPERMILPS zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 04 /r ib

AVX512F

16/32/64-bit

§

VEX_Vpermilpd_xmm_xmmm128_imm8

VPERMILPD xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 05 /r ib

AVX

16/32/64-bit

§

VEX_Vpermilpd_ymm_ymmm256_imm8

VPERMILPD ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.W0 05 /r ib

AVX

16/32/64-bit

§

EVEX_Vpermilpd_xmm_k1z_xmmm128b64_imm8

VPERMILPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 05 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_ymm_k1z_ymmm256b64_imm8

VPERMILPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 05 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpermilpd_zmm_k1z_zmmm512b64_imm8

VPERMILPD zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 05 /r ib

AVX512F

16/32/64-bit

§

VEX_Vperm2f128_ymm_ymm_ymmm256_imm8

VPERM2F128 ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W0 06 /r ib

AVX

16/32/64-bit

§

Roundps_xmm_xmmm128_imm8

ROUNDPS xmm1, xmm2/m128, imm8

66 0F 3A 08 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundps_xmm_xmmm128_imm8

VROUNDPS xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 08 /r ib

AVX

16/32/64-bit

§

VEX_Vroundps_ymm_ymmm256_imm8

VROUNDPS ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.WIG 08 /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscaleps_xmm_k1z_xmmm128b32_imm8

VRNDSCALEPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 08 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscaleps_ymm_k1z_ymmm256b32_imm8

VRNDSCALEPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 08 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscaleps_zmm_k1z_zmmm512b32_imm8_sae

VRNDSCALEPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 08 /r ib

AVX512F

16/32/64-bit

§

Roundpd_xmm_xmmm128_imm8

ROUNDPD xmm1, xmm2/m128, imm8

66 0F 3A 09 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundpd_xmm_xmmm128_imm8

VROUNDPD xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 09 /r ib

AVX

16/32/64-bit

§

VEX_Vroundpd_ymm_ymmm256_imm8

VROUNDPD ymm1, ymm2/m256, imm8

VEX.256.66.0F3A.WIG 09 /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscalepd_xmm_k1z_xmmm128b64_imm8

VRNDSCALEPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 09 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscalepd_ymm_k1z_ymmm256b64_imm8

VRNDSCALEPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 09 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vrndscalepd_zmm_k1z_zmmm512b64_imm8_sae

VRNDSCALEPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 09 /r ib

AVX512F

16/32/64-bit

§

Roundss_xmm_xmmm32_imm8

ROUNDSS xmm1, xmm2/m32, imm8

66 0F 3A 0A /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundss_xmm_xmm_xmmm32_imm8

VROUNDSS xmm1, xmm2, xmm3/m32, imm8

VEX.LIG.66.0F3A.WIG 0A /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscaless_xmm_k1z_xmm_xmmm32_imm8_sae

VRNDSCALESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 0A /r ib

AVX512F

16/32/64-bit

§

Roundsd_xmm_xmmm64_imm8

ROUNDSD xmm1, xmm2/m64, imm8

66 0F 3A 0B /r ib

SSE4.1

16/32/64-bit

§

VEX_Vroundsd_xmm_xmm_xmmm64_imm8

VROUNDSD xmm1, xmm2, xmm3/m64, imm8

VEX.LIG.66.0F3A.WIG 0B /r ib

AVX

16/32/64-bit

§

EVEX_Vrndscalesd_xmm_k1z_xmm_xmmm64_imm8_sae

VRNDSCALESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 0B /r ib

AVX512F

16/32/64-bit

§

Blendps_xmm_xmmm128_imm8

BLENDPS xmm1, xmm2/m128, imm8

66 0F 3A 0C /r ib

SSE4.1

16/32/64-bit

§

VEX_Vblendps_xmm_xmm_xmmm128_imm8

VBLENDPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0C /r ib

AVX

16/32/64-bit

§

VEX_Vblendps_ymm_ymm_ymmm256_imm8

VBLENDPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0C /r ib

AVX

16/32/64-bit

§

Blendpd_xmm_xmmm128_imm8

BLENDPD xmm1, xmm2/m128, imm8

66 0F 3A 0D /r ib

SSE4.1

16/32/64-bit

§

VEX_Vblendpd_xmm_xmm_xmmm128_imm8

VBLENDPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0D /r ib

AVX

16/32/64-bit

§

VEX_Vblendpd_ymm_ymm_ymmm256_imm8

VBLENDPD ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0D /r ib

AVX

16/32/64-bit

§

Pblendw_xmm_xmmm128_imm8

PBLENDW xmm1, xmm2/m128, imm8

66 0F 3A 0E /r ib

SSE4.1

16/32/64-bit

§

VEX_Vpblendw_xmm_xmm_xmmm128_imm8

VPBLENDW xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0E /r ib

AVX

16/32/64-bit

§

VEX_Vpblendw_ymm_ymm_ymmm256_imm8

VPBLENDW ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0E /r ib

AVX2

16/32/64-bit

§

Palignr_mm_mmm64_imm8

PALIGNR mm1, mm2/m64, imm8

NP 0F 3A 0F /r ib

SSSE3

16/32/64-bit

§

Palignr_xmm_xmmm128_imm8

PALIGNR xmm1, xmm2/m128, imm8

66 0F 3A 0F /r ib

SSSE3

16/32/64-bit

§

VEX_Vpalignr_xmm_xmm_xmmm128_imm8

VPALIGNR xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 0F /r ib

AVX

16/32/64-bit

§

VEX_Vpalignr_ymm_ymm_ymmm256_imm8

VPALIGNR ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 0F /r ib

AVX2

16/32/64-bit

§

EVEX_Vpalignr_xmm_k1z_xmm_xmmm128_imm8

VPALIGNR xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.WIG 0F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpalignr_ymm_k1z_ymm_ymmm256_imm8

VPALIGNR ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.WIG 0F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpalignr_zmm_k1z_zmm_zmmm512_imm8

VPALIGNR zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.WIG 0F /r ib

AVX512BW

16/32/64-bit

§

Pextrb_r32m8_xmm_imm8

PEXTRB r32/m8, xmm2, imm8

66 0F 3A 14 /r ib

SSE4.1

16/32/64-bit

§

Pextrb_r64m8_xmm_imm8

PEXTRB r64/m8, xmm2, imm8

66 o64 0F 3A 14 /r ib

SSE4.1

64-bit

§

VEX_Vpextrb_r32m8_xmm_imm8

VPEXTRB r32/m8, xmm2, imm8

VEX.128.66.0F3A.W0 14 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrb_r64m8_xmm_imm8

VPEXTRB r64/m8, xmm2, imm8

VEX.128.66.0F3A.W1 14 /r ib

AVX

64-bit

§

EVEX_Vpextrb_r32m8_xmm_imm8

VPEXTRB r32/m8, xmm2, imm8

EVEX.128.66.0F3A.W0 14 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpextrb_r64m8_xmm_imm8

VPEXTRB r64/m8, xmm2, imm8

EVEX.128.66.0F3A.W1 14 /r ib

AVX512BW

64-bit

§

Pextrw_r32m16_xmm_imm8

PEXTRW r32/m16, xmm, imm8

66 0F 3A 15 /r ib

SSE4.1

16/32/64-bit

§

Pextrw_r64m16_xmm_imm8

PEXTRW r64/m16, xmm, imm8

66 o64 0F 3A 15 /r ib

SSE4.1

64-bit

§

VEX_Vpextrw_r32m16_xmm_imm8

VPEXTRW r32/m16, xmm2, imm8

VEX.128.66.0F3A.W0 15 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrw_r64m16_xmm_imm8

VPEXTRW r64/m16, xmm2, imm8

VEX.128.66.0F3A.W1 15 /r ib

AVX

64-bit

§

EVEX_Vpextrw_r32m16_xmm_imm8

VPEXTRW r32/m16, xmm2, imm8

EVEX.128.66.0F3A.W0 15 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpextrw_r64m16_xmm_imm8

VPEXTRW r64/m16, xmm2, imm8

EVEX.128.66.0F3A.W1 15 /r ib

AVX512BW

64-bit

§

Pextrd_rm32_xmm_imm8

PEXTRD r/m32, xmm2, imm8

66 0F 3A 16 /r ib

SSE4.1

16/32/64-bit

§

Pextrq_rm64_xmm_imm8

PEXTRQ r/m64, xmm2, imm8

66 o64 0F 3A 16 /r ib

SSE4.1

64-bit

§

VEX_Vpextrd_rm32_xmm_imm8

VPEXTRD r/m32, xmm2, imm8

VEX.128.66.0F3A.W0 16 /r ib

AVX

16/32/64-bit

§

VEX_Vpextrq_rm64_xmm_imm8

VPEXTRQ r/m64, xmm2, imm8

VEX.128.66.0F3A.W1 16 /r ib

AVX

64-bit

§

EVEX_Vpextrd_rm32_xmm_imm8

VPEXTRD r/m32, xmm2, imm8

EVEX.128.66.0F3A.W0 16 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vpextrq_rm64_xmm_imm8

VPEXTRQ r/m64, xmm2, imm8

EVEX.128.66.0F3A.W1 16 /r ib

AVX512DQ

64-bit

§

Extractps_rm32_xmm_imm8

EXTRACTPS r/m32, xmm1, imm8

66 0F 3A 17 /r ib

SSE4.1

16/32/64-bit

§

Extractps_r64m32_xmm_imm8

EXTRACTPS r64/m32, xmm1, imm8

66 o64 0F 3A 17 /r ib

SSE4.1

64-bit

§

VEX_Vextractps_rm32_xmm_imm8

VEXTRACTPS r/m32, xmm1, imm8

VEX.128.66.0F3A.W0 17 /r ib

AVX

16/32/64-bit

§

VEX_Vextractps_r64m32_xmm_imm8

VEXTRACTPS r64/m32, xmm1, imm8

VEX.128.66.0F3A.W1 17 /r ib

AVX

64-bit

§

EVEX_Vextractps_rm32_xmm_imm8

VEXTRACTPS r/m32, xmm1, imm8

EVEX.128.66.0F3A.W0 17 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextractps_r64m32_xmm_imm8

VEXTRACTPS r64/m32, xmm1, imm8

EVEX.128.66.0F3A.W1 17 /r ib

AVX512F

64-bit

§

VEX_Vinsertf128_ymm_ymm_xmmm128_imm8

VINSERTF128 ymm1, ymm2, xmm3/m128, imm8

VEX.256.66.0F3A.W0 18 /r ib

AVX

16/32/64-bit

§

EVEX_Vinsertf32x4_ymm_k1z_ymm_xmmm128_imm8

VINSERTF32X4 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W0 18 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vinsertf32x4_zmm_k1z_zmm_xmmm128_imm8

VINSERTF32X4 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W0 18 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vinsertf64x2_ymm_k1z_ymm_xmmm128_imm8

VINSERTF64X2 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W1 18 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vinsertf64x2_zmm_k1z_zmm_xmmm128_imm8

VINSERTF64X2 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W1 18 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vextractf128_xmmm128_ymm_imm8

VEXTRACTF128 xmm1/m128, ymm2, imm8

VEX.256.66.0F3A.W0 19 /r ib

AVX

16/32/64-bit

§

EVEX_Vextractf32x4_xmmm128_k1z_ymm_imm8

VEXTRACTF32X4 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W0 19 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vextractf32x4_xmmm128_k1z_zmm_imm8

VEXTRACTF32X4 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 19 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextractf64x2_xmmm128_k1z_ymm_imm8

VEXTRACTF64X2 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W1 19 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vextractf64x2_xmmm128_k1z_zmm_imm8

VEXTRACTF64X2 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 19 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinsertf32x8_zmm_k1z_zmm_ymmm256_imm8

VINSERTF32X8 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W0 1A /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinsertf64x4_zmm_k1z_zmm_ymmm256_imm8

VINSERTF64X4 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W1 1A /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextractf32x8_ymmm256_k1z_zmm_imm8

VEXTRACTF32X8 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 1B /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vextractf64x4_ymmm256_k1z_zmm_imm8

VEXTRACTF64X4 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 1B /r ib

AVX512F

16/32/64-bit

§

VEX_Vcvtps2ph_xmmm64_xmm_imm8

VCVTPS2PH xmm1/m64, xmm2, imm8

VEX.128.66.0F3A.W0 1D /r ib

F16C

16/32/64-bit

§

VEX_Vcvtps2ph_xmmm128_ymm_imm8

VCVTPS2PH xmm1/m128, ymm2, imm8

VEX.256.66.0F3A.W0 1D /r ib

F16C

16/32/64-bit

§

EVEX_Vcvtps2ph_xmmm64_k1z_xmm_imm8

VCVTPS2PH xmm1/m64 {k1}{z}, xmm2, imm8

EVEX.128.66.0F3A.W0 1D /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2ph_xmmm128_k1z_ymm_imm8

VCVTPS2PH xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W0 1D /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vcvtps2ph_ymmm256_k1z_zmm_imm8_sae

VCVTPS2PH ymm1/m256 {k1}{z}, zmm2{sae}, imm8

EVEX.512.66.0F3A.W0 1D /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpud_kr_k1_xmm_xmmm128b32_imm8

VPCMPUD k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpud_kr_k1_ymm_ymmm256b32_imm8

VPCMPUD k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpud_kr_k1_zmm_zmmm512b32_imm8

VPCMPUD k1 {k2}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 1E /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpuq_kr_k1_xmm_xmmm128b64_imm8

VPCMPUQ k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpuq_kr_k1_ymm_ymmm256b64_imm8

VPCMPUQ k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 1E /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpuq_kr_k1_zmm_zmmm512b64_imm8

VPCMPUQ k1 {k2}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 1E /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpd_kr_k1_xmm_xmmm128b32_imm8

VPCMPD k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpd_kr_k1_ymm_ymmm256b32_imm8

VPCMPD k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpd_kr_k1_zmm_zmmm512b32_imm8

VPCMPD k1 {k2}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 1F /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpq_kr_k1_xmm_xmmm128b64_imm8

VPCMPQ k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpq_kr_k1_ymm_ymmm256b64_imm8

VPCMPQ k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 1F /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpcmpq_kr_k1_zmm_zmmm512b64_imm8

VPCMPQ k1 {k2}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 1F /r ib

AVX512F

16/32/64-bit

§

Pinsrb_xmm_r32m8_imm8

PINSRB xmm1, r32/m8, imm8

66 0F 3A 20 /r ib

SSE4.1

16/32/64-bit

§

Pinsrb_xmm_r64m8_imm8

PINSRB xmm1, r64/m8, imm8

66 o64 0F 3A 20 /r ib

SSE4.1

64-bit

§

VEX_Vpinsrb_xmm_xmm_r32m8_imm8

VPINSRB xmm1, xmm2, r32/m8, imm8

VEX.128.66.0F3A.W0 20 /r ib

AVX

16/32/64-bit

§

VEX_Vpinsrb_xmm_xmm_r64m8_imm8

VPINSRB xmm1, xmm2, r64/m8, imm8

VEX.128.66.0F3A.W1 20 /r ib

AVX

64-bit

§

EVEX_Vpinsrb_xmm_xmm_r32m8_imm8

VPINSRB xmm1, xmm2, r32/m8, imm8

EVEX.128.66.0F3A.W0 20 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpinsrb_xmm_xmm_r64m8_imm8

VPINSRB xmm1, xmm2, r64/m8, imm8

EVEX.128.66.0F3A.W1 20 /r ib

AVX512BW

64-bit

§

Insertps_xmm_xmmm32_imm8

INSERTPS xmm1, xmm2/m32, imm8

66 0F 3A 21 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vinsertps_xmm_xmm_xmmm32_imm8

VINSERTPS xmm1, xmm2, xmm3/m32, imm8

VEX.128.66.0F3A.WIG 21 /r ib

AVX

16/32/64-bit

§

EVEX_Vinsertps_xmm_xmm_xmmm32_imm8

VINSERTPS xmm1, xmm2, xmm3/m32, imm8

EVEX.128.66.0F3A.W0 21 /r ib

AVX512F

16/32/64-bit

§

Pinsrd_xmm_rm32_imm8

PINSRD xmm1, r/m32, imm8

66 0F 3A 22 /r ib

SSE4.1

16/32/64-bit

§

Pinsrq_xmm_rm64_imm8

PINSRQ xmm1, r/m64, imm8

66 o64 0F 3A 22 /r ib

SSE4.1

64-bit

§

VEX_Vpinsrd_xmm_xmm_rm32_imm8

VPINSRD xmm1, xmm2, r/m32, imm8

VEX.128.66.0F3A.W0 22 /r ib

AVX

16/32/64-bit

§

VEX_Vpinsrq_xmm_xmm_rm64_imm8

VPINSRQ xmm1, xmm2, r/m64, imm8

VEX.128.66.0F3A.W1 22 /r ib

AVX

64-bit

§

EVEX_Vpinsrd_xmm_xmm_rm32_imm8

VPINSRD xmm1, xmm2, r/m32, imm8

EVEX.128.66.0F3A.W0 22 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vpinsrq_xmm_xmm_rm64_imm8

VPINSRQ xmm1, xmm2, r/m64, imm8

EVEX.128.66.0F3A.W1 22 /r ib

AVX512DQ

64-bit

§

EVEX_Vshuff32x4_ymm_k1z_ymm_ymmm256b32_imm8

VSHUFF32X4 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 23 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshuff32x4_zmm_k1z_zmm_zmmm512b32_imm8

VSHUFF32X4 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 23 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vshuff64x2_ymm_k1z_ymm_ymmm256b64_imm8

VSHUFF64X2 ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 23 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshuff64x2_zmm_k1z_zmm_zmmm512b64_imm8

VSHUFF64X2 zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 23 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpternlogd_xmm_k1z_xmm_xmmm128b32_imm8

VPTERNLOGD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogd_ymm_k1z_ymm_ymmm256b32_imm8

VPTERNLOGD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogd_zmm_k1z_zmm_zmmm512b32_imm8

VPTERNLOGD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 25 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpternlogq_xmm_k1z_xmm_xmmm128b64_imm8

VPTERNLOGQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogq_ymm_k1z_ymm_ymmm256b64_imm8

VPTERNLOGQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 25 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vpternlogq_zmm_k1z_zmm_zmmm512b64_imm8

VPTERNLOGQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 25 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantps_xmm_k1z_xmmm128b32_imm8

VGETMANTPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantps_ymm_k1z_ymmm256b32_imm8

VGETMANTPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantps_zmm_k1z_zmmm512b32_imm8_sae

VGETMANTPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 26 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantpd_xmm_k1z_xmmm128b64_imm8

VGETMANTPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantpd_ymm_k1z_ymmm256b64_imm8

VGETMANTPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 26 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vgetmantpd_zmm_k1z_zmmm512b64_imm8_sae

VGETMANTPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 26 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantss_xmm_k1z_xmm_xmmm32_imm8_sae

VGETMANTSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 27 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vgetmantsd_xmm_k1z_xmm_xmmm64_imm8_sae

VGETMANTSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 27 /r ib

AVX512F

16/32/64-bit

§

VEX_Kshiftrb_kr_kr_imm8

KSHIFTRB k1, k2, imm8

VEX.L0.66.0F3A.W0 30 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Kshiftrw_kr_kr_imm8

KSHIFTRW k1, k2, imm8

VEX.L0.66.0F3A.W1 30 /r ib

AVX512F

16/32/64-bit

§

VEX_Kshiftrd_kr_kr_imm8

KSHIFTRD k1, k2, imm8

VEX.L0.66.0F3A.W0 31 /r ib

AVX512BW

16/32/64-bit

§

VEX_Kshiftrq_kr_kr_imm8

KSHIFTRQ k1, k2, imm8

VEX.L0.66.0F3A.W1 31 /r ib

AVX512BW

16/32/64-bit

§

VEX_Kshiftlb_kr_kr_imm8

KSHIFTLB k1, k2, imm8

VEX.L0.66.0F3A.W0 32 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Kshiftlw_kr_kr_imm8

KSHIFTLW k1, k2, imm8

VEX.L0.66.0F3A.W1 32 /r ib

AVX512F

16/32/64-bit

§

VEX_Kshiftld_kr_kr_imm8

KSHIFTLD k1, k2, imm8

VEX.L0.66.0F3A.W0 33 /r ib

AVX512BW

16/32/64-bit

§

VEX_Kshiftlq_kr_kr_imm8

KSHIFTLQ k1, k2, imm8

VEX.L0.66.0F3A.W1 33 /r ib

AVX512BW

16/32/64-bit

§

VEX_Vinserti128_ymm_ymm_xmmm128_imm8

VINSERTI128 ymm1, ymm2, xmm3/m128, imm8

VEX.256.66.0F3A.W0 38 /r ib

AVX2

16/32/64-bit

§

EVEX_Vinserti32x4_ymm_k1z_ymm_xmmm128_imm8

VINSERTI32X4 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W0 38 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vinserti32x4_zmm_k1z_zmm_xmmm128_imm8

VINSERTI32X4 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W0 38 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vinserti64x2_ymm_k1z_ymm_xmmm128_imm8

VINSERTI64X2 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8

EVEX.256.66.0F3A.W1 38 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vinserti64x2_zmm_k1z_zmm_xmmm128_imm8

VINSERTI64X2 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8

EVEX.512.66.0F3A.W1 38 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vextracti128_xmmm128_ymm_imm8

VEXTRACTI128 xmm1/m128, ymm2, imm8

VEX.256.66.0F3A.W0 39 /r ib

AVX2

16/32/64-bit

§

EVEX_Vextracti32x4_xmmm128_k1z_ymm_imm8

VEXTRACTI32X4 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W0 39 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vextracti32x4_xmmm128_k1z_zmm_imm8

VEXTRACTI32X4 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 39 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextracti64x2_xmmm128_k1z_ymm_imm8

VEXTRACTI64X2 xmm1/m128 {k1}{z}, ymm2, imm8

EVEX.256.66.0F3A.W1 39 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vextracti64x2_xmmm128_k1z_zmm_imm8

VEXTRACTI64X2 xmm1/m128 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 39 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinserti32x8_zmm_k1z_zmm_ymmm256_imm8

VINSERTI32X8 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W0 3A /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vinserti64x4_zmm_k1z_zmm_ymmm256_imm8

VINSERTI64X4 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8

EVEX.512.66.0F3A.W1 3A /r ib

AVX512F

16/32/64-bit

§

EVEX_Vextracti32x8_ymmm256_k1z_zmm_imm8

VEXTRACTI32X8 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W0 3B /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vextracti64x4_ymmm256_k1z_zmm_imm8

VEXTRACTI64X4 ymm1/m256 {k1}{z}, zmm2, imm8

EVEX.512.66.0F3A.W1 3B /r ib

AVX512F

16/32/64-bit

§

EVEX_Vpcmpub_kr_k1_xmm_xmmm128_imm8

VPCMPUB k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W0 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpub_kr_k1_ymm_ymmm256_imm8

VPCMPUB k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W0 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpub_kr_k1_zmm_zmmm512_imm8

VPCMPUB k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W0 3E /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpcmpuw_kr_k1_xmm_xmmm128_imm8

VPCMPUW k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpuw_kr_k1_ymm_ymmm256_imm8

VPCMPUW k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 3E /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpuw_kr_k1_zmm_zmmm512_imm8

VPCMPUW k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 3E /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpcmpb_kr_k1_xmm_xmmm128_imm8

VPCMPB k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W0 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpb_kr_k1_ymm_ymmm256_imm8

VPCMPB k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W0 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpb_kr_k1_zmm_zmmm512_imm8

VPCMPB k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W0 3F /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vpcmpw_kr_k1_xmm_xmmm128_imm8

VPCMPW k1 {k2}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpw_kr_k1_ymm_ymmm256_imm8

VPCMPW k1 {k2}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 3F /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vpcmpw_kr_k1_zmm_zmmm512_imm8

VPCMPW k1 {k2}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 3F /r ib

AVX512BW

16/32/64-bit

§

Dpps_xmm_xmmm128_imm8

DPPS xmm1, xmm2/m128, imm8

66 0F 3A 40 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vdpps_xmm_xmm_xmmm128_imm8

VDPPS xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 40 /r ib

AVX

16/32/64-bit

§

VEX_Vdpps_ymm_ymm_ymmm256_imm8

VDPPS ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 40 /r ib

AVX

16/32/64-bit

§

Dppd_xmm_xmmm128_imm8

DPPD xmm1, xmm2/m128, imm8

66 0F 3A 41 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vdppd_xmm_xmm_xmmm128_imm8

VDPPD xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 41 /r ib

AVX

16/32/64-bit

§

Mpsadbw_xmm_xmmm128_imm8

MPSADBW xmm1, xmm2/m128, imm8

66 0F 3A 42 /r ib

SSE4.1

16/32/64-bit

§

VEX_Vmpsadbw_xmm_xmm_xmmm128_imm8

VMPSADBW xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 42 /r ib

AVX

16/32/64-bit

§

VEX_Vmpsadbw_ymm_ymm_ymmm256_imm8

VMPSADBW ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 42 /r ib

AVX2

16/32/64-bit

§

EVEX_Vdbpsadbw_xmm_k1z_xmm_xmmm128_imm8

VDBPSADBW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W0 42 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vdbpsadbw_ymm_k1z_ymm_ymmm256_imm8

VDBPSADBW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W0 42 /r ib

AVX512VL and AVX512BW

16/32/64-bit

§

EVEX_Vdbpsadbw_zmm_k1z_zmm_zmmm512_imm8

VDBPSADBW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W0 42 /r ib

AVX512BW

16/32/64-bit

§

EVEX_Vshufi32x4_ymm_k1z_ymm_ymmm256b32_imm8

VSHUFI32X4 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 43 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufi32x4_zmm_k1z_zmm_zmmm512b32_imm8

VSHUFI32X4 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 43 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vshufi64x2_ymm_k1z_ymm_ymmm256b64_imm8

VSHUFI64X2 ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 43 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vshufi64x2_zmm_k1z_zmm_zmmm512b64_imm8

VSHUFI64X2 zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 43 /r ib

AVX512F

16/32/64-bit

§

Pclmulqdq_xmm_xmmm128_imm8

PCLMULQDQ xmm1, xmm2/m128, imm8

66 0F 3A 44 /r ib

PCLMULQDQ

16/32/64-bit

§

VEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8

VPCLMULQDQ xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.WIG 44 /r ib

PCLMULQDQ and AVX

16/32/64-bit

§

VEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8

VPCLMULQDQ ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.WIG 44 /r ib

VPCLMULQDQ

16/32/64-bit

§

EVEX_Vpclmulqdq_xmm_xmm_xmmm128_imm8

VPCLMULQDQ xmm1, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.WIG 44 /r ib

AVX512VL and VPCLMULQDQ

16/32/64-bit

§

EVEX_Vpclmulqdq_ymm_ymm_ymmm256_imm8

VPCLMULQDQ ymm1, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.WIG 44 /r ib

AVX512VL and VPCLMULQDQ

16/32/64-bit

§

EVEX_Vpclmulqdq_zmm_zmm_zmmm512_imm8

VPCLMULQDQ zmm1, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.WIG 44 /r ib

AVX512F and VPCLMULQDQ

16/32/64-bit

§

VEX_Vperm2i128_ymm_ymm_ymmm256_imm8

VPERM2I128 ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W0 46 /r ib

AVX2

16/32/64-bit

§

VEX_Vpermil2ps_xmm_xmm_xmmm128_xmm_imm4

VPERMIL2PS xmm1, xmm2, xmm3/m128, xmm4, imm4

VEX.128.66.0F3A.W0 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2ps_ymm_ymm_ymmm256_ymm_imm4

VPERMIL2PS ymm1, ymm2, ymm3/m256, ymm4, imm4

VEX.256.66.0F3A.W0 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2ps_xmm_xmm_xmm_xmmm128_imm4

VPERMIL2PS xmm1, xmm2, xmm3, xmm4/m128, imm4

VEX.128.66.0F3A.W1 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2ps_ymm_ymm_ymm_ymmm256_imm4

VPERMIL2PS ymm1, ymm2, ymm3, ymm4/m256, imm4

VEX.256.66.0F3A.W1 48 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_xmm_xmm_xmmm128_xmm_imm4

VPERMIL2PD xmm1, xmm2, xmm3/m128, xmm4, imm4

VEX.128.66.0F3A.W0 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_ymm_ymm_ymmm256_ymm_imm4

VPERMIL2PD ymm1, ymm2, ymm3/m256, ymm4, imm4

VEX.256.66.0F3A.W0 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_xmm_xmm_xmm_xmmm128_imm4

VPERMIL2PD xmm1, xmm2, xmm3, xmm4/m128, imm4

VEX.128.66.0F3A.W1 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vpermil2pd_ymm_ymm_ymm_ymmm256_imm4

VPERMIL2PD ymm1, ymm2, ymm3, ymm4/m256, imm4

VEX.256.66.0F3A.W1 49 /r /is5

XOP

16/32/64-bit

§

VEX_Vblendvps_xmm_xmm_xmmm128_xmm

VBLENDVPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 4A /r /is4

AVX

16/32/64-bit

§

VEX_Vblendvps_ymm_ymm_ymmm256_ymm

VBLENDVPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 4A /r /is4

AVX

16/32/64-bit

§

VEX_Vblendvpd_xmm_xmm_xmmm128_xmm

VBLENDVPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 4B /r /is4

AVX

16/32/64-bit

§

VEX_Vblendvpd_ymm_ymm_ymmm256_ymm

VBLENDVPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 4B /r /is4

AVX

16/32/64-bit

§

VEX_Vpblendvb_xmm_xmm_xmmm128_xmm

VPBLENDVB xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 4C /r /is4

AVX

16/32/64-bit

§

VEX_Vpblendvb_ymm_ymm_ymmm256_ymm

VPBLENDVB ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 4C /r /is4

AVX2

16/32/64-bit

§

EVEX_Vrangeps_xmm_k1z_xmm_xmmm128b32_imm8

VRANGEPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangeps_ymm_k1z_ymm_ymmm256b32_imm8

VRANGEPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangeps_zmm_k1z_zmm_zmmm512b32_imm8_sae

VRANGEPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 50 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vrangepd_xmm_k1z_xmm_xmmm128b64_imm8

VRANGEPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangepd_ymm_k1z_ymm_ymmm256b64_imm8

VRANGEPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 50 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vrangepd_zmm_k1z_zmm_zmmm512b64_imm8_sae

VRANGEPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 50 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vrangess_xmm_k1z_xmm_xmmm32_imm8_sae

VRANGESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 51 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vrangesd_xmm_k1z_xmm_xmmm64_imm8_sae

VRANGESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 51 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfixupimmps_xmm_k1z_xmm_xmmm128b32_imm8

VFIXUPIMMPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmps_ymm_k1z_ymm_ymmm256b32_imm8

VFIXUPIMMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmps_zmm_k1z_zmm_zmmm512b32_imm8_sae

VFIXUPIMMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 54 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vfixupimmpd_xmm_k1z_xmm_xmmm128b64_imm8

VFIXUPIMMPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmpd_ymm_k1z_ymm_ymmm256b64_imm8

VFIXUPIMMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 54 /r ib

AVX512VL and AVX512F

16/32/64-bit

§

EVEX_Vfixupimmpd_zmm_k1z_zmm_zmmm512b64_imm8_sae

VFIXUPIMMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 54 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vfixupimmss_xmm_k1z_xmm_xmmm32_imm8_sae

VFIXUPIMMSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 55 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vfixupimmsd_xmm_k1z_xmm_xmmm64_imm8_sae

VFIXUPIMMSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 55 /r ib

AVX512F

16/32/64-bit

§

EVEX_Vreduceps_xmm_k1z_xmmm128b32_imm8

VREDUCEPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreduceps_ymm_k1z_ymmm256b32_imm8

VREDUCEPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreduceps_zmm_k1z_zmmm512b32_imm8_sae

VREDUCEPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8

EVEX.512.66.0F3A.W0 56 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vreducepd_xmm_k1z_xmmm128b64_imm8

VREDUCEPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreducepd_ymm_k1z_ymmm256b64_imm8

VREDUCEPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 56 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vreducepd_zmm_k1z_zmmm512b64_imm8_sae

VREDUCEPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8

EVEX.512.66.0F3A.W1 56 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vreducess_xmm_k1z_xmm_xmmm32_imm8_sae

VREDUCESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8

EVEX.LIG.66.0F3A.W0 57 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vreducesd_xmm_k1z_xmm_xmmm64_imm8_sae

VREDUCESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8

EVEX.LIG.66.0F3A.W1 57 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vfmaddsubps_xmm_xmm_xmmm128_xmm

VFMADDSUBPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubps_ymm_ymm_ymmm256_ymm

VFMADDSUBPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubps_xmm_xmm_xmm_xmmm128

VFMADDSUBPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubps_ymm_ymm_ymm_ymmm256

VFMADDSUBPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_xmm_xmm_xmmm128_xmm

VFMADDSUBPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_ymm_ymm_ymmm256_ymm

VFMADDSUBPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_xmm_xmm_xmm_xmmm128

VFMADDSUBPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsubpd_ymm_ymm_ymm_ymmm256

VFMADDSUBPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_xmm_xmm_xmmm128_xmm

VFMSUBADDPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_ymm_ymm_ymmm256_ymm

VFMSUBADDPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_xmm_xmm_xmm_xmmm128

VFMSUBADDPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddps_ymm_ymm_ymm_ymmm256

VFMSUBADDPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_xmm_xmm_xmmm128_xmm

VFMSUBADDPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 5F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_ymm_ymm_ymmm256_ymm

VFMSUBADDPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 5F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_xmm_xmm_xmm_xmmm128

VFMSUBADDPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 5F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubaddpd_ymm_ymm_ymm_ymmm256

VFMSUBADDPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 5F /r /is4

FMA4

16/32/64-bit

§

Pcmpestrm_xmm_xmmm128_imm8

PCMPESTRM xmm1, xmm2/m128, imm8

66 0F 3A 60 /r ib

SSE4.2

16/32/64-bit

§

Pcmpestrm64_xmm_xmmm128_imm8

PCMPESTRM64 xmm1, xmm2/m128, imm8

66 o64 0F 3A 60 /r ib

SSE4.2

64-bit

§

VEX_Vpcmpestrm_xmm_xmmm128_imm8

VPCMPESTRM xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 60 /r ib

AVX

16/32/64-bit

§

VEX_Vpcmpestrm64_xmm_xmmm128_imm8

VPCMPESTRM64 xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W1 60 /r ib

AVX

64-bit

§

Pcmpestri_xmm_xmmm128_imm8

PCMPESTRI xmm1, xmm2/m128, imm8

66 0F 3A 61 /r ib

SSE4.2

16/32/64-bit

§

Pcmpestri64_xmm_xmmm128_imm8

PCMPESTRI64 xmm1, xmm2/m128, imm8

66 o64 0F 3A 61 /r ib

SSE4.2

64-bit

§

VEX_Vpcmpestri_xmm_xmmm128_imm8

VPCMPESTRI xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W0 61 /r ib

AVX

16/32/64-bit

§

VEX_Vpcmpestri64_xmm_xmmm128_imm8

VPCMPESTRI64 xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.W1 61 /r ib

AVX

64-bit

§

Pcmpistrm_xmm_xmmm128_imm8

PCMPISTRM xmm1, xmm2/m128, imm8

66 0F 3A 62 /r ib

SSE4.2

16/32/64-bit

§

VEX_Vpcmpistrm_xmm_xmmm128_imm8

VPCMPISTRM xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 62 /r ib

AVX

16/32/64-bit

§

Pcmpistri_xmm_xmmm128_imm8

PCMPISTRI xmm1, xmm2/m128, imm8

66 0F 3A 63 /r ib

SSE4.2

16/32/64-bit

§

VEX_Vpcmpistri_xmm_xmmm128_imm8

VPCMPISTRI xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG 63 /r ib

AVX

16/32/64-bit

§

EVEX_Vfpclassps_kr_k1_xmmm128b32_imm8

VFPCLASSPS k2 {k1}, xmm2/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclassps_kr_k1_ymmm256b32_imm8

VFPCLASSPS k2 {k1}, ymm2/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclassps_kr_k1_zmmm512b32_imm8

VFPCLASSPS k2 {k1}, zmm2/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 66 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasspd_kr_k1_xmmm128b64_imm8

VFPCLASSPD k2 {k1}, xmm2/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasspd_kr_k1_ymmm256b64_imm8

VFPCLASSPD k2 {k1}, ymm2/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 66 /r ib

AVX512VL and AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasspd_kr_k1_zmmm512b64_imm8

VFPCLASSPD k2 {k1}, zmm2/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 66 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfpclassss_kr_k1_xmmm32_imm8

VFPCLASSSS k2 {k1}, xmm2/m32, imm8

EVEX.LIG.66.0F3A.W0 67 /r ib

AVX512DQ

16/32/64-bit

§

EVEX_Vfpclasssd_kr_k1_xmmm64_imm8

VFPCLASSSD k2 {k1}, xmm2/m64, imm8

EVEX.LIG.66.0F3A.W1 67 /r ib

AVX512DQ

16/32/64-bit

§

VEX_Vfmaddps_xmm_xmm_xmmm128_xmm

VFMADDPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddps_ymm_ymm_ymmm256_ymm

VFMADDPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddps_xmm_xmm_xmm_xmmm128

VFMADDPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddps_ymm_ymm_ymm_ymmm256

VFMADDPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 68 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_xmm_xmm_xmmm128_xmm

VFMADDPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_ymm_ymm_ymmm256_ymm

VFMADDPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_xmm_xmm_xmm_xmmm128

VFMADDPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddpd_ymm_ymm_ymm_ymmm256

VFMADDPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 69 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddss_xmm_xmm_xmmm32_xmm

VFMADDSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 6A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddss_xmm_xmm_xmm_xmmm32

VFMADDSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 6A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsd_xmm_xmm_xmmm64_xmm

VFMADDSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 6B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmaddsd_xmm_xmm_xmm_xmmm64

VFMADDSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 6B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_xmm_xmm_xmmm128_xmm

VFMSUBPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_ymm_ymm_ymmm256_ymm

VFMSUBPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_xmm_xmm_xmm_xmmm128

VFMSUBPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubps_ymm_ymm_ymm_ymmm256

VFMSUBPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 6C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_xmm_xmm_xmmm128_xmm

VFMSUBPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_ymm_ymm_ymmm256_ymm

VFMSUBPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_xmm_xmm_xmm_xmmm128

VFMSUBPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubpd_ymm_ymm_ymm_ymmm256

VFMSUBPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 6D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubss_xmm_xmm_xmmm32_xmm

VFMSUBSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 6E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubss_xmm_xmm_xmm_xmmm32

VFMSUBSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 6E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubsd_xmm_xmm_xmmm64_xmm

VFMSUBSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 6F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfmsubsd_xmm_xmm_xmm_xmmm64

VFMSUBSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 6F /r /is4

FMA4

16/32/64-bit

§

EVEX_Vpshldw_xmm_k1z_xmm_xmmm128_imm8

VPSHLDW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 70 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldw_ymm_k1z_ymm_ymmm256_imm8

VPSHLDW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 70 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldw_zmm_k1z_zmm_zmmm512_imm8

VPSHLDW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 70 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldd_xmm_k1z_xmm_xmmm128b32_imm8

VPSHLDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldd_ymm_k1z_ymm_ymmm256b32_imm8

VPSHLDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldd_zmm_k1z_zmm_zmmm512b32_imm8

VPSHLDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 71 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldq_xmm_k1z_xmm_xmmm128b64_imm8

VPSHLDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldq_ymm_k1z_ymm_ymmm256b64_imm8

VPSHLDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 71 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshldq_zmm_k1z_zmm_zmmm512b64_imm8

VPSHLDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 71 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdw_xmm_k1z_xmm_xmmm128_imm8

VPSHRDW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8

EVEX.128.66.0F3A.W1 72 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdw_ymm_k1z_ymm_ymmm256_imm8

VPSHRDW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8

EVEX.256.66.0F3A.W1 72 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdw_zmm_k1z_zmm_zmmm512_imm8

VPSHRDW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8

EVEX.512.66.0F3A.W1 72 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdd_xmm_k1z_xmm_xmmm128b32_imm8

VPSHRDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8

EVEX.128.66.0F3A.W0 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdd_ymm_k1z_ymm_ymmm256b32_imm8

VPSHRDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8

EVEX.256.66.0F3A.W0 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdd_zmm_k1z_zmm_zmmm512b32_imm8

VPSHRDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8

EVEX.512.66.0F3A.W0 73 /r ib

AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdq_xmm_k1z_xmm_xmmm128b64_imm8

VPSHRDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdq_ymm_k1z_ymm_ymmm256b64_imm8

VPSHRDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 73 /r ib

AVX512VL and AVX512_VBMI2

16/32/64-bit

§

EVEX_Vpshrdq_zmm_k1z_zmm_zmmm512b64_imm8

VPSHRDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 73 /r ib

AVX512_VBMI2

16/32/64-bit

§

VEX_Vfnmaddps_xmm_xmm_xmmm128_xmm

VFNMADDPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddps_ymm_ymm_ymmm256_ymm

VFNMADDPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddps_xmm_xmm_xmm_xmmm128

VFNMADDPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddps_ymm_ymm_ymm_ymmm256

VFNMADDPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 78 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_xmm_xmm_xmmm128_xmm

VFNMADDPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_ymm_ymm_ymmm256_ymm

VFNMADDPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_xmm_xmm_xmm_xmmm128

VFNMADDPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddpd_ymm_ymm_ymm_ymmm256

VFNMADDPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 79 /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddss_xmm_xmm_xmmm32_xmm

VFNMADDSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 7A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddss_xmm_xmm_xmm_xmmm32

VFNMADDSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 7A /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddsd_xmm_xmm_xmmm64_xmm

VFNMADDSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 7B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmaddsd_xmm_xmm_xmm_xmmm64

VFNMADDSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 7B /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_xmm_xmm_xmmm128_xmm

VFNMSUBPS xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_ymm_ymm_ymmm256_ymm

VFNMSUBPS ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_xmm_xmm_xmm_xmmm128

VFNMSUBPS xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubps_ymm_ymm_ymm_ymmm256

VFNMSUBPS ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 7C /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_xmm_xmm_xmmm128_xmm

VFNMSUBPD xmm1, xmm2, xmm3/m128, xmm4

VEX.128.66.0F3A.W0 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_ymm_ymm_ymmm256_ymm

VFNMSUBPD ymm1, ymm2, ymm3/m256, ymm4

VEX.256.66.0F3A.W0 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_xmm_xmm_xmm_xmmm128

VFNMSUBPD xmm1, xmm2, xmm3, xmm4/m128

VEX.128.66.0F3A.W1 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubpd_ymm_ymm_ymm_ymmm256

VFNMSUBPD ymm1, ymm2, ymm3, ymm4/m256

VEX.256.66.0F3A.W1 7D /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubss_xmm_xmm_xmmm32_xmm

VFNMSUBSS xmm1, xmm2, xmm3/m32, xmm4

VEX.LIG.66.0F3A.W0 7E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubss_xmm_xmm_xmm_xmmm32

VFNMSUBSS xmm1, xmm2, xmm3, xmm4/m32

VEX.LIG.66.0F3A.W1 7E /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubsd_xmm_xmm_xmmm64_xmm

VFNMSUBSD xmm1, xmm2, xmm3/m64, xmm4

VEX.LIG.66.0F3A.W0 7F /r /is4

FMA4

16/32/64-bit

§

VEX_Vfnmsubsd_xmm_xmm_xmm_xmmm64

VFNMSUBSD xmm1, xmm2, xmm3, xmm4/m64

VEX.LIG.66.0F3A.W1 7F /r /is4

FMA4

16/32/64-bit

§

Sha1rnds4_xmm_xmmm128_imm8

SHA1RNDS4 xmm1, xmm2/m128, imm8

NP 0F 3A CC /r ib

SHA

16/32/64-bit

§

Gf2p8affineqb_xmm_xmmm128_imm8

GF2P8AFFINEQB xmm1, xmm2/m128, imm8

66 0F 3A CE /r ib

GFNI

16/32/64-bit

§

VEX_Vgf2p8affineqb_xmm_xmm_xmmm128_imm8

VGF2P8AFFINEQB xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W1 CE /r ib

AVX and GFNI

16/32/64-bit

§

VEX_Vgf2p8affineqb_ymm_ymm_ymmm256_imm8

VGF2P8AFFINEQB ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W1 CE /r ib

AVX and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineqb_xmm_k1z_xmm_xmmm128b64_imm8

VGF2P8AFFINEQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 CE /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineqb_ymm_k1z_ymm_ymmm256b64_imm8

VGF2P8AFFINEQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 CE /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineqb_zmm_k1z_zmm_zmmm512b64_imm8

VGF2P8AFFINEQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 CE /r ib

AVX512F and GFNI

16/32/64-bit

§

Gf2p8affineinvqb_xmm_xmmm128_imm8

GF2P8AFFINEINVQB xmm1, xmm2/m128, imm8

66 0F 3A CF /r ib

GFNI

16/32/64-bit

§

VEX_Vgf2p8affineinvqb_xmm_xmm_xmmm128_imm8

VGF2P8AFFINEINVQB xmm1, xmm2, xmm3/m128, imm8

VEX.128.66.0F3A.W1 CF /r ib

AVX and GFNI

16/32/64-bit

§

VEX_Vgf2p8affineinvqb_ymm_ymm_ymmm256_imm8

VGF2P8AFFINEINVQB ymm1, ymm2, ymm3/m256, imm8

VEX.256.66.0F3A.W1 CF /r ib

AVX and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineinvqb_xmm_k1z_xmm_xmmm128b64_imm8

VGF2P8AFFINEINVQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8

EVEX.128.66.0F3A.W1 CF /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineinvqb_ymm_k1z_ymm_ymmm256b64_imm8

VGF2P8AFFINEINVQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8

EVEX.256.66.0F3A.W1 CF /r ib

AVX512VL and GFNI

16/32/64-bit

§

EVEX_Vgf2p8affineinvqb_zmm_k1z_zmm_zmmm512b64_imm8

VGF2P8AFFINEINVQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8

EVEX.512.66.0F3A.W1 CF /r ib

AVX512F and GFNI

16/32/64-bit

§

Aeskeygenassist_xmm_xmmm128_imm8

AESKEYGENASSIST xmm1, xmm2/m128, imm8

66 0F 3A DF /r ib

AES

16/32/64-bit

§

VEX_Vaeskeygenassist_xmm_xmmm128_imm8

VAESKEYGENASSIST xmm1, xmm2/m128, imm8

VEX.128.66.0F3A.WIG DF /r ib

AES and AVX

16/32/64-bit

§

VEX_Rorx_r32_rm32_imm8

RORX r32, r/m32, imm8

VEX.LZ.F2.0F3A.W0 F0 /r ib

BMI2

16/32/64-bit

§

VEX_Rorx_r64_rm64_imm8

RORX r64, r/m64, imm8

VEX.LZ.F2.0F3A.W1 F0 /r ib

BMI2

64-bit

§

XOP_Vpmacssww_xmm_xmm_xmmm128_xmm

VPMACSSWW xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 85 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsswd_xmm_xmm_xmmm128_xmm

VPMACSSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 86 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacssdql_xmm_xmm_xmmm128_xmm

VPMACSSDQL xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 87 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacssdd_xmm_xmm_xmmm128_xmm

VPMACSSDD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 8E /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacssdqh_xmm_xmm_xmmm128_xmm

VPMACSSDQH xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 8F /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsww_xmm_xmm_xmmm128_xmm

VPMACSWW xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 95 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacswd_xmm_xmm_xmmm128_xmm

VPMACSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 96 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsdql_xmm_xmm_xmmm128_xmm

VPMACSDQL xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 97 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsdd_xmm_xmm_xmmm128_xmm

VPMACSDD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 9E /r /is4

XOP

16/32/64-bit

§

XOP_Vpmacsdqh_xmm_xmm_xmmm128_xmm

VPMACSDQH xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 9F /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_xmm_xmm_xmmm128_xmm

VPCMOV xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_ymm_ymm_ymmm256_ymm

VPCMOV ymm1, ymm2, ymm3/m256, ymm4

XOP.256.X8.W0 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_xmm_xmm_xmm_xmmm128

VPCMOV xmm1, xmm2, xmm3, xmm4/m128

XOP.128.X8.W1 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpcmov_ymm_ymm_ymm_ymmm256

VPCMOV ymm1, ymm2, ymm3, ymm4/m256

XOP.256.X8.W1 A2 /r /is4

XOP

16/32/64-bit

§

XOP_Vpperm_xmm_xmm_xmmm128_xmm

VPPERM xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 A3 /r /is4

XOP

16/32/64-bit

§

XOP_Vpperm_xmm_xmm_xmm_xmmm128

VPPERM xmm1, xmm2, xmm3, xmm4/m128

XOP.128.X8.W1 A3 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmadcsswd_xmm_xmm_xmmm128_xmm

VPMADCSSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 A6 /r /is4

XOP

16/32/64-bit

§

XOP_Vpmadcswd_xmm_xmm_xmmm128_xmm

VPMADCSWD xmm1, xmm2, xmm3/m128, xmm4

XOP.128.X8.W0 B6 /r /is4

XOP

16/32/64-bit

§

XOP_Vprotb_xmm_xmmm128_imm8

VPROTB xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C0 /r ib

XOP

16/32/64-bit

§

XOP_Vprotw_xmm_xmmm128_imm8

VPROTW xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C1 /r ib

XOP

16/32/64-bit

§

XOP_Vprotd_xmm_xmmm128_imm8

VPROTD xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C2 /r ib

XOP

16/32/64-bit

§

XOP_Vprotq_xmm_xmmm128_imm8

VPROTQ xmm1, xmm2/m128, imm8

XOP.128.X8.W0 C3 /r ib

XOP

16/32/64-bit

§

XOP_Vpcomb_xmm_xmm_xmmm128_imm8

VPCOMB xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CC /r ib

XOP

16/32/64-bit

§

XOP_Vpcomw_xmm_xmm_xmmm128_imm8

VPCOMW xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CD /r ib

XOP

16/32/64-bit

§

XOP_Vpcomd_xmm_xmm_xmmm128_imm8

VPCOMD xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CE /r ib

XOP

16/32/64-bit

§

XOP_Vpcomq_xmm_xmm_xmmm128_imm8

VPCOMQ xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 CF /r ib

XOP

16/32/64-bit

§

XOP_Vpcomub_xmm_xmm_xmmm128_imm8

VPCOMUB xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 EC /r ib

XOP

16/32/64-bit

§

XOP_Vpcomuw_xmm_xmm_xmmm128_imm8

VPCOMUW xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 ED /r ib

XOP

16/32/64-bit

§

XOP_Vpcomud_xmm_xmm_xmmm128_imm8

VPCOMUD xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 EE /r ib

XOP

16/32/64-bit

§

XOP_Vpcomuq_xmm_xmm_xmmm128_imm8

VPCOMUQ xmm1, xmm2, xmm3/m128, imm8

XOP.128.X8.W0 EF /r ib

XOP

16/32/64-bit

§

XOP_Blcfill_r32_rm32

BLCFILL r32, r/m32

XOP.L0.X9.W0 01 /1

TBM

16/32/64-bit

§

XOP_Blcfill_r64_rm64

BLCFILL r64, r/m64

XOP.L0.X9.W1 01 /1

TBM

64-bit

§

XOP_Blsfill_r32_rm32

BLSFILL r32, r/m32

XOP.L0.X9.W0 01 /2

TBM

16/32/64-bit

§

XOP_Blsfill_r64_rm64

BLSFILL r64, r/m64

XOP.L0.X9.W1 01 /2

TBM

64-bit

§

XOP_Blcs_r32_rm32

BLCS r32, r/m32

XOP.L0.X9.W0 01 /3

TBM

16/32/64-bit

§

XOP_Blcs_r64_rm64

BLCS r64, r/m64

XOP.L0.X9.W1 01 /3

TBM

64-bit

§

XOP_Tzmsk_r32_rm32

TZMSK r32, r/m32

XOP.L0.X9.W0 01 /4

TBM

16/32/64-bit

§

XOP_Tzmsk_r64_rm64

TZMSK r64, r/m64

XOP.L0.X9.W1 01 /4

TBM

64-bit

§

XOP_Blcic_r32_rm32

BLCIC r32, r/m32

XOP.L0.X9.W0 01 /5

TBM

16/32/64-bit

§

XOP_Blcic_r64_rm64

BLCIC r64, r/m64

XOP.L0.X9.W1 01 /5

TBM

64-bit

§

XOP_Blsic_r32_rm32

BLSIC r32, r/m32

XOP.L0.X9.W0 01 /6

TBM

16/32/64-bit

§

XOP_Blsic_r64_rm64

BLSIC r64, r/m64

XOP.L0.X9.W1 01 /6

TBM

64-bit

§

XOP_T1mskc_r32_rm32

T1MSKC r32, r/m32

XOP.L0.X9.W0 01 /7

TBM

16/32/64-bit

§

XOP_T1mskc_r64_rm64

T1MSKC r64, r/m64

XOP.L0.X9.W1 01 /7

TBM

64-bit

§

XOP_Blcmsk_r32_rm32

BLCMSK r32, r/m32

XOP.L0.X9.W0 02 /1

TBM

16/32/64-bit

§

XOP_Blcmsk_r64_rm64

BLCMSK r64, r/m64

XOP.L0.X9.W1 02 /1

TBM

64-bit

§

XOP_Blci_r32_rm32

BLCI r32, r/m32

XOP.L0.X9.W0 02 /6

TBM

16/32/64-bit

§

XOP_Blci_r64_rm64

BLCI r64, r/m64

XOP.L0.X9.W1 02 /6

TBM

64-bit

§

XOP_Llwpcb_r32

LLWPCB r32

XOP.L0.X9.W0 12 /0

LWP

16/32/64-bit

§

XOP_Llwpcb_r64

LLWPCB r64

XOP.L0.X9.W1 12 /0

LWP

64-bit

§

XOP_Slwpcb_r32

SLWPCB r32

XOP.L0.X9.W0 12 /1

LWP

16/32/64-bit

§

XOP_Slwpcb_r64

SLWPCB r64

XOP.L0.X9.W1 12 /1

LWP

64-bit

§

XOP_Vfrczps_xmm_xmmm128

VFRCZPS xmm1, xmm2/m128

XOP.128.X9.W0 80 /r

XOP

16/32/64-bit

§

XOP_Vfrczps_ymm_ymmm256

VFRCZPS ymm1, ymm2/m256

XOP.256.X9.W0 80 /r

XOP

16/32/64-bit

§

XOP_Vfrczpd_xmm_xmmm128

VFRCZPD xmm1, xmm2/m128

XOP.128.X9.W0 81 /r

XOP

16/32/64-bit

§

XOP_Vfrczpd_ymm_ymmm256

VFRCZPD ymm1, ymm2/m256

XOP.256.X9.W0 81 /r

XOP

16/32/64-bit

§

XOP_Vfrczss_xmm_xmmm32

VFRCZSS xmm1, xmm2/m32

XOP.128.X9.W0 82 /r

XOP

16/32/64-bit

§

XOP_Vfrczsd_xmm_xmmm64

VFRCZSD xmm1, xmm2/m64

XOP.128.X9.W0 83 /r

XOP

16/32/64-bit

§

XOP_Vprotb_xmm_xmmm128_xmm

VPROTB xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 90 /r

XOP

16/32/64-bit

§

XOP_Vprotb_xmm_xmm_xmmm128

VPROTB xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 90 /r

XOP

16/32/64-bit

§

XOP_Vprotw_xmm_xmmm128_xmm

VPROTW xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 91 /r

XOP

16/32/64-bit

§

XOP_Vprotw_xmm_xmm_xmmm128

VPROTW xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 91 /r

XOP

16/32/64-bit

§

XOP_Vprotd_xmm_xmmm128_xmm

VPROTD xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 92 /r

XOP

16/32/64-bit

§

XOP_Vprotd_xmm_xmm_xmmm128

VPROTD xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 92 /r

XOP

16/32/64-bit

§

XOP_Vprotq_xmm_xmmm128_xmm

VPROTQ xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 93 /r

XOP

16/32/64-bit

§

XOP_Vprotq_xmm_xmm_xmmm128

VPROTQ xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 93 /r

XOP

16/32/64-bit

§

XOP_Vpshlb_xmm_xmmm128_xmm

VPSHLB xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 94 /r

XOP

16/32/64-bit

§

XOP_Vpshlb_xmm_xmm_xmmm128

VPSHLB xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 94 /r

XOP

16/32/64-bit

§

XOP_Vpshlw_xmm_xmmm128_xmm

VPSHLW xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 95 /r

XOP

16/32/64-bit

§

XOP_Vpshlw_xmm_xmm_xmmm128

VPSHLW xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 95 /r

XOP

16/32/64-bit

§

XOP_Vpshld_xmm_xmmm128_xmm

VPSHLD xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 96 /r

XOP

16/32/64-bit

§

XOP_Vpshld_xmm_xmm_xmmm128

VPSHLD xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 96 /r

XOP

16/32/64-bit

§

XOP_Vpshlq_xmm_xmmm128_xmm

VPSHLQ xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 97 /r

XOP

16/32/64-bit

§

XOP_Vpshlq_xmm_xmm_xmmm128

VPSHLQ xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 97 /r

XOP

16/32/64-bit

§

XOP_Vpshab_xmm_xmmm128_xmm

VPSHAB xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 98 /r

XOP

16/32/64-bit

§

XOP_Vpshab_xmm_xmm_xmmm128

VPSHAB xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 98 /r

XOP

16/32/64-bit

§

XOP_Vpshaw_xmm_xmmm128_xmm

VPSHAW xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 99 /r

XOP

16/32/64-bit

§

XOP_Vpshaw_xmm_xmm_xmmm128

VPSHAW xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 99 /r

XOP

16/32/64-bit

§

XOP_Vpshad_xmm_xmmm128_xmm

VPSHAD xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 9A /r

XOP

16/32/64-bit

§

XOP_Vpshad_xmm_xmm_xmmm128

VPSHAD xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 9A /r

XOP

16/32/64-bit

§

XOP_Vpshaq_xmm_xmmm128_xmm

VPSHAQ xmm1, xmm2/m128, xmm3

XOP.128.X9.W0 9B /r

XOP

16/32/64-bit

§

XOP_Vpshaq_xmm_xmm_xmmm128

VPSHAQ xmm1, xmm2, xmm3/m128

XOP.128.X9.W1 9B /r

XOP

16/32/64-bit

§

XOP_Vphaddbw_xmm_xmmm128

VPHADDBW xmm1, xmm2/m128

XOP.128.X9.W0 C1 /r

XOP

16/32/64-bit

§

XOP_Vphaddbd_xmm_xmmm128

VPHADDBD xmm1, xmm2/m128

XOP.128.X9.W0 C2 /r

XOP

16/32/64-bit

§

XOP_Vphaddbq_xmm_xmmm128

VPHADDBQ xmm1, xmm2/m128

XOP.128.X9.W0 C3 /r

XOP

16/32/64-bit

§

XOP_Vphaddwd_xmm_xmmm128

VPHADDWD xmm1, xmm2/m128

XOP.128.X9.W0 C6 /r

XOP

16/32/64-bit

§

XOP_Vphaddwq_xmm_xmmm128

VPHADDWQ xmm1, xmm2/m128

XOP.128.X9.W0 C7 /r

XOP

16/32/64-bit

§

XOP_Vphadddq_xmm_xmmm128

VPHADDDQ xmm1, xmm2/m128

XOP.128.X9.W0 CB /r

XOP

16/32/64-bit

§

XOP_Vphaddubw_xmm_xmmm128

VPHADDUBW xmm1, xmm2/m128

XOP.128.X9.W0 D1 /r

XOP

16/32/64-bit

§

XOP_Vphaddubd_xmm_xmmm128

VPHADDUBD xmm1, xmm2/m128

XOP.128.X9.W0 D2 /r

XOP

16/32/64-bit

§

XOP_Vphaddubq_xmm_xmmm128

VPHADDUBQ xmm1, xmm2/m128

XOP.128.X9.W0 D3 /r

XOP

16/32/64-bit

§

XOP_Vphadduwd_xmm_xmmm128

VPHADDUWD xmm1, xmm2/m128

XOP.128.X9.W0 D6 /r

XOP

16/32/64-bit

§

XOP_Vphadduwq_xmm_xmmm128

VPHADDUWQ xmm1, xmm2/m128

XOP.128.X9.W0 D7 /r

XOP

16/32/64-bit

§

XOP_Vphaddudq_xmm_xmmm128

VPHADDUDQ xmm1, xmm2/m128

XOP.128.X9.W0 DB /r

XOP

16/32/64-bit

§

XOP_Vphsubbw_xmm_xmmm128

VPHSUBBW xmm1, xmm2/m128

XOP.128.X9.W0 E1 /r

XOP

16/32/64-bit

§

XOP_Vphsubwd_xmm_xmmm128

VPHSUBWD xmm1, xmm2/m128

XOP.128.X9.W0 E2 /r

XOP

16/32/64-bit

§

XOP_Vphsubdq_xmm_xmmm128

VPHSUBDQ xmm1, xmm2/m128

XOP.128.X9.W0 E3 /r

XOP

16/32/64-bit

§

XOP_Bextr_r32_rm32_imm32

BEXTR r32, r/m32, imm32

XOP.L0.XA.W0 10 /r id

TBM

16/32/64-bit

§

XOP_Bextr_r64_rm64_imm32

BEXTR r64, r/m64, imm32

XOP.L0.XA.W1 10 /r id

TBM

64-bit

§

XOP_Lwpins_r32_rm32_imm32

LWPINS r32, r/m32, imm32

XOP.L0.XA.W0 12 /0 id

LWP

16/32/64-bit

§

XOP_Lwpins_r64_rm32_imm32

LWPINS r64, r/m32, imm32

XOP.L0.XA.W1 12 /0 id

LWP

64-bit

§

XOP_Lwpval_r32_rm32_imm32

LWPVAL r32, r/m32, imm32

XOP.L0.XA.W0 12 /1 id

LWP

16/32/64-bit

§

XOP_Lwpval_r64_rm32_imm32

LWPVAL r64, r/m32, imm32

XOP.L0.XA.W1 12 /1 id

LWP

64-bit

§

D3NOW_Pi2fw_mm_mmm64

PI2FW mm, mm/m64

0F 0F /r 0C

3DNOWEXT

16/32/64-bit

§

D3NOW_Pi2fd_mm_mmm64

PI2FD mm, mm/m64

0F 0F /r 0D

3DNOW

16/32/64-bit

§

D3NOW_Pf2iw_mm_mmm64

PF2IW mm, mm/m64

0F 0F /r 1C

3DNOWEXT

16/32/64-bit

§

D3NOW_Pf2id_mm_mmm64

PF2ID mm, mm/m64

0F 0F /r 1D

3DNOW

16/32/64-bit

§

D3NOW_Pfrcpv_mm_mmm64

PFRCPV mm, mm/m64

0F 0F /r 86

AMD Geode GX/LX

16/32-bit

§

D3NOW_Pfrsqrtv_mm_mmm64

PFRSQRTV mm, mm/m64

0F 0F /r 87

AMD Geode GX/LX

16/32-bit

§

D3NOW_Pfnacc_mm_mmm64

PFNACC mm, mm/m64

0F 0F /r 8A

3DNOWEXT

16/32/64-bit

§

D3NOW_Pfpnacc_mm_mmm64

PFPNACC mm, mm/m64

0F 0F /r 8E

3DNOWEXT

16/32/64-bit

§

D3NOW_Pfcmpge_mm_mmm64

PFCMPGE mm, mm/m64

0F 0F /r 90

3DNOW

16/32/64-bit

§

D3NOW_Pfmin_mm_mmm64

PFMIN mm, mm/m64

0F 0F /r 94

3DNOW

16/32/64-bit

§

D3NOW_Pfrcp_mm_mmm64

PFRCP mm, mm/m64

0F 0F /r 96

3DNOW

16/32/64-bit

§

D3NOW_Pfrsqrt_mm_mmm64

PFRSQRT mm, mm/m64

0F 0F /r 97

3DNOW

16/32/64-bit

§

D3NOW_Pfsub_mm_mmm64

PFSUB mm, mm/m64

0F 0F /r 9A

3DNOW

16/32/64-bit

§

D3NOW_Pfadd_mm_mmm64

PFADD mm, mm/m64

0F 0F /r 9E

3DNOW

16/32/64-bit

§

D3NOW_Pfcmpgt_mm_mmm64

PFCMPGT mm, mm/m64

0F 0F /r A0

3DNOW

16/32/64-bit

§

D3NOW_Pfmax_mm_mmm64

PFMAX mm, mm/m64

0F 0F /r A4

3DNOW

16/32/64-bit

§

D3NOW_Pfrcpit1_mm_mmm64

PFRCPIT1 mm, mm/m64

0F 0F /r A6

3DNOW

16/32/64-bit

§

D3NOW_Pfrsqit1_mm_mmm64

PFRSQIT1 mm, mm/m64

0F 0F /r A7

3DNOW

16/32/64-bit

§

D3NOW_Pfsubr_mm_mmm64

PFSUBR mm, mm/m64

0F 0F /r AA

3DNOW

16/32/64-bit

§

D3NOW_Pfacc_mm_mmm64

PFACC mm, mm/m64

0F 0F /r AE

3DNOW

16/32/64-bit

§

D3NOW_Pfcmpeq_mm_mmm64

PFCMPEQ mm, mm/m64

0F 0F /r B0

3DNOW

16/32/64-bit

§

D3NOW_Pfmul_mm_mmm64

PFMUL mm, mm/m64

0F 0F /r B4

3DNOW

16/32/64-bit

§

D3NOW_Pfrcpit2_mm_mmm64

PFRCPIT2 mm, mm/m64

0F 0F /r B6

3DNOW

16/32/64-bit

§

D3NOW_Pmulhrw_mm_mmm64

PMULHRW mm, mm/m64

0F 0F /r B7

3DNOW

16/32/64-bit

§

D3NOW_Pswapd_mm_mmm64

PSWAPD mm, mm/m64

0F 0F /r BB

3DNOWEXT

16/32/64-bit

§

D3NOW_Pavgusb_mm_mmm64

PAVGUSB mm, mm/m64

0F 0F /r BF

3DNOW

16/32/64-bit

§

Rmpadjust

RMPADJUST

F3 0F 01 FE

SEV-SNP

64-bit

§

Rmpupdate

RMPUPDATE

F2 0F 01 FE

SEV-SNP

64-bit

§

Psmash

PSMASH

F3 0F 01 FF

SEV-SNP

64-bit

§

Pvalidatew

PVALIDATE

a16 F2 0F 01 FF

SEV-SNP

16/32-bit

§

Pvalidated

PVALIDATE

a32 F2 0F 01 FF

SEV-SNP

16/32/64-bit

§

Pvalidateq

PVALIDATE

a64 F2 0F 01 FF

SEV-SNP

64-bit

§

Serialize

SERIALIZE

NP 0F 01 E8

SERIALIZE

16/32/64-bit

§

Xsusldtrk

XSUSLDTRK

F2 0F 01 E8

TSXLDTRK

16/32/64-bit

§

Xresldtrk

XRESLDTRK

F2 0F 01 E9

TSXLDTRK

16/32/64-bit

§

Invlpgbw

INVLPGB

a16 NP 0F 01 FE

INVLPGB

16/32-bit

§

Invlpgbd

INVLPGB

a32 NP 0F 01 FE

INVLPGB

16/32/64-bit

§

Invlpgbq

INVLPGB

a64 NP 0F 01 FE

INVLPGB

64-bit

§

Tlbsync

TLBSYNC

NP 0F 01 FF

INVLPGB

16/32/64-bit

§

Prefetchreserved3_m8

PREFETCHW m8

0F 0D /3

PREFETCHW

16/32/64-bit

§

Prefetchreserved4_m8

PREFETCH m8

0F 0D /4

PREFETCHW

16/32/64-bit

§

Prefetchreserved5_m8

PREFETCH m8

0F 0D /5

PREFETCHW

16/32/64-bit

§

Prefetchreserved6_m8

PREFETCH m8

0F 0D /6

PREFETCHW

16/32/64-bit

§

Prefetchreserved7_m8

PREFETCH m8

0F 0D /7

PREFETCHW

16/32/64-bit

§

Ud0

UD0

0F FF

286+

16/32/64-bit

§

Vmgexit

VMGEXIT

F3 0F 01 D9

SEV-ES

16/32/64-bit

§

Getsecq

GETSECQ

NP o64 0F 37

SMX

64-bit

§

VEX_Ldtilecfg_m512

LDTILECFG m512

VEX.128.0F38.W0 49 !(11):000:bbb

AMX-TILE

64-bit

§

VEX_Tilerelease

TILERELEASE

VEX.128.0F38.W0 49 C0

AMX-TILE

64-bit

§

VEX_Sttilecfg_m512

STTILECFG m512

VEX.128.66.0F38.W0 49 !(11):000:bbb

AMX-TILE

64-bit

§

VEX_Tilezero_tmm

TILEZERO tmm1

VEX.128.F2.0F38.W0 49 11:rrr:000

AMX-TILE

64-bit

§

VEX_Tileloaddt1_tmm_sibmem

TILELOADDT1 tmm1, sibmem

VEX.128.66.0F38.W0 4B !(11):rrr:100

AMX-TILE

64-bit

§

VEX_Tilestored_sibmem_tmm

TILESTORED sibmem, tmm1

VEX.128.F3.0F38.W0 4B !(11):rrr:100

AMX-TILE

64-bit

§

VEX_Tileloadd_tmm_sibmem

TILELOADD tmm1, sibmem

VEX.128.F2.0F38.W0 4B !(11):rrr:100

AMX-TILE

64-bit

§

VEX_Tdpbf16ps_tmm_tmm_tmm

TDPBF16PS tmm1, tmm2, tmm3

VEX.128.F3.0F38.W0 5C 11:rrr:bbb

AMX-BF16

64-bit

§

VEX_Tdpbuud_tmm_tmm_tmm

TDPBUUD tmm1, tmm2, tmm3

VEX.128.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

VEX_Tdpbusd_tmm_tmm_tmm

TDPBUSD tmm1, tmm2, tmm3

VEX.128.66.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

VEX_Tdpbsud_tmm_tmm_tmm

TDPBSUD tmm1, tmm2, tmm3

VEX.128.F3.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

VEX_Tdpbssd_tmm_tmm_tmm

TDPBSSD tmm1, tmm2, tmm3

VEX.128.F2.0F38.W0 5E 11:rrr:bbb

AMX-INT8

64-bit

§

Fnstdw_AX

FNSTDW AX

DF E1

387 SL

16/32-bit

§

Fnstsg_AX

FNSTSG AX

DF E2

387 SL

16/32-bit

§

Rdshr_rm32

RDSHR r/m32

0F 36 /0

Cyrix 6x86MX, M II, III

16/32-bit

§

Wrshr_rm32

WRSHR r/m32

0F 37 /0

Cyrix 6x86MX, M II, III

16/32-bit

§

Smint

SMINT

0F 38

Cyrix 6x86MX+, AMD Geode GX/LX

16/32-bit

§

Dmint

DMINT

0F 39

AMD Geode GX/LX

16/32-bit

§

Rdm

RDM

0F 3A

AMD Geode GX/LX

16/32-bit

§

Svdc_m80_Sreg

SVDC m80, Sreg

0F 78 /r

Cyrix, AMD Geode GX/LX

16/32-bit

§

Rsdc_Sreg_m80

RSDC Sreg, m80

0F 79 /r

Cyrix, AMD Geode GX/LX

16/32-bit

§

Svldt_m80

SVLDT m80

0F 7A /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Rsldt_m80

RSLDT m80

0F 7B /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Svts_m80

SVTS m80

0F 7C /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Rsts_m80

RSTS m80

0F 7D /0

Cyrix, AMD Geode GX/LX

16/32-bit

§

Smint_0F7E

SMINT

0F 7E

Cyrix 6x86 or earlier

16/32-bit

§

Bb0_reset

BB0_RESET

0F 3A

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Bb1_reset

BB1_RESET

0F 3B

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Cpu_write

CPU_WRITE

0F 3C

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Cpu_read

CPU_READ

0F 3D

Cyrix MediaGX, GXm, GXLV, GX1

16/32-bit

§

Altinst

ALTINST

0F 3F

Centaur AIS

16/32-bit

§

Paveb_mm_mmm64

PAVEB mm, mm/m64

0F 50 /r

CYRIX_EMMI

16/32-bit

§

Paddsiw_mm_mmm64

PADDSIW mm, mm/m64

0F 51 /r

CYRIX_EMMI

16/32-bit

§

Pmagw_mm_mmm64

PMAGW mm, mm/m64

0F 52 /r

CYRIX_EMMI

16/32-bit

§

Pdistib_mm_m64

PDISTIB mm, m64

0F 54 /r

CYRIX_EMMI

16/32-bit

§

Psubsiw_mm_mmm64

PSUBSIW mm, mm/m64

0F 55 /r

CYRIX_EMMI

16/32-bit

§

Pmvzb_mm_m64

PMVZB mm, m64

0F 58 /r

CYRIX_EMMI

16/32-bit

§

Pmulhrw_mm_mmm64

PMULHRW mm, mm/m64

0F 59 /r

CYRIX_EMMI

16/32-bit

§

Pmvnzb_mm_m64

PMVNZB mm, m64

0F 5A /r

CYRIX_EMMI

16/32-bit

§

Pmvlzb_mm_m64

PMVLZB mm, m64

0F 5B /r

CYRIX_EMMI

16/32-bit

§

Pmvgezb_mm_m64

PMVGEZB mm, m64

0F 5C /r

CYRIX_EMMI

16/32-bit

§

Pmulhriw_mm_mmm64

PMULHRIW mm, mm/m64

0F 5D /r

CYRIX_EMMI

16/32-bit

§

Pmachriw_mm_m64

PMACHRIW mm, m64

0F 5E /r

CYRIX_EMMI

16/32-bit

§

Cyrix_D9D7

UNDOC

D9 D7

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_D9E2

UNDOC

D9 E2

Cyrix, AMD Geode GX/LX

16/32-bit

§

Ftstp

FTSTP

D9 E6

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_D9E7

UNDOC

D9 E7

Cyrix, AMD Geode GX/LX

16/32-bit

§

Frint2

FRINT2

DB FC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Frichop

FRICHOP

DD FC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DED8

UNDOC

DE D8

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDA

UNDOC

DE DA

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDC

UNDOC

DE DC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDD

UNDOC

DE DD

Cyrix, AMD Geode GX/LX

16/32-bit

§

Cyrix_DEDE

UNDOC

DE DE

Cyrix, AMD Geode GX/LX

16/32-bit

§

Frinear

FRINEAR

DF FC

Cyrix, AMD Geode GX/LX

16/32-bit

§

Tdcall

TDCALL

66 0F 01 CC

TDX

16/32/64-bit

§

Seamret

SEAMRET

66 0F 01 CD

TDX

64-bit

§

Seamops

SEAMOPS

66 0F 01 CE

TDX

64-bit

§

Seamcall

SEAMCALL

66 0F 01 CF

TDX

64-bit

§

Aesencwide128kl_m384

AESENCWIDE128KL m384, <XMM0-7>

F3 0F 38 D8 !(11):000:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Aesdecwide128kl_m384

AESDECWIDE128KL m384, <XMM0-7>

F3 0F 38 D8 !(11):001:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Aesencwide256kl_m512

AESENCWIDE256KL m512, <XMM0-7>

F3 0F 38 D8 !(11):010:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Aesdecwide256kl_m512

AESDECWIDE256KL m512, <XMM0-7>

F3 0F 38 D8 !(11):011:bbb

AESKLE and WIDE_KL

16/32/64-bit

§

Loadiwkey_xmm_xmm

LOADIWKEY xmm1, xmm2, <EAX>, <XMM0>

F3 0F 38 DC 11:rrr:bbb

KL

16/32/64-bit

§

Aesenc128kl_xmm_m384

AESENC128KL xmm, m384

F3 0F 38 DC !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Aesdec128kl_xmm_m384

AESDEC128KL xmm, m384

F3 0F 38 DD !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Aesenc256kl_xmm_m512

AESENC256KL xmm, m512

F3 0F 38 DE !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Aesdec256kl_xmm_m512

AESDEC256KL xmm, m512

F3 0F 38 DF !(11):rrr:bbb

AESKLE

16/32/64-bit

§

Encodekey128_r32_r32

ENCODEKEY128 r32, r32, <XMM0-2>, <XMM4-6>

F3 0F 38 FA 11:rrr:bbb

AESKLE

16/32/64-bit

§

Encodekey256_r32_r32

ENCODEKEY256 r32, r32, <XMM0-6>

F3 0F 38 FB 11:rrr:bbb

AESKLE

16/32/64-bit

§

VEX_Vbroadcastss_xmm_xmm

VBROADCASTSS xmm1, xmm2

VEX.128.66.0F38.W0 18 /r

AVX2

16/32/64-bit

§

VEX_Vbroadcastss_ymm_xmm

VBROADCASTSS ymm1, xmm2

VEX.256.66.0F38.W0 18 /r

AVX2

16/32/64-bit

§

VEX_Vbroadcastsd_ymm_xmm

VBROADCASTSD ymm1, xmm2

VEX.256.66.0F38.W0 19 /r

AVX2

16/32/64-bit

§

Vmgexit_F2

VMGEXIT

F2 0F 01 D9

SEV-ES

16/32/64-bit

§

Uiret

UIRET

F3 0F 01 EC

UINTR

64-bit

§

Testui

TESTUI

F3 0F 01 ED

UINTR

64-bit

§

Clui

CLUI

F3 0F 01 EE

UINTR

64-bit

§

Stui

STUI

F3 0F 01 EF

UINTR

64-bit

§

Senduipi_r64

SENDUIPI r64

F3 0F C7 /6

UINTR

64-bit

§

Hreset_imm8

HRESET imm8, <EAX>

F3 0F 3A F0 C0 ib

HRESET

16/32/64-bit

§

VEX_Vpdpbusd_xmm_xmm_xmmm128

VPDPBUSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 50 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpbusd_ymm_ymm_ymmm256

VPDPBUSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 50 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpbusds_xmm_xmm_xmmm128

VPDPBUSDS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 51 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpbusds_ymm_ymm_ymmm256

VPDPBUSDS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 51 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssd_xmm_xmm_xmmm128

VPDPWSSD xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 52 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssd_ymm_ymm_ymmm256

VPDPWSSD ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 52 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssds_xmm_xmm_xmmm128

VPDPWSSDS xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W0 53 /r

AVX-VNNI

16/32/64-bit

§

VEX_Vpdpwssds_ymm_ymm_ymmm256

VPDPWSSDS ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W0 53 /r

AVX-VNNI

16/32/64-bit

§

Ccs_hash_16

CCS_HASH

a16 F3 0F A6 E8

PADLOCK_GMI

16/32-bit

§

Ccs_hash_32

CCS_HASH

a32 F3 0F A6 E8

PADLOCK_GMI

16/32/64-bit

§

Ccs_hash_64

CCS_HASH

a64 F3 0F A6 E8

PADLOCK_GMI

64-bit

§

Ccs_encrypt_16

CCS_ENCRYPT

a16 F3 0F A7 F0

PADLOCK_GMI

16/32-bit

§

Ccs_encrypt_32

CCS_ENCRYPT

a32 F3 0F A7 F0

PADLOCK_GMI

16/32/64-bit

§

Ccs_encrypt_64

CCS_ENCRYPT

a64 F3 0F A7 F0

PADLOCK_GMI

64-bit

§

Lkgs_rm16

LKGS r/m16

o16 F2 0F 00 /6

LKGS

64-bit

§

Lkgs_r32m16

LKGS r32/m16

o32 F2 0F 00 /6

LKGS

64-bit

§

Lkgs_r64m16

LKGS r64/m16

F2 o64 0F 00 /6

LKGS

64-bit

§

Eretu

ERETU

F3 0F 01 CA

FRED

64-bit

§

Erets

ERETS

F2 0F 01 CA

FRED

64-bit

§

EVEX_Vaddph_xmm_k1z_xmm_xmmm128b16

VADDPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 58 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vaddph_ymm_k1z_ymm_ymmm256b16

VADDPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 58 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vaddph_zmm_k1z_zmm_zmmm512b16_er

VADDPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 58 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vaddsh_xmm_k1z_xmm_xmmm16_er

VADDSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 58 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpph_kr_k1_xmm_xmmm128b16_imm8

VCMPPH k1 {k2}, xmm2, xmm3/m128/m16bcst, imm8

EVEX.128.0F3A.W0 C2 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpph_kr_k1_ymm_ymmm256b16_imm8

VCMPPH k1 {k2}, ymm2, ymm3/m256/m16bcst, imm8

EVEX.256.0F3A.W0 C2 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpph_kr_k1_zmm_zmmm512b16_imm8_sae

VCMPPH k1 {k2}, zmm2, zmm3/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 C2 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vcmpsh_kr_k1_xmm_xmmm16_imm8_sae

VCMPSH k1 {k2}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.F3.0F3A.W0 C2 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vcomish_xmm_xmmm16_sae

VCOMISH xmm1, xmm2/m16{sae}

EVEX.LIG.MAP5.W0 2F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtdq2ph_xmm_k1z_xmmm128b32

VCVTDQ2PH xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtdq2ph_xmm_k1z_ymmm256b32

VCVTDQ2PH xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtdq2ph_ymm_k1z_zmmm512b32_er

VCVTDQ2PH ymm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.MAP5.W0 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtpd2ph_xmm_k1z_xmmm128b64

VCVTPD2PH xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.66.MAP5.W1 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtpd2ph_xmm_k1z_ymmm256b64

VCVTPD2PH xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.66.MAP5.W1 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtpd2ph_xmm_k1z_zmmm512b64_er

VCVTPD2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.66.MAP5.W1 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2dq_xmm_k1z_xmmm64b16

VCVTPH2DQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.66.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2dq_ymm_k1z_xmmm128b16

VCVTPH2DQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.66.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2dq_zmm_k1z_ymmm256b16_er

VCVTPH2DQ zmm1 {k1}{z}, ymm2/m256/m16bcst{er}

EVEX.512.66.MAP5.W0 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2pd_xmm_k1z_xmmm32b16

VCVTPH2PD xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.MAP5.W0 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2pd_ymm_k1z_xmmm64b16

VCVTPH2PD ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.MAP5.W0 5A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2pd_zmm_k1z_xmmm128b16_sae

VCVTPH2PD zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}

EVEX.512.MAP5.W0 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2psx_xmm_k1z_xmmm64b16

VCVTPH2PSX xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.66.MAP6.W0 13 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2psx_ymm_k1z_xmmm128b16

VCVTPH2PSX ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.66.MAP6.W0 13 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2psx_zmm_k1z_ymmm256b16_sae

VCVTPH2PSX zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}

EVEX.512.66.MAP6.W0 13 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2qq_xmm_k1z_xmmm32b16

VCVTPH2QQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 7B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2qq_ymm_k1z_xmmm64b16

VCVTPH2QQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 7B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2qq_zmm_k1z_xmmm128b16_er

VCVTPH2QQ zmm1 {k1}{z}, xmm2/m128/m16bcst{er}

EVEX.512.66.MAP5.W0 7B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2udq_xmm_k1z_xmmm64b16

VCVTPH2UDQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2udq_ymm_k1z_xmmm128b16

VCVTPH2UDQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2udq_zmm_k1z_ymmm256b16_er

VCVTPH2UDQ zmm1 {k1}{z}, ymm2/m256/m16bcst{er}

EVEX.512.MAP5.W0 79 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uqq_xmm_k1z_xmmm32b16

VCVTPH2UQQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uqq_ymm_k1z_xmmm64b16

VCVTPH2UQQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 79 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uqq_zmm_k1z_xmmm128b16_er

VCVTPH2UQQ zmm1 {k1}{z}, xmm2/m128/m16bcst{er}

EVEX.512.66.MAP5.W0 79 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uw_xmm_k1z_xmmm128b16

VCVTPH2UW xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uw_ymm_k1z_ymmm256b16

VCVTPH2UW ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2uw_zmm_k1z_zmmm512b16_er

VCVTPH2UW zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2w_xmm_k1z_xmmm128b16

VCVTPH2W xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2w_ymm_k1z_ymmm256b16

VCVTPH2W ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtph2w_zmm_k1z_zmmm512b16_er

VCVTPH2W zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.66.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtps2phx_xmm_k1z_xmmm128b32

VCVTPS2PHX xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.66.MAP5.W0 1D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtps2phx_xmm_k1z_ymmm256b32

VCVTPS2PHX xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.66.MAP5.W0 1D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtps2phx_ymm_k1z_zmmm512b32_er

VCVTPS2PHX ymm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.66.MAP5.W0 1D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtqq2ph_xmm_k1z_xmmm128b64

VCVTQQ2PH xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.MAP5.W1 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtqq2ph_xmm_k1z_ymmm256b64

VCVTQQ2PH xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.MAP5.W1 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtqq2ph_xmm_k1z_zmmm512b64_er

VCVTQQ2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.MAP5.W1 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsd2sh_xmm_k1z_xmm_xmmm64_er

VCVTSD2SH xmm1 {k1}{z}, xmm2, xmm3/m64{er}

EVEX.LIG.F2.MAP5.W1 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2sd_xmm_k1z_xmm_xmmm16_sae

VCVTSH2SD xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.F3.MAP5.W0 5A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2si_r32_xmmm16_er

VCVTSH2SI r32, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W0 2D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2si_r64_xmmm16_er

VCVTSH2SI r64, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W1 2D /r

AVX512-FP16

64-bit

§

EVEX_Vcvtsh2ss_xmm_k1z_xmm_xmmm16_sae

VCVTSH2SS xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.MAP6.W0 13 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2usi_r32_xmmm16_er

VCVTSH2USI r32, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W0 79 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsh2usi_r64_xmmm16_er

VCVTSH2USI r64, xmm1/m16{er}

EVEX.LIG.F3.MAP5.W1 79 /r

AVX512-FP16

64-bit

§

EVEX_Vcvtsi2sh_xmm_xmm_rm32_er

VCVTSI2SH xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.MAP5.W0 2A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtsi2sh_xmm_xmm_rm64_er

VCVTSI2SH xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.MAP5.W1 2A /r

AVX512-FP16

64-bit

§

EVEX_Vcvtss2sh_xmm_k1z_xmm_xmmm32_er

VCVTSS2SH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.MAP5.W0 1D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2dq_xmm_k1z_xmmm64b16

VCVTTPH2DQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.F3.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2dq_ymm_k1z_xmmm128b16

VCVTTPH2DQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.F3.MAP5.W0 5B /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2dq_zmm_k1z_ymmm256b16_sae

VCVTTPH2DQ zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}

EVEX.512.F3.MAP5.W0 5B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2qq_xmm_k1z_xmmm32b16

VCVTTPH2QQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2qq_ymm_k1z_xmmm64b16

VCVTTPH2QQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2qq_zmm_k1z_xmmm128b16_sae

VCVTTPH2QQ zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}

EVEX.512.66.MAP5.W0 7A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2udq_xmm_k1z_xmmm64b16

VCVTTPH2UDQ xmm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.128.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2udq_ymm_k1z_xmmm128b16

VCVTTPH2UDQ ymm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.256.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2udq_zmm_k1z_ymmm256b16_sae

VCVTTPH2UDQ zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}

EVEX.512.MAP5.W0 78 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uqq_xmm_k1z_xmmm32b16

VCVTTPH2UQQ xmm1 {k1}{z}, xmm2/m32/m16bcst

EVEX.128.66.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uqq_ymm_k1z_xmmm64b16

VCVTTPH2UQQ ymm1 {k1}{z}, xmm2/m64/m16bcst

EVEX.256.66.MAP5.W0 78 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uqq_zmm_k1z_xmmm128b16_sae

VCVTTPH2UQQ zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}

EVEX.512.66.MAP5.W0 78 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uw_xmm_k1z_xmmm128b16

VCVTTPH2UW xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uw_ymm_k1z_ymmm256b16

VCVTTPH2UW ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2uw_zmm_k1z_zmmm512b16_sae

VCVTTPH2UW zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}

EVEX.512.MAP5.W0 7C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2w_xmm_k1z_xmmm128b16

VCVTTPH2W xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2w_ymm_k1z_ymmm256b16

VCVTTPH2W ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP5.W0 7C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttph2w_zmm_k1z_zmmm512b16_sae

VCVTTPH2W zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}

EVEX.512.66.MAP5.W0 7C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttsh2si_r32_xmmm16_sae

VCVTTSH2SI r32, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W0 2C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttsh2si_r64_xmmm16_sae

VCVTTSH2SI r64, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W1 2C /r

AVX512-FP16

64-bit

§

EVEX_Vcvttsh2usi_r32_xmmm16_sae

VCVTTSH2USI r32, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W0 78 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvttsh2usi_r64_xmmm16_sae

VCVTTSH2USI r64, xmm1/m16{sae}

EVEX.LIG.F3.MAP5.W1 78 /r

AVX512-FP16

64-bit

§

EVEX_Vcvtudq2ph_xmm_k1z_xmmm128b32

VCVTUDQ2PH xmm1 {k1}{z}, xmm2/m128/m32bcst

EVEX.128.F2.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtudq2ph_xmm_k1z_ymmm256b32

VCVTUDQ2PH xmm1 {k1}{z}, ymm2/m256/m32bcst

EVEX.256.F2.MAP5.W0 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtudq2ph_ymm_k1z_zmmm512b32_er

VCVTUDQ2PH ymm1 {k1}{z}, zmm2/m512/m32bcst{er}

EVEX.512.F2.MAP5.W0 7A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuqq2ph_xmm_k1z_xmmm128b64

VCVTUQQ2PH xmm1 {k1}{z}, xmm2/m128/m64bcst

EVEX.128.F2.MAP5.W1 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuqq2ph_xmm_k1z_ymmm256b64

VCVTUQQ2PH xmm1 {k1}{z}, ymm2/m256/m64bcst

EVEX.256.F2.MAP5.W1 7A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuqq2ph_xmm_k1z_zmmm512b64_er

VCVTUQQ2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}

EVEX.512.F2.MAP5.W1 7A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtusi2sh_xmm_xmm_rm32_er

VCVTUSI2SH xmm1, xmm2, r/m32{er}

EVEX.LIG.F3.MAP5.W0 7B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtusi2sh_xmm_xmm_rm64_er

VCVTUSI2SH xmm1, xmm2, r/m64{er}

EVEX.LIG.F3.MAP5.W1 7B /r

AVX512-FP16

64-bit

§

EVEX_Vcvtuw2ph_xmm_k1z_xmmm128b16

VCVTUW2PH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.F2.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuw2ph_ymm_k1z_ymmm256b16

VCVTUW2PH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.F2.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtuw2ph_zmm_k1z_zmmm512b16_er

VCVTUW2PH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.F2.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtw2ph_xmm_k1z_xmmm128b16

VCVTW2PH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.F3.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtw2ph_ymm_k1z_ymmm256b16

VCVTW2PH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.F3.MAP5.W0 7D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vcvtw2ph_zmm_k1z_zmmm512b16_er

VCVTW2PH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.F3.MAP5.W0 7D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vdivph_xmm_k1z_xmm_xmmm128b16

VDIVPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vdivph_ymm_k1z_ymm_ymmm256b16

VDIVPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vdivph_zmm_k1z_zmm_zmmm512b16_er

VDIVPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 5E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vdivsh_xmm_k1z_xmm_xmmm16_er

VDIVSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 5E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcph_xmm_k1z_xmm_xmmm128b32

VFCMADDCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcph_ymm_k1z_ymm_ymmm256b32

VFCMADDCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcph_zmm_k1z_zmm_zmmm512b32_er

VFCMADDCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F2.MAP6.W0 56 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcph_xmm_k1z_xmm_xmmm128b32

VFMADDCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcph_ymm_k1z_ymm_ymmm256b32

VFMADDCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.MAP6.W0 56 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcph_zmm_k1z_zmm_zmmm512b32_er

VFMADDCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F3.MAP6.W0 56 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmaddcsh_xmm_k1z_xmm_xmmm32_er

VFCMADDCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F2.MAP6.W0 57 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddcsh_xmm_k1z_xmm_xmmm32_er

VFMADDCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.MAP6.W0 57 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcph_xmm_k1z_xmm_xmmm128b32

VFCMULCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F2.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcph_ymm_k1z_ymm_ymmm256b32

VFCMULCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F2.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcph_zmm_k1z_zmm_zmmm512b32_er

VFCMULCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F2.MAP6.W0 D6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcph_xmm_k1z_xmm_xmmm128b32

VFMULCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst

EVEX.128.F3.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcph_ymm_k1z_ymm_ymmm256b32

VFMULCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst

EVEX.256.F3.MAP6.W0 D6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcph_zmm_k1z_zmm_zmmm512b32_er

VFMULCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}

EVEX.512.F3.MAP6.W0 D6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfcmulcsh_xmm_k1z_xmm_xmmm32_er

VFCMULCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F2.MAP6.W0 D7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmulcsh_xmm_k1z_xmm_xmmm32_er

VFMULCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}

EVEX.LIG.F3.MAP6.W0 D7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub132ph_xmm_k1z_xmm_xmmm128b16

VFMADDSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 96 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub132ph_ymm_k1z_ymm_ymmm256b16

VFMADDSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 96 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub132ph_zmm_k1z_zmm_zmmm512b16_er

VFMADDSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 96 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub213ph_xmm_k1z_xmm_xmmm128b16

VFMADDSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 A6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub213ph_ymm_k1z_ymm_ymmm256b16

VFMADDSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 A6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub213ph_zmm_k1z_zmm_zmmm512b16_er

VFMADDSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 A6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub231ph_xmm_k1z_xmm_xmmm128b16

VFMADDSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 B6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub231ph_ymm_k1z_ymm_ymmm256b16

VFMADDSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 B6 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmaddsub231ph_zmm_k1z_zmm_zmmm512b16_er

VFMADDSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 B6 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd132ph_xmm_k1z_xmm_xmmm128b16

VFMSUBADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 97 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd132ph_ymm_k1z_ymm_ymmm256b16

VFMSUBADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 97 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd132ph_zmm_k1z_zmm_zmmm512b16_er

VFMSUBADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 97 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd213ph_xmm_k1z_xmm_xmmm128b16

VFMSUBADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 A7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd213ph_ymm_k1z_ymm_ymmm256b16

VFMSUBADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 A7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd213ph_zmm_k1z_zmm_zmmm512b16_er

VFMSUBADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 A7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd231ph_xmm_k1z_xmm_xmmm128b16

VFMSUBADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 B7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd231ph_ymm_k1z_ymm_ymmm256b16

VFMSUBADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 B7 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsubadd231ph_zmm_k1z_zmm_zmmm512b16_er

VFMSUBADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 B7 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132ph_xmm_k1z_xmm_xmmm128b16

VFMADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 98 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132ph_ymm_k1z_ymm_ymmm256b16

VFMADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 98 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132ph_zmm_k1z_zmm_zmmm512b16_er

VFMADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 98 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213ph_xmm_k1z_xmm_xmmm128b16

VFMADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 A8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213ph_ymm_k1z_ymm_ymmm256b16

VFMADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 A8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213ph_zmm_k1z_zmm_zmmm512b16_er

VFMADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 A8 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231ph_xmm_k1z_xmm_xmmm128b16

VFMADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 B8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231ph_ymm_k1z_ymm_ymmm256b16

VFMADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 B8 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231ph_zmm_k1z_zmm_zmmm512b16_er

VFMADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 B8 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132ph_xmm_k1z_xmm_xmmm128b16

VFNMADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 9C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132ph_ymm_k1z_ymm_ymmm256b16

VFNMADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 9C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132ph_zmm_k1z_zmm_zmmm512b16_er

VFNMADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 9C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213ph_xmm_k1z_xmm_xmmm128b16

VFNMADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 AC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213ph_ymm_k1z_ymm_ymmm256b16

VFNMADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 AC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213ph_zmm_k1z_zmm_zmmm512b16_er

VFNMADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 AC /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231ph_xmm_k1z_xmm_xmmm128b16

VFNMADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 BC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231ph_ymm_k1z_ymm_ymmm256b16

VFNMADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 BC /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231ph_zmm_k1z_zmm_zmmm512b16_er

VFNMADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 BC /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd132sh_xmm_k1z_xmm_xmmm16_er

VFMADD132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 99 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd213sh_xmm_k1z_xmm_xmmm16_er

VFMADD213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 A9 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmadd231sh_xmm_k1z_xmm_xmmm16_er

VFMADD231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 B9 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd132sh_xmm_k1z_xmm_xmmm16_er

VFNMADD132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 9D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd213sh_xmm_k1z_xmm_xmmm16_er

VFNMADD213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 AD /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmadd231sh_xmm_k1z_xmm_xmmm16_er

VFNMADD231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 BD /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132ph_xmm_k1z_xmm_xmmm128b16

VFMSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 9A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132ph_ymm_k1z_ymm_ymmm256b16

VFMSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 9A /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132ph_zmm_k1z_zmm_zmmm512b16_er

VFMSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 9A /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213ph_xmm_k1z_xmm_xmmm128b16

VFMSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 AA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213ph_ymm_k1z_ymm_ymmm256b16

VFMSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 AA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213ph_zmm_k1z_zmm_zmmm512b16_er

VFMSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 AA /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231ph_xmm_k1z_xmm_xmmm128b16

VFMSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 BA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231ph_ymm_k1z_ymm_ymmm256b16

VFMSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 BA /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231ph_zmm_k1z_zmm_zmmm512b16_er

VFMSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 BA /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132ph_xmm_k1z_xmm_xmmm128b16

VFNMSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 9E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132ph_ymm_k1z_ymm_ymmm256b16

VFNMSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 9E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132ph_zmm_k1z_zmm_zmmm512b16_er

VFNMSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 9E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213ph_xmm_k1z_xmm_xmmm128b16

VFNMSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 AE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213ph_ymm_k1z_ymm_ymmm256b16

VFNMSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 AE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213ph_zmm_k1z_zmm_zmmm512b16_er

VFNMSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 AE /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231ph_xmm_k1z_xmm_xmmm128b16

VFNMSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 BE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231ph_ymm_k1z_ymm_ymmm256b16

VFNMSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 BE /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231ph_zmm_k1z_zmm_zmmm512b16_er

VFNMSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 BE /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub132sh_xmm_k1z_xmm_xmmm16_er

VFMSUB132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 9B /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub213sh_xmm_k1z_xmm_xmmm16_er

VFMSUB213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 AB /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfmsub231sh_xmm_k1z_xmm_xmmm16_er

VFMSUB231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 BB /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub132sh_xmm_k1z_xmm_xmmm16_er

VFNMSUB132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 9F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub213sh_xmm_k1z_xmm_xmmm16_er

VFNMSUB213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 AF /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfnmsub231sh_xmm_k1z_xmm_xmmm16_er

VFNMSUB231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 BF /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclassph_kr_k1_xmmm128b16_imm8

VFPCLASSPH k1 {k2}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 66 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclassph_kr_k1_ymmm256b16_imm8

VFPCLASSPH k1 {k2}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 66 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclassph_kr_k1_zmmm512b16_imm8

VFPCLASSPH k1 {k2}, zmm2/m512/m16bcst, imm8

EVEX.512.0F3A.W0 66 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vfpclasssh_kr_k1_xmmm16_imm8

VFPCLASSSH k1 {k2}, xmm2/m16, imm8

EVEX.LIG.0F3A.W0 67 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpph_xmm_k1z_xmmm128b16

VGETEXPPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP6.W0 42 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpph_ymm_k1z_ymmm256b16

VGETEXPPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP6.W0 42 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpph_zmm_k1z_zmmm512b16_sae

VGETEXPPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}

EVEX.512.66.MAP6.W0 42 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetexpsh_xmm_k1z_xmm_xmmm16_sae

VGETEXPSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.66.MAP6.W0 43 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantph_xmm_k1z_xmmm128b16_imm8

VGETMANTPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 26 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantph_ymm_k1z_ymmm256b16_imm8

VGETMANTPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 26 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantph_zmm_k1z_zmmm512b16_imm8_sae

VGETMANTPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 26 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vgetmantsh_xmm_k1z_xmm_xmmm16_imm8_sae

VGETMANTSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.0F3A.W0 27 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxph_xmm_k1z_xmm_xmmm128b16

VMAXPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5F /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxph_ymm_k1z_ymm_ymmm256b16

VMAXPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5F /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxph_zmm_k1z_zmm_zmmm512b16_sae

VMAXPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{sae}

EVEX.512.MAP5.W0 5F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmaxsh_xmm_k1z_xmm_xmmm16_sae

VMAXSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.F3.MAP5.W0 5F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vminph_xmm_k1z_xmm_xmmm128b16

VMINPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vminph_ymm_k1z_ymm_ymmm256b16

VMINPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5D /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vminph_zmm_k1z_zmm_zmmm512b16_sae

VMINPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{sae}

EVEX.512.MAP5.W0 5D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vminsh_xmm_k1z_xmm_xmmm16_sae

VMINSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}

EVEX.LIG.F3.MAP5.W0 5D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_xmm_k1z_m16

VMOVSH xmm1 {k1}{z}, m16

EVEX.LIG.F3.MAP5.W0 10 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_m16_k1_xmm

VMOVSH m16 {k1}, xmm1

EVEX.LIG.F3.MAP5.W0 11 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_xmm_k1z_xmm_xmm

VMOVSH xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.MAP5.W0 10 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovsh_xmm_k1z_xmm_xmm_MAP5_11

VMOVSH xmm1 {k1}{z}, xmm2, xmm3

EVEX.LIG.F3.MAP5.W0 11 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovw_xmm_r32m16

VMOVW xmm1, r32/m16

EVEX.128.66.MAP5.W0 6E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovw_xmm_r64m16

VMOVW xmm1, r64/m16

EVEX.128.66.MAP5.W1 6E /r

AVX512-FP16

64-bit

§

EVEX_Vmovw_r32m16_xmm

VMOVW r32/m16, xmm1

EVEX.128.66.MAP5.W0 7E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmovw_r64m16_xmm

VMOVW r64/m16, xmm1

EVEX.128.66.MAP5.W1 7E /r

AVX512-FP16

64-bit

§

EVEX_Vmulph_xmm_k1z_xmm_xmmm128b16

VMULPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 59 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmulph_ymm_k1z_ymm_ymmm256b16

VMULPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 59 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vmulph_zmm_k1z_zmm_zmmm512b16_er

VMULPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 59 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vmulsh_xmm_k1z_xmm_xmmm16_er

VMULSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 59 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpph_xmm_k1z_xmmm128b16

VRCPPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP6.W0 4C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpph_ymm_k1z_ymmm256b16

VRCPPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP6.W0 4C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpph_zmm_k1z_zmmm512b16

VRCPPH zmm1 {k1}{z}, zmm2/m512/m16bcst

EVEX.512.66.MAP6.W0 4C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vrcpsh_xmm_k1z_xmm_xmmm16

VRCPSH xmm1 {k1}{z}, xmm2, xmm3/m16

EVEX.LIG.66.MAP6.W0 4D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vreduceph_xmm_k1z_xmmm128b16_imm8

VREDUCEPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 56 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vreduceph_ymm_k1z_ymmm256b16_imm8

VREDUCEPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 56 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vreduceph_zmm_k1z_zmmm512b16_imm8_sae

VREDUCEPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 56 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vreducesh_xmm_k1z_xmm_xmmm16_imm8_sae

VREDUCESH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.0F3A.W0 57 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscaleph_xmm_k1z_xmmm128b16_imm8

VRNDSCALEPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8

EVEX.128.0F3A.W0 08 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscaleph_ymm_k1z_ymmm256b16_imm8

VRNDSCALEPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8

EVEX.256.0F3A.W0 08 /r ib

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscaleph_zmm_k1z_zmmm512b16_imm8_sae

VRNDSCALEPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8

EVEX.512.0F3A.W0 08 /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vrndscalesh_xmm_k1z_xmm_xmmm16_imm8_sae

VRNDSCALESH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8

EVEX.LIG.0F3A.W0 0A /r ib

AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtph_xmm_k1z_xmmm128b16

VRSQRTPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.66.MAP6.W0 4E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtph_ymm_k1z_ymmm256b16

VRSQRTPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.66.MAP6.W0 4E /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtph_zmm_k1z_zmmm512b16

VRSQRTPH zmm1 {k1}{z}, zmm2/m512/m16bcst

EVEX.512.66.MAP6.W0 4E /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vrsqrtsh_xmm_k1z_xmm_xmmm16

VRSQRTSH xmm1 {k1}{z}, xmm2, xmm3/m16

EVEX.LIG.66.MAP6.W0 4F /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefph_xmm_k1z_xmm_xmmm128b16

VSCALEFPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.66.MAP6.W0 2C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefph_ymm_k1z_ymm_ymmm256b16

VSCALEFPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.66.MAP6.W0 2C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefph_zmm_k1z_zmm_zmmm512b16_er

VSCALEFPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.66.MAP6.W0 2C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vscalefsh_xmm_k1z_xmm_xmmm16_er

VSCALEFSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.66.MAP6.W0 2D /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtph_xmm_k1z_xmmm128b16

VSQRTPH xmm1 {k1}{z}, xmm2/m128/m16bcst

EVEX.128.MAP5.W0 51 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtph_ymm_k1z_ymmm256b16

VSQRTPH ymm1 {k1}{z}, ymm2/m256/m16bcst

EVEX.256.MAP5.W0 51 /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtph_zmm_k1z_zmmm512b16_er

VSQRTPH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}

EVEX.512.MAP5.W0 51 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsqrtsh_xmm_k1z_xmm_xmmm16_er

VSQRTSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 51 /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsubph_xmm_k1z_xmm_xmmm128b16

VSUBPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst

EVEX.128.MAP5.W0 5C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsubph_ymm_k1z_ymm_ymmm256b16

VSUBPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst

EVEX.256.MAP5.W0 5C /r

AVX512VL and AVX512-FP16

16/32/64-bit

§

EVEX_Vsubph_zmm_k1z_zmm_zmmm512b16_er

VSUBPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}

EVEX.512.MAP5.W0 5C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vsubsh_xmm_k1z_xmm_xmmm16_er

VSUBSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}

EVEX.LIG.F3.MAP5.W0 5C /r

AVX512-FP16

16/32/64-bit

§

EVEX_Vucomish_xmm_xmmm16_sae

VUCOMISH xmm1, xmm2/m16{sae}

EVEX.LIG.MAP5.W0 2E /r

AVX512-FP16

16/32/64-bit

§

Rdudbg

RDUDBG

0F 0E

UDBG

16/32/64-bit

§

Wrudbg

WRUDBG

0F 0F

UDBG

16/32/64-bit

§

VEX_KNC_Jkzd_kr_rel8_64

JKZD k1, rel8

VEX.128.W0 74 cb

KNC

64-bit

§

VEX_KNC_Jknzd_kr_rel8_64

JKNZD k1, rel8

VEX.128.W0 75 cb

KNC

64-bit

§

VEX_KNC_Vprefetchnta_m8

VPREFETCHNTA m8

VEX.128.0F.WIG 18 /0

KNC

64-bit

§

VEX_KNC_Vprefetch0_m8

VPREFETCH0 m8

VEX.128.0F.WIG 18 /1

KNC

64-bit

§

VEX_KNC_Vprefetch1_m8

VPREFETCH1 m8

VEX.128.0F.WIG 18 /2

KNC

64-bit

§

VEX_KNC_Vprefetch2_m8

VPREFETCH2 m8

VEX.128.0F.WIG 18 /3

KNC

64-bit

§

VEX_KNC_Vprefetchenta_m8

VPREFETCHENTA m8

VEX.128.0F.WIG 18 /4

KNC

64-bit

§

VEX_KNC_Vprefetche0_m8

VPREFETCHE0 m8

VEX.128.0F.WIG 18 /5

KNC

64-bit

§

VEX_KNC_Vprefetche1_m8

VPREFETCHE1 m8

VEX.128.0F.WIG 18 /6

KNC

64-bit

§

VEX_KNC_Vprefetche2_m8

VPREFETCHE2 m8

VEX.128.0F.WIG 18 /7

KNC

64-bit

§

VEX_KNC_Kand_kr_kr

KAND k1, k2

VEX.128.0F.W0 41 /r

KNC

64-bit

§

VEX_KNC_Kandn_kr_kr

KANDN k1, k2

VEX.128.0F.W0 42 /r

KNC

64-bit

§

VEX_KNC_Kandnr_kr_kr

KANDNR k1, k2

VEX.128.0F.W0 43 /r

KNC

64-bit

§

VEX_KNC_Knot_kr_kr

KNOT k1, k2

VEX.128.0F.W0 44 /r

KNC

64-bit

§

VEX_KNC_Kor_kr_kr

KOR k1, k2

VEX.128.0F.W0 45 /r

KNC

64-bit

§

VEX_KNC_Kxnor_kr_kr

KXNOR k1, k2

VEX.128.0F.W0 46 /r

KNC

64-bit

§

VEX_KNC_Kxor_kr_kr

KXOR k1, k2

VEX.128.0F.W0 47 /r

KNC

64-bit

§

VEX_KNC_Kmerge2l1h_kr_kr

KMERGE2L1H k1, k2

VEX.128.0F.W0 48 /r

KNC

64-bit

§

VEX_KNC_Kmerge2l1l_kr_kr

KMERGE2L1L k1, k2

VEX.128.0F.W0 49 /r

KNC

64-bit

§

VEX_KNC_Jkzd_kr_rel32_64

JKZD k1, rel32

VEX.128.0F.W0 84 cd

KNC

64-bit

§

VEX_KNC_Jknzd_kr_rel32_64

JKNZD k1, rel32

VEX.128.0F.W0 85 cd

KNC

64-bit

§

VEX_KNC_Kmov_kr_kr

KMOV k1, k2

VEX.128.0F.W0 90 /r

KNC

64-bit

§

VEX_KNC_Kmov_kr_r32

KMOV k1, r32

VEX.128.0F.W0 92 /r

KNC

64-bit

§

VEX_KNC_Kmov_r32_kr

KMOV r32, k1

VEX.128.0F.W0 93 /r

KNC

64-bit

§

VEX_KNC_Kconcath_r64_kr_kr

KCONCATH r64, k1, k2

VEX.128.0F.W0 95 /r

KNC

64-bit

§

VEX_KNC_Kconcatl_r64_kr_kr

KCONCATL r64, k1, k2

VEX.128.0F.W0 97 /r

KNC

64-bit

§

VEX_KNC_Kortest_kr_kr

KORTEST k1, k2

VEX.128.0F.W0 98 /r

KNC

64-bit

§

VEX_KNC_Delay_r32

DELAY r32

VEX.128.F3.0F.W0 AE /6

KNC

64-bit

§

VEX_KNC_Delay_r64

DELAY r64

VEX.128.F3.0F.W1 AE /6

KNC

64-bit

§

VEX_KNC_Spflt_r32

SPFLT r32

VEX.128.F2.0F.W0 AE /6

KNC

64-bit

§

VEX_KNC_Spflt_r64

SPFLT r64

VEX.128.F2.0F.W1 AE /6

KNC

64-bit

§

VEX_KNC_Clevict1_m8

CLEVICT1 m8

VEX.128.F3.0F.WIG AE /7

KNC

64-bit

§

VEX_KNC_Clevict0_m8

CLEVICT0 m8

VEX.128.F2.0F.WIG AE /7

KNC

64-bit

§

VEX_KNC_Popcnt_r32_r32

POPCNT r32, r32

VEX.128.F3.0F.W0 B8 /r

KNC

64-bit

§

VEX_KNC_Popcnt_r64_r64

POPCNT r64, r64

VEX.128.F3.0F.W1 B8 /r

KNC

64-bit

§

VEX_KNC_Tzcnt_r32_r32

TZCNT r32, r32

VEX.128.F3.0F.W0 BC /r

KNC

64-bit

§

VEX_KNC_Tzcnt_r64_r64

TZCNT r64, r64

VEX.128.F3.0F.W1 BC /r

KNC

64-bit

§

VEX_KNC_Tzcnti_r32_r32

TZCNTI r32, r32

VEX.128.F2.0F.W0 BC /r

KNC

64-bit

§

VEX_KNC_Tzcnti_r64_r64

TZCNTI r64, r64

VEX.128.F2.0F.W1 BC /r

KNC

64-bit

§

VEX_KNC_Lzcnt_r32_r32

LZCNT r32, r32

VEX.128.F3.0F.W0 BD /r

KNC

64-bit

§

VEX_KNC_Lzcnt_r64_r64

LZCNT r64, r64

VEX.128.F3.0F.W1 BD /r

KNC

64-bit

§

VEX_KNC_Undoc_r32_rm32_128_F3_0F38_W0_F0

UNDOC r32, r/m32

VEX.128.F3.0F38.W0 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r64_rm64_128_F3_0F38_W1_F0

UNDOC r64, r/m64

VEX.128.F3.0F38.W1 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F0

UNDOC r32, r/m32

VEX.128.F2.0F38.W0 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F0

UNDOC r64, r/m64

VEX.128.F2.0F38.W1 F0 /r

KNC

64-bit

§

VEX_KNC_Undoc_r32_rm32_128_F2_0F38_W0_F1

UNDOC r32, r/m32

VEX.128.F2.0F38.W0 F1 /r

KNC

64-bit

§

VEX_KNC_Undoc_r64_rm64_128_F2_0F38_W1_F1

UNDOC r64, r/m64

VEX.128.F2.0F38.W1 F1 /r

KNC

64-bit

§

VEX_KNC_Kextract_kr_r64_imm8

KEXTRACT k1, r64, imm8

VEX.128.66.0F3A.W0 3E /r ib

KNC

64-bit

§

MVEX_Vprefetchnta_m

VPREFETCHNTA m

MVEX.512.0F.WIG 18 /0

KNC

64-bit

§

MVEX_Vprefetch0_m

VPREFETCH0 m

MVEX.512.0F.WIG 18 /1

KNC

64-bit

§

MVEX_Vprefetch1_m

VPREFETCH1 m

MVEX.512.0F.WIG 18 /2

KNC

64-bit

§

MVEX_Vprefetch2_m

VPREFETCH2 m

MVEX.512.0F.WIG 18 /3

KNC

64-bit

§

MVEX_Vprefetchenta_m

VPREFETCHENTA m

MVEX.512.0F.WIG 18 /4

KNC

64-bit

§

MVEX_Vprefetche0_m

VPREFETCHE0 m

MVEX.512.0F.WIG 18 /5

KNC

64-bit

§

MVEX_Vprefetche1_m

VPREFETCHE1 m

MVEX.512.0F.WIG 18 /6

KNC

64-bit

§

MVEX_Vprefetche2_m

VPREFETCHE2 m

MVEX.512.0F.WIG 18 /7

KNC

64-bit

§

MVEX_Vmovaps_zmm_k1_zmmmt

VMOVAPS zmm1 {k1}, Sf32(zmm2/mt)

MVEX.512.0F.W0 28 /r

KNC

64-bit

§

MVEX_Vmovapd_zmm_k1_zmmmt

VMOVAPD zmm1 {k1}, Sf64(zmm2/mt)

MVEX.512.66.0F.W1 28 /r

KNC

64-bit

§

MVEX_Vmovaps_mt_k1_zmm

VMOVAPS mt {k1}, Df32(zmm1)

MVEX.512.0F.W0 29 /r

KNC

64-bit

§

MVEX_Vmovapd_mt_k1_zmm

VMOVAPD mt {k1}, Df64(zmm1)

MVEX.512.66.0F.W1 29 /r

KNC

64-bit

§

MVEX_Vmovnrapd_m_k1_zmm

VMOVNRAPD m {k1}, Df64(zmm1)

MVEX.512.F3.0F.W1.EH0 29 /r

KNC

64-bit

§

MVEX_Vmovnrngoapd_m_k1_zmm

VMOVNRNGOAPD m {k1}, Df64(zmm1)

MVEX.512.F3.0F.W1.EH1 29 /r

KNC

64-bit

§

MVEX_Vmovnraps_m_k1_zmm

VMOVNRAPS m {k1}, Df32(zmm1)

MVEX.512.F2.0F.W0.EH0 29 /r

KNC

64-bit

§

MVEX_Vmovnrngoaps_m_k1_zmm

VMOVNRNGOAPS m {k1}, Df32(zmm1)

MVEX.512.F2.0F.W0.EH1 29 /r

KNC

64-bit

§

MVEX_Vaddps_zmm_k1_zmm_zmmmt

VADDPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.0F.W0 58 /r

KNC

64-bit

§

MVEX_Vaddpd_zmm_k1_zmm_zmmmt

VADDPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 58 /r

KNC

64-bit

§

MVEX_Vmulps_zmm_k1_zmm_zmmmt

VMULPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.0F.W0 59 /r

KNC

64-bit

§

MVEX_Vmulpd_zmm_k1_zmm_zmmmt

VMULPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 59 /r

KNC

64-bit

§

MVEX_Vcvtps2pd_zmm_k1_zmmmt

VCVTPS2PD zmm1 {k1}, Sf32(zmm2/mt)

MVEX.512.0F.W0 5A /r

KNC

64-bit

§

MVEX_Vcvtpd2ps_zmm_k1_zmmmt

VCVTPD2PS zmm1 {k1}, Sf64(zmm2/mt)

MVEX.512.66.0F.W1 5A /r

KNC

64-bit

§

MVEX_Vsubps_zmm_k1_zmm_zmmmt

VSUBPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.0F.W0 5C /r

KNC

64-bit

§

MVEX_Vsubpd_zmm_k1_zmm_zmmmt

VSUBPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 5C /r

KNC

64-bit

§

MVEX_Vpcmpgtd_kr_k1_zmm_zmmmt

VPCMPGTD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F.W0 66 /r

KNC

64-bit

§

MVEX_Vmovdqa32_zmm_k1_zmmmt

VMOVDQA32 zmm1 {k1}, Si32(zmm2/mt)

MVEX.512.66.0F.W0 6F /r

KNC

64-bit

§

MVEX_Vmovdqa64_zmm_k1_zmmmt

VMOVDQA64 zmm1 {k1}, Si64(zmm2/mt)

MVEX.512.66.0F.W1 6F /r

KNC

64-bit

§

MVEX_Vpshufd_zmm_k1_zmmmt_imm8

VPSHUFD zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F.W0 70 /r ib

KNC

64-bit

§

MVEX_Vpsrld_zmm_k1_zmmmt_imm8

VPSRLD zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.NDD.512.66.0F.W0 72 /2 ib

KNC

64-bit

§

MVEX_Vpsrad_zmm_k1_zmmmt_imm8

VPSRAD zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.NDD.512.66.0F.W0 72 /4 ib

KNC

64-bit

§

MVEX_Vpslld_zmm_k1_zmmmt_imm8

VPSLLD zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.NDD.512.66.0F.W0 72 /6 ib

KNC

64-bit

§

MVEX_Vpcmpeqd_kr_k1_zmm_zmmmt

VPCMPEQD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F.W0 76 /r

KNC

64-bit

§

MVEX_Vcvtudq2pd_zmm_k1_zmmmt

VCVTUDQ2PD zmm1 {k1}, Si32(zmm2/mt)

MVEX.512.F3.0F.W0 7A /r

KNC

64-bit

§

MVEX_Vmovdqa32_mt_k1_zmm

VMOVDQA32 mt {k1}, Di32(zmm1)

MVEX.512.66.0F.W0 7F /r

KNC

64-bit

§

MVEX_Vmovdqa64_mt_k1_zmm

VMOVDQA64 mt {k1}, Di64(zmm1)

MVEX.512.66.0F.W1 7F /r

KNC

64-bit

§

MVEX_Clevict1_m

CLEVICT1 m

MVEX.512.F3.0F.WIG AE /7

KNC

64-bit

§

MVEX_Clevict0_m

CLEVICT0 m

MVEX.512.F2.0F.WIG AE /7

KNC

64-bit

§

MVEX_Vcmpps_kr_k1_zmm_zmmmt_imm8

VCMPPS k2 {k1}, zmm1, Sf32(zmm2/mt), imm8

MVEX.NDS.512.0F.W0 C2 /r ib

KNC

64-bit

§

MVEX_Vcmppd_kr_k1_zmm_zmmmt_imm8

VCMPPD k2 {k1}, zmm1, Sf64(zmm2/mt), imm8

MVEX.NDS.512.66.0F.W1 C2 /r ib

KNC

64-bit

§

MVEX_Vpandd_zmm_k1_zmm_zmmmt

VPANDD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 DB /r

KNC

64-bit

§

MVEX_Vpandq_zmm_k1_zmm_zmmmt

VPANDQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 DB /r

KNC

64-bit

§

MVEX_Vpandnd_zmm_k1_zmm_zmmmt

VPANDND zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 DF /r

KNC

64-bit

§

MVEX_Vpandnq_zmm_k1_zmm_zmmmt

VPANDNQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 DF /r

KNC

64-bit

§

MVEX_Vcvtdq2pd_zmm_k1_zmmmt

VCVTDQ2PD zmm1 {k1}, Si32(zmm2/mt)

MVEX.512.F3.0F.W0 E6 /r

KNC

64-bit

§

MVEX_Vpord_zmm_k1_zmm_zmmmt

VPORD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 EB /r

KNC

64-bit

§

MVEX_Vporq_zmm_k1_zmm_zmmmt

VPORQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 EB /r

KNC

64-bit

§

MVEX_Vpxord_zmm_k1_zmm_zmmmt

VPXORD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 EF /r

KNC

64-bit

§

MVEX_Vpxorq_zmm_k1_zmm_zmmmt

VPXORQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F.W1 EF /r

KNC

64-bit

§

MVEX_Vpsubd_zmm_k1_zmm_zmmmt

VPSUBD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 FA /r

KNC

64-bit

§

MVEX_Vpaddd_zmm_k1_zmm_zmmmt

VPADDD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F.W0 FE /r

KNC

64-bit

§

MVEX_Vbroadcastss_zmm_k1_mt

VBROADCASTSS zmm1 {k1}, Uf32(mt)

MVEX.512.66.0F38.W0 18 /r

KNC

64-bit

§

MVEX_Vbroadcastsd_zmm_k1_mt

VBROADCASTSD zmm1 {k1}, Uf64(mt)

MVEX.512.66.0F38.W1 19 /r

KNC

64-bit

§

MVEX_Vbroadcastf32x4_zmm_k1_mt

VBROADCASTF32X4 zmm1 {k1}, Uf32(mt)

MVEX.512.66.0F38.W0 1A /r

KNC

64-bit

§

MVEX_Vbroadcastf64x4_zmm_k1_mt

VBROADCASTF64X4 zmm1 {k1}, Uf64(mt)

MVEX.512.66.0F38.W1 1B /r

KNC

64-bit

§

MVEX_Vptestmd_kr_k1_zmm_zmmmt

VPTESTMD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F38.W0 27 /r

KNC

64-bit

§

MVEX_Vpermd_zmm_k1_zmm_zmmmt

VPERMD zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 36 /r

KNC

64-bit

§

MVEX_Vpminsd_zmm_k1_zmm_zmmmt

VPMINSD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 39 /r

KNC

64-bit

§

MVEX_Vpminud_zmm_k1_zmm_zmmmt

VPMINUD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 3B /r

KNC

64-bit

§

MVEX_Vpmaxsd_zmm_k1_zmm_zmmmt

VPMAXSD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 3D /r

KNC

64-bit

§

MVEX_Vpmaxud_zmm_k1_zmm_zmmmt

VPMAXUD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 3F /r

KNC

64-bit

§

MVEX_Vpmulld_zmm_k1_zmm_zmmmt

VPMULLD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 40 /r

KNC

64-bit

§

MVEX_Vgetexpps_zmm_k1_zmmmt

VGETEXPPS zmm1 {k1}, Sf32(zmm2/mt)

MVEX.512.66.0F38.W0 42 /r

KNC

64-bit

§

MVEX_Vgetexppd_zmm_k1_zmmmt

VGETEXPPD zmm1 {k1}, Sf64(zmm2/mt)

MVEX.512.66.0F38.W1 42 /r

KNC

64-bit

§

MVEX_Vpsrlvd_zmm_k1_zmm_zmmmt

VPSRLVD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 45 /r

KNC

64-bit

§

MVEX_Vpsravd_zmm_k1_zmm_zmmmt

VPSRAVD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 46 /r

KNC

64-bit

§

MVEX_Vpsllvd_zmm_k1_zmm_zmmmt

VPSLLVD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 47 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_48

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 48 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_49

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 49 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4A

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 4A /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_4B

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 4B /r

KNC

64-bit

§

MVEX_Vaddnps_zmm_k1_zmm_zmmmt

VADDNPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 50 /r

KNC

64-bit

§

MVEX_Vaddnpd_zmm_k1_zmm_zmmmt

VADDNPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 50 /r

KNC

64-bit

§

MVEX_Vgmaxabsps_zmm_k1_zmm_zmmmt

VGMAXABSPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 51 /r

KNC

64-bit

§

MVEX_Vgminps_zmm_k1_zmm_zmmmt

VGMINPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 52 /r

KNC

64-bit

§

MVEX_Vgminpd_zmm_k1_zmm_zmmmt

VGMINPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 52 /r

KNC

64-bit

§

MVEX_Vgmaxps_zmm_k1_zmm_zmmmt

VGMAXPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 53 /r

KNC

64-bit

§

MVEX_Vgmaxpd_zmm_k1_zmm_zmmmt

VGMAXPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 53 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_54

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 54 /r

KNC

64-bit

§

MVEX_Vfixupnanps_zmm_k1_zmm_zmmmt

VFIXUPNANPS zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 55 /r

KNC

64-bit

§

MVEX_Vfixupnanpd_zmm_k1_zmm_zmmmt

VFIXUPNANPD zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 55 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_56

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 56 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_57

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 57 /r

KNC

64-bit

§

MVEX_Vpbroadcastd_zmm_k1_mt

VPBROADCASTD zmm1 {k1}, Ui32(mt)

MVEX.512.66.0F38.W0 58 /r

KNC

64-bit

§

MVEX_Vpbroadcastq_zmm_k1_mt

VPBROADCASTQ zmm1 {k1}, Ui64(mt)

MVEX.512.66.0F38.W1 59 /r

KNC

64-bit

§

MVEX_Vbroadcasti32x4_zmm_k1_mt

VBROADCASTI32X4 zmm1 {k1}, Ui32(mt)

MVEX.512.66.0F38.W0 5A /r

KNC

64-bit

§

MVEX_Vbroadcasti64x4_zmm_k1_mt

VBROADCASTI64X4 zmm1 {k1}, Ui64(mt)

MVEX.512.66.0F38.W1 5B /r

KNC

64-bit

§

MVEX_Vpadcd_zmm_k1_kr_zmmmt

VPADCD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5C /r

KNC

64-bit

§

MVEX_Vpaddsetcd_zmm_k1_kr_zmmmt

VPADDSETCD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5D /r

KNC

64-bit

§

MVEX_Vpsbbd_zmm_k1_kr_zmmmt

VPSBBD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5E /r

KNC

64-bit

§

MVEX_Vpsubsetbd_zmm_k1_kr_zmmmt

VPSUBSETBD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 5F /r

KNC

64-bit

§

MVEX_Vpblendmd_zmm_k1_zmm_zmmmt

VPBLENDMD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 64 /r

KNC

64-bit

§

MVEX_Vpblendmq_zmm_k1_zmm_zmmmt

VPBLENDMQ zmm1 {k1}, zmm2, Si64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 64 /r

KNC

64-bit

§

MVEX_Vblendmps_zmm_k1_zmm_zmmmt

VBLENDMPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 65 /r

KNC

64-bit

§

MVEX_Vblendmpd_zmm_k1_zmm_zmmmt

VBLENDMPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 65 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_67

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 67 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_68

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 68 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_69

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 69 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6A

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 6A /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_6B

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 6B /r

KNC

64-bit

§

MVEX_Vpsubrd_zmm_k1_zmm_zmmmt

VPSUBRD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6C /r

KNC

64-bit

§

MVEX_Vsubrps_zmm_k1_zmm_zmmmt

VSUBRPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6D /r

KNC

64-bit

§

MVEX_Vsubrpd_zmm_k1_zmm_zmmmt

VSUBRPD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 6D /r

KNC

64-bit

§

MVEX_Vpsbbrd_zmm_k1_kr_zmmmt

VPSBBRD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6E /r

KNC

64-bit

§

MVEX_Vpsubrsetbd_zmm_k1_kr_zmmmt

VPSUBRSETBD zmm1 {k1}, k2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 6F /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_70

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 70 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_71

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 71 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_72

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 72 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_73

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 73 /r

KNC

64-bit

§

MVEX_Vpcmpltd_kr_k1_zmm_zmmmt

VPCMPLTD k2 {k1}, zmm1, Si32(zmm2/mt)

MVEX.NDS.512.66.0F38.W0 74 /r

KNC

64-bit

§

MVEX_Vscaleps_zmm_k1_zmm_zmmmt

VSCALEPS zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 84 /r

KNC

64-bit

§

MVEX_Vpmulhud_zmm_k1_zmm_zmmmt

VPMULHUD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 86 /r

KNC

64-bit

§

MVEX_Vpmulhd_zmm_k1_zmm_zmmmt

VPMULHD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 87 /r

KNC

64-bit

§

MVEX_Vpgatherdd_zmm_k1_mvt

VPGATHERDD zmm1 {k1}, Ui32(mvt)

MVEX.512.66.0F38.W0 90 /vsib

KNC

64-bit

§

MVEX_Vpgatherdq_zmm_k1_mvt

VPGATHERDQ zmm1 {k1}, Ui64(mvt)

MVEX.512.66.0F38.W1 90 /vsib

KNC

64-bit

§

MVEX_Vgatherdps_zmm_k1_mvt

VGATHERDPS zmm1 {k1}, Uf32(mvt)

MVEX.512.66.0F38.W0 92 /vsib

KNC

64-bit

§

MVEX_Vgatherdpd_zmm_k1_mvt

VGATHERDPD zmm1 {k1}, Uf64(mvt)

MVEX.512.66.0F38.W1 92 /vsib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_94

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 94 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_94

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W1 94 /r

KNC

64-bit

§

MVEX_Vfmadd132ps_zmm_k1_zmm_zmmmt

VFMADD132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 98 /r

KNC

64-bit

§

MVEX_Vfmadd132pd_zmm_k1_zmm_zmmmt

VFMADD132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 98 /r

KNC

64-bit

§

MVEX_Vfmsub132ps_zmm_k1_zmm_zmmmt

VFMSUB132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 9A /r

KNC

64-bit

§

MVEX_Vfmsub132pd_zmm_k1_zmm_zmmmt

VFMSUB132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 9A /r

KNC

64-bit

§

MVEX_Vfnmadd132ps_zmm_k1_zmm_zmmmt

VFNMADD132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 9C /r

KNC

64-bit

§

MVEX_Vfnmadd132pd_zmm_k1_zmm_zmmmt

VFNMADD132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 9C /r

KNC

64-bit

§

MVEX_Vfnmsub132ps_zmm_k1_zmm_zmmmt

VFNMSUB132PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 9E /r

KNC

64-bit

§

MVEX_Vfnmsub132pd_zmm_k1_zmm_zmmmt

VFNMSUB132PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 9E /r

KNC

64-bit

§

MVEX_Vpscatterdd_mvt_k1_zmm

VPSCATTERDD mvt {k1}, Di32(zmm1)

MVEX.512.66.0F38.W0 A0 /vsib

KNC

64-bit

§

MVEX_Vpscatterdq_mvt_k1_zmm

VPSCATTERDQ mvt {k1}, Di64(zmm1)

MVEX.512.66.0F38.W1 A0 /vsib

KNC

64-bit

§

MVEX_Vscatterdps_mvt_k1_zmm

VSCATTERDPS mvt {k1}, Df32(zmm1)

MVEX.512.66.0F38.W0 A2 /vsib

KNC

64-bit

§

MVEX_Vscatterdpd_mvt_k1_zmm

VSCATTERDPD mvt {k1}, Df64(zmm1)

MVEX.512.66.0F38.W1 A2 /vsib

KNC

64-bit

§

MVEX_Vfmadd233ps_zmm_k1_zmm_zmmmt

VFMADD233PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 A4 /r

KNC

64-bit

§

MVEX_Vfmadd213ps_zmm_k1_zmm_zmmmt

VFMADD213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 A8 /r

KNC

64-bit

§

MVEX_Vfmadd213pd_zmm_k1_zmm_zmmmt

VFMADD213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 A8 /r

KNC

64-bit

§

MVEX_Vfmsub213ps_zmm_k1_zmm_zmmmt

VFMSUB213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 AA /r

KNC

64-bit

§

MVEX_Vfmsub213pd_zmm_k1_zmm_zmmmt

VFMSUB213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 AA /r

KNC

64-bit

§

MVEX_Vfnmadd213ps_zmm_k1_zmm_zmmmt

VFNMADD213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 AC /r

KNC

64-bit

§

MVEX_Vfnmadd213pd_zmm_k1_zmm_zmmmt

VFNMADD213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 AC /r

KNC

64-bit

§

MVEX_Vfnmsub213ps_zmm_k1_zmm_zmmmt

VFNMSUB213PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 AE /r

KNC

64-bit

§

MVEX_Vfnmsub213pd_zmm_k1_zmm_zmmmt

VFNMSUB213PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 AE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B0

UNDOC zmm1 {k1}, mvt

MVEX.512.66.0F38.W0 B0 /vsib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_B2

UNDOC zmm1 {k1}, mvt

MVEX.512.66.0F38.W0 B2 /vsib

KNC

64-bit

§

MVEX_Vpmadd233d_zmm_k1_zmm_zmmmt

VPMADD233D zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 B4 /r

KNC

64-bit

§

MVEX_Vpmadd231d_zmm_k1_zmm_zmmmt

VPMADD231D zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 B5 /r

KNC

64-bit

§

MVEX_Vfmadd231ps_zmm_k1_zmm_zmmmt

VFMADD231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 B8 /r

KNC

64-bit

§

MVEX_Vfmadd231pd_zmm_k1_zmm_zmmmt

VFMADD231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 B8 /r

KNC

64-bit

§

MVEX_Vfmsub231ps_zmm_k1_zmm_zmmmt

VFMSUB231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 BA /r

KNC

64-bit

§

MVEX_Vfmsub231pd_zmm_k1_zmm_zmmmt

VFMSUB231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 BA /r

KNC

64-bit

§

MVEX_Vfnmadd231ps_zmm_k1_zmm_zmmmt

VFNMADD231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 BC /r

KNC

64-bit

§

MVEX_Vfnmadd231pd_zmm_k1_zmm_zmmmt

VFNMADD231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 BC /r

KNC

64-bit

§

MVEX_Vfnmsub231ps_zmm_k1_zmm_zmmmt

VFNMSUB231PS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 BE /r

KNC

64-bit

§

MVEX_Vfnmsub231pd_zmm_k1_zmm_zmmmt

VFNMSUB231PD zmm1 {k1}, zmm2, Sf64(zmm3/mt)

MVEX.NDS.512.66.0F38.W1 BE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_mvt_512_66_0F38_W0_C0

UNDOC zmm1 {k1}, mvt

MVEX.512.66.0F38.W0 C0 /vsib

KNC

64-bit

§

MVEX_Vgatherpf0hintdps_mvt_k1

VGATHERPF0HINTDPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /0 /vsib

KNC

64-bit

§

MVEX_Vgatherpf0hintdpd_mvt_k1

VGATHERPF0HINTDPD Uf64(mvt) {k1}

MVEX.512.66.0F38.W1 C6 /0 /vsib

KNC

64-bit

§

MVEX_Vgatherpf0dps_mvt_k1

VGATHERPF0DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /1 /vsib

KNC

64-bit

§

MVEX_Vgatherpf1dps_mvt_k1

VGATHERPF1DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /2 /vsib

KNC

64-bit

§

MVEX_Vscatterpf0hintdps_mvt_k1

VSCATTERPF0HINTDPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /4 /vsib

KNC

64-bit

§

MVEX_Vscatterpf0hintdpd_mvt_k1

VSCATTERPF0HINTDPD Uf64(mvt) {k1}

MVEX.512.66.0F38.W1 C6 /4 /vsib

KNC

64-bit

§

MVEX_Vscatterpf0dps_mvt_k1

VSCATTERPF0DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /5 /vsib

KNC

64-bit

§

MVEX_Vscatterpf1dps_mvt_k1

VSCATTERPF1DPS Uf32(mvt) {k1}

MVEX.512.66.0F38.W0 C6 /6 /vsib

KNC

64-bit

§

MVEX_Vexp223ps_zmm_k1_zmmmt

VEXP223PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 C8 /r

KNC

64-bit

§

MVEX_Vlog2ps_zmm_k1_zmmmt

VLOG2PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 C9 /r

KNC

64-bit

§

MVEX_Vrcp23ps_zmm_k1_zmmmt

VRCP23PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 CA /r

KNC

64-bit

§

MVEX_Vrsqrt23ps_zmm_k1_zmmmt

VRSQRT23PS zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 CB /r

KNC

64-bit

§

MVEX_Vaddsetsps_zmm_k1_zmm_zmmmt

VADDSETSPS zmm1 {k1}, zmm2, Sf32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 CC /r

KNC

64-bit

§

MVEX_Vpaddsetsd_zmm_k1_zmm_zmmmt

VPADDSETSD zmm1 {k1}, zmm2, Si32(zmm3/mt)

MVEX.NDS.512.66.0F38.W0 CD /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CE

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 CE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W1_CE

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W1 CE /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmm_zmmmt_512_66_0F38_W0_CF

UNDOC zmm1 {k1}, zmm2, zmm3/mt

MVEX.NDS.512.66.0F38.W0 CF /r

KNC

64-bit

§

MVEX_Vloadunpackld_zmm_k1_mt

VLOADUNPACKLD zmm1 {k1}, Ui32(mt)

MVEX.512.0F38.W0 D0 /r

KNC

64-bit

§

MVEX_Vloadunpacklq_zmm_k1_mt

VLOADUNPACKLQ zmm1 {k1}, Ui64(mt)

MVEX.512.0F38.W1 D0 /r

KNC

64-bit

§

MVEX_Vpackstoreld_mt_k1_zmm

VPACKSTORELD mt {k1}, Di32(zmm1)

MVEX.512.66.0F38.W0 D0 /r

KNC

64-bit

§

MVEX_Vpackstorelq_mt_k1_zmm

VPACKSTORELQ mt {k1}, Di64(zmm1)

MVEX.512.66.0F38.W1 D0 /r

KNC

64-bit

§

MVEX_Vloadunpacklps_zmm_k1_mt

VLOADUNPACKLPS zmm1 {k1}, Uf32(mt)

MVEX.512.0F38.W0 D1 /r

KNC

64-bit

§

MVEX_Vloadunpacklpd_zmm_k1_mt

VLOADUNPACKLPD zmm1 {k1}, Uf64(mt)

MVEX.512.0F38.W1 D1 /r

KNC

64-bit

§

MVEX_Vpackstorelps_mt_k1_zmm

VPACKSTORELPS mt {k1}, Df32(zmm1)

MVEX.512.66.0F38.W0 D1 /r

KNC

64-bit

§

MVEX_Vpackstorelpd_mt_k1_zmm

VPACKSTORELPD mt {k1}, Df64(zmm1)

MVEX.512.66.0F38.W1 D1 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D2

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D2 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D2

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 D2 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D3

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D3 /r

KNC

64-bit

§

MVEX_Vloadunpackhd_zmm_k1_mt

VLOADUNPACKHD zmm1 {k1}, Ui32(mt)

MVEX.512.0F38.W0 D4 /r

KNC

64-bit

§

MVEX_Vloadunpackhq_zmm_k1_mt

VLOADUNPACKHQ zmm1 {k1}, Ui64(mt)

MVEX.512.0F38.W1 D4 /r

KNC

64-bit

§

MVEX_Vpackstorehd_mt_k1_zmm

VPACKSTOREHD mt {k1}, Di32(zmm1)

MVEX.512.66.0F38.W0 D4 /r

KNC

64-bit

§

MVEX_Vpackstorehq_mt_k1_zmm

VPACKSTOREHQ mt {k1}, Di64(zmm1)

MVEX.512.66.0F38.W1 D4 /r

KNC

64-bit

§

MVEX_Vloadunpackhps_zmm_k1_mt

VLOADUNPACKHPS zmm1 {k1}, Uf32(mt)

MVEX.512.0F38.W0 D5 /r

KNC

64-bit

§

MVEX_Vloadunpackhpd_zmm_k1_mt

VLOADUNPACKHPD zmm1 {k1}, Uf64(mt)

MVEX.512.0F38.W1 D5 /r

KNC

64-bit

§

MVEX_Vpackstorehps_mt_k1_zmm

VPACKSTOREHPS mt {k1}, Df32(zmm1)

MVEX.512.66.0F38.W0 D5 /r

KNC

64-bit

§

MVEX_Vpackstorehpd_mt_k1_zmm

VPACKSTOREHPD mt {k1}, Df64(zmm1)

MVEX.512.66.0F38.W1 D5 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D6

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D6 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_66_0F38_W0_D6

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.66.0F38.W0 D6 /r

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_512_0F38_W0_D7

UNDOC zmm1 {k1}, zmm2/mt

MVEX.512.0F38.W0 D7 /r

KNC

64-bit

§

MVEX_Valignd_zmm_k1_zmm_zmmmt_imm8

VALIGND zmm1 {k1}, zmm2, zmm3/mt, imm8

MVEX.NDS.512.66.0F3A.W0 03 /r ib

KNC

64-bit

§

MVEX_Vpermf32x4_zmm_k1_zmmmt_imm8

VPERMF32X4 zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F3A.W0 07 /r ib

KNC

64-bit

§

MVEX_Vpcmpud_kr_k1_zmm_zmmmt_imm8

VPCMPUD k2 {k1}, zmm1, Si32(zmm2/mt), imm8

MVEX.NDS.512.66.0F3A.W0 1E /r ib

KNC

64-bit

§

MVEX_Vpcmpd_kr_k1_zmm_zmmmt_imm8

VPCMPD k2 {k1}, zmm1, Si32(zmm2/mt), imm8

MVEX.NDS.512.66.0F3A.W0 1F /r ib

KNC

64-bit

§

MVEX_Vgetmantps_zmm_k1_zmmmt_imm8

VGETMANTPS zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 26 /r ib

KNC

64-bit

§

MVEX_Vgetmantpd_zmm_k1_zmmmt_imm8

VGETMANTPD zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.66.0F3A.W1 26 /r ib

KNC

64-bit

§

MVEX_Vrndfxpntps_zmm_k1_zmmmt_imm8

VRNDFXPNTPS zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 52 /r ib

KNC

64-bit

§

MVEX_Vrndfxpntpd_zmm_k1_zmmmt_imm8

VRNDFXPNTPD zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.66.0F3A.W1 52 /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntudq2ps_zmm_k1_zmmmt_imm8

VCVTFXPNTUDQ2PS zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.512.0F3A.W0 CA /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntps2udq_zmm_k1_zmmmt_imm8

VCVTFXPNTPS2UDQ zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 CA /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntpd2udq_zmm_k1_zmmmt_imm8

VCVTFXPNTPD2UDQ zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.F2.0F3A.W1 CA /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntdq2ps_zmm_k1_zmmmt_imm8

VCVTFXPNTDQ2PS zmm1 {k1}, Si32(zmm2/mt), imm8

MVEX.512.0F3A.W0 CB /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntps2dq_zmm_k1_zmmmt_imm8

VCVTFXPNTPS2DQ zmm1 {k1}, Sf32(zmm2/mt), imm8

MVEX.512.66.0F3A.W0 CB /r ib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D0

UNDOC zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F3A.W0 D0 /r ib

KNC

64-bit

§

MVEX_Undoc_zmm_k1_zmmmt_imm8_512_66_0F3A_W0_D1

UNDOC zmm1 {k1}, zmm2/mt, imm8

MVEX.512.66.0F3A.W0 D1 /r ib

KNC

64-bit

§

MVEX_Vcvtfxpntpd2dq_zmm_k1_zmmmt_imm8

VCVTFXPNTPD2DQ zmm1 {k1}, Sf64(zmm2/mt), imm8

MVEX.512.F2.0F3A.W1 E6 /r ib

KNC

64-bit

§

Via_undoc_F30FA6F0_16

UNDOC

a16 F3 0F A6 F0

PADLOCK_UNDOC

16/32-bit

§

Via_undoc_F30FA6F0_32

UNDOC

a32 F3 0F A6 F0

PADLOCK_UNDOC

16/32/64-bit

§

Via_undoc_F30FA6F0_64

UNDOC

a64 F3 0F A6 F0

PADLOCK_UNDOC

64-bit

§

Via_undoc_F30FA6F8_16

UNDOC

a16 F3 0F A6 F8

PADLOCK_UNDOC

16/32-bit

§

Via_undoc_F30FA6F8_32

UNDOC

a32 F3 0F A6 F8

PADLOCK_UNDOC

16/32/64-bit

§

Via_undoc_F30FA6F8_64

UNDOC

a64 F3 0F A6 F8

PADLOCK_UNDOC

64-bit

§

Xsha512_16

XSHA512

a16 F3 0F A6 E0

PADLOCK_PHE

16/32-bit

§

Xsha512_32

XSHA512

a32 F3 0F A6 E0

PADLOCK_PHE

16/32/64-bit

§

Xsha512_64

XSHA512

a64 F3 0F A6 E0

PADLOCK_PHE

64-bit

§

Xstore_alt_16

XSTORE_ALT

a16 F3 0F A7 F8

PADLOCK_RNG

16/32-bit

§

Xstore_alt_32

XSTORE_ALT

a32 F3 0F A7 F8

PADLOCK_RNG

16/32/64-bit

§

Xstore_alt_64

XSTORE_ALT

a64 F3 0F A7 F8

PADLOCK_RNG

64-bit

§

Xsha512_alt_16

XSHA512_ALT

a16 F3 0F A6 D8

PADLOCK_PHE

16/32-bit

§

Xsha512_alt_32

XSHA512_ALT

a32 F3 0F A6 D8

PADLOCK_PHE

16/32/64-bit

§

Xsha512_alt_64

XSHA512_ALT

a64 F3 0F A6 D8

PADLOCK_PHE

64-bit

§

Zero_bytes

A zero-sized instruction. Can be used as a label.

§

Wrmsrns

WRMSRNS

NP 0F 01 C6

WRMSRNS

16/32/64-bit

§

Wrmsrlist

WRMSRLIST

F3 0F 01 C6

MSRLIST

64-bit

§

Rdmsrlist

RDMSRLIST

F2 0F 01 C6

MSRLIST

64-bit

§

Rmpquery

RMPQUERY

F3 0F 01 FD

RMPQUERY

64-bit

§

Prefetchit1_m8

PREFETCHIT1 m8

0F 18 /6

PREFETCHITI

16/32/64-bit

§

Prefetchit0_m8

PREFETCHIT0 m8

0F 18 /7

PREFETCHITI

16/32/64-bit

§

Aadd_m32_r32

AADD m32, r32

NP 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Aadd_m64_r64

AADD m64, r64

NP o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

Aand_m32_r32

AAND m32, r32

66 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Aand_m64_r64

AAND m64, r64

66 o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

Axor_m32_r32

AXOR m32, r32

F3 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Axor_m64_r64

AXOR m64, r64

F3 o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

Aor_m32_r32

AOR m32, r32

F2 0F 38 FC !(11):rrr:bbb

RAO-INT

16/32/64-bit

§

Aor_m64_r64

AOR m64, r64

F2 o64 0F 38 FC !(11):rrr:bbb

RAO-INT

64-bit

§

VEX_Vpdpbuud_xmm_xmm_xmmm128

VPDPBUUD xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbuud_ymm_ymm_ymmm256

VPDPBUUD ymm1, ymm2, ymm3/m256

VEX.256.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsud_xmm_xmm_xmmm128

VPDPBSUD xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsud_ymm_ymm_ymmm256

VPDPBSUD ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssd_xmm_xmm_xmmm128

VPDPBSSD xmm1, xmm2, xmm3/m128

VEX.128.F2.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssd_ymm_ymm_ymmm256

VPDPBSSD ymm1, ymm2, ymm3/m256

VEX.256.F2.0F38.W0 50 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbuuds_xmm_xmm_xmmm128

VPDPBUUDS xmm1, xmm2, xmm3/m128

VEX.128.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbuuds_ymm_ymm_ymmm256

VPDPBUUDS ymm1, ymm2, ymm3/m256

VEX.256.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsuds_xmm_xmm_xmmm128

VPDPBSUDS xmm1, xmm2, xmm3/m128

VEX.128.F3.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbsuds_ymm_ymm_ymmm256

VPDPBSUDS ymm1, ymm2, ymm3/m256

VEX.256.F3.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssds_xmm_xmm_xmmm128

VPDPBSSDS xmm1, xmm2, xmm3/m128

VEX.128.F2.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Vpdpbssds_ymm_ymm_ymmm256

VPDPBSSDS ymm1, ymm2, ymm3/m256

VEX.256.F2.0F38.W0 51 /r

AVX-VNNI-INT8

16/32/64-bit

§

VEX_Tdpfp16ps_tmm_tmm_tmm

TDPFP16PS tmm1, tmm2, tmm3

VEX.128.F2.0F38.W0 5C 11:rrr:bbb

AMX-FP16

64-bit

§

VEX_Vcvtneps2bf16_xmm_xmmm128

VCVTNEPS2BF16 xmm1, xmm2/m128

VEX.128.F3.0F38.W0 72 /r

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneps2bf16_xmm_ymmm256

VCVTNEPS2BF16 xmm1, ymm2/m256

VEX.256.F3.0F38.W0 72 /r

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneoph2ps_xmm_m128

VCVTNEOPH2PS xmm1, m128

VEX.128.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneoph2ps_ymm_m256

VCVTNEOPH2PS ymm1, m256

VEX.256.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneeph2ps_xmm_m128

VCVTNEEPH2PS xmm1, m128

VEX.128.66.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneeph2ps_ymm_m256

VCVTNEEPH2PS ymm1, m256

VEX.256.66.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneebf162ps_xmm_m128

VCVTNEEBF162PS xmm1, m128

VEX.128.F3.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneebf162ps_ymm_m256

VCVTNEEBF162PS ymm1, m256

VEX.256.F3.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneobf162ps_xmm_m128

VCVTNEOBF162PS xmm1, m128

VEX.128.F2.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vcvtneobf162ps_ymm_m256

VCVTNEOBF162PS ymm1, m256

VEX.256.F2.0F38.W0 B0 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnesh2ps_xmm_m16

VBCSTNESH2PS xmm1, m16

VEX.128.66.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnesh2ps_ymm_m16

VBCSTNESH2PS ymm1, m16

VEX.256.66.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnebf162ps_xmm_m16

VBCSTNEBF162PS xmm1, m16

VEX.128.F3.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vbcstnebf162ps_ymm_m16

VBCSTNEBF162PS ymm1, m16

VEX.256.F3.0F38.W0 B1 !(11):rrr:bbb

AVX-NE-CONVERT

16/32/64-bit

§

VEX_Vpmadd52luq_xmm_xmm_xmmm128

VPMADD52LUQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B4 /r

AVX-IFMA

16/32/64-bit

§

VEX_Vpmadd52luq_ymm_ymm_ymmm256

VPMADD52LUQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B4 /r

AVX-IFMA

16/32/64-bit

§

VEX_Vpmadd52huq_xmm_xmm_xmmm128

VPMADD52HUQ xmm1, xmm2, xmm3/m128

VEX.128.66.0F38.W1 B5 /r

AVX-IFMA

16/32/64-bit

§

VEX_Vpmadd52huq_ymm_ymm_ymmm256

VPMADD52HUQ ymm1, ymm2, ymm3/m256

VEX.256.66.0F38.W1 B5 /r

AVX-IFMA

16/32/64-bit

§

VEX_Cmpoxadd_m32_r32_r32

CMPOXADD m32, r32, r32

VEX.128.66.0F38.W0 E0 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpoxadd_m64_r64_r64

CMPOXADD m64, r64, r64

VEX.128.66.0F38.W1 E0 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnoxadd_m32_r32_r32

CMPNOXADD m32, r32, r32

VEX.128.66.0F38.W0 E1 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnoxadd_m64_r64_r64

CMPNOXADD m64, r64, r64

VEX.128.66.0F38.W1 E1 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbxadd_m32_r32_r32

CMPBXADD m32, r32, r32

VEX.128.66.0F38.W0 E2 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbxadd_m64_r64_r64

CMPBXADD m64, r64, r64

VEX.128.66.0F38.W1 E2 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbxadd_m32_r32_r32

CMPNBXADD m32, r32, r32

VEX.128.66.0F38.W0 E3 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbxadd_m64_r64_r64

CMPNBXADD m64, r64, r64

VEX.128.66.0F38.W1 E3 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpzxadd_m32_r32_r32

CMPZXADD m32, r32, r32

VEX.128.66.0F38.W0 E4 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpzxadd_m64_r64_r64

CMPZXADD m64, r64, r64

VEX.128.66.0F38.W1 E4 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnzxadd_m32_r32_r32

CMPNZXADD m32, r32, r32

VEX.128.66.0F38.W0 E5 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnzxadd_m64_r64_r64

CMPNZXADD m64, r64, r64

VEX.128.66.0F38.W1 E5 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbexadd_m32_r32_r32

CMPBEXADD m32, r32, r32

VEX.128.66.0F38.W0 E6 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpbexadd_m64_r64_r64

CMPBEXADD m64, r64, r64

VEX.128.66.0F38.W1 E6 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbexadd_m32_r32_r32

CMPNBEXADD m32, r32, r32

VEX.128.66.0F38.W0 E7 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnbexadd_m64_r64_r64

CMPNBEXADD m64, r64, r64

VEX.128.66.0F38.W1 E7 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpsxadd_m32_r32_r32

CMPSXADD m32, r32, r32

VEX.128.66.0F38.W0 E8 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpsxadd_m64_r64_r64

CMPSXADD m64, r64, r64

VEX.128.66.0F38.W1 E8 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnsxadd_m32_r32_r32

CMPNSXADD m32, r32, r32

VEX.128.66.0F38.W0 E9 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnsxadd_m64_r64_r64

CMPNSXADD m64, r64, r64

VEX.128.66.0F38.W1 E9 !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmppxadd_m32_r32_r32

CMPPXADD m32, r32, r32

VEX.128.66.0F38.W0 EA !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmppxadd_m64_r64_r64

CMPPXADD m64, r64, r64

VEX.128.66.0F38.W1 EA !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnpxadd_m32_r32_r32

CMPNPXADD m32, r32, r32

VEX.128.66.0F38.W0 EB !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnpxadd_m64_r64_r64

CMPNPXADD m64, r64, r64

VEX.128.66.0F38.W1 EB !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplxadd_m32_r32_r32

CMPLXADD m32, r32, r32

VEX.128.66.0F38.W0 EC !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplxadd_m64_r64_r64

CMPLXADD m64, r64, r64

VEX.128.66.0F38.W1 EC !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlxadd_m32_r32_r32

CMPNLXADD m32, r32, r32

VEX.128.66.0F38.W0 ED !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlxadd_m64_r64_r64

CMPNLXADD m64, r64, r64

VEX.128.66.0F38.W1 ED !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplexadd_m32_r32_r32

CMPLEXADD m32, r32, r32

VEX.128.66.0F38.W0 EE !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmplexadd_m64_r64_r64

CMPLEXADD m64, r64, r64

VEX.128.66.0F38.W1 EE !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlexadd_m32_r32_r32

CMPNLEXADD m32, r32, r32

VEX.128.66.0F38.W0 EF !(11):rrr:bbb

CMPCCXADD

64-bit

§

VEX_Cmpnlexadd_m64_r64_r64

CMPNLEXADD m64, r64, r64

VEX.128.66.0F38.W1 EF !(11):rrr:bbb

CMPCCXADD

64-bit

Implementations§

source§

impl Code

source

pub fn values( ) -> impl Iterator<Item = Code> + DoubleEndedIterator + ExactSizeIterator + FusedIterator

Iterates over all Code enum values

source§

impl Code

source

pub fn mnemonic(self) -> Mnemonic

Gets the mnemonic

Examples
use iced_x86::*;
assert_eq!(Code::Add_rm32_r32.mnemonic(), Mnemonic::Add);

Trait Implementations§

source§

impl Clone for Code

source§

fn clone(&self) -> Code

Returns a copy of the value. Read more
1.0.0§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for Code

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Default for Code

source§

fn default() -> Self

Returns the “default value” for a type. Read more
source§

impl Hash for Code

source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given [Hasher]. Read more
1.3.0§

fn hash_slice<H>(data: &[Self], state: &mut H)where H: Hasher, Self: Sized,

Feeds a slice of this type into the given [Hasher]. Read more
source§

impl Ord for Code

source§

fn cmp(&self, other: &Code) -> Ordering

This method returns an [Ordering] between self and other. Read more
1.21.0§

fn max(self, other: Self) -> Selfwhere Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0§

fn min(self, other: Self) -> Selfwhere Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0§

fn clamp(self, min: Self, max: Self) -> Selfwhere Self: Sized + PartialOrd<Self>,

Restrict a value to a certain interval. Read more
source§

impl PartialEq<Code> for Code

source§

fn eq(&self, other: &Code) -> bool

This method tests for self and other values to be equal, and is used by ==.
1.0.0§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
source§

impl PartialOrd<Code> for Code

source§

fn partial_cmp(&self, other: &Code) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= operator. Read more
1.0.0§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= operator. Read more
source§

impl TryFrom<usize> for Code

§

type Error = IcedError

The type returned in the event of a conversion error.
source§

fn try_from(value: usize) -> Result<Self, Self::Error>

Performs the conversion.
source§

impl Copy for Code

source§

impl Eq for Code

source§

impl StructuralEq for Code

source§

impl StructuralPartialEq for Code

Auto Trait Implementations§

§

impl RefUnwindSafe for Code

§

impl Send for Code

§

impl Sync for Code

§

impl Unpin for Code

§

impl UnwindSafe for Code

Blanket Implementations§

§

impl<T> Any for Twhere T: 'static + ?Sized,

§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
§

impl<T> Borrow<T> for Twhere T: ?Sized,

const: unstable§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
§

impl<T> BorrowMut<T> for Twhere T: ?Sized,

const: unstable§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<T> From<T> for T

const: unstable§

fn from(t: T) -> T

Returns the argument unchanged.

§

impl<T, U> Into<U> for Twhere U: From<T>,

const: unstable§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of [From]<T> for U chooses to do.

§

impl<T> ToOwned for Twhere T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
§

impl<T, U> TryFrom<U> for Twhere U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
const: unstable§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
§

impl<T, U> TryInto<U> for Twhere U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
const: unstable§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.