Commit 37e13a1e authored by Linus Torvalds's avatar Linus Torvalds

Merge branch 'perf-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull perf fixes from Ingo Molnar:
 "This tree contains tooling fixes plus some additions:

   - fixes to the vdso2c build environment that Stephen Rothwell is
     using for the linux-next build (Arnaldo Carvalho de Melo)

   - AVX-512 instruction mappings (Adrian Hunter)

   - misc fixes"

* 'perf-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  Revert "perf tools: event.h needs asm/perf_regs.h"
  x86: Make the vdso2c compiler use the host architecture headers
  tools build: Fix objtool build with ARCH=x86_64
  objtool: Always use host headers
  objtool: Use tools/scripts/Makefile.arch to get ARCH and HOSTARCH
  tools build: Add HOSTARCH Makefile variable
  perf tests kmod-path: Fix build on ubuntu:16.04-x-armhf
  perf tools: Add AVX-512 instructions to the new instructions test
  perf tools: Add AVX-512 support to the instruction decoder used by Intel PT
  x86/insn: Add AVX-512 support to the instruction decoder
  x86/insn: perf tools: Fix vcvtph2ps instruction decoding
parents e6580525 674d2d69
...@@ -55,7 +55,7 @@ VDSO_LDFLAGS_vdso.lds = -m64 -Wl,-soname=linux-vdso.so.1 \ ...@@ -55,7 +55,7 @@ VDSO_LDFLAGS_vdso.lds = -m64 -Wl,-soname=linux-vdso.so.1 \
$(obj)/vdso64.so.dbg: $(src)/vdso.lds $(vobjs) FORCE $(obj)/vdso64.so.dbg: $(src)/vdso.lds $(vobjs) FORCE
$(call if_changed,vdso) $(call if_changed,vdso)
HOST_EXTRACFLAGS += -I$(srctree)/tools/include -I$(srctree)/include/uapi -I$(srctree)/arch/x86/include/uapi HOST_EXTRACFLAGS += -I$(srctree)/tools/include -I$(srctree)/include/uapi -I$(srctree)/arch/$(SUBARCH)/include/uapi
hostprogs-y += vdso2c hostprogs-y += vdso2c
quiet_cmd_vdso2c = VDSO2C $@ quiet_cmd_vdso2c = VDSO2C $@
......
...@@ -48,6 +48,7 @@ ...@@ -48,6 +48,7 @@
/* AVX VEX prefixes */ /* AVX VEX prefixes */
#define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */ #define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */
#define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */ #define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */
#define INAT_PFX_EVEX 15 /* EVEX prefix */
#define INAT_LSTPFX_MAX 3 #define INAT_LSTPFX_MAX 3
#define INAT_LGCPFX_MAX 11 #define INAT_LGCPFX_MAX 11
...@@ -89,6 +90,7 @@ ...@@ -89,6 +90,7 @@
#define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4)) #define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4))
#define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5)) #define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5))
#define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6)) #define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6))
#define INAT_EVEXONLY (1 << (INAT_FLAG_OFFS + 7))
/* Attribute making macros for attribute tables */ /* Attribute making macros for attribute tables */
#define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS) #define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS)
#define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS) #define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS)
...@@ -141,7 +143,13 @@ static inline int inat_last_prefix_id(insn_attr_t attr) ...@@ -141,7 +143,13 @@ static inline int inat_last_prefix_id(insn_attr_t attr)
static inline int inat_is_vex_prefix(insn_attr_t attr) static inline int inat_is_vex_prefix(insn_attr_t attr)
{ {
attr &= INAT_PFX_MASK; attr &= INAT_PFX_MASK;
return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3; return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3 ||
attr == INAT_PFX_EVEX;
}
static inline int inat_is_evex_prefix(insn_attr_t attr)
{
return (attr & INAT_PFX_MASK) == INAT_PFX_EVEX;
} }
static inline int inat_is_vex3_prefix(insn_attr_t attr) static inline int inat_is_vex3_prefix(insn_attr_t attr)
...@@ -216,6 +224,11 @@ static inline int inat_accept_vex(insn_attr_t attr) ...@@ -216,6 +224,11 @@ static inline int inat_accept_vex(insn_attr_t attr)
static inline int inat_must_vex(insn_attr_t attr) static inline int inat_must_vex(insn_attr_t attr)
{ {
return attr & INAT_VEXONLY; return attr & (INAT_VEXONLY | INAT_EVEXONLY);
}
static inline int inat_must_evex(insn_attr_t attr)
{
return attr & INAT_EVEXONLY;
} }
#endif #endif
...@@ -91,6 +91,7 @@ struct insn { ...@@ -91,6 +91,7 @@ struct insn {
#define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */ #define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */
#define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */ #define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */
/* VEX bit fields */ /* VEX bit fields */
#define X86_EVEX_M(vex) ((vex) & 0x03) /* EVEX Byte1 */
#define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */ #define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */
#define X86_VEX2_M 1 /* VEX2.M always 1 */ #define X86_VEX2_M 1 /* VEX2.M always 1 */
#define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */ #define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */
...@@ -133,6 +134,13 @@ static inline int insn_is_avx(struct insn *insn) ...@@ -133,6 +134,13 @@ static inline int insn_is_avx(struct insn *insn)
return (insn->vex_prefix.value != 0); return (insn->vex_prefix.value != 0);
} }
static inline int insn_is_evex(struct insn *insn)
{
if (!insn->prefixes.got)
insn_get_prefixes(insn);
return (insn->vex_prefix.nbytes == 4);
}
/* Ensure this instruction is decoded completely */ /* Ensure this instruction is decoded completely */
static inline int insn_complete(struct insn *insn) static inline int insn_complete(struct insn *insn)
{ {
...@@ -144,8 +152,10 @@ static inline insn_byte_t insn_vex_m_bits(struct insn *insn) ...@@ -144,8 +152,10 @@ static inline insn_byte_t insn_vex_m_bits(struct insn *insn)
{ {
if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */
return X86_VEX2_M; return X86_VEX2_M;
else else if (insn->vex_prefix.nbytes == 3) /* 3 bytes VEX */
return X86_VEX3_M(insn->vex_prefix.bytes[1]); return X86_VEX3_M(insn->vex_prefix.bytes[1]);
else /* EVEX */
return X86_EVEX_M(insn->vex_prefix.bytes[1]);
} }
static inline insn_byte_t insn_vex_p_bits(struct insn *insn) static inline insn_byte_t insn_vex_p_bits(struct insn *insn)
......
...@@ -155,14 +155,24 @@ void insn_get_prefixes(struct insn *insn) ...@@ -155,14 +155,24 @@ void insn_get_prefixes(struct insn *insn)
/* /*
* In 32-bits mode, if the [7:6] bits (mod bits of * In 32-bits mode, if the [7:6] bits (mod bits of
* ModRM) on the second byte are not 11b, it is * ModRM) on the second byte are not 11b, it is
* LDS or LES. * LDS or LES or BOUND.
*/ */
if (X86_MODRM_MOD(b2) != 3) if (X86_MODRM_MOD(b2) != 3)
goto vex_end; goto vex_end;
} }
insn->vex_prefix.bytes[0] = b; insn->vex_prefix.bytes[0] = b;
insn->vex_prefix.bytes[1] = b2; insn->vex_prefix.bytes[1] = b2;
if (inat_is_vex3_prefix(attr)) { if (inat_is_evex_prefix(attr)) {
b2 = peek_nbyte_next(insn_byte_t, insn, 2);
insn->vex_prefix.bytes[2] = b2;
b2 = peek_nbyte_next(insn_byte_t, insn, 3);
insn->vex_prefix.bytes[3] = b2;
insn->vex_prefix.nbytes = 4;
insn->next_byte += 4;
if (insn->x86_64 && X86_VEX_W(b2))
/* VEX.W overrides opnd_size */
insn->opnd_bytes = 8;
} else if (inat_is_vex3_prefix(attr)) {
b2 = peek_nbyte_next(insn_byte_t, insn, 2); b2 = peek_nbyte_next(insn_byte_t, insn, 2);
insn->vex_prefix.bytes[2] = b2; insn->vex_prefix.bytes[2] = b2;
insn->vex_prefix.nbytes = 3; insn->vex_prefix.nbytes = 3;
...@@ -221,7 +231,9 @@ void insn_get_opcode(struct insn *insn) ...@@ -221,7 +231,9 @@ void insn_get_opcode(struct insn *insn)
m = insn_vex_m_bits(insn); m = insn_vex_m_bits(insn);
p = insn_vex_p_bits(insn); p = insn_vex_p_bits(insn);
insn->attr = inat_get_avx_attribute(op, m, p); insn->attr = inat_get_avx_attribute(op, m, p);
if (!inat_accept_vex(insn->attr) && !inat_is_group(insn->attr)) if ((inat_must_evex(insn->attr) && !insn_is_evex(insn)) ||
(!inat_accept_vex(insn->attr) &&
!inat_is_group(insn->attr)))
insn->attr = 0; /* This instruction is bad */ insn->attr = 0; /* This instruction is bad */
goto end; /* VEX has only 1 byte for opcode */ goto end; /* VEX has only 1 byte for opcode */
} }
......
...@@ -13,12 +13,17 @@ ...@@ -13,12 +13,17 @@
# opcode: escape # escaped-name # opcode: escape # escaped-name
# EndTable # EndTable
# #
# mnemonics that begin with lowercase 'v' accept a VEX or EVEX prefix
# mnemonics that begin with lowercase 'k' accept a VEX prefix
#
#<group maps> #<group maps>
# GrpTable: GrpXXX # GrpTable: GrpXXX
# reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...] # reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...]
# EndTable # EndTable
# #
# AVX Superscripts # AVX Superscripts
# (ev): this opcode requires EVEX prefix.
# (evo): this opcode is changed by EVEX prefix (EVEX opcode)
# (v): this opcode requires VEX prefix. # (v): this opcode requires VEX prefix.
# (v1): this opcode only supports 128bit VEX. # (v1): this opcode only supports 128bit VEX.
# #
...@@ -137,7 +142,7 @@ AVXcode: ...@@ -137,7 +142,7 @@ AVXcode:
# 0x60 - 0x6f # 0x60 - 0x6f
60: PUSHA/PUSHAD (i64) 60: PUSHA/PUSHAD (i64)
61: POPA/POPAD (i64) 61: POPA/POPAD (i64)
62: BOUND Gv,Ma (i64) 62: BOUND Gv,Ma (i64) | EVEX (Prefix)
63: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64) 63: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64)
64: SEG=FS (Prefix) 64: SEG=FS (Prefix)
65: SEG=GS (Prefix) 65: SEG=GS (Prefix)
...@@ -399,17 +404,17 @@ AVXcode: 1 ...@@ -399,17 +404,17 @@ AVXcode: 1
3f: 3f:
# 0x0f 0x40-0x4f # 0x0f 0x40-0x4f
40: CMOVO Gv,Ev 40: CMOVO Gv,Ev
41: CMOVNO Gv,Ev 41: CMOVNO Gv,Ev | kandw/q Vk,Hk,Uk | kandb/d Vk,Hk,Uk (66)
42: CMOVB/C/NAE Gv,Ev 42: CMOVB/C/NAE Gv,Ev | kandnw/q Vk,Hk,Uk | kandnb/d Vk,Hk,Uk (66)
43: CMOVAE/NB/NC Gv,Ev 43: CMOVAE/NB/NC Gv,Ev
44: CMOVE/Z Gv,Ev 44: CMOVE/Z Gv,Ev | knotw/q Vk,Uk | knotb/d Vk,Uk (66)
45: CMOVNE/NZ Gv,Ev 45: CMOVNE/NZ Gv,Ev | korw/q Vk,Hk,Uk | korb/d Vk,Hk,Uk (66)
46: CMOVBE/NA Gv,Ev 46: CMOVBE/NA Gv,Ev | kxnorw/q Vk,Hk,Uk | kxnorb/d Vk,Hk,Uk (66)
47: CMOVA/NBE Gv,Ev 47: CMOVA/NBE Gv,Ev | kxorw/q Vk,Hk,Uk | kxorb/d Vk,Hk,Uk (66)
48: CMOVS Gv,Ev 48: CMOVS Gv,Ev
49: CMOVNS Gv,Ev 49: CMOVNS Gv,Ev
4a: CMOVP/PE Gv,Ev 4a: CMOVP/PE Gv,Ev | kaddw/q Vk,Hk,Uk | kaddb/d Vk,Hk,Uk (66)
4b: CMOVNP/PO Gv,Ev 4b: CMOVNP/PO Gv,Ev | kunpckbw Vk,Hk,Uk (66) | kunpckwd/dq Vk,Hk,Uk
4c: CMOVL/NGE Gv,Ev 4c: CMOVL/NGE Gv,Ev
4d: CMOVNL/GE Gv,Ev 4d: CMOVNL/GE Gv,Ev
4e: CMOVLE/NG Gv,Ev 4e: CMOVLE/NG Gv,Ev
...@@ -426,7 +431,7 @@ AVXcode: 1 ...@@ -426,7 +431,7 @@ AVXcode: 1
58: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1) 58: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1)
59: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1) 59: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1)
5a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1) 5a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1)
5b: vcvtdq2ps Vps,Wdq | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3) 5b: vcvtdq2ps Vps,Wdq | vcvtqq2ps Vps,Wqq (evo) | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3)
5c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1) 5c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1)
5d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1) 5d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1)
5e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1) 5e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1)
...@@ -447,7 +452,7 @@ AVXcode: 1 ...@@ -447,7 +452,7 @@ AVXcode: 1
6c: vpunpcklqdq Vx,Hx,Wx (66),(v1) 6c: vpunpcklqdq Vx,Hx,Wx (66),(v1)
6d: vpunpckhqdq Vx,Hx,Wx (66),(v1) 6d: vpunpckhqdq Vx,Hx,Wx (66),(v1)
6e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1) 6e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1)
6f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqu Vx,Wx (F3) 6f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqa32/64 Vx,Wx (66),(evo) | vmovdqu Vx,Wx (F3) | vmovdqu32/64 Vx,Wx (F3),(evo) | vmovdqu8/16 Vx,Wx (F2),(ev)
# 0x0f 0x70-0x7f # 0x0f 0x70-0x7f
70: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1) 70: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1)
71: Grp12 (1A) 71: Grp12 (1A)
...@@ -458,14 +463,14 @@ AVXcode: 1 ...@@ -458,14 +463,14 @@ AVXcode: 1
76: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1) 76: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1)
# Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX. # Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX.
77: emms | vzeroupper | vzeroall 77: emms | vzeroupper | vzeroall
78: VMREAD Ey,Gy 78: VMREAD Ey,Gy | vcvttps2udq/pd2udq Vx,Wpd (evo) | vcvttsd2usi Gv,Wx (F2),(ev) | vcvttss2usi Gv,Wx (F3),(ev) | vcvttps2uqq/pd2uqq Vx,Wx (66),(ev)
79: VMWRITE Gy,Ey 79: VMWRITE Gy,Ey | vcvtps2udq/pd2udq Vx,Wpd (evo) | vcvtsd2usi Gv,Wx (F2),(ev) | vcvtss2usi Gv,Wx (F3),(ev) | vcvtps2uqq/pd2uqq Vx,Wx (66),(ev)
7a: 7a: vcvtudq2pd/uqq2pd Vpd,Wx (F3),(ev) | vcvtudq2ps/uqq2ps Vpd,Wx (F2),(ev) | vcvttps2qq/pd2qq Vx,Wx (66),(ev)
7b: 7b: vcvtusi2sd Vpd,Hpd,Ev (F2),(ev) | vcvtusi2ss Vps,Hps,Ev (F3),(ev) | vcvtps2qq/pd2qq Vx,Wx (66),(ev)
7c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2) 7c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2)
7d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2) 7d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2)
7e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1) 7e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1)
7f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqu Wx,Vx (F3) 7f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqa32/64 Wx,Vx (66),(evo) | vmovdqu Wx,Vx (F3) | vmovdqu32/64 Wx,Vx (F3),(evo) | vmovdqu8/16 Wx,Vx (F2),(ev)
# 0x0f 0x80-0x8f # 0x0f 0x80-0x8f
# Note: "forced64" is Intel CPU behavior (see comment about CALL insn). # Note: "forced64" is Intel CPU behavior (see comment about CALL insn).
80: JO Jz (f64) 80: JO Jz (f64)
...@@ -485,16 +490,16 @@ AVXcode: 1 ...@@ -485,16 +490,16 @@ AVXcode: 1
8e: JLE/JNG Jz (f64) 8e: JLE/JNG Jz (f64)
8f: JNLE/JG Jz (f64) 8f: JNLE/JG Jz (f64)
# 0x0f 0x90-0x9f # 0x0f 0x90-0x9f
90: SETO Eb 90: SETO Eb | kmovw/q Vk,Wk | kmovb/d Vk,Wk (66)
91: SETNO Eb 91: SETNO Eb | kmovw/q Mv,Vk | kmovb/d Mv,Vk (66)
92: SETB/C/NAE Eb 92: SETB/C/NAE Eb | kmovw Vk,Rv | kmovb Vk,Rv (66) | kmovq/d Vk,Rv (F2)
93: SETAE/NB/NC Eb 93: SETAE/NB/NC Eb | kmovw Gv,Uk | kmovb Gv,Uk (66) | kmovq/d Gv,Uk (F2)
94: SETE/Z Eb 94: SETE/Z Eb
95: SETNE/NZ Eb 95: SETNE/NZ Eb
96: SETBE/NA Eb 96: SETBE/NA Eb
97: SETA/NBE Eb 97: SETA/NBE Eb
98: SETS Eb 98: SETS Eb | kortestw/q Vk,Uk | kortestb/d Vk,Uk (66)
99: SETNS Eb 99: SETNS Eb | ktestw/q Vk,Uk | ktestb/d Vk,Uk (66)
9a: SETP/PE Eb 9a: SETP/PE Eb
9b: SETNP/PO Eb 9b: SETNP/PO Eb
9c: SETL/NGE Eb 9c: SETL/NGE Eb
...@@ -564,11 +569,11 @@ d7: pmovmskb Gd,Nq | vpmovmskb Gd,Ux (66),(v1) ...@@ -564,11 +569,11 @@ d7: pmovmskb Gd,Nq | vpmovmskb Gd,Ux (66),(v1)
d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1) d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1)
d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1) d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1)
da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1) da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1)
db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) | vpandd/q Vx,Hx,Wx (66),(evo)
dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1) dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1)
dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1) dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1)
de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1) de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1)
df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) | vpandnd/q Vx,Hx,Wx (66),(evo)
# 0x0f 0xe0-0xef # 0x0f 0xe0-0xef
e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1) e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1)
e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1) e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1)
...@@ -576,16 +581,16 @@ e2: psrad Pq,Qq | vpsrad Vx,Hx,Wx (66),(v1) ...@@ -576,16 +581,16 @@ e2: psrad Pq,Qq | vpsrad Vx,Hx,Wx (66),(v1)
e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1) e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1)
e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1) e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1)
e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1) e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1)
e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtpd2dq Vx,Wpd (F2) e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtdq2pd/qq2pd Vx,Wdq (F3),(evo) | vcvtpd2dq Vx,Wpd (F2)
e7: movntq Mq,Pq | vmovntdq Mx,Vx (66) e7: movntq Mq,Pq | vmovntdq Mx,Vx (66)
e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1) e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1)
e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1) e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1)
ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1) ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1)
eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) | vpord/q Vx,Hx,Wx (66),(evo)
ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1) ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1)
ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1) ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1)
ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1) ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1)
ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) | vpxord/q Vx,Hx,Wx (66),(evo)
# 0x0f 0xf0-0xff # 0x0f 0xf0-0xff
f0: vlddqu Vx,Mx (F2) f0: vlddqu Vx,Mx (F2)
f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1) f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1)
...@@ -626,81 +631,105 @@ AVXcode: 2 ...@@ -626,81 +631,105 @@ AVXcode: 2
0e: vtestps Vx,Wx (66),(v) 0e: vtestps Vx,Wx (66),(v)
0f: vtestpd Vx,Wx (66),(v) 0f: vtestpd Vx,Wx (66),(v)
# 0x0f 0x38 0x10-0x1f # 0x0f 0x38 0x10-0x1f
10: pblendvb Vdq,Wdq (66) 10: pblendvb Vdq,Wdq (66) | vpsrlvw Vx,Hx,Wx (66),(evo) | vpmovuswb Wx,Vx (F3),(ev)
11: 11: vpmovusdb Wx,Vd (F3),(ev) | vpsravw Vx,Hx,Wx (66),(ev)
12: 12: vpmovusqb Wx,Vq (F3),(ev) | vpsllvw Vx,Hx,Wx (66),(ev)
13: vcvtph2ps Vx,Wx,Ib (66),(v) 13: vcvtph2ps Vx,Wx (66),(v) | vpmovusdw Wx,Vd (F3),(ev)
14: blendvps Vdq,Wdq (66) 14: blendvps Vdq,Wdq (66) | vpmovusqw Wx,Vq (F3),(ev) | vprorvd/q Vx,Hx,Wx (66),(evo)
15: blendvpd Vdq,Wdq (66) 15: blendvpd Vdq,Wdq (66) | vpmovusqd Wx,Vq (F3),(ev) | vprolvd/q Vx,Hx,Wx (66),(evo)
16: vpermps Vqq,Hqq,Wqq (66),(v) 16: vpermps Vqq,Hqq,Wqq (66),(v) | vpermps/d Vqq,Hqq,Wqq (66),(evo)
17: vptest Vx,Wx (66) 17: vptest Vx,Wx (66)
18: vbroadcastss Vx,Wd (66),(v) 18: vbroadcastss Vx,Wd (66),(v)
19: vbroadcastsd Vqq,Wq (66),(v) 19: vbroadcastsd Vqq,Wq (66),(v) | vbroadcastf32x2 Vqq,Wq (66),(evo)
1a: vbroadcastf128 Vqq,Mdq (66),(v) 1a: vbroadcastf128 Vqq,Mdq (66),(v) | vbroadcastf32x4/64x2 Vqq,Wq (66),(evo)
1b: 1b: vbroadcastf32x8/64x4 Vqq,Mdq (66),(ev)
1c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1) 1c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1)
1d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1) 1d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1)
1e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1) 1e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1)
1f: 1f: vpabsq Vx,Wx (66),(ev)
# 0x0f 0x38 0x20-0x2f # 0x0f 0x38 0x20-0x2f
20: vpmovsxbw Vx,Ux/Mq (66),(v1) 20: vpmovsxbw Vx,Ux/Mq (66),(v1) | vpmovswb Wx,Vx (F3),(ev)
21: vpmovsxbd Vx,Ux/Md (66),(v1) 21: vpmovsxbd Vx,Ux/Md (66),(v1) | vpmovsdb Wx,Vd (F3),(ev)
22: vpmovsxbq Vx,Ux/Mw (66),(v1) 22: vpmovsxbq Vx,Ux/Mw (66),(v1) | vpmovsqb Wx,Vq (F3),(ev)
23: vpmovsxwd Vx,Ux/Mq (66),(v1) 23: vpmovsxwd Vx,Ux/Mq (66),(v1) | vpmovsdw Wx,Vd (F3),(ev)
24: vpmovsxwq Vx,Ux/Md (66),(v1) 24: vpmovsxwq Vx,Ux/Md (66),(v1) | vpmovsqw Wx,Vq (F3),(ev)
25: vpmovsxdq Vx,Ux/Mq (66),(v1) 25: vpmovsxdq Vx,Ux/Mq (66),(v1) | vpmovsqd Wx,Vq (F3),(ev)
26: 26: vptestmb/w Vk,Hx,Wx (66),(ev) | vptestnmb/w Vk,Hx,Wx (F3),(ev)
27: 27: vptestmd/q Vk,Hx,Wx (66),(ev) | vptestnmd/q Vk,Hx,Wx (F3),(ev)
28: vpmuldq Vx,Hx,Wx (66),(v1) 28: vpmuldq Vx,Hx,Wx (66),(v1) | vpmovm2b/w Vx,Uk (F3),(ev)
29: vpcmpeqq Vx,Hx,Wx (66),(v1) 29: vpcmpeqq Vx,Hx,Wx (66),(v1) | vpmovb2m/w2m Vk,Ux (F3),(ev)
2a: vmovntdqa Vx,Mx (66),(v1) 2a: vmovntdqa Vx,Mx (66),(v1) | vpbroadcastmb2q Vx,Uk (F3),(ev)
2b: vpackusdw Vx,Hx,Wx (66),(v1) 2b: vpackusdw Vx,Hx,Wx (66),(v1)
2c: vmaskmovps Vx,Hx,Mx (66),(v) 2c: vmaskmovps Vx,Hx,Mx (66),(v) | vscalefps/d Vx,Hx,Wx (66),(evo)
2d: vmaskmovpd Vx,Hx,Mx (66),(v) 2d: vmaskmovpd Vx,Hx,Mx (66),(v) | vscalefss/d Vx,Hx,Wx (66),(evo)
2e: vmaskmovps Mx,Hx,Vx (66),(v) 2e: vmaskmovps Mx,Hx,Vx (66),(v)
2f: vmaskmovpd Mx,Hx,Vx (66),(v) 2f: vmaskmovpd Mx,Hx,Vx (66),(v)
# 0x0f 0x38 0x30-0x3f # 0x0f 0x38 0x30-0x3f
30: vpmovzxbw Vx,Ux/Mq (66),(v1) 30: vpmovzxbw Vx,Ux/Mq (66),(v1) | vpmovwb Wx,Vx (F3),(ev)
31: vpmovzxbd Vx,Ux/Md (66),(v1) 31: vpmovzxbd Vx,Ux/Md (66),(v1) | vpmovdb Wx,Vd (F3),(ev)
32: vpmovzxbq Vx,Ux/Mw (66),(v1) 32: vpmovzxbq Vx,Ux/Mw (66),(v1) | vpmovqb Wx,Vq (F3),(ev)
33: vpmovzxwd Vx,Ux/Mq (66),(v1) 33: vpmovzxwd Vx,Ux/Mq (66),(v1) | vpmovdw Wx,Vd (F3),(ev)
34: vpmovzxwq Vx,Ux/Md (66),(v1) 34: vpmovzxwq Vx,Ux/Md (66),(v1) | vpmovqw Wx,Vq (F3),(ev)
35: vpmovzxdq Vx,Ux/Mq (66),(v1) 35: vpmovzxdq Vx,Ux/Mq (66),(v1) | vpmovqd Wx,Vq (F3),(ev)
36: vpermd Vqq,Hqq,Wqq (66),(v) 36: vpermd Vqq,Hqq,Wqq (66),(v) | vpermd/q Vqq,Hqq,Wqq (66),(evo)
37: vpcmpgtq Vx,Hx,Wx (66),(v1) 37: vpcmpgtq Vx,Hx,Wx (66),(v1)
38: vpminsb Vx,Hx,Wx (66),(v1) 38: vpminsb Vx,Hx,Wx (66),(v1) | vpmovm2d/q Vx,Uk (F3),(ev)
39: vpminsd Vx,Hx,Wx (66),(v1) 39: vpminsd Vx,Hx,Wx (66),(v1) | vpminsd/q Vx,Hx,Wx (66),(evo) | vpmovd2m/q2m Vk,Ux (F3),(ev)
3a: vpminuw Vx,Hx,Wx (66),(v1) 3a: vpminuw Vx,Hx,Wx (66),(v1) | vpbroadcastmw2d Vx,Uk (F3),(ev)
3b: vpminud Vx,Hx,Wx (66),(v1) 3b: vpminud Vx,Hx,Wx (66),(v1) | vpminud/q Vx,Hx,Wx (66),(evo)
3c: vpmaxsb Vx,Hx,Wx (66),(v1) 3c: vpmaxsb Vx,Hx,Wx (66),(v1)
3d: vpmaxsd Vx,Hx,Wx (66),(v1) 3d: vpmaxsd Vx,Hx,Wx (66),(v1) | vpmaxsd/q Vx,Hx,Wx (66),(evo)
3e: vpmaxuw Vx,Hx,Wx (66),(v1) 3e: vpmaxuw Vx,Hx,Wx (66),(v1)
3f: vpmaxud Vx,Hx,Wx (66),(v1) 3f: vpmaxud Vx,Hx,Wx (66),(v1) | vpmaxud/q Vx,Hx,Wx (66),(evo)
# 0x0f 0x38 0x40-0x8f # 0x0f 0x38 0x40-0x8f
40: vpmulld Vx,Hx,Wx (66),(v1) 40: vpmulld Vx,Hx,Wx (66),(v1) | vpmulld/q Vx,Hx,Wx (66),(evo)
41: vphminposuw Vdq,Wdq (66),(v1) 41: vphminposuw Vdq,Wdq (66),(v1)
42: 42: vgetexpps/d Vx,Wx (66),(ev)
43: 43: vgetexpss/d Vx,Hx,Wx (66),(ev)
44: 44: vplzcntd/q Vx,Wx (66),(ev)
45: vpsrlvd/q Vx,Hx,Wx (66),(v) 45: vpsrlvd/q Vx,Hx,Wx (66),(v)
46: vpsravd Vx,Hx,Wx (66),(v) 46: vpsravd Vx,Hx,Wx (66),(v) | vpsravd/q Vx,Hx,Wx (66),(evo)
47: vpsllvd/q Vx,Hx,Wx (66),(v) 47: vpsllvd/q Vx,Hx,Wx (66),(v)
# Skip 0x48-0x57 # Skip 0x48-0x4b
4c: vrcp14ps/d Vpd,Wpd (66),(ev)
4d: vrcp14ss/d Vsd,Hpd,Wsd (66),(ev)
4e: vrsqrt14ps/d Vpd,Wpd (66),(ev)
4f: vrsqrt14ss/d Vsd,Hsd,Wsd (66),(ev)
# Skip 0x50-0x57
58: vpbroadcastd Vx,Wx (66),(v) 58: vpbroadcastd Vx,Wx (66),(v)
59: vpbroadcastq Vx,Wx (66),(v) 59: vpbroadcastq Vx,Wx (66),(v) | vbroadcasti32x2 Vx,Wx (66),(evo)
5a: vbroadcasti128 Vqq,Mdq (66),(v) 5a: vbroadcasti128 Vqq,Mdq (66),(v) | vbroadcasti32x4/64x2 Vx,Wx (66),(evo)
# Skip 0x5b-0x77 5b: vbroadcasti32x8/64x4 Vqq,Mdq (66),(ev)
# Skip 0x5c-0x63
64: vpblendmd/q Vx,Hx,Wx (66),(ev)
65: vblendmps/d Vx,Hx,Wx (66),(ev)
66: vpblendmb/w Vx,Hx,Wx (66),(ev)
# Skip 0x67-0x74
75: vpermi2b/w Vx,Hx,Wx (66),(ev)
76: vpermi2d/q Vx,Hx,Wx (66),(ev)
77: vpermi2ps/d Vx,Hx,Wx (66),(ev)
78: vpbroadcastb Vx,Wx (66),(v) 78: vpbroadcastb Vx,Wx (66),(v)
79: vpbroadcastw Vx,Wx (66),(v) 79: vpbroadcastw Vx,Wx (66),(v)
# Skip 0x7a-0x7f 7a: vpbroadcastb Vx,Rv (66),(ev)
7b: vpbroadcastw Vx,Rv (66),(ev)
7c: vpbroadcastd/q Vx,Rv (66),(ev)
7d: vpermt2b/w Vx,Hx,Wx (66),(ev)
7e: vpermt2d/q Vx,Hx,Wx (66),(ev)
7f: vpermt2ps/d Vx,Hx,Wx (66),(ev)
80: INVEPT Gy,Mdq (66) 80: INVEPT Gy,Mdq (66)
81: INVPID Gy,Mdq (66) 81: INVPID Gy,Mdq (66)
82: INVPCID Gy,Mdq (66) 82: INVPCID Gy,Mdq (66)
83: vpmultishiftqb Vx,Hx,Wx (66),(ev)
88: vexpandps/d Vpd,Wpd (66),(ev)
89: vpexpandd/q Vx,Wx (66),(ev)
8a: vcompressps/d Wx,Vx (66),(ev)
8b: vpcompressd/q Wx,Vx (66),(ev)
8c: vpmaskmovd/q Vx,Hx,Mx (66),(v) 8c: vpmaskmovd/q Vx,Hx,Mx (66),(v)
8d: vpermb/w Vx,Hx,Wx (66),(ev)
8e: vpmaskmovd/q Mx,Vx,Hx (66),(v) 8e: vpmaskmovd/q Mx,Vx,Hx (66),(v)
# 0x0f 0x38 0x90-0xbf (FMA) # 0x0f 0x38 0x90-0xbf (FMA)
90: vgatherdd/q Vx,Hx,Wx (66),(v) 90: vgatherdd/q Vx,Hx,Wx (66),(v) | vpgatherdd/q Vx,Wx (66),(evo)
91: vgatherqd/q Vx,Hx,Wx (66),(v) 91: vgatherqd/q Vx,Hx,Wx (66),(v) | vpgatherqd/q Vx,Wx (66),(evo)
92: vgatherdps/d Vx,Hx,Wx (66),(v) 92: vgatherdps/d Vx,Hx,Wx (66),(v)
93: vgatherqps/d Vx,Hx,Wx (66),(v) 93: vgatherqps/d Vx,Hx,Wx (66),(v)
94: 94:
...@@ -715,6 +744,10 @@ AVXcode: 2 ...@@ -715,6 +744,10 @@ AVXcode: 2
9d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1) 9d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1)
9e: vfnmsub132ps/d Vx,Hx,Wx (66),(v) 9e: vfnmsub132ps/d Vx,Hx,Wx (66),(v)
9f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1) 9f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1)
a0: vpscatterdd/q Wx,Vx (66),(ev)
a1: vpscatterqd/q Wx,Vx (66),(ev)
a2: vscatterdps/d Wx,Vx (66),(ev)
a3: vscatterqps/d Wx,Vx (66),(ev)
a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v) a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v)
a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v) a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v)
a8: vfmadd213ps/d Vx,Hx,Wx (66),(v) a8: vfmadd213ps/d Vx,Hx,Wx (66),(v)
...@@ -725,6 +758,8 @@ ac: vfnmadd213ps/d Vx,Hx,Wx (66),(v) ...@@ -725,6 +758,8 @@ ac: vfnmadd213ps/d Vx,Hx,Wx (66),(v)
ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1) ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1)
ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v) ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v)
af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1) af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1)
b4: vpmadd52luq Vx,Hx,Wx (66),(ev)
b5: vpmadd52huq Vx,Hx,Wx (66),(ev)
b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v) b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v)
b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v) b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v)
b8: vfmadd231ps/d Vx,Hx,Wx (66),(v) b8: vfmadd231ps/d Vx,Hx,Wx (66),(v)
...@@ -736,12 +771,15 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1) ...@@ -736,12 +771,15 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1)
be: vfnmsub231ps/d Vx,Hx,Wx (66),(v) be: vfnmsub231ps/d Vx,Hx,Wx (66),(v)
bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1) bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1)
# 0x0f 0x38 0xc0-0xff # 0x0f 0x38 0xc0-0xff
c8: sha1nexte Vdq,Wdq c4: vpconflictd/q Vx,Wx (66),(ev)
c6: Grp18 (1A)
c7: Grp19 (1A)
c8: sha1nexte Vdq,Wdq | vexp2ps/d Vx,Wx (66),(ev)
c9: sha1msg1 Vdq,Wdq c9: sha1msg1 Vdq,Wdq
ca: sha1msg2 Vdq,Wdq ca: sha1msg2 Vdq,Wdq | vrcp28ps/d Vx,Wx (66),(ev)
cb: sha256rnds2 Vdq,Wdq cb: sha256rnds2 Vdq,Wdq | vrcp28ss/d Vx,Hx,Wx (66),(ev)
cc: sha256msg1 Vdq,Wdq cc: sha256msg1 Vdq,Wdq | vrsqrt28ps/d Vx,Wx (66),(ev)
cd: sha256msg2 Vdq,Wdq cd: sha256msg2 Vdq,Wdq | vrsqrt28ss/d Vx,Hx,Wx (66),(ev)
db: VAESIMC Vdq,Wdq (66),(v1) db: VAESIMC Vdq,Wdq (66),(v1)
dc: VAESENC Vdq,Hdq,Wdq (66),(v1) dc: VAESENC Vdq,Hdq,Wdq (66),(v1)
dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1) dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1)
...@@ -763,15 +801,15 @@ AVXcode: 3 ...@@ -763,15 +801,15 @@ AVXcode: 3
00: vpermq Vqq,Wqq,Ib (66),(v) 00: vpermq Vqq,Wqq,Ib (66),(v)
01: vpermpd Vqq,Wqq,Ib (66),(v) 01: vpermpd Vqq,Wqq,Ib (66),(v)
02: vpblendd Vx,Hx,Wx,Ib (66),(v) 02: vpblendd Vx,Hx,Wx,Ib (66),(v)
03: 03: valignd/q Vx,Hx,Wx,Ib (66),(ev)
04: vpermilps Vx,Wx,Ib (66),(v) 04: vpermilps Vx,Wx,Ib (66),(v)
05: vpermilpd Vx,Wx,Ib (66),(v) 05: vpermilpd Vx,Wx,Ib (66),(v)
06: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v) 06: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v)
07: 07:
08: vroundps Vx,Wx,Ib (66) 08: vroundps Vx,Wx,Ib (66) | vrndscaleps Vx,Wx,Ib (66),(evo)
09: vroundpd Vx,Wx,Ib (66) 09: vroundpd Vx,Wx,Ib (66) | vrndscalepd Vx,Wx,Ib (66),(evo)
0a: vroundss Vss,Wss,Ib (66),(v1) 0a: vroundss Vss,Wss,Ib (66),(v1) | vrndscaless Vx,Hx,Wx,Ib (66),(evo)
0b: vroundsd Vsd,Wsd,Ib (66),(v1) 0b: vroundsd Vsd,Wsd,Ib (66),(v1) | vrndscalesd Vx,Hx,Wx,Ib (66),(evo)
0c: vblendps Vx,Hx,Wx,Ib (66) 0c: vblendps Vx,Hx,Wx,Ib (66)
0d: vblendpd Vx,Hx,Wx,Ib (66) 0d: vblendpd Vx,Hx,Wx,Ib (66)
0e: vpblendw Vx,Hx,Wx,Ib (66),(v1) 0e: vpblendw Vx,Hx,Wx,Ib (66),(v1)
...@@ -780,26 +818,51 @@ AVXcode: 3 ...@@ -780,26 +818,51 @@ AVXcode: 3
15: vpextrw Rd/Mw,Vdq,Ib (66),(v1) 15: vpextrw Rd/Mw,Vdq,Ib (66),(v1)
16: vpextrd/q Ey,Vdq,Ib (66),(v1) 16: vpextrd/q Ey,Vdq,Ib (66),(v1)
17: vextractps Ed,Vdq,Ib (66),(v1) 17: vextractps Ed,Vdq,Ib (66),(v1)
18: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) 18: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) | vinsertf32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
19: vextractf128 Wdq,Vqq,Ib (66),(v) 19: vextractf128 Wdq,Vqq,Ib (66),(v) | vextractf32x4/64x2 Wdq,Vqq,Ib (66),(evo)
1a: vinsertf32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
1b: vextractf32x8/64x4 Wdq,Vqq,Ib (66),(ev)
1d: vcvtps2ph Wx,Vx,Ib (66),(v) 1d: vcvtps2ph Wx,Vx,Ib (66),(v)
1e: vpcmpud/q Vk,Hd,Wd,Ib (66),(ev)
1f: vpcmpd/q Vk,Hd,Wd,Ib (66),(ev)
20: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1) 20: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1)
21: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1) 21: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1)
22: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1) 22: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1)
38: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) 23: vshuff32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
39: vextracti128 Wdq,Vqq,Ib (66),(v) 25: vpternlogd/q Vx,Hx,Wx,Ib (66),(ev)
26: vgetmantps/d Vx,Wx,Ib (66),(ev)
27: vgetmantss/d Vx,Hx,Wx,Ib (66),(ev)
30: kshiftrb/w Vk,Uk,Ib (66),(v)
31: kshiftrd/q Vk,Uk,Ib (66),(v)
32: kshiftlb/w Vk,Uk,Ib (66),(v)
33: kshiftld/q Vk,Uk,Ib (66),(v)
38: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) | vinserti32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
39: vextracti128 Wdq,Vqq,Ib (66),(v) | vextracti32x4/64x2 Wdq,Vqq,Ib (66),(evo)
3a: vinserti32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
3b: vextracti32x8/64x4 Wdq,Vqq,Ib (66),(ev)
3e: vpcmpub/w Vk,Hk,Wx,Ib (66),(ev)
3f: vpcmpb/w Vk,Hk,Wx,Ib (66),(ev)
40: vdpps Vx,Hx,Wx,Ib (66) 40: vdpps Vx,Hx,Wx,Ib (66)
41: vdppd Vdq,Hdq,Wdq,Ib (66),(v1) 41: vdppd Vdq,Hdq,Wdq,Ib (66),(v1)
42: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) 42: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) | vdbpsadbw Vx,Hx,Wx,Ib (66),(evo)
43: vshufi32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
44: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1) 44: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1)
46: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v) 46: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v)
4a: vblendvps Vx,Hx,Wx,Lx (66),(v) 4a: vblendvps Vx,Hx,Wx,Lx (66),(v)
4b: vblendvpd Vx,Hx,Wx,Lx (66),(v) 4b: vblendvpd Vx,Hx,Wx,Lx (66),(v)
4c: vpblendvb Vx,Hx,Wx,Lx (66),(v1) 4c: vpblendvb Vx,Hx,Wx,Lx (66),(v1)
50: vrangeps/d Vx,Hx,Wx,Ib (66),(ev)
51: vrangess/d Vx,Hx,Wx,Ib (66),(ev)
54: vfixupimmps/d Vx,Hx,Wx,Ib (66),(ev)
55: vfixupimmss/d Vx,Hx,Wx,Ib (66),(ev)
56: vreduceps/d Vx,Wx,Ib (66),(ev)
57: vreducess/d Vx,Hx,Wx,Ib (66),(ev)
60: vpcmpestrm Vdq,Wdq,Ib (66),(v1) 60: vpcmpestrm Vdq,Wdq,Ib (66),(v1)
61: vpcmpestri Vdq,Wdq,Ib (66),(v1) 61: vpcmpestri Vdq,Wdq,Ib (66),(v1)
62: vpcmpistrm Vdq,Wdq,Ib (66),(v1) 62: vpcmpistrm Vdq,Wdq,Ib (66),(v1)
63: vpcmpistri Vdq,Wdq,Ib (66),(v1) 63: vpcmpistri Vdq,Wdq,Ib (66),(v1)
66: vfpclassps/d Vk,Wx,Ib (66),(ev)
67: vfpclassss/d Vk,Wx,Ib (66),(ev)
cc: sha1rnds4 Vdq,Wdq,Ib cc: sha1rnds4 Vdq,Wdq,Ib
df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1) df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1)
f0: RORX Gy,Ey,Ib (F2),(v) f0: RORX Gy,Ey,Ib (F2),(v)
...@@ -927,8 +990,10 @@ GrpTable: Grp12 ...@@ -927,8 +990,10 @@ GrpTable: Grp12
EndTable EndTable
GrpTable: Grp13 GrpTable: Grp13
0: vprord/q Hx,Wx,Ib (66),(ev)
1: vprold/q Hx,Wx,Ib (66),(ev)
2: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1) 2: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1)
4: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) 4: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) | vpsrad/q Hx,Ux,Ib (66),(evo)
6: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1) 6: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1)
EndTable EndTable
...@@ -963,6 +1028,20 @@ GrpTable: Grp17 ...@@ -963,6 +1028,20 @@ GrpTable: Grp17
3: BLSI By,Ey (v) 3: BLSI By,Ey (v)
EndTable EndTable
GrpTable: Grp18
1: vgatherpf0dps/d Wx (66),(ev)
2: vgatherpf1dps/d Wx (66),(ev)
5: vscatterpf0dps/d Wx (66),(ev)
6: vscatterpf1dps/d Wx (66),(ev)
EndTable
GrpTable: Grp19
1: vgatherpf0qps/d Wx (66),(ev)
2: vgatherpf1qps/d Wx (66),(ev)
5: vscatterpf0qps/d Wx (66),(ev)
6: vscatterpf1qps/d Wx (66),(ev)
EndTable
# AMD's Prefetch Group # AMD's Prefetch Group
GrpTable: GrpP GrpTable: GrpP
0: PREFETCH 0: PREFETCH
......
...@@ -72,12 +72,14 @@ BEGIN { ...@@ -72,12 +72,14 @@ BEGIN {
lprefix_expr = "\\((66|F2|F3)\\)" lprefix_expr = "\\((66|F2|F3)\\)"
max_lprefix = 4 max_lprefix = 4
# All opcodes starting with lower-case 'v' or with (v1) superscript # All opcodes starting with lower-case 'v', 'k' or with (v1) superscript
# accepts VEX prefix # accepts VEX prefix
vexok_opcode_expr = "^v.*" vexok_opcode_expr = "^[vk].*"
vexok_expr = "\\(v1\\)" vexok_expr = "\\(v1\\)"
# All opcodes with (v) superscript supports *only* VEX prefix # All opcodes with (v) superscript supports *only* VEX prefix
vexonly_expr = "\\(v\\)" vexonly_expr = "\\(v\\)"
# All opcodes with (ev) superscript supports *only* EVEX prefix
evexonly_expr = "\\(ev\\)"
prefix_expr = "\\(Prefix\\)" prefix_expr = "\\(Prefix\\)"
prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ" prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ"
...@@ -95,6 +97,7 @@ BEGIN { ...@@ -95,6 +97,7 @@ BEGIN {
prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ" prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ"
prefix_num["VEX+1byte"] = "INAT_PFX_VEX2" prefix_num["VEX+1byte"] = "INAT_PFX_VEX2"
prefix_num["VEX+2byte"] = "INAT_PFX_VEX3" prefix_num["VEX+2byte"] = "INAT_PFX_VEX3"
prefix_num["EVEX"] = "INAT_PFX_EVEX"
clear_vars() clear_vars()
} }
...@@ -319,7 +322,9 @@ function convert_operands(count,opnd, i,j,imm,mod) ...@@ -319,7 +322,9 @@ function convert_operands(count,opnd, i,j,imm,mod)
flags = add_flags(flags, "INAT_MODRM") flags = add_flags(flags, "INAT_MODRM")
# check VEX codes # check VEX codes
if (match(ext, vexonly_expr)) if (match(ext, evexonly_expr))
flags = add_flags(flags, "INAT_VEXOK | INAT_EVEXONLY")
else if (match(ext, vexonly_expr))
flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY") flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY")
else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr)) else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr))
flags = add_flags(flags, "INAT_VEXOK") flags = add_flags(flags, "INAT_VEXOK")
......
objtool-y += arch/$(ARCH)/ objtool-y += arch/$(SRCARCH)/
objtool-y += builtin-check.o objtool-y += builtin-check.o
objtool-y += elf.o objtool-y += elf.o
objtool-y += special.o objtool-y += special.o
......
include ../scripts/Makefile.include include ../scripts/Makefile.include
include ../scripts/Makefile.arch
ifndef ($(ARCH))
ARCH ?= $(shell uname -m)
ifeq ($(ARCH),x86_64) ifeq ($(ARCH),x86_64)
ARCH := x86 ARCH := x86
endif endif
endif
# always use the host compiler # always use the host compiler
CC = gcc CC = gcc
...@@ -26,7 +24,7 @@ OBJTOOL_IN := $(OBJTOOL)-in.o ...@@ -26,7 +24,7 @@ OBJTOOL_IN := $(OBJTOOL)-in.o
all: $(OBJTOOL) all: $(OBJTOOL)
INCLUDES := -I$(srctree)/tools/include -I$(srctree)/tools/arch/$(ARCH)/include/uapi INCLUDES := -I$(srctree)/tools/include -I$(srctree)/tools/arch/$(HOSTARCH)/include/uapi
CFLAGS += -Wall -Werror $(EXTRA_WARNINGS) -fomit-frame-pointer -O2 -g $(INCLUDES) CFLAGS += -Wall -Werror $(EXTRA_WARNINGS) -fomit-frame-pointer -O2 -g $(INCLUDES)
LDFLAGS += -lelf $(LIBSUBCMD) LDFLAGS += -lelf $(LIBSUBCMD)
...@@ -35,7 +33,7 @@ elfshdr := $(shell echo '\#include <libelf.h>' | $(CC) $(CFLAGS) -x c -E - | gre ...@@ -35,7 +33,7 @@ elfshdr := $(shell echo '\#include <libelf.h>' | $(CC) $(CFLAGS) -x c -E - | gre
CFLAGS += $(if $(elfshdr),,-DLIBELF_USE_DEPRECATED) CFLAGS += $(if $(elfshdr),,-DLIBELF_USE_DEPRECATED)
AWK = awk AWK = awk
export srctree OUTPUT CFLAGS ARCH AWK export srctree OUTPUT CFLAGS SRCARCH AWK
include $(srctree)/tools/build/Makefile.include include $(srctree)/tools/build/Makefile.include
$(OBJTOOL_IN): fixdep FORCE $(OBJTOOL_IN): fixdep FORCE
......
...@@ -6,6 +6,1016 @@ ...@@ -6,6 +6,1016 @@
{{0x0f, 0x31, }, 2, 0, "", "", {{0x0f, 0x31, }, 2, 0, "", "",
"0f 31 \trdtsc ",}, "0f 31 \trdtsc ",},
{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
{{0x62, 0x81, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 81 78 56 34 12 \tbound %eax,0x12345678(%ecx)",},
{{0x62, 0x88, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 88 78 56 34 12 \tbound %ecx,0x12345678(%eax)",},
{{0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 90 78 56 34 12 \tbound %edx,0x12345678(%eax)",},
{{0x62, 0x98, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 98 78 56 34 12 \tbound %ebx,0x12345678(%eax)",},
{{0x62, 0xa0, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 a0 78 56 34 12 \tbound %esp,0x12345678(%eax)",},
{{0x62, 0xa8, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 a8 78 56 34 12 \tbound %ebp,0x12345678(%eax)",},
{{0x62, 0xb0, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 b0 78 56 34 12 \tbound %esi,0x12345678(%eax)",},
{{0x62, 0xb8, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 b8 78 56 34 12 \tbound %edi,0x12345678(%eax)",},
{{0x62, 0x08, }, 2, 0, "", "",
"62 08 \tbound %ecx,(%eax)",},
{{0x62, 0x05, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 05 78 56 34 12 \tbound %eax,0x12345678",},
{{0x62, 0x14, 0x01, }, 3, 0, "", "",
"62 14 01 \tbound %edx,(%ecx,%eax,1)",},
{{0x62, 0x14, 0x05, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"62 14 05 78 56 34 12 \tbound %edx,0x12345678(,%eax,1)",},
{{0x62, 0x14, 0x08, }, 3, 0, "", "",
"62 14 08 \tbound %edx,(%eax,%ecx,1)",},
{{0x62, 0x14, 0xc8, }, 3, 0, "", "",
"62 14 c8 \tbound %edx,(%eax,%ecx,8)",},
{{0x62, 0x50, 0x12, }, 3, 0, "", "",
"62 50 12 \tbound %edx,0x12(%eax)",},
{{0x62, 0x55, 0x12, }, 3, 0, "", "",
"62 55 12 \tbound %edx,0x12(%ebp)",},
{{0x62, 0x54, 0x01, 0x12, }, 4, 0, "", "",
"62 54 01 12 \tbound %edx,0x12(%ecx,%eax,1)",},
{{0x62, 0x54, 0x05, 0x12, }, 4, 0, "", "",
"62 54 05 12 \tbound %edx,0x12(%ebp,%eax,1)",},
{{0x62, 0x54, 0x08, 0x12, }, 4, 0, "", "",
"62 54 08 12 \tbound %edx,0x12(%eax,%ecx,1)",},
{{0x62, 0x54, 0xc8, 0x12, }, 4, 0, "", "",
"62 54 c8 12 \tbound %edx,0x12(%eax,%ecx,8)",},
{{0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 90 78 56 34 12 \tbound %edx,0x12345678(%eax)",},
{{0x62, 0x95, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
"62 95 78 56 34 12 \tbound %edx,0x12345678(%ebp)",},
{{0x62, 0x94, 0x01, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"62 94 01 78 56 34 12 \tbound %edx,0x12345678(%ecx,%eax,1)",},
{{0x62, 0x94, 0x05, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"62 94 05 78 56 34 12 \tbound %edx,0x12345678(%ebp,%eax,1)",},
{{0x62, 0x94, 0x08, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"62 94 08 78 56 34 12 \tbound %edx,0x12345678(%eax,%ecx,1)",},
{{0x62, 0x94, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"62 94 c8 78 56 34 12 \tbound %edx,0x12345678(%eax,%ecx,8)",},
{{0x66, 0x62, 0x81, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 81 78 56 34 12 \tbound %ax,0x12345678(%ecx)",},
{{0x66, 0x62, 0x88, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 88 78 56 34 12 \tbound %cx,0x12345678(%eax)",},
{{0x66, 0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 90 78 56 34 12 \tbound %dx,0x12345678(%eax)",},
{{0x66, 0x62, 0x98, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 98 78 56 34 12 \tbound %bx,0x12345678(%eax)",},
{{0x66, 0x62, 0xa0, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 a0 78 56 34 12 \tbound %sp,0x12345678(%eax)",},
{{0x66, 0x62, 0xa8, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 a8 78 56 34 12 \tbound %bp,0x12345678(%eax)",},
{{0x66, 0x62, 0xb0, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 b0 78 56 34 12 \tbound %si,0x12345678(%eax)",},
{{0x66, 0x62, 0xb8, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 b8 78 56 34 12 \tbound %di,0x12345678(%eax)",},
{{0x66, 0x62, 0x08, }, 3, 0, "", "",
"66 62 08 \tbound %cx,(%eax)",},
{{0x66, 0x62, 0x05, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 05 78 56 34 12 \tbound %ax,0x12345678",},
{{0x66, 0x62, 0x14, 0x01, }, 4, 0, "", "",
"66 62 14 01 \tbound %dx,(%ecx,%eax,1)",},
{{0x66, 0x62, 0x14, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 62 14 05 78 56 34 12 \tbound %dx,0x12345678(,%eax,1)",},
{{0x66, 0x62, 0x14, 0x08, }, 4, 0, "", "",
"66 62 14 08 \tbound %dx,(%eax,%ecx,1)",},
{{0x66, 0x62, 0x14, 0xc8, }, 4, 0, "", "",
"66 62 14 c8 \tbound %dx,(%eax,%ecx,8)",},
{{0x66, 0x62, 0x50, 0x12, }, 4, 0, "", "",
"66 62 50 12 \tbound %dx,0x12(%eax)",},
{{0x66, 0x62, 0x55, 0x12, }, 4, 0, "", "",
"66 62 55 12 \tbound %dx,0x12(%ebp)",},
{{0x66, 0x62, 0x54, 0x01, 0x12, }, 5, 0, "", "",
"66 62 54 01 12 \tbound %dx,0x12(%ecx,%eax,1)",},
{{0x66, 0x62, 0x54, 0x05, 0x12, }, 5, 0, "", "",
"66 62 54 05 12 \tbound %dx,0x12(%ebp,%eax,1)",},
{{0x66, 0x62, 0x54, 0x08, 0x12, }, 5, 0, "", "",
"66 62 54 08 12 \tbound %dx,0x12(%eax,%ecx,1)",},
{{0x66, 0x62, 0x54, 0xc8, 0x12, }, 5, 0, "", "",
"66 62 54 c8 12 \tbound %dx,0x12(%eax,%ecx,8)",},
{{0x66, 0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 90 78 56 34 12 \tbound %dx,0x12345678(%eax)",},
{{0x66, 0x62, 0x95, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"66 62 95 78 56 34 12 \tbound %dx,0x12345678(%ebp)",},
{{0x66, 0x62, 0x94, 0x01, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 62 94 01 78 56 34 12 \tbound %dx,0x12345678(%ecx,%eax,1)",},
{{0x66, 0x62, 0x94, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 62 94 05 78 56 34 12 \tbound %dx,0x12345678(%ebp,%eax,1)",},
{{0x66, 0x62, 0x94, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 62 94 08 78 56 34 12 \tbound %dx,0x12345678(%eax,%ecx,1)",},
{{0x66, 0x62, 0x94, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 62 94 c8 78 56 34 12 \tbound %dx,0x12345678(%eax,%ecx,8)",},
{{0x0f, 0x41, 0xd8, }, 3, 0, "", "",
"0f 41 d8 \tcmovno %eax,%ebx",},
{{0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 41 88 78 56 34 12 \tcmovno 0x12345678(%eax),%ecx",},
{{0x66, 0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 0f 41 88 78 56 34 12 \tcmovno 0x12345678(%eax),%cx",},
{{0x0f, 0x44, 0xd8, }, 3, 0, "", "",
"0f 44 d8 \tcmove %eax,%ebx",},
{{0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 44 88 78 56 34 12 \tcmove 0x12345678(%eax),%ecx",},
{{0x66, 0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 0f 44 88 78 56 34 12 \tcmove 0x12345678(%eax),%cx",},
{{0x0f, 0x90, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 90 80 78 56 34 12 \tseto 0x12345678(%eax)",},
{{0x0f, 0x91, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 91 80 78 56 34 12 \tsetno 0x12345678(%eax)",},
{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 92 80 78 56 34 12 \tsetb 0x12345678(%eax)",},
{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 92 80 78 56 34 12 \tsetb 0x12345678(%eax)",},
{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 92 80 78 56 34 12 \tsetb 0x12345678(%eax)",},
{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 93 80 78 56 34 12 \tsetae 0x12345678(%eax)",},
{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 93 80 78 56 34 12 \tsetae 0x12345678(%eax)",},
{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 93 80 78 56 34 12 \tsetae 0x12345678(%eax)",},
{{0x0f, 0x98, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 98 80 78 56 34 12 \tsets 0x12345678(%eax)",},
{{0x0f, 0x99, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 99 80 78 56 34 12 \tsetns 0x12345678(%eax)",},
{{0xc5, 0xcc, 0x41, 0xef, }, 4, 0, "", "",
"c5 cc 41 ef \tkandw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x41, 0xef, }, 5, 0, "", "",
"c4 e1 cc 41 ef \tkandq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x41, 0xef, }, 4, 0, "", "",
"c5 cd 41 ef \tkandb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x41, 0xef, }, 5, 0, "", "",
"c4 e1 cd 41 ef \tkandd %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x42, 0xef, }, 4, 0, "", "",
"c5 cc 42 ef \tkandnw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x42, 0xef, }, 5, 0, "", "",
"c4 e1 cc 42 ef \tkandnq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x42, 0xef, }, 4, 0, "", "",
"c5 cd 42 ef \tkandnb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x42, 0xef, }, 5, 0, "", "",
"c4 e1 cd 42 ef \tkandnd %k7,%k6,%k5",},
{{0xc5, 0xf8, 0x44, 0xf7, }, 4, 0, "", "",
"c5 f8 44 f7 \tknotw %k7,%k6",},
{{0xc4, 0xe1, 0xf8, 0x44, 0xf7, }, 5, 0, "", "",
"c4 e1 f8 44 f7 \tknotq %k7,%k6",},
{{0xc5, 0xf9, 0x44, 0xf7, }, 4, 0, "", "",
"c5 f9 44 f7 \tknotb %k7,%k6",},
{{0xc4, 0xe1, 0xf9, 0x44, 0xf7, }, 5, 0, "", "",
"c4 e1 f9 44 f7 \tknotd %k7,%k6",},
{{0xc5, 0xcc, 0x45, 0xef, }, 4, 0, "", "",
"c5 cc 45 ef \tkorw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x45, 0xef, }, 5, 0, "", "",
"c4 e1 cc 45 ef \tkorq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x45, 0xef, }, 4, 0, "", "",
"c5 cd 45 ef \tkorb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x45, 0xef, }, 5, 0, "", "",
"c4 e1 cd 45 ef \tkord %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x46, 0xef, }, 4, 0, "", "",
"c5 cc 46 ef \tkxnorw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x46, 0xef, }, 5, 0, "", "",
"c4 e1 cc 46 ef \tkxnorq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x46, 0xef, }, 4, 0, "", "",
"c5 cd 46 ef \tkxnorb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x46, 0xef, }, 5, 0, "", "",
"c4 e1 cd 46 ef \tkxnord %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x47, 0xef, }, 4, 0, "", "",
"c5 cc 47 ef \tkxorw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x47, 0xef, }, 5, 0, "", "",
"c4 e1 cc 47 ef \tkxorq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x47, 0xef, }, 4, 0, "", "",
"c5 cd 47 ef \tkxorb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x47, 0xef, }, 5, 0, "", "",
"c4 e1 cd 47 ef \tkxord %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x4a, 0xef, }, 4, 0, "", "",
"c5 cc 4a ef \tkaddw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x4a, 0xef, }, 5, 0, "", "",
"c4 e1 cc 4a ef \tkaddq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x4a, 0xef, }, 4, 0, "", "",
"c5 cd 4a ef \tkaddb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x4a, 0xef, }, 5, 0, "", "",
"c4 e1 cd 4a ef \tkaddd %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x4b, 0xef, }, 4, 0, "", "",
"c5 cd 4b ef \tkunpckbw %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x4b, 0xef, }, 4, 0, "", "",
"c5 cc 4b ef \tkunpckwd %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x4b, 0xef, }, 5, 0, "", "",
"c4 e1 cc 4b ef \tkunpckdq %k7,%k6,%k5",},
{{0xc5, 0xf8, 0x90, 0xee, }, 4, 0, "", "",
"c5 f8 90 ee \tkmovw %k6,%k5",},
{{0xc5, 0xf8, 0x90, 0x29, }, 4, 0, "", "",
"c5 f8 90 29 \tkmovw (%ecx),%k5",},
{{0xc5, 0xf8, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
"c5 f8 90 ac c8 23 01 00 00 \tkmovw 0x123(%eax,%ecx,8),%k5",},
{{0xc5, 0xf8, 0x91, 0x29, }, 4, 0, "", "",
"c5 f8 91 29 \tkmovw %k5,(%ecx)",},
{{0xc5, 0xf8, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
"c5 f8 91 ac c8 23 01 00 00 \tkmovw %k5,0x123(%eax,%ecx,8)",},
{{0xc5, 0xf8, 0x92, 0xe8, }, 4, 0, "", "",
"c5 f8 92 e8 \tkmovw %eax,%k5",},
{{0xc5, 0xf8, 0x92, 0xed, }, 4, 0, "", "",
"c5 f8 92 ed \tkmovw %ebp,%k5",},
{{0xc5, 0xf8, 0x93, 0xc5, }, 4, 0, "", "",
"c5 f8 93 c5 \tkmovw %k5,%eax",},
{{0xc5, 0xf8, 0x93, 0xed, }, 4, 0, "", "",
"c5 f8 93 ed \tkmovw %k5,%ebp",},
{{0xc4, 0xe1, 0xf8, 0x90, 0xee, }, 5, 0, "", "",
"c4 e1 f8 90 ee \tkmovq %k6,%k5",},
{{0xc4, 0xe1, 0xf8, 0x90, 0x29, }, 5, 0, "", "",
"c4 e1 f8 90 29 \tkmovq (%ecx),%k5",},
{{0xc4, 0xe1, 0xf8, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 e1 f8 90 ac c8 23 01 00 00 \tkmovq 0x123(%eax,%ecx,8),%k5",},
{{0xc4, 0xe1, 0xf8, 0x91, 0x29, }, 5, 0, "", "",
"c4 e1 f8 91 29 \tkmovq %k5,(%ecx)",},
{{0xc4, 0xe1, 0xf8, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 e1 f8 91 ac c8 23 01 00 00 \tkmovq %k5,0x123(%eax,%ecx,8)",},
{{0xc5, 0xf9, 0x90, 0xee, }, 4, 0, "", "",
"c5 f9 90 ee \tkmovb %k6,%k5",},
{{0xc5, 0xf9, 0x90, 0x29, }, 4, 0, "", "",
"c5 f9 90 29 \tkmovb (%ecx),%k5",},
{{0xc5, 0xf9, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
"c5 f9 90 ac c8 23 01 00 00 \tkmovb 0x123(%eax,%ecx,8),%k5",},
{{0xc5, 0xf9, 0x91, 0x29, }, 4, 0, "", "",
"c5 f9 91 29 \tkmovb %k5,(%ecx)",},
{{0xc5, 0xf9, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
"c5 f9 91 ac c8 23 01 00 00 \tkmovb %k5,0x123(%eax,%ecx,8)",},
{{0xc5, 0xf9, 0x92, 0xe8, }, 4, 0, "", "",
"c5 f9 92 e8 \tkmovb %eax,%k5",},
{{0xc5, 0xf9, 0x92, 0xed, }, 4, 0, "", "",
"c5 f9 92 ed \tkmovb %ebp,%k5",},
{{0xc5, 0xf9, 0x93, 0xc5, }, 4, 0, "", "",
"c5 f9 93 c5 \tkmovb %k5,%eax",},
{{0xc5, 0xf9, 0x93, 0xed, }, 4, 0, "", "",
"c5 f9 93 ed \tkmovb %k5,%ebp",},
{{0xc4, 0xe1, 0xf9, 0x90, 0xee, }, 5, 0, "", "",
"c4 e1 f9 90 ee \tkmovd %k6,%k5",},
{{0xc4, 0xe1, 0xf9, 0x90, 0x29, }, 5, 0, "", "",
"c4 e1 f9 90 29 \tkmovd (%ecx),%k5",},
{{0xc4, 0xe1, 0xf9, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 e1 f9 90 ac c8 23 01 00 00 \tkmovd 0x123(%eax,%ecx,8),%k5",},
{{0xc4, 0xe1, 0xf9, 0x91, 0x29, }, 5, 0, "", "",
"c4 e1 f9 91 29 \tkmovd %k5,(%ecx)",},
{{0xc4, 0xe1, 0xf9, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 e1 f9 91 ac c8 23 01 00 00 \tkmovd %k5,0x123(%eax,%ecx,8)",},
{{0xc5, 0xfb, 0x92, 0xe8, }, 4, 0, "", "",
"c5 fb 92 e8 \tkmovd %eax,%k5",},
{{0xc5, 0xfb, 0x92, 0xed, }, 4, 0, "", "",
"c5 fb 92 ed \tkmovd %ebp,%k5",},
{{0xc5, 0xfb, 0x93, 0xc5, }, 4, 0, "", "",
"c5 fb 93 c5 \tkmovd %k5,%eax",},
{{0xc5, 0xfb, 0x93, 0xed, }, 4, 0, "", "",
"c5 fb 93 ed \tkmovd %k5,%ebp",},
{{0xc5, 0xf8, 0x98, 0xee, }, 4, 0, "", "",
"c5 f8 98 ee \tkortestw %k6,%k5",},
{{0xc4, 0xe1, 0xf8, 0x98, 0xee, }, 5, 0, "", "",
"c4 e1 f8 98 ee \tkortestq %k6,%k5",},
{{0xc5, 0xf9, 0x98, 0xee, }, 4, 0, "", "",
"c5 f9 98 ee \tkortestb %k6,%k5",},
{{0xc4, 0xe1, 0xf9, 0x98, 0xee, }, 5, 0, "", "",
"c4 e1 f9 98 ee \tkortestd %k6,%k5",},
{{0xc5, 0xf8, 0x99, 0xee, }, 4, 0, "", "",
"c5 f8 99 ee \tktestw %k6,%k5",},
{{0xc4, 0xe1, 0xf8, 0x99, 0xee, }, 5, 0, "", "",
"c4 e1 f8 99 ee \tktestq %k6,%k5",},
{{0xc5, 0xf9, 0x99, 0xee, }, 4, 0, "", "",
"c5 f9 99 ee \tktestb %k6,%k5",},
{{0xc4, 0xe1, 0xf9, 0x99, 0xee, }, 5, 0, "", "",
"c4 e1 f9 99 ee \tktestd %k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x30, 0xee, 0x12, }, 6, 0, "", "",
"c4 e3 f9 30 ee 12 \tkshiftrw $0x12,%k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x31, 0xee, 0x5b, }, 6, 0, "", "",
"c4 e3 f9 31 ee 5b \tkshiftrq $0x5b,%k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x32, 0xee, 0x12, }, 6, 0, "", "",
"c4 e3 f9 32 ee 12 \tkshiftlw $0x12,%k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x33, 0xee, 0x5b, }, 6, 0, "", "",
"c4 e3 f9 33 ee 5b \tkshiftlq $0x5b,%k6,%k5",},
{{0xc5, 0xf8, 0x5b, 0xf5, }, 4, 0, "", "",
"c5 f8 5b f5 \tvcvtdq2ps %xmm5,%xmm6",},
{{0x62, 0xf1, 0xfc, 0x4f, 0x5b, 0xf5, }, 6, 0, "", "",
"62 f1 fc 4f 5b f5 \tvcvtqq2ps %zmm5,%ymm6{%k7}",},
{{0xc5, 0xf9, 0x5b, 0xf5, }, 4, 0, "", "",
"c5 f9 5b f5 \tvcvtps2dq %xmm5,%xmm6",},
{{0xc5, 0xfa, 0x5b, 0xf5, }, 4, 0, "", "",
"c5 fa 5b f5 \tvcvttps2dq %xmm5,%xmm6",},
{{0x0f, 0x6f, 0xe0, }, 3, 0, "", "",
"0f 6f e0 \tmovq %mm0,%mm4",},
{{0xc5, 0xfd, 0x6f, 0xf4, }, 4, 0, "", "",
"c5 fd 6f f4 \tvmovdqa %ymm4,%ymm6",},
{{0x62, 0xf1, 0x7d, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 f1 7d 48 6f f5 \tvmovdqa32 %zmm5,%zmm6",},
{{0x62, 0xf1, 0xfd, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 f1 fd 48 6f f5 \tvmovdqa64 %zmm5,%zmm6",},
{{0xc5, 0xfe, 0x6f, 0xf4, }, 4, 0, "", "",
"c5 fe 6f f4 \tvmovdqu %ymm4,%ymm6",},
{{0x62, 0xf1, 0x7e, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 f1 7e 48 6f f5 \tvmovdqu32 %zmm5,%zmm6",},
{{0x62, 0xf1, 0xfe, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 f1 fe 48 6f f5 \tvmovdqu64 %zmm5,%zmm6",},
{{0x62, 0xf1, 0x7f, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 f1 7f 48 6f f5 \tvmovdqu8 %zmm5,%zmm6",},
{{0x62, 0xf1, 0xff, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 f1 ff 48 6f f5 \tvmovdqu16 %zmm5,%zmm6",},
{{0x0f, 0x78, 0xc3, }, 3, 0, "", "",
"0f 78 c3 \tvmread %eax,%ebx",},
{{0x62, 0xf1, 0x7c, 0x48, 0x78, 0xf5, }, 6, 0, "", "",
"62 f1 7c 48 78 f5 \tvcvttps2udq %zmm5,%zmm6",},
{{0x62, 0xf1, 0xfc, 0x4f, 0x78, 0xf5, }, 6, 0, "", "",
"62 f1 fc 4f 78 f5 \tvcvttpd2udq %zmm5,%ymm6{%k7}",},
{{0x62, 0xf1, 0x7f, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
"62 f1 7f 08 78 c6 \tvcvttsd2usi %xmm6,%eax",},
{{0x62, 0xf1, 0x7e, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
"62 f1 7e 08 78 c6 \tvcvttss2usi %xmm6,%eax",},
{{0x62, 0xf1, 0x7d, 0x4f, 0x78, 0xf5, }, 6, 0, "", "",
"62 f1 7d 4f 78 f5 \tvcvttps2uqq %ymm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xfd, 0x48, 0x78, 0xf5, }, 6, 0, "", "",
"62 f1 fd 48 78 f5 \tvcvttpd2uqq %zmm5,%zmm6",},
{{0x0f, 0x79, 0xd8, }, 3, 0, "", "",
"0f 79 d8 \tvmwrite %eax,%ebx",},
{{0x62, 0xf1, 0x7c, 0x48, 0x79, 0xf5, }, 6, 0, "", "",
"62 f1 7c 48 79 f5 \tvcvtps2udq %zmm5,%zmm6",},
{{0x62, 0xf1, 0xfc, 0x4f, 0x79, 0xf5, }, 6, 0, "", "",
"62 f1 fc 4f 79 f5 \tvcvtpd2udq %zmm5,%ymm6{%k7}",},
{{0x62, 0xf1, 0x7f, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
"62 f1 7f 08 79 c6 \tvcvtsd2usi %xmm6,%eax",},
{{0x62, 0xf1, 0x7e, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
"62 f1 7e 08 79 c6 \tvcvtss2usi %xmm6,%eax",},
{{0x62, 0xf1, 0x7d, 0x4f, 0x79, 0xf5, }, 6, 0, "", "",
"62 f1 7d 4f 79 f5 \tvcvtps2uqq %ymm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xfd, 0x48, 0x79, 0xf5, }, 6, 0, "", "",
"62 f1 fd 48 79 f5 \tvcvtpd2uqq %zmm5,%zmm6",},
{{0x62, 0xf1, 0x7e, 0x4f, 0x7a, 0xf5, }, 6, 0, "", "",
"62 f1 7e 4f 7a f5 \tvcvtudq2pd %ymm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xfe, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
"62 f1 fe 48 7a f5 \tvcvtuqq2pd %zmm5,%zmm6",},
{{0x62, 0xf1, 0x7f, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
"62 f1 7f 48 7a f5 \tvcvtudq2ps %zmm5,%zmm6",},
{{0x62, 0xf1, 0xff, 0x4f, 0x7a, 0xf5, }, 6, 0, "", "",
"62 f1 ff 4f 7a f5 \tvcvtuqq2ps %zmm5,%ymm6{%k7}",},
{{0x62, 0xf1, 0x7d, 0x4f, 0x7a, 0xf5, }, 6, 0, "", "",
"62 f1 7d 4f 7a f5 \tvcvttps2qq %ymm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xfd, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
"62 f1 fd 48 7a f5 \tvcvttpd2qq %zmm5,%zmm6",},
{{0x62, 0xf1, 0x57, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
"62 f1 57 08 7b f0 \tvcvtusi2sd %eax,%xmm5,%xmm6",},
{{0x62, 0xf1, 0x56, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
"62 f1 56 08 7b f0 \tvcvtusi2ss %eax,%xmm5,%xmm6",},
{{0x62, 0xf1, 0x7d, 0x4f, 0x7b, 0xf5, }, 6, 0, "", "",
"62 f1 7d 4f 7b f5 \tvcvtps2qq %ymm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xfd, 0x48, 0x7b, 0xf5, }, 6, 0, "", "",
"62 f1 fd 48 7b f5 \tvcvtpd2qq %zmm5,%zmm6",},
{{0x0f, 0x7f, 0xc4, }, 3, 0, "", "",
"0f 7f c4 \tmovq %mm0,%mm4",},
{{0xc5, 0xfd, 0x7f, 0xee, }, 4, 0, "", "",
"c5 fd 7f ee \tvmovdqa %ymm5,%ymm6",},
{{0x62, 0xf1, 0x7d, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
"62 f1 7d 48 7f ee \tvmovdqa32 %zmm5,%zmm6",},
{{0x62, 0xf1, 0xfd, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
"62 f1 fd 48 7f ee \tvmovdqa64 %zmm5,%zmm6",},
{{0xc5, 0xfe, 0x7f, 0xee, }, 4, 0, "", "",
"c5 fe 7f ee \tvmovdqu %ymm5,%ymm6",},
{{0x62, 0xf1, 0x7e, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
"62 f1 7e 48 7f ee \tvmovdqu32 %zmm5,%zmm6",},
{{0x62, 0xf1, 0xfe, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
"62 f1 fe 48 7f ee \tvmovdqu64 %zmm5,%zmm6",},
{{0x62, 0xf1, 0x7f, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
"62 f1 7f 48 7f ee \tvmovdqu8 %zmm5,%zmm6",},
{{0x62, 0xf1, 0xff, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
"62 f1 ff 48 7f ee \tvmovdqu16 %zmm5,%zmm6",},
{{0x0f, 0xdb, 0xd1, }, 3, 0, "", "",
"0f db d1 \tpand %mm1,%mm2",},
{{0x66, 0x0f, 0xdb, 0xd1, }, 4, 0, "", "",
"66 0f db d1 \tpand %xmm1,%xmm2",},
{{0xc5, 0xcd, 0xdb, 0xd4, }, 4, 0, "", "",
"c5 cd db d4 \tvpand %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf1, 0x55, 0x48, 0xdb, 0xf4, }, 6, 0, "", "",
"62 f1 55 48 db f4 \tvpandd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0xdb, 0xf4, }, 6, 0, "", "",
"62 f1 d5 48 db f4 \tvpandq %zmm4,%zmm5,%zmm6",},
{{0x0f, 0xdf, 0xd1, }, 3, 0, "", "",
"0f df d1 \tpandn %mm1,%mm2",},
{{0x66, 0x0f, 0xdf, 0xd1, }, 4, 0, "", "",
"66 0f df d1 \tpandn %xmm1,%xmm2",},
{{0xc5, 0xcd, 0xdf, 0xd4, }, 4, 0, "", "",
"c5 cd df d4 \tvpandn %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf1, 0x55, 0x48, 0xdf, 0xf4, }, 6, 0, "", "",
"62 f1 55 48 df f4 \tvpandnd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0xdf, 0xf4, }, 6, 0, "", "",
"62 f1 d5 48 df f4 \tvpandnq %zmm4,%zmm5,%zmm6",},
{{0xc5, 0xf9, 0xe6, 0xd1, }, 4, 0, "", "",
"c5 f9 e6 d1 \tvcvttpd2dq %xmm1,%xmm2",},
{{0xc5, 0xfa, 0xe6, 0xf5, }, 4, 0, "", "",
"c5 fa e6 f5 \tvcvtdq2pd %xmm5,%xmm6",},
{{0x62, 0xf1, 0x7e, 0x4f, 0xe6, 0xf5, }, 6, 0, "", "",
"62 f1 7e 4f e6 f5 \tvcvtdq2pd %ymm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xfe, 0x48, 0xe6, 0xf5, }, 6, 0, "", "",
"62 f1 fe 48 e6 f5 \tvcvtqq2pd %zmm5,%zmm6",},
{{0xc5, 0xfb, 0xe6, 0xd1, }, 4, 0, "", "",
"c5 fb e6 d1 \tvcvtpd2dq %xmm1,%xmm2",},
{{0x0f, 0xeb, 0xf4, }, 3, 0, "", "",
"0f eb f4 \tpor %mm4,%mm6",},
{{0xc5, 0xcd, 0xeb, 0xd4, }, 4, 0, "", "",
"c5 cd eb d4 \tvpor %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf1, 0x55, 0x48, 0xeb, 0xf4, }, 6, 0, "", "",
"62 f1 55 48 eb f4 \tvpord %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0xeb, 0xf4, }, 6, 0, "", "",
"62 f1 d5 48 eb f4 \tvporq %zmm4,%zmm5,%zmm6",},
{{0x0f, 0xef, 0xf4, }, 3, 0, "", "",
"0f ef f4 \tpxor %mm4,%mm6",},
{{0xc5, 0xcd, 0xef, 0xd4, }, 4, 0, "", "",
"c5 cd ef d4 \tvpxor %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf1, 0x55, 0x48, 0xef, 0xf4, }, 6, 0, "", "",
"62 f1 55 48 ef f4 \tvpxord %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0xef, 0xf4, }, 6, 0, "", "",
"62 f1 d5 48 ef f4 \tvpxorq %zmm4,%zmm5,%zmm6",},
{{0x66, 0x0f, 0x38, 0x10, 0xc1, }, 5, 0, "", "",
"66 0f 38 10 c1 \tpblendvb %xmm0,%xmm1,%xmm0",},
{{0x62, 0xf2, 0xd5, 0x48, 0x10, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 10 f4 \tvpsrlvw %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x10, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 10 ee \tvpmovuswb %zmm5,%ymm6{%k7}",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x11, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 11 ee \tvpmovusdb %zmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0xd5, 0x48, 0x11, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 11 f4 \tvpsravw %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x12, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 12 ee \tvpmovusqb %zmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0xd5, 0x48, 0x12, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 12 f4 \tvpsllvw %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
{{0x62, 0xf2, 0x7d, 0x4f, 0x13, 0xf5, }, 6, 0, "", "",
"62 f2 7d 4f 13 f5 \tvcvtph2ps %ymm5,%zmm6{%k7}",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x13, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 13 ee \tvpmovusdw %zmm5,%ymm6{%k7}",},
{{0x66, 0x0f, 0x38, 0x14, 0xc1, }, 5, 0, "", "",
"66 0f 38 14 c1 \tblendvps %xmm0,%xmm1,%xmm0",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x14, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 14 ee \tvpmovusqw %zmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0x55, 0x48, 0x14, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 14 f4 \tvprorvd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x14, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 14 f4 \tvprorvq %zmm4,%zmm5,%zmm6",},
{{0x66, 0x0f, 0x38, 0x15, 0xc1, }, 5, 0, "", "",
"66 0f 38 15 c1 \tblendvpd %xmm0,%xmm1,%xmm0",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x15, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 15 ee \tvpmovusqd %zmm5,%ymm6{%k7}",},
{{0x62, 0xf2, 0x55, 0x48, 0x15, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 15 f4 \tvprolvd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x15, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 15 f4 \tvprolvq %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x16, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 16 d4 \tvpermps %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x4d, 0x2f, 0x16, 0xd4, }, 6, 0, "", "",
"62 f2 4d 2f 16 d4 \tvpermps %ymm4,%ymm6,%ymm2{%k7}",},
{{0x62, 0xf2, 0xcd, 0x2f, 0x16, 0xd4, }, 6, 0, "", "",
"62 f2 cd 2f 16 d4 \tvpermpd %ymm4,%ymm6,%ymm2{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x19, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 19 f4 \tvbroadcastsd %xmm4,%ymm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x19, 0xf7, }, 6, 0, "", "",
"62 f2 7d 48 19 f7 \tvbroadcastf32x2 %xmm7,%zmm6",},
{{0xc4, 0xe2, 0x7d, 0x1a, 0x21, }, 5, 0, "", "",
"c4 e2 7d 1a 21 \tvbroadcastf128 (%ecx),%ymm4",},
{{0x62, 0xf2, 0x7d, 0x48, 0x1a, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 1a 31 \tvbroadcastf32x4 (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x1a, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 1a 31 \tvbroadcastf64x2 (%ecx),%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x1b, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 1b 31 \tvbroadcastf32x8 (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x1b, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 1b 31 \tvbroadcastf64x4 (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x1f, 0xf4, }, 6, 0, "", "",
"62 f2 fd 48 1f f4 \tvpabsq %zmm4,%zmm6",},
{{0xc4, 0xe2, 0x79, 0x20, 0xec, }, 5, 0, "", "",
"c4 e2 79 20 ec \tvpmovsxbw %xmm4,%xmm5",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x20, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 20 ee \tvpmovswb %zmm5,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x21, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 21 f4 \tvpmovsxbd %xmm4,%ymm6",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x21, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 21 ee \tvpmovsdb %zmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x22, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 22 e4 \tvpmovsxbq %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x22, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 22 ee \tvpmovsqb %zmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x23, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 23 e4 \tvpmovsxwd %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x23, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 23 ee \tvpmovsdw %zmm5,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x24, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 24 f4 \tvpmovsxwq %xmm4,%ymm6",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x24, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 24 ee \tvpmovsqw %zmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x25, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 25 e4 \tvpmovsxdq %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x25, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 25 ee \tvpmovsqd %zmm5,%ymm6{%k7}",},
{{0x62, 0xf2, 0x4d, 0x48, 0x26, 0xed, }, 6, 0, "", "",
"62 f2 4d 48 26 ed \tvptestmb %zmm5,%zmm6,%k5",},
{{0x62, 0xf2, 0xcd, 0x48, 0x26, 0xed, }, 6, 0, "", "",
"62 f2 cd 48 26 ed \tvptestmw %zmm5,%zmm6,%k5",},
{{0x62, 0xf2, 0x56, 0x48, 0x26, 0xec, }, 6, 0, "", "",
"62 f2 56 48 26 ec \tvptestnmb %zmm4,%zmm5,%k5",},
{{0x62, 0xf2, 0xd6, 0x48, 0x26, 0xec, }, 6, 0, "", "",
"62 f2 d6 48 26 ec \tvptestnmw %zmm4,%zmm5,%k5",},
{{0x62, 0xf2, 0x4d, 0x48, 0x27, 0xed, }, 6, 0, "", "",
"62 f2 4d 48 27 ed \tvptestmd %zmm5,%zmm6,%k5",},
{{0x62, 0xf2, 0xcd, 0x48, 0x27, 0xed, }, 6, 0, "", "",
"62 f2 cd 48 27 ed \tvptestmq %zmm5,%zmm6,%k5",},
{{0x62, 0xf2, 0x56, 0x48, 0x27, 0xec, }, 6, 0, "", "",
"62 f2 56 48 27 ec \tvptestnmd %zmm4,%zmm5,%k5",},
{{0x62, 0xf2, 0xd6, 0x48, 0x27, 0xec, }, 6, 0, "", "",
"62 f2 d6 48 27 ec \tvptestnmq %zmm4,%zmm5,%k5",},
{{0xc4, 0xe2, 0x4d, 0x28, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 28 d4 \tvpmuldq %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x7e, 0x48, 0x28, 0xf5, }, 6, 0, "", "",
"62 f2 7e 48 28 f5 \tvpmovm2b %k5,%zmm6",},
{{0x62, 0xf2, 0xfe, 0x48, 0x28, 0xf5, }, 6, 0, "", "",
"62 f2 fe 48 28 f5 \tvpmovm2w %k5,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x29, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 29 d4 \tvpcmpeqq %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x7e, 0x48, 0x29, 0xee, }, 6, 0, "", "",
"62 f2 7e 48 29 ee \tvpmovb2m %zmm6,%k5",},
{{0x62, 0xf2, 0xfe, 0x48, 0x29, 0xee, }, 6, 0, "", "",
"62 f2 fe 48 29 ee \tvpmovw2m %zmm6,%k5",},
{{0xc4, 0xe2, 0x7d, 0x2a, 0x21, }, 5, 0, "", "",
"c4 e2 7d 2a 21 \tvmovntdqa (%ecx),%ymm4",},
{{0x62, 0xf2, 0xfe, 0x48, 0x2a, 0xce, }, 6, 0, "", "",
"62 f2 fe 48 2a ce \tvpbroadcastmb2q %k6,%zmm1",},
{{0xc4, 0xe2, 0x5d, 0x2c, 0x31, }, 5, 0, "", "",
"c4 e2 5d 2c 31 \tvmaskmovps (%ecx),%ymm4,%ymm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x2c, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 2c f4 \tvscalefps %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x2c, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 2c f4 \tvscalefpd %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x5d, 0x2d, 0x31, }, 5, 0, "", "",
"c4 e2 5d 2d 31 \tvmaskmovpd (%ecx),%ymm4,%ymm6",},
{{0x62, 0xf2, 0x55, 0x0f, 0x2d, 0xf4, }, 6, 0, "", "",
"62 f2 55 0f 2d f4 \tvscalefss %xmm4,%xmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0xd5, 0x0f, 0x2d, 0xf4, }, 6, 0, "", "",
"62 f2 d5 0f 2d f4 \tvscalefsd %xmm4,%xmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x30, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 30 e4 \tvpmovzxbw %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x30, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 30 ee \tvpmovwb %zmm5,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x31, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 31 f4 \tvpmovzxbd %xmm4,%ymm6",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x31, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 31 ee \tvpmovdb %zmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x32, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 32 e4 \tvpmovzxbq %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x32, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 32 ee \tvpmovqb %zmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x33, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 33 e4 \tvpmovzxwd %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x33, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 33 ee \tvpmovdw %zmm5,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x34, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 34 f4 \tvpmovzxwq %xmm4,%ymm6",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x34, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 34 ee \tvpmovqw %zmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x35, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 35 e4 \tvpmovzxdq %xmm4,%ymm4",},
{{0x62, 0xf2, 0x7e, 0x4f, 0x35, 0xee, }, 6, 0, "", "",
"62 f2 7e 4f 35 ee \tvpmovqd %zmm5,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x4d, 0x36, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 36 d4 \tvpermd %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x4d, 0x2f, 0x36, 0xd4, }, 6, 0, "", "",
"62 f2 4d 2f 36 d4 \tvpermd %ymm4,%ymm6,%ymm2{%k7}",},
{{0x62, 0xf2, 0xcd, 0x2f, 0x36, 0xd4, }, 6, 0, "", "",
"62 f2 cd 2f 36 d4 \tvpermq %ymm4,%ymm6,%ymm2{%k7}",},
{{0xc4, 0xe2, 0x4d, 0x38, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 38 d4 \tvpminsb %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x7e, 0x48, 0x38, 0xf5, }, 6, 0, "", "",
"62 f2 7e 48 38 f5 \tvpmovm2d %k5,%zmm6",},
{{0x62, 0xf2, 0xfe, 0x48, 0x38, 0xf5, }, 6, 0, "", "",
"62 f2 fe 48 38 f5 \tvpmovm2q %k5,%zmm6",},
{{0xc4, 0xe2, 0x69, 0x39, 0xd9, }, 5, 0, "", "",
"c4 e2 69 39 d9 \tvpminsd %xmm1,%xmm2,%xmm3",},
{{0x62, 0xf2, 0x55, 0x48, 0x39, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 39 f4 \tvpminsd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x39, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 39 f4 \tvpminsq %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7e, 0x48, 0x39, 0xee, }, 6, 0, "", "",
"62 f2 7e 48 39 ee \tvpmovd2m %zmm6,%k5",},
{{0x62, 0xf2, 0xfe, 0x48, 0x39, 0xee, }, 6, 0, "", "",
"62 f2 fe 48 39 ee \tvpmovq2m %zmm6,%k5",},
{{0xc4, 0xe2, 0x4d, 0x3a, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3a d4 \tvpminuw %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x7e, 0x48, 0x3a, 0xf6, }, 6, 0, "", "",
"62 f2 7e 48 3a f6 \tvpbroadcastmw2d %k6,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x3b, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3b d4 \tvpminud %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x55, 0x48, 0x3b, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 3b f4 \tvpminud %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x3b, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 3b f4 \tvpminuq %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x3d, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3d d4 \tvpmaxsd %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x55, 0x48, 0x3d, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 3d f4 \tvpmaxsd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x3d, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 3d f4 \tvpmaxsq %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x3f, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3f d4 \tvpmaxud %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x55, 0x48, 0x3f, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 3f f4 \tvpmaxud %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x3f, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 3f f4 \tvpmaxuq %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x40, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 40 d4 \tvpmulld %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x55, 0x48, 0x40, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 40 f4 \tvpmulld %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x40, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 40 f4 \tvpmullq %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x42, 0xf5, }, 6, 0, "", "",
"62 f2 7d 48 42 f5 \tvgetexpps %zmm5,%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x42, 0xf5, }, 6, 0, "", "",
"62 f2 fd 48 42 f5 \tvgetexppd %zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x0f, 0x43, 0xf4, }, 6, 0, "", "",
"62 f2 55 0f 43 f4 \tvgetexpss %xmm4,%xmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0xe5, 0x0f, 0x43, 0xe2, }, 6, 0, "", "",
"62 f2 e5 0f 43 e2 \tvgetexpsd %xmm2,%xmm3,%xmm4{%k7}",},
{{0x62, 0xf2, 0x7d, 0x48, 0x44, 0xf5, }, 6, 0, "", "",
"62 f2 7d 48 44 f5 \tvplzcntd %zmm5,%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x44, 0xf5, }, 6, 0, "", "",
"62 f2 fd 48 44 f5 \tvplzcntq %zmm5,%zmm6",},
{{0xc4, 0xe2, 0x4d, 0x46, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 46 d4 \tvpsravd %ymm4,%ymm6,%ymm2",},
{{0x62, 0xf2, 0x55, 0x48, 0x46, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 46 f4 \tvpsravd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x46, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 46 f4 \tvpsravq %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x4c, 0xf5, }, 6, 0, "", "",
"62 f2 7d 48 4c f5 \tvrcp14ps %zmm5,%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x4c, 0xf5, }, 6, 0, "", "",
"62 f2 fd 48 4c f5 \tvrcp14pd %zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x0f, 0x4d, 0xf4, }, 6, 0, "", "",
"62 f2 55 0f 4d f4 \tvrcp14ss %xmm4,%xmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0xd5, 0x0f, 0x4d, 0xf4, }, 6, 0, "", "",
"62 f2 d5 0f 4d f4 \tvrcp14sd %xmm4,%xmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0x7d, 0x48, 0x4e, 0xf5, }, 6, 0, "", "",
"62 f2 7d 48 4e f5 \tvrsqrt14ps %zmm5,%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x4e, 0xf5, }, 6, 0, "", "",
"62 f2 fd 48 4e f5 \tvrsqrt14pd %zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x0f, 0x4f, 0xf4, }, 6, 0, "", "",
"62 f2 55 0f 4f f4 \tvrsqrt14ss %xmm4,%xmm5,%xmm6{%k7}",},
{{0x62, 0xf2, 0xd5, 0x0f, 0x4f, 0xf4, }, 6, 0, "", "",
"62 f2 d5 0f 4f f4 \tvrsqrt14sd %xmm4,%xmm5,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x79, 0x59, 0xf4, }, 5, 0, "", "",
"c4 e2 79 59 f4 \tvpbroadcastq %xmm4,%xmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x59, 0xf7, }, 6, 0, "", "",
"62 f2 7d 48 59 f7 \tvbroadcasti32x2 %xmm7,%zmm6",},
{{0xc4, 0xe2, 0x7d, 0x5a, 0x21, }, 5, 0, "", "",
"c4 e2 7d 5a 21 \tvbroadcasti128 (%ecx),%ymm4",},
{{0x62, 0xf2, 0x7d, 0x48, 0x5a, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 5a 31 \tvbroadcasti32x4 (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x5a, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 5a 31 \tvbroadcasti64x2 (%ecx),%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x5b, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 5b 31 \tvbroadcasti32x8 (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x5b, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 5b 31 \tvbroadcasti64x4 (%ecx),%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x64, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 64 f4 \tvpblendmd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x64, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 64 f4 \tvpblendmq %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x65, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 65 f4 \tvblendmps %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x65, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 65 f4 \tvblendmpd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x66, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 66 f4 \tvpblendmb %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x66, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 66 f4 \tvpblendmw %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x75, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 75 f4 \tvpermi2b %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x75, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 75 f4 \tvpermi2w %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x76, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 76 f4 \tvpermi2d %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x76, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 76 f4 \tvpermi2q %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x77, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 77 f4 \tvpermi2ps %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x77, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 77 f4 \tvpermi2pd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7d, 0x08, 0x7a, 0xd8, }, 6, 0, "", "",
"62 f2 7d 08 7a d8 \tvpbroadcastb %eax,%xmm3",},
{{0x62, 0xf2, 0x7d, 0x08, 0x7b, 0xd8, }, 6, 0, "", "",
"62 f2 7d 08 7b d8 \tvpbroadcastw %eax,%xmm3",},
{{0x62, 0xf2, 0x7d, 0x08, 0x7c, 0xd8, }, 6, 0, "", "",
"62 f2 7d 08 7c d8 \tvpbroadcastd %eax,%xmm3",},
{{0x62, 0xf2, 0x55, 0x48, 0x7d, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 7d f4 \tvpermt2b %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x7d, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 7d f4 \tvpermt2w %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x7e, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 7e f4 \tvpermt2d %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x7e, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 7e f4 \tvpermt2q %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x55, 0x48, 0x7f, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 7f f4 \tvpermt2ps %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x7f, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 7f f4 \tvpermt2pd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x83, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 83 f4 \tvpmultishiftqb %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x88, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 88 31 \tvexpandps (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x88, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 88 31 \tvexpandpd (%ecx),%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x89, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 89 31 \tvpexpandd (%ecx),%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0x89, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 89 31 \tvpexpandq (%ecx),%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0x8a, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 8a 31 \tvcompressps %zmm6,(%ecx)",},
{{0x62, 0xf2, 0xfd, 0x48, 0x8a, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 8a 31 \tvcompresspd %zmm6,(%ecx)",},
{{0x62, 0xf2, 0x7d, 0x48, 0x8b, 0x31, }, 6, 0, "", "",
"62 f2 7d 48 8b 31 \tvpcompressd %zmm6,(%ecx)",},
{{0x62, 0xf2, 0xfd, 0x48, 0x8b, 0x31, }, 6, 0, "", "",
"62 f2 fd 48 8b 31 \tvpcompressq %zmm6,(%ecx)",},
{{0x62, 0xf2, 0x55, 0x48, 0x8d, 0xf4, }, 6, 0, "", "",
"62 f2 55 48 8d f4 \tvpermb %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0x8d, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 8d f4 \tvpermw %zmm4,%zmm5,%zmm6",},
{{0xc4, 0xe2, 0x69, 0x90, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
"c4 e2 69 90 4c 7d 02 \tvpgatherdd %xmm2,0x2(%ebp,%xmm7,2),%xmm1",},
{{0xc4, 0xe2, 0xe9, 0x90, 0x4c, 0x7d, 0x04, }, 7, 0, "", "",
"c4 e2 e9 90 4c 7d 04 \tvpgatherdq %xmm2,0x4(%ebp,%xmm7,2),%xmm1",},
{{0x62, 0xf2, 0x7d, 0x49, 0x90, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 90 b4 fd 7b 00 00 00 \tvpgatherdd 0x7b(%ebp,%zmm7,8),%zmm6{%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0x90, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 90 b4 fd 7b 00 00 00 \tvpgatherdq 0x7b(%ebp,%ymm7,8),%zmm6{%k1}",},
{{0xc4, 0xe2, 0x69, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
"c4 e2 69 91 4c 7d 02 \tvpgatherqd %xmm2,0x2(%ebp,%xmm7,2),%xmm1",},
{{0xc4, 0xe2, 0xe9, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
"c4 e2 e9 91 4c 7d 02 \tvpgatherqq %xmm2,0x2(%ebp,%xmm7,2),%xmm1",},
{{0x62, 0xf2, 0x7d, 0x49, 0x91, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 91 b4 fd 7b 00 00 00 \tvpgatherqd 0x7b(%ebp,%zmm7,8),%ymm6{%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0x91, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 91 b4 fd 7b 00 00 00 \tvpgatherqq 0x7b(%ebp,%zmm7,8),%zmm6{%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xa0, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 a0 b4 fd 7b 00 00 00 \tvpscatterdd %zmm6,0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xa0, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 a0 b4 fd 7b 00 00 00 \tvpscatterdq %zmm6,0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xa1, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 a1 b4 fd 7b 00 00 00 \tvpscatterqd %ymm6,0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x29, 0xa1, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 29 a1 b4 fd 7b 00 00 00 \tvpscatterqq %ymm6,0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xa2, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 a2 b4 fd 7b 00 00 00 \tvscatterdps %zmm6,0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xa2, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 a2 b4 fd 7b 00 00 00 \tvscatterdpd %zmm6,0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xa3, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 a3 b4 fd 7b 00 00 00 \tvscatterqps %ymm6,0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xa3, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 a3 b4 fd 7b 00 00 00 \tvscatterqpd %zmm6,0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xd5, 0x48, 0xb4, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 b4 f4 \tvpmadd52luq %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0xd5, 0x48, 0xb5, 0xf4, }, 6, 0, "", "",
"62 f2 d5 48 b5 f4 \tvpmadd52huq %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0xc4, 0xf5, }, 6, 0, "", "",
"62 f2 7d 48 c4 f5 \tvpconflictd %zmm5,%zmm6",},
{{0x62, 0xf2, 0xfd, 0x48, 0xc4, 0xf5, }, 6, 0, "", "",
"62 f2 fd 48 c4 f5 \tvpconflictq %zmm5,%zmm6",},
{{0x62, 0xf2, 0x7d, 0x48, 0xc8, 0xfe, }, 6, 0, "", "",
"62 f2 7d 48 c8 fe \tvexp2ps %zmm6,%zmm7",},
{{0x62, 0xf2, 0xfd, 0x48, 0xc8, 0xfe, }, 6, 0, "", "",
"62 f2 fd 48 c8 fe \tvexp2pd %zmm6,%zmm7",},
{{0x62, 0xf2, 0x7d, 0x48, 0xca, 0xfe, }, 6, 0, "", "",
"62 f2 7d 48 ca fe \tvrcp28ps %zmm6,%zmm7",},
{{0x62, 0xf2, 0xfd, 0x48, 0xca, 0xfe, }, 6, 0, "", "",
"62 f2 fd 48 ca fe \tvrcp28pd %zmm6,%zmm7",},
{{0x62, 0xf2, 0x4d, 0x0f, 0xcb, 0xfd, }, 6, 0, "", "",
"62 f2 4d 0f cb fd \tvrcp28ss %xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf2, 0xcd, 0x0f, 0xcb, 0xfd, }, 6, 0, "", "",
"62 f2 cd 0f cb fd \tvrcp28sd %xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf2, 0x7d, 0x48, 0xcc, 0xfe, }, 6, 0, "", "",
"62 f2 7d 48 cc fe \tvrsqrt28ps %zmm6,%zmm7",},
{{0x62, 0xf2, 0xfd, 0x48, 0xcc, 0xfe, }, 6, 0, "", "",
"62 f2 fd 48 cc fe \tvrsqrt28pd %zmm6,%zmm7",},
{{0x62, 0xf2, 0x4d, 0x0f, 0xcd, 0xfd, }, 6, 0, "", "",
"62 f2 4d 0f cd fd \tvrsqrt28ss %xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf2, 0xcd, 0x0f, 0xcd, 0xfd, }, 6, 0, "", "",
"62 f2 cd 0f cd fd \tvrsqrt28sd %xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf3, 0x4d, 0x48, 0x03, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 48 03 fd 12 \tvalignd $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xcd, 0x48, 0x03, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 48 03 fd 12 \tvalignq $0x12,%zmm5,%zmm6,%zmm7",},
{{0xc4, 0xe3, 0x7d, 0x08, 0xd6, 0x05, }, 6, 0, "", "",
"c4 e3 7d 08 d6 05 \tvroundps $0x5,%ymm6,%ymm2",},
{{0x62, 0xf3, 0x7d, 0x48, 0x08, 0xf5, 0x12, }, 7, 0, "", "",
"62 f3 7d 48 08 f5 12 \tvrndscaleps $0x12,%zmm5,%zmm6",},
{{0xc4, 0xe3, 0x7d, 0x09, 0xd6, 0x05, }, 6, 0, "", "",
"c4 e3 7d 09 d6 05 \tvroundpd $0x5,%ymm6,%ymm2",},
{{0x62, 0xf3, 0xfd, 0x48, 0x09, 0xf5, 0x12, }, 7, 0, "", "",
"62 f3 fd 48 09 f5 12 \tvrndscalepd $0x12,%zmm5,%zmm6",},
{{0xc4, 0xe3, 0x49, 0x0a, 0xd4, 0x05, }, 6, 0, "", "",
"c4 e3 49 0a d4 05 \tvroundss $0x5,%xmm4,%xmm6,%xmm2",},
{{0x62, 0xf3, 0x55, 0x0f, 0x0a, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 55 0f 0a f4 12 \tvrndscaless $0x12,%xmm4,%xmm5,%xmm6{%k7}",},
{{0xc4, 0xe3, 0x49, 0x0b, 0xd4, 0x05, }, 6, 0, "", "",
"c4 e3 49 0b d4 05 \tvroundsd $0x5,%xmm4,%xmm6,%xmm2",},
{{0x62, 0xf3, 0xd5, 0x0f, 0x0b, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 d5 0f 0b f4 12 \tvrndscalesd $0x12,%xmm4,%xmm5,%xmm6{%k7}",},
{{0xc4, 0xe3, 0x5d, 0x18, 0xf4, 0x05, }, 6, 0, "", "",
"c4 e3 5d 18 f4 05 \tvinsertf128 $0x5,%xmm4,%ymm4,%ymm6",},
{{0x62, 0xf3, 0x55, 0x4f, 0x18, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 55 4f 18 f4 12 \tvinsertf32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
{{0x62, 0xf3, 0xd5, 0x4f, 0x18, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 d5 4f 18 f4 12 \tvinsertf64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
{{0xc4, 0xe3, 0x7d, 0x19, 0xe4, 0x05, }, 6, 0, "", "",
"c4 e3 7d 19 e4 05 \tvextractf128 $0x5,%ymm4,%xmm4",},
{{0x62, 0xf3, 0x7d, 0x4f, 0x19, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 7d 4f 19 ee 12 \tvextractf32x4 $0x12,%zmm5,%xmm6{%k7}",},
{{0x62, 0xf3, 0xfd, 0x4f, 0x19, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 fd 4f 19 ee 12 \tvextractf64x2 $0x12,%zmm5,%xmm6{%k7}",},
{{0x62, 0xf3, 0x4d, 0x4f, 0x1a, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 4f 1a fd 12 \tvinsertf32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
{{0x62, 0xf3, 0xcd, 0x4f, 0x1a, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 4f 1a fd 12 \tvinsertf64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
{{0x62, 0xf3, 0x7d, 0x4f, 0x1b, 0xf7, 0x12, }, 7, 0, "", "",
"62 f3 7d 4f 1b f7 12 \tvextractf32x8 $0x12,%zmm6,%ymm7{%k7}",},
{{0x62, 0xf3, 0xfd, 0x4f, 0x1b, 0xf7, 0x12, }, 7, 0, "", "",
"62 f3 fd 4f 1b f7 12 \tvextractf64x4 $0x12,%zmm6,%ymm7{%k7}",},
{{0x62, 0xf3, 0x45, 0x48, 0x1e, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 45 48 1e ee 12 \tvpcmpud $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0xc5, 0x48, 0x1e, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 c5 48 1e ee 12 \tvpcmpuq $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0x45, 0x48, 0x1f, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 45 48 1f ee 12 \tvpcmpd $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0xc5, 0x48, 0x1f, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 c5 48 1f ee 12 \tvpcmpq $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0x4d, 0x48, 0x23, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 48 23 fd 12 \tvshuff32x4 $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xcd, 0x48, 0x23, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 48 23 fd 12 \tvshuff64x2 $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x4d, 0x48, 0x25, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 48 25 fd 12 \tvpternlogd $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xcd, 0x48, 0x25, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 48 25 fd 12 \tvpternlogq $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x7d, 0x48, 0x26, 0xfe, 0x12, }, 7, 0, "", "",
"62 f3 7d 48 26 fe 12 \tvgetmantps $0x12,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xfd, 0x48, 0x26, 0xfe, 0x12, }, 7, 0, "", "",
"62 f3 fd 48 26 fe 12 \tvgetmantpd $0x12,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x4d, 0x0f, 0x27, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 0f 27 fd 12 \tvgetmantss $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf3, 0xcd, 0x0f, 0x27, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 0f 27 fd 12 \tvgetmantsd $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
{{0xc4, 0xe3, 0x5d, 0x38, 0xf4, 0x05, }, 6, 0, "", "",
"c4 e3 5d 38 f4 05 \tvinserti128 $0x5,%xmm4,%ymm4,%ymm6",},
{{0x62, 0xf3, 0x55, 0x4f, 0x38, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 55 4f 38 f4 12 \tvinserti32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
{{0x62, 0xf3, 0xd5, 0x4f, 0x38, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 d5 4f 38 f4 12 \tvinserti64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
{{0xc4, 0xe3, 0x7d, 0x39, 0xe6, 0x05, }, 6, 0, "", "",
"c4 e3 7d 39 e6 05 \tvextracti128 $0x5,%ymm4,%xmm6",},
{{0x62, 0xf3, 0x7d, 0x4f, 0x39, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 7d 4f 39 ee 12 \tvextracti32x4 $0x12,%zmm5,%xmm6{%k7}",},
{{0x62, 0xf3, 0xfd, 0x4f, 0x39, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 fd 4f 39 ee 12 \tvextracti64x2 $0x12,%zmm5,%xmm6{%k7}",},
{{0x62, 0xf3, 0x4d, 0x4f, 0x3a, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 4f 3a fd 12 \tvinserti32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
{{0x62, 0xf3, 0xcd, 0x4f, 0x3a, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 4f 3a fd 12 \tvinserti64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
{{0x62, 0xf3, 0x7d, 0x4f, 0x3b, 0xf7, 0x12, }, 7, 0, "", "",
"62 f3 7d 4f 3b f7 12 \tvextracti32x8 $0x12,%zmm6,%ymm7{%k7}",},
{{0x62, 0xf3, 0xfd, 0x4f, 0x3b, 0xf7, 0x12, }, 7, 0, "", "",
"62 f3 fd 4f 3b f7 12 \tvextracti64x4 $0x12,%zmm6,%ymm7{%k7}",},
{{0x62, 0xf3, 0x45, 0x48, 0x3e, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 45 48 3e ee 12 \tvpcmpub $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0xc5, 0x48, 0x3e, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 c5 48 3e ee 12 \tvpcmpuw $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0x45, 0x48, 0x3f, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 45 48 3f ee 12 \tvpcmpb $0x12,%zmm6,%zmm7,%k5",},
{{0x62, 0xf3, 0xc5, 0x48, 0x3f, 0xee, 0x12, }, 7, 0, "", "",
"62 f3 c5 48 3f ee 12 \tvpcmpw $0x12,%zmm6,%zmm7,%k5",},
{{0xc4, 0xe3, 0x4d, 0x42, 0xd4, 0x05, }, 6, 0, "", "",
"c4 e3 4d 42 d4 05 \tvmpsadbw $0x5,%ymm4,%ymm6,%ymm2",},
{{0x62, 0xf3, 0x55, 0x48, 0x42, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 55 48 42 f4 12 \tvdbpsadbw $0x12,%zmm4,%zmm5,%zmm6",},
{{0x62, 0xf3, 0x4d, 0x48, 0x43, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 48 43 fd 12 \tvshufi32x4 $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xcd, 0x48, 0x43, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 48 43 fd 12 \tvshufi64x2 $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x4d, 0x48, 0x50, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 48 50 fd 12 \tvrangeps $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xcd, 0x48, 0x50, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 48 50 fd 12 \tvrangepd $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x4d, 0x08, 0x51, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 08 51 fd 12 \tvrangess $0x12,%xmm5,%xmm6,%xmm7",},
{{0x62, 0xf3, 0xcd, 0x08, 0x51, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 08 51 fd 12 \tvrangesd $0x12,%xmm5,%xmm6,%xmm7",},
{{0x62, 0xf3, 0x4d, 0x48, 0x54, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 48 54 fd 12 \tvfixupimmps $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xcd, 0x48, 0x54, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 48 54 fd 12 \tvfixupimmpd $0x12,%zmm5,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x4d, 0x0f, 0x55, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 0f 55 fd 12 \tvfixupimmss $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf3, 0xcd, 0x0f, 0x55, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 0f 55 fd 12 \tvfixupimmsd $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
{{0x62, 0xf3, 0x7d, 0x48, 0x56, 0xfe, 0x12, }, 7, 0, "", "",
"62 f3 7d 48 56 fe 12 \tvreduceps $0x12,%zmm6,%zmm7",},
{{0x62, 0xf3, 0xfd, 0x48, 0x56, 0xfe, 0x12, }, 7, 0, "", "",
"62 f3 fd 48 56 fe 12 \tvreducepd $0x12,%zmm6,%zmm7",},
{{0x62, 0xf3, 0x4d, 0x08, 0x57, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 4d 08 57 fd 12 \tvreducess $0x12,%xmm5,%xmm6,%xmm7",},
{{0x62, 0xf3, 0xcd, 0x08, 0x57, 0xfd, 0x12, }, 7, 0, "", "",
"62 f3 cd 08 57 fd 12 \tvreducesd $0x12,%xmm5,%xmm6,%xmm7",},
{{0x62, 0xf3, 0x7d, 0x48, 0x66, 0xef, 0x12, }, 7, 0, "", "",
"62 f3 7d 48 66 ef 12 \tvfpclassps $0x12,%zmm7,%k5",},
{{0x62, 0xf3, 0xfd, 0x48, 0x66, 0xef, 0x12, }, 7, 0, "", "",
"62 f3 fd 48 66 ef 12 \tvfpclasspd $0x12,%zmm7,%k5",},
{{0x62, 0xf3, 0x7d, 0x08, 0x67, 0xef, 0x12, }, 7, 0, "", "",
"62 f3 7d 08 67 ef 12 \tvfpclassss $0x12,%xmm7,%k5",},
{{0x62, 0xf3, 0xfd, 0x08, 0x67, 0xef, 0x12, }, 7, 0, "", "",
"62 f3 fd 08 67 ef 12 \tvfpclasssd $0x12,%xmm7,%k5",},
{{0x62, 0xf1, 0x4d, 0x48, 0x72, 0xc5, 0x12, }, 7, 0, "", "",
"62 f1 4d 48 72 c5 12 \tvprord $0x12,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xcd, 0x48, 0x72, 0xc5, 0x12, }, 7, 0, "", "",
"62 f1 cd 48 72 c5 12 \tvprorq $0x12,%zmm5,%zmm6",},
{{0x62, 0xf1, 0x4d, 0x48, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
"62 f1 4d 48 72 cd 12 \tvprold $0x12,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xcd, 0x48, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
"62 f1 cd 48 72 cd 12 \tvprolq $0x12,%zmm5,%zmm6",},
{{0x0f, 0x72, 0xe6, 0x02, }, 4, 0, "", "",
"0f 72 e6 02 \tpsrad $0x2,%mm6",},
{{0xc5, 0xed, 0x72, 0xe6, 0x05, }, 5, 0, "", "",
"c5 ed 72 e6 05 \tvpsrad $0x5,%ymm6,%ymm2",},
{{0x62, 0xf1, 0x6d, 0x48, 0x72, 0xe6, 0x05, }, 7, 0, "", "",
"62 f1 6d 48 72 e6 05 \tvpsrad $0x5,%zmm6,%zmm2",},
{{0x62, 0xf1, 0xed, 0x48, 0x72, 0xe6, 0x05, }, 7, 0, "", "",
"62 f1 ed 48 72 e6 05 \tvpsraq $0x5,%zmm6,%zmm2",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c6 8c fd 7b 00 00 00 \tvgatherpf0dps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c6 8c fd 7b 00 00 00 \tvgatherpf0dpd 0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c6 94 fd 7b 00 00 00 \tvgatherpf1dps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c6 94 fd 7b 00 00 00 \tvgatherpf1dpd 0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c6 ac fd 7b 00 00 00 \tvscatterpf0dps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c6 ac fd 7b 00 00 00 \tvscatterpf0dpd 0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c6 b4 fd 7b 00 00 00 \tvscatterpf1dps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c6 b4 fd 7b 00 00 00 \tvscatterpf1dpd 0x7b(%ebp,%ymm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c7 8c fd 7b 00 00 00 \tvgatherpf0qps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c7 8c fd 7b 00 00 00 \tvgatherpf0qpd 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c7 94 fd 7b 00 00 00 \tvgatherpf1qps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c7 94 fd 7b 00 00 00 \tvgatherpf1qpd 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c7 ac fd 7b 00 00 00 \tvscatterpf0qps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c7 ac fd 7b 00 00 00 \tvscatterpf0qpd 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 7d 49 c7 b4 fd 7b 00 00 00 \tvscatterpf1qps 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 f2 fd 49 c7 b4 fd 7b 00 00 00 \tvscatterpf1qpd 0x7b(%ebp,%zmm7,8){%k1}",},
{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 48 58 f4 \tvaddpd %zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x4f, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 4f 58 f4 \tvaddpd %zmm4,%zmm5,%zmm6{%k7}",},
{{0x62, 0xf1, 0xd5, 0xcf, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 cf 58 f4 \tvaddpd %zmm4,%zmm5,%zmm6{%k7}{z}",},
{{0x62, 0xf1, 0xd5, 0x18, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 18 58 f4 \tvaddpd {rn-sae},%zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x58, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 58 58 f4 \tvaddpd {ru-sae},%zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x38, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 38 58 f4 \tvaddpd {rd-sae},%zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x78, 0x58, 0xf4, }, 6, 0, "", "",
"62 f1 d5 78 58 f4 \tvaddpd {rz-sae},%zmm4,%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0x31, }, 6, 0, "", "",
"62 f1 d5 48 58 31 \tvaddpd (%ecx),%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0xb4, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 11, 0, "", "",
"62 f1 d5 48 58 b4 c8 23 01 00 00 \tvaddpd 0x123(%eax,%ecx,8),%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x58, 0x58, 0x31, }, 6, 0, "", "",
"62 f1 d5 58 58 31 \tvaddpd (%ecx){1to8},%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
"62 f1 d5 48 58 72 7f \tvaddpd 0x1fc0(%edx),%zmm5,%zmm6",},
{{0x62, 0xf1, 0xd5, 0x58, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
"62 f1 d5 58 58 72 7f \tvaddpd 0x3f8(%edx){1to8},%zmm5,%zmm6",},
{{0x62, 0xf1, 0x4c, 0x58, 0xc2, 0x6a, 0x7f, 0x08, }, 8, 0, "", "",
"62 f1 4c 58 c2 6a 7f 08 \tvcmpeq_uqps 0x1fc(%edx){1to16},%zmm6,%k5",},
{{0x62, 0xf1, 0xe7, 0x0f, 0xc2, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, 0x01, }, 12, 0, "", "",
"62 f1 e7 0f c2 ac c8 23 01 00 00 01 \tvcmpltsd 0x123(%eax,%ecx,8),%xmm3,%k5{%k7}",},
{{0x62, 0xf1, 0xd7, 0x1f, 0xc2, 0xec, 0x02, }, 7, 0, "", "",
"62 f1 d7 1f c2 ec 02 \tvcmplesd {sae},%xmm4,%xmm5,%k5{%k7}",},
{{0x62, 0xf3, 0x5d, 0x0f, 0x27, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, 0x5b, }, 12, 0, "", "",
"62 f3 5d 0f 27 ac c8 23 01 00 00 5b \tvgetmantss $0x5b,0x123(%eax,%ecx,8),%xmm4,%xmm5{%k7}",},
{{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "", {{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "",
"f3 0f 1b 00 \tbndmk (%eax),%bnd0",}, "f3 0f 1b 00 \tbndmk (%eax),%bnd0",},
{{0xf3, 0x0f, 0x1b, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "", {{0xf3, 0x0f, 0x1b, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
...@@ -309,19 +1319,19 @@ ...@@ -309,19 +1319,19 @@
{{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "", {{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%eax,%ecx,1)",}, "0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%eax,%ecx,1)",},
{{0xf2, 0xe8, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "call", "unconditional", {{0xf2, 0xe8, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "call", "unconditional",
"f2 e8 fc ff ff ff \tbnd call 3c3 <main+0x3c3>",}, "f2 e8 fc ff ff ff \tbnd call fce <main+0xfce>",},
{{0xf2, 0xff, 0x10, }, 3, 0, "call", "indirect", {{0xf2, 0xff, 0x10, }, 3, 0, "call", "indirect",
"f2 ff 10 \tbnd call *(%eax)",}, "f2 ff 10 \tbnd call *(%eax)",},
{{0xf2, 0xc3, }, 2, 0, "ret", "indirect", {{0xf2, 0xc3, }, 2, 0, "ret", "indirect",
"f2 c3 \tbnd ret ",}, "f2 c3 \tbnd ret ",},
{{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional", {{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional",
"f2 e9 fc ff ff ff \tbnd jmp 3ce <main+0x3ce>",}, "f2 e9 fc ff ff ff \tbnd jmp fd9 <main+0xfd9>",},
{{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional", {{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional",
"f2 e9 fc ff ff ff \tbnd jmp 3d4 <main+0x3d4>",}, "f2 e9 fc ff ff ff \tbnd jmp fdf <main+0xfdf>",},
{{0xf2, 0xff, 0x21, }, 3, 0, "jmp", "indirect", {{0xf2, 0xff, 0x21, }, 3, 0, "jmp", "indirect",
"f2 ff 21 \tbnd jmp *(%ecx)",}, "f2 ff 21 \tbnd jmp *(%ecx)",},
{{0xf2, 0x0f, 0x85, 0xfc, 0xff, 0xff, 0xff, }, 7, 0xfffffffc, "jcc", "conditional", {{0xf2, 0x0f, 0x85, 0xfc, 0xff, 0xff, 0xff, }, 7, 0xfffffffc, "jcc", "conditional",
"f2 0f 85 fc ff ff ff \tbnd jne 3de <main+0x3de>",}, "f2 0f 85 fc ff ff ff \tbnd jne fe9 <main+0xfe9>",},
{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "", {{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "",
"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",}, "0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",},
{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "", {{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "",
......
...@@ -6,6 +6,938 @@ ...@@ -6,6 +6,938 @@
{{0x0f, 0x31, }, 2, 0, "", "", {{0x0f, 0x31, }, 2, 0, "", "",
"0f 31 \trdtsc ",}, "0f 31 \trdtsc ",},
{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
{{0x48, 0x0f, 0x41, 0xd8, }, 4, 0, "", "",
"48 0f 41 d8 \tcmovno %rax,%rbx",},
{{0x48, 0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"48 0f 41 88 78 56 34 12 \tcmovno 0x12345678(%rax),%rcx",},
{{0x66, 0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 0f 41 88 78 56 34 12 \tcmovno 0x12345678(%rax),%cx",},
{{0x48, 0x0f, 0x44, 0xd8, }, 4, 0, "", "",
"48 0f 44 d8 \tcmove %rax,%rbx",},
{{0x48, 0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"48 0f 44 88 78 56 34 12 \tcmove 0x12345678(%rax),%rcx",},
{{0x66, 0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"66 0f 44 88 78 56 34 12 \tcmove 0x12345678(%rax),%cx",},
{{0x0f, 0x90, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 90 80 78 56 34 12 \tseto 0x12345678(%rax)",},
{{0x0f, 0x91, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 91 80 78 56 34 12 \tsetno 0x12345678(%rax)",},
{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 92 80 78 56 34 12 \tsetb 0x12345678(%rax)",},
{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 92 80 78 56 34 12 \tsetb 0x12345678(%rax)",},
{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 92 80 78 56 34 12 \tsetb 0x12345678(%rax)",},
{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 93 80 78 56 34 12 \tsetae 0x12345678(%rax)",},
{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 93 80 78 56 34 12 \tsetae 0x12345678(%rax)",},
{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 93 80 78 56 34 12 \tsetae 0x12345678(%rax)",},
{{0x0f, 0x98, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 98 80 78 56 34 12 \tsets 0x12345678(%rax)",},
{{0x0f, 0x99, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
"0f 99 80 78 56 34 12 \tsetns 0x12345678(%rax)",},
{{0xc5, 0xcc, 0x41, 0xef, }, 4, 0, "", "",
"c5 cc 41 ef \tkandw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x41, 0xef, }, 5, 0, "", "",
"c4 e1 cc 41 ef \tkandq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x41, 0xef, }, 4, 0, "", "",
"c5 cd 41 ef \tkandb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x41, 0xef, }, 5, 0, "", "",
"c4 e1 cd 41 ef \tkandd %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x42, 0xef, }, 4, 0, "", "",
"c5 cc 42 ef \tkandnw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x42, 0xef, }, 5, 0, "", "",
"c4 e1 cc 42 ef \tkandnq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x42, 0xef, }, 4, 0, "", "",
"c5 cd 42 ef \tkandnb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x42, 0xef, }, 5, 0, "", "",
"c4 e1 cd 42 ef \tkandnd %k7,%k6,%k5",},
{{0xc5, 0xf8, 0x44, 0xf7, }, 4, 0, "", "",
"c5 f8 44 f7 \tknotw %k7,%k6",},
{{0xc4, 0xe1, 0xf8, 0x44, 0xf7, }, 5, 0, "", "",
"c4 e1 f8 44 f7 \tknotq %k7,%k6",},
{{0xc5, 0xf9, 0x44, 0xf7, }, 4, 0, "", "",
"c5 f9 44 f7 \tknotb %k7,%k6",},
{{0xc4, 0xe1, 0xf9, 0x44, 0xf7, }, 5, 0, "", "",
"c4 e1 f9 44 f7 \tknotd %k7,%k6",},
{{0xc5, 0xcc, 0x45, 0xef, }, 4, 0, "", "",
"c5 cc 45 ef \tkorw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x45, 0xef, }, 5, 0, "", "",
"c4 e1 cc 45 ef \tkorq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x45, 0xef, }, 4, 0, "", "",
"c5 cd 45 ef \tkorb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x45, 0xef, }, 5, 0, "", "",
"c4 e1 cd 45 ef \tkord %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x46, 0xef, }, 4, 0, "", "",
"c5 cc 46 ef \tkxnorw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x46, 0xef, }, 5, 0, "", "",
"c4 e1 cc 46 ef \tkxnorq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x46, 0xef, }, 4, 0, "", "",
"c5 cd 46 ef \tkxnorb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x46, 0xef, }, 5, 0, "", "",
"c4 e1 cd 46 ef \tkxnord %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x47, 0xef, }, 4, 0, "", "",
"c5 cc 47 ef \tkxorw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x47, 0xef, }, 5, 0, "", "",
"c4 e1 cc 47 ef \tkxorq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x47, 0xef, }, 4, 0, "", "",
"c5 cd 47 ef \tkxorb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x47, 0xef, }, 5, 0, "", "",
"c4 e1 cd 47 ef \tkxord %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x4a, 0xef, }, 4, 0, "", "",
"c5 cc 4a ef \tkaddw %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x4a, 0xef, }, 5, 0, "", "",
"c4 e1 cc 4a ef \tkaddq %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x4a, 0xef, }, 4, 0, "", "",
"c5 cd 4a ef \tkaddb %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcd, 0x4a, 0xef, }, 5, 0, "", "",
"c4 e1 cd 4a ef \tkaddd %k7,%k6,%k5",},
{{0xc5, 0xcd, 0x4b, 0xef, }, 4, 0, "", "",
"c5 cd 4b ef \tkunpckbw %k7,%k6,%k5",},
{{0xc5, 0xcc, 0x4b, 0xef, }, 4, 0, "", "",
"c5 cc 4b ef \tkunpckwd %k7,%k6,%k5",},
{{0xc4, 0xe1, 0xcc, 0x4b, 0xef, }, 5, 0, "", "",
"c4 e1 cc 4b ef \tkunpckdq %k7,%k6,%k5",},
{{0xc5, 0xf8, 0x90, 0xee, }, 4, 0, "", "",
"c5 f8 90 ee \tkmovw %k6,%k5",},
{{0xc5, 0xf8, 0x90, 0x29, }, 4, 0, "", "",
"c5 f8 90 29 \tkmovw (%rcx),%k5",},
{{0xc4, 0xa1, 0x78, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 78 90 ac f0 23 01 00 00 \tkmovw 0x123(%rax,%r14,8),%k5",},
{{0xc5, 0xf8, 0x91, 0x29, }, 4, 0, "", "",
"c5 f8 91 29 \tkmovw %k5,(%rcx)",},
{{0xc4, 0xa1, 0x78, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 78 91 ac f0 23 01 00 00 \tkmovw %k5,0x123(%rax,%r14,8)",},
{{0xc5, 0xf8, 0x92, 0xe8, }, 4, 0, "", "",
"c5 f8 92 e8 \tkmovw %eax,%k5",},
{{0xc5, 0xf8, 0x92, 0xed, }, 4, 0, "", "",
"c5 f8 92 ed \tkmovw %ebp,%k5",},
{{0xc4, 0xc1, 0x78, 0x92, 0xed, }, 5, 0, "", "",
"c4 c1 78 92 ed \tkmovw %r13d,%k5",},
{{0xc5, 0xf8, 0x93, 0xc5, }, 4, 0, "", "",
"c5 f8 93 c5 \tkmovw %k5,%eax",},
{{0xc5, 0xf8, 0x93, 0xed, }, 4, 0, "", "",
"c5 f8 93 ed \tkmovw %k5,%ebp",},
{{0xc5, 0x78, 0x93, 0xed, }, 4, 0, "", "",
"c5 78 93 ed \tkmovw %k5,%r13d",},
{{0xc4, 0xe1, 0xf8, 0x90, 0xee, }, 5, 0, "", "",
"c4 e1 f8 90 ee \tkmovq %k6,%k5",},
{{0xc4, 0xe1, 0xf8, 0x90, 0x29, }, 5, 0, "", "",
"c4 e1 f8 90 29 \tkmovq (%rcx),%k5",},
{{0xc4, 0xa1, 0xf8, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 f8 90 ac f0 23 01 00 00 \tkmovq 0x123(%rax,%r14,8),%k5",},
{{0xc4, 0xe1, 0xf8, 0x91, 0x29, }, 5, 0, "", "",
"c4 e1 f8 91 29 \tkmovq %k5,(%rcx)",},
{{0xc4, 0xa1, 0xf8, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 f8 91 ac f0 23 01 00 00 \tkmovq %k5,0x123(%rax,%r14,8)",},
{{0xc4, 0xe1, 0xfb, 0x92, 0xe8, }, 5, 0, "", "",
"c4 e1 fb 92 e8 \tkmovq %rax,%k5",},
{{0xc4, 0xe1, 0xfb, 0x92, 0xed, }, 5, 0, "", "",
"c4 e1 fb 92 ed \tkmovq %rbp,%k5",},
{{0xc4, 0xc1, 0xfb, 0x92, 0xed, }, 5, 0, "", "",
"c4 c1 fb 92 ed \tkmovq %r13,%k5",},
{{0xc4, 0xe1, 0xfb, 0x93, 0xc5, }, 5, 0, "", "",
"c4 e1 fb 93 c5 \tkmovq %k5,%rax",},
{{0xc4, 0xe1, 0xfb, 0x93, 0xed, }, 5, 0, "", "",
"c4 e1 fb 93 ed \tkmovq %k5,%rbp",},
{{0xc4, 0x61, 0xfb, 0x93, 0xed, }, 5, 0, "", "",
"c4 61 fb 93 ed \tkmovq %k5,%r13",},
{{0xc5, 0xf9, 0x90, 0xee, }, 4, 0, "", "",
"c5 f9 90 ee \tkmovb %k6,%k5",},
{{0xc5, 0xf9, 0x90, 0x29, }, 4, 0, "", "",
"c5 f9 90 29 \tkmovb (%rcx),%k5",},
{{0xc4, 0xa1, 0x79, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 79 90 ac f0 23 01 00 00 \tkmovb 0x123(%rax,%r14,8),%k5",},
{{0xc5, 0xf9, 0x91, 0x29, }, 4, 0, "", "",
"c5 f9 91 29 \tkmovb %k5,(%rcx)",},
{{0xc4, 0xa1, 0x79, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 79 91 ac f0 23 01 00 00 \tkmovb %k5,0x123(%rax,%r14,8)",},
{{0xc5, 0xf9, 0x92, 0xe8, }, 4, 0, "", "",
"c5 f9 92 e8 \tkmovb %eax,%k5",},
{{0xc5, 0xf9, 0x92, 0xed, }, 4, 0, "", "",
"c5 f9 92 ed \tkmovb %ebp,%k5",},
{{0xc4, 0xc1, 0x79, 0x92, 0xed, }, 5, 0, "", "",
"c4 c1 79 92 ed \tkmovb %r13d,%k5",},
{{0xc5, 0xf9, 0x93, 0xc5, }, 4, 0, "", "",
"c5 f9 93 c5 \tkmovb %k5,%eax",},
{{0xc5, 0xf9, 0x93, 0xed, }, 4, 0, "", "",
"c5 f9 93 ed \tkmovb %k5,%ebp",},
{{0xc5, 0x79, 0x93, 0xed, }, 4, 0, "", "",
"c5 79 93 ed \tkmovb %k5,%r13d",},
{{0xc4, 0xe1, 0xf9, 0x90, 0xee, }, 5, 0, "", "",
"c4 e1 f9 90 ee \tkmovd %k6,%k5",},
{{0xc4, 0xe1, 0xf9, 0x90, 0x29, }, 5, 0, "", "",
"c4 e1 f9 90 29 \tkmovd (%rcx),%k5",},
{{0xc4, 0xa1, 0xf9, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 f9 90 ac f0 23 01 00 00 \tkmovd 0x123(%rax,%r14,8),%k5",},
{{0xc4, 0xe1, 0xf9, 0x91, 0x29, }, 5, 0, "", "",
"c4 e1 f9 91 29 \tkmovd %k5,(%rcx)",},
{{0xc4, 0xa1, 0xf9, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
"c4 a1 f9 91 ac f0 23 01 00 00 \tkmovd %k5,0x123(%rax,%r14,8)",},
{{0xc5, 0xfb, 0x92, 0xe8, }, 4, 0, "", "",
"c5 fb 92 e8 \tkmovd %eax,%k5",},
{{0xc5, 0xfb, 0x92, 0xed, }, 4, 0, "", "",
"c5 fb 92 ed \tkmovd %ebp,%k5",},
{{0xc4, 0xc1, 0x7b, 0x92, 0xed, }, 5, 0, "", "",
"c4 c1 7b 92 ed \tkmovd %r13d,%k5",},
{{0xc5, 0xfb, 0x93, 0xc5, }, 4, 0, "", "",
"c5 fb 93 c5 \tkmovd %k5,%eax",},
{{0xc5, 0xfb, 0x93, 0xed, }, 4, 0, "", "",
"c5 fb 93 ed \tkmovd %k5,%ebp",},
{{0xc5, 0x7b, 0x93, 0xed, }, 4, 0, "", "",
"c5 7b 93 ed \tkmovd %k5,%r13d",},
{{0xc5, 0xf8, 0x98, 0xee, }, 4, 0, "", "",
"c5 f8 98 ee \tkortestw %k6,%k5",},
{{0xc4, 0xe1, 0xf8, 0x98, 0xee, }, 5, 0, "", "",
"c4 e1 f8 98 ee \tkortestq %k6,%k5",},
{{0xc5, 0xf9, 0x98, 0xee, }, 4, 0, "", "",
"c5 f9 98 ee \tkortestb %k6,%k5",},
{{0xc4, 0xe1, 0xf9, 0x98, 0xee, }, 5, 0, "", "",
"c4 e1 f9 98 ee \tkortestd %k6,%k5",},
{{0xc5, 0xf8, 0x99, 0xee, }, 4, 0, "", "",
"c5 f8 99 ee \tktestw %k6,%k5",},
{{0xc4, 0xe1, 0xf8, 0x99, 0xee, }, 5, 0, "", "",
"c4 e1 f8 99 ee \tktestq %k6,%k5",},
{{0xc5, 0xf9, 0x99, 0xee, }, 4, 0, "", "",
"c5 f9 99 ee \tktestb %k6,%k5",},
{{0xc4, 0xe1, 0xf9, 0x99, 0xee, }, 5, 0, "", "",
"c4 e1 f9 99 ee \tktestd %k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x30, 0xee, 0x12, }, 6, 0, "", "",
"c4 e3 f9 30 ee 12 \tkshiftrw $0x12,%k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x31, 0xee, 0x5b, }, 6, 0, "", "",
"c4 e3 f9 31 ee 5b \tkshiftrq $0x5b,%k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x32, 0xee, 0x12, }, 6, 0, "", "",
"c4 e3 f9 32 ee 12 \tkshiftlw $0x12,%k6,%k5",},
{{0xc4, 0xe3, 0xf9, 0x33, 0xee, 0x5b, }, 6, 0, "", "",
"c4 e3 f9 33 ee 5b \tkshiftlq $0x5b,%k6,%k5",},
{{0xc5, 0xf8, 0x5b, 0xf5, }, 4, 0, "", "",
"c5 f8 5b f5 \tvcvtdq2ps %xmm5,%xmm6",},
{{0x62, 0x91, 0xfc, 0x4f, 0x5b, 0xf5, }, 6, 0, "", "",
"62 91 fc 4f 5b f5 \tvcvtqq2ps %zmm29,%ymm6{%k7}",},
{{0xc5, 0xf9, 0x5b, 0xf5, }, 4, 0, "", "",
"c5 f9 5b f5 \tvcvtps2dq %xmm5,%xmm6",},
{{0xc5, 0xfa, 0x5b, 0xf5, }, 4, 0, "", "",
"c5 fa 5b f5 \tvcvttps2dq %xmm5,%xmm6",},
{{0x0f, 0x6f, 0xe0, }, 3, 0, "", "",
"0f 6f e0 \tmovq %mm0,%mm4",},
{{0xc5, 0xfd, 0x6f, 0xf4, }, 4, 0, "", "",
"c5 fd 6f f4 \tvmovdqa %ymm4,%ymm6",},
{{0x62, 0x01, 0x7d, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
"62 01 7d 48 6f d1 \tvmovdqa32 %zmm25,%zmm26",},
{{0x62, 0x01, 0xfd, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
"62 01 fd 48 6f d1 \tvmovdqa64 %zmm25,%zmm26",},
{{0xc5, 0xfe, 0x6f, 0xf4, }, 4, 0, "", "",
"c5 fe 6f f4 \tvmovdqu %ymm4,%ymm6",},
{{0x62, 0x01, 0x7e, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 01 7e 48 6f f5 \tvmovdqu32 %zmm29,%zmm30",},
{{0x62, 0x01, 0xfe, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
"62 01 fe 48 6f d1 \tvmovdqu64 %zmm25,%zmm26",},
{{0x62, 0x01, 0x7f, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
"62 01 7f 48 6f f5 \tvmovdqu8 %zmm29,%zmm30",},
{{0x62, 0x01, 0xff, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
"62 01 ff 48 6f d1 \tvmovdqu16 %zmm25,%zmm26",},
{{0x0f, 0x78, 0xc3, }, 3, 0, "", "",
"0f 78 c3 \tvmread %rax,%rbx",},
{{0x62, 0x01, 0x7c, 0x48, 0x78, 0xd1, }, 6, 0, "", "",
"62 01 7c 48 78 d1 \tvcvttps2udq %zmm25,%zmm26",},
{{0x62, 0x91, 0xfc, 0x4f, 0x78, 0xf5, }, 6, 0, "", "",
"62 91 fc 4f 78 f5 \tvcvttpd2udq %zmm29,%ymm6{%k7}",},
{{0x62, 0xf1, 0xff, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
"62 f1 ff 08 78 c6 \tvcvttsd2usi %xmm6,%rax",},
{{0x62, 0xf1, 0xfe, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
"62 f1 fe 08 78 c6 \tvcvttss2usi %xmm6,%rax",},
{{0x62, 0x61, 0x7d, 0x4f, 0x78, 0xd5, }, 6, 0, "", "",
"62 61 7d 4f 78 d5 \tvcvttps2uqq %ymm5,%zmm26{%k7}",},
{{0x62, 0x01, 0xfd, 0x48, 0x78, 0xf5, }, 6, 0, "", "",
"62 01 fd 48 78 f5 \tvcvttpd2uqq %zmm29,%zmm30",},
{{0x0f, 0x79, 0xd8, }, 3, 0, "", "",
"0f 79 d8 \tvmwrite %rax,%rbx",},
{{0x62, 0x01, 0x7c, 0x48, 0x79, 0xd1, }, 6, 0, "", "",
"62 01 7c 48 79 d1 \tvcvtps2udq %zmm25,%zmm26",},
{{0x62, 0x91, 0xfc, 0x4f, 0x79, 0xf5, }, 6, 0, "", "",
"62 91 fc 4f 79 f5 \tvcvtpd2udq %zmm29,%ymm6{%k7}",},
{{0x62, 0xf1, 0xff, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
"62 f1 ff 08 79 c6 \tvcvtsd2usi %xmm6,%rax",},
{{0x62, 0xf1, 0xfe, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
"62 f1 fe 08 79 c6 \tvcvtss2usi %xmm6,%rax",},
{{0x62, 0x61, 0x7d, 0x4f, 0x79, 0xd5, }, 6, 0, "", "",
"62 61 7d 4f 79 d5 \tvcvtps2uqq %ymm5,%zmm26{%k7}",},
{{0x62, 0x01, 0xfd, 0x48, 0x79, 0xf5, }, 6, 0, "", "",
"62 01 fd 48 79 f5 \tvcvtpd2uqq %zmm29,%zmm30",},
{{0x62, 0x61, 0x7e, 0x4f, 0x7a, 0xed, }, 6, 0, "", "",
"62 61 7e 4f 7a ed \tvcvtudq2pd %ymm5,%zmm29{%k7}",},
{{0x62, 0x01, 0xfe, 0x48, 0x7a, 0xd1, }, 6, 0, "", "",
"62 01 fe 48 7a d1 \tvcvtuqq2pd %zmm25,%zmm26",},
{{0x62, 0x01, 0x7f, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
"62 01 7f 48 7a f5 \tvcvtudq2ps %zmm29,%zmm30",},
{{0x62, 0x01, 0xff, 0x4f, 0x7a, 0xd1, }, 6, 0, "", "",
"62 01 ff 4f 7a d1 \tvcvtuqq2ps %zmm25,%ymm26{%k7}",},
{{0x62, 0x01, 0x7d, 0x4f, 0x7a, 0xd1, }, 6, 0, "", "",
"62 01 7d 4f 7a d1 \tvcvttps2qq %ymm25,%zmm26{%k7}",},
{{0x62, 0x01, 0xfd, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
"62 01 fd 48 7a f5 \tvcvttpd2qq %zmm29,%zmm30",},
{{0x62, 0xf1, 0x57, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
"62 f1 57 08 7b f0 \tvcvtusi2sd %eax,%xmm5,%xmm6",},
{{0x62, 0xf1, 0x56, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
"62 f1 56 08 7b f0 \tvcvtusi2ss %eax,%xmm5,%xmm6",},
{{0x62, 0x61, 0x7d, 0x4f, 0x7b, 0xd5, }, 6, 0, "", "",
"62 61 7d 4f 7b d5 \tvcvtps2qq %ymm5,%zmm26{%k7}",},
{{0x62, 0x01, 0xfd, 0x48, 0x7b, 0xf5, }, 6, 0, "", "",
"62 01 fd 48 7b f5 \tvcvtpd2qq %zmm29,%zmm30",},
{{0x0f, 0x7f, 0xc4, }, 3, 0, "", "",
"0f 7f c4 \tmovq %mm0,%mm4",},
{{0xc5, 0x7d, 0x7f, 0xc6, }, 4, 0, "", "",
"c5 7d 7f c6 \tvmovdqa %ymm8,%ymm6",},
{{0x62, 0x01, 0x7d, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
"62 01 7d 48 7f ca \tvmovdqa32 %zmm25,%zmm26",},
{{0x62, 0x01, 0xfd, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
"62 01 fd 48 7f ca \tvmovdqa64 %zmm25,%zmm26",},
{{0xc5, 0x7e, 0x7f, 0xc6, }, 4, 0, "", "",
"c5 7e 7f c6 \tvmovdqu %ymm8,%ymm6",},
{{0x62, 0x01, 0x7e, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
"62 01 7e 48 7f ca \tvmovdqu32 %zmm25,%zmm26",},
{{0x62, 0x01, 0xfe, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
"62 01 fe 48 7f ca \tvmovdqu64 %zmm25,%zmm26",},
{{0x62, 0x61, 0x7f, 0x48, 0x7f, 0x31, }, 6, 0, "", "",
"62 61 7f 48 7f 31 \tvmovdqu8 %zmm30,(%rcx)",},
{{0x62, 0x01, 0xff, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
"62 01 ff 48 7f ca \tvmovdqu16 %zmm25,%zmm26",},
{{0x0f, 0xdb, 0xd1, }, 3, 0, "", "",
"0f db d1 \tpand %mm1,%mm2",},
{{0x66, 0x0f, 0xdb, 0xd1, }, 4, 0, "", "",
"66 0f db d1 \tpand %xmm1,%xmm2",},
{{0xc5, 0xcd, 0xdb, 0xd4, }, 4, 0, "", "",
"c5 cd db d4 \tvpand %ymm4,%ymm6,%ymm2",},
{{0x62, 0x01, 0x35, 0x40, 0xdb, 0xd0, }, 6, 0, "", "",
"62 01 35 40 db d0 \tvpandd %zmm24,%zmm25,%zmm26",},
{{0x62, 0x01, 0xb5, 0x40, 0xdb, 0xd0, }, 6, 0, "", "",
"62 01 b5 40 db d0 \tvpandq %zmm24,%zmm25,%zmm26",},
{{0x0f, 0xdf, 0xd1, }, 3, 0, "", "",
"0f df d1 \tpandn %mm1,%mm2",},
{{0x66, 0x0f, 0xdf, 0xd1, }, 4, 0, "", "",
"66 0f df d1 \tpandn %xmm1,%xmm2",},
{{0xc5, 0xcd, 0xdf, 0xd4, }, 4, 0, "", "",
"c5 cd df d4 \tvpandn %ymm4,%ymm6,%ymm2",},
{{0x62, 0x01, 0x35, 0x40, 0xdf, 0xd0, }, 6, 0, "", "",
"62 01 35 40 df d0 \tvpandnd %zmm24,%zmm25,%zmm26",},
{{0x62, 0x01, 0xb5, 0x40, 0xdf, 0xd0, }, 6, 0, "", "",
"62 01 b5 40 df d0 \tvpandnq %zmm24,%zmm25,%zmm26",},
{{0xc5, 0xf9, 0xe6, 0xd1, }, 4, 0, "", "",
"c5 f9 e6 d1 \tvcvttpd2dq %xmm1,%xmm2",},
{{0xc5, 0xfa, 0xe6, 0xf5, }, 4, 0, "", "",
"c5 fa e6 f5 \tvcvtdq2pd %xmm5,%xmm6",},
{{0x62, 0x61, 0x7e, 0x4f, 0xe6, 0xd5, }, 6, 0, "", "",
"62 61 7e 4f e6 d5 \tvcvtdq2pd %ymm5,%zmm26{%k7}",},
{{0x62, 0x01, 0xfe, 0x48, 0xe6, 0xd1, }, 6, 0, "", "",
"62 01 fe 48 e6 d1 \tvcvtqq2pd %zmm25,%zmm26",},
{{0xc5, 0xfb, 0xe6, 0xd1, }, 4, 0, "", "",
"c5 fb e6 d1 \tvcvtpd2dq %xmm1,%xmm2",},
{{0x0f, 0xeb, 0xf4, }, 3, 0, "", "",
"0f eb f4 \tpor %mm4,%mm6",},
{{0xc5, 0xcd, 0xeb, 0xd4, }, 4, 0, "", "",
"c5 cd eb d4 \tvpor %ymm4,%ymm6,%ymm2",},
{{0x62, 0x01, 0x35, 0x40, 0xeb, 0xd0, }, 6, 0, "", "",
"62 01 35 40 eb d0 \tvpord %zmm24,%zmm25,%zmm26",},
{{0x62, 0x01, 0xb5, 0x40, 0xeb, 0xd0, }, 6, 0, "", "",
"62 01 b5 40 eb d0 \tvporq %zmm24,%zmm25,%zmm26",},
{{0x0f, 0xef, 0xf4, }, 3, 0, "", "",
"0f ef f4 \tpxor %mm4,%mm6",},
{{0xc5, 0xcd, 0xef, 0xd4, }, 4, 0, "", "",
"c5 cd ef d4 \tvpxor %ymm4,%ymm6,%ymm2",},
{{0x62, 0x01, 0x35, 0x40, 0xef, 0xd0, }, 6, 0, "", "",
"62 01 35 40 ef d0 \tvpxord %zmm24,%zmm25,%zmm26",},
{{0x62, 0x01, 0xb5, 0x40, 0xef, 0xd0, }, 6, 0, "", "",
"62 01 b5 40 ef d0 \tvpxorq %zmm24,%zmm25,%zmm26",},
{{0x66, 0x0f, 0x38, 0x10, 0xc1, }, 5, 0, "", "",
"66 0f 38 10 c1 \tpblendvb %xmm0,%xmm1,%xmm0",},
{{0x62, 0x02, 0x9d, 0x40, 0x10, 0xeb, }, 6, 0, "", "",
"62 02 9d 40 10 eb \tvpsrlvw %zmm27,%zmm28,%zmm29",},
{{0x62, 0x62, 0x7e, 0x4f, 0x10, 0xe6, }, 6, 0, "", "",
"62 62 7e 4f 10 e6 \tvpmovuswb %zmm28,%ymm6{%k7}",},
{{0x62, 0x62, 0x7e, 0x4f, 0x11, 0xe6, }, 6, 0, "", "",
"62 62 7e 4f 11 e6 \tvpmovusdb %zmm28,%xmm6{%k7}",},
{{0x62, 0x02, 0x9d, 0x40, 0x11, 0xeb, }, 6, 0, "", "",
"62 02 9d 40 11 eb \tvpsravw %zmm27,%zmm28,%zmm29",},
{{0x62, 0x62, 0x7e, 0x4f, 0x12, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 12 de \tvpmovusqb %zmm27,%xmm6{%k7}",},
{{0x62, 0x02, 0x9d, 0x40, 0x12, 0xeb, }, 6, 0, "", "",
"62 02 9d 40 12 eb \tvpsllvw %zmm27,%zmm28,%zmm29",},
{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
{{0x62, 0x62, 0x7d, 0x4f, 0x13, 0xdd, }, 6, 0, "", "",
"62 62 7d 4f 13 dd \tvcvtph2ps %ymm5,%zmm27{%k7}",},
{{0x62, 0x62, 0x7e, 0x4f, 0x13, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 13 de \tvpmovusdw %zmm27,%ymm6{%k7}",},
{{0x66, 0x0f, 0x38, 0x14, 0xc1, }, 5, 0, "", "",
"66 0f 38 14 c1 \tblendvps %xmm0,%xmm1,%xmm0",},
{{0x62, 0x62, 0x7e, 0x4f, 0x14, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 14 de \tvpmovusqw %zmm27,%xmm6{%k7}",},
{{0x62, 0x02, 0x1d, 0x40, 0x14, 0xeb, }, 6, 0, "", "",
"62 02 1d 40 14 eb \tvprorvd %zmm27,%zmm28,%zmm29",},
{{0x62, 0x02, 0x9d, 0x40, 0x14, 0xeb, }, 6, 0, "", "",
"62 02 9d 40 14 eb \tvprorvq %zmm27,%zmm28,%zmm29",},
{{0x66, 0x0f, 0x38, 0x15, 0xc1, }, 5, 0, "", "",
"66 0f 38 15 c1 \tblendvpd %xmm0,%xmm1,%xmm0",},
{{0x62, 0x62, 0x7e, 0x4f, 0x15, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 15 de \tvpmovusqd %zmm27,%ymm6{%k7}",},
{{0x62, 0x02, 0x1d, 0x40, 0x15, 0xeb, }, 6, 0, "", "",
"62 02 1d 40 15 eb \tvprolvd %zmm27,%zmm28,%zmm29",},
{{0x62, 0x02, 0x9d, 0x40, 0x15, 0xeb, }, 6, 0, "", "",
"62 02 9d 40 15 eb \tvprolvq %zmm27,%zmm28,%zmm29",},
{{0xc4, 0xe2, 0x4d, 0x16, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 16 d4 \tvpermps %ymm4,%ymm6,%ymm2",},
{{0x62, 0x82, 0x2d, 0x27, 0x16, 0xf0, }, 6, 0, "", "",
"62 82 2d 27 16 f0 \tvpermps %ymm24,%ymm26,%ymm22{%k7}",},
{{0x62, 0x82, 0xad, 0x27, 0x16, 0xf0, }, 6, 0, "", "",
"62 82 ad 27 16 f0 \tvpermpd %ymm24,%ymm26,%ymm22{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x19, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 19 f4 \tvbroadcastsd %xmm4,%ymm6",},
{{0x62, 0x02, 0x7d, 0x48, 0x19, 0xd3, }, 6, 0, "", "",
"62 02 7d 48 19 d3 \tvbroadcastf32x2 %xmm27,%zmm26",},
{{0xc4, 0xe2, 0x7d, 0x1a, 0x21, }, 5, 0, "", "",
"c4 e2 7d 1a 21 \tvbroadcastf128 (%rcx),%ymm4",},
{{0x62, 0x62, 0x7d, 0x48, 0x1a, 0x11, }, 6, 0, "", "",
"62 62 7d 48 1a 11 \tvbroadcastf32x4 (%rcx),%zmm26",},
{{0x62, 0x62, 0xfd, 0x48, 0x1a, 0x11, }, 6, 0, "", "",
"62 62 fd 48 1a 11 \tvbroadcastf64x2 (%rcx),%zmm26",},
{{0x62, 0x62, 0x7d, 0x48, 0x1b, 0x19, }, 6, 0, "", "",
"62 62 7d 48 1b 19 \tvbroadcastf32x8 (%rcx),%zmm27",},
{{0x62, 0x62, 0xfd, 0x48, 0x1b, 0x11, }, 6, 0, "", "",
"62 62 fd 48 1b 11 \tvbroadcastf64x4 (%rcx),%zmm26",},
{{0x62, 0x02, 0xfd, 0x48, 0x1f, 0xe3, }, 6, 0, "", "",
"62 02 fd 48 1f e3 \tvpabsq %zmm27,%zmm28",},
{{0xc4, 0xe2, 0x79, 0x20, 0xec, }, 5, 0, "", "",
"c4 e2 79 20 ec \tvpmovsxbw %xmm4,%xmm5",},
{{0x62, 0x62, 0x7e, 0x4f, 0x20, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 20 de \tvpmovswb %zmm27,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x21, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 21 f4 \tvpmovsxbd %xmm4,%ymm6",},
{{0x62, 0x62, 0x7e, 0x4f, 0x21, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 21 de \tvpmovsdb %zmm27,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x22, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 22 e4 \tvpmovsxbq %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x22, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 22 de \tvpmovsqb %zmm27,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x23, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 23 e4 \tvpmovsxwd %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x23, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 23 de \tvpmovsdw %zmm27,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x24, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 24 f4 \tvpmovsxwq %xmm4,%ymm6",},
{{0x62, 0x62, 0x7e, 0x4f, 0x24, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 24 de \tvpmovsqw %zmm27,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x25, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 25 e4 \tvpmovsxdq %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x25, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 25 de \tvpmovsqd %zmm27,%ymm6{%k7}",},
{{0x62, 0x92, 0x1d, 0x40, 0x26, 0xeb, }, 6, 0, "", "",
"62 92 1d 40 26 eb \tvptestmb %zmm27,%zmm28,%k5",},
{{0x62, 0x92, 0x9d, 0x40, 0x26, 0xeb, }, 6, 0, "", "",
"62 92 9d 40 26 eb \tvptestmw %zmm27,%zmm28,%k5",},
{{0x62, 0x92, 0x26, 0x40, 0x26, 0xea, }, 6, 0, "", "",
"62 92 26 40 26 ea \tvptestnmb %zmm26,%zmm27,%k5",},
{{0x62, 0x92, 0xa6, 0x40, 0x26, 0xea, }, 6, 0, "", "",
"62 92 a6 40 26 ea \tvptestnmw %zmm26,%zmm27,%k5",},
{{0x62, 0x92, 0x1d, 0x40, 0x27, 0xeb, }, 6, 0, "", "",
"62 92 1d 40 27 eb \tvptestmd %zmm27,%zmm28,%k5",},
{{0x62, 0x92, 0x9d, 0x40, 0x27, 0xeb, }, 6, 0, "", "",
"62 92 9d 40 27 eb \tvptestmq %zmm27,%zmm28,%k5",},
{{0x62, 0x92, 0x26, 0x40, 0x27, 0xea, }, 6, 0, "", "",
"62 92 26 40 27 ea \tvptestnmd %zmm26,%zmm27,%k5",},
{{0x62, 0x92, 0xa6, 0x40, 0x27, 0xea, }, 6, 0, "", "",
"62 92 a6 40 27 ea \tvptestnmq %zmm26,%zmm27,%k5",},
{{0xc4, 0xe2, 0x4d, 0x28, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 28 d4 \tvpmuldq %ymm4,%ymm6,%ymm2",},
{{0x62, 0x62, 0x7e, 0x48, 0x28, 0xe5, }, 6, 0, "", "",
"62 62 7e 48 28 e5 \tvpmovm2b %k5,%zmm28",},
{{0x62, 0x62, 0xfe, 0x48, 0x28, 0xe5, }, 6, 0, "", "",
"62 62 fe 48 28 e5 \tvpmovm2w %k5,%zmm28",},
{{0xc4, 0xe2, 0x4d, 0x29, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 29 d4 \tvpcmpeqq %ymm4,%ymm6,%ymm2",},
{{0x62, 0x92, 0x7e, 0x48, 0x29, 0xec, }, 6, 0, "", "",
"62 92 7e 48 29 ec \tvpmovb2m %zmm28,%k5",},
{{0x62, 0x92, 0xfe, 0x48, 0x29, 0xec, }, 6, 0, "", "",
"62 92 fe 48 29 ec \tvpmovw2m %zmm28,%k5",},
{{0xc4, 0xe2, 0x7d, 0x2a, 0x21, }, 5, 0, "", "",
"c4 e2 7d 2a 21 \tvmovntdqa (%rcx),%ymm4",},
{{0x62, 0x62, 0xfe, 0x48, 0x2a, 0xf6, }, 6, 0, "", "",
"62 62 fe 48 2a f6 \tvpbroadcastmb2q %k6,%zmm30",},
{{0xc4, 0xe2, 0x5d, 0x2c, 0x31, }, 5, 0, "", "",
"c4 e2 5d 2c 31 \tvmaskmovps (%rcx),%ymm4,%ymm6",},
{{0x62, 0x02, 0x35, 0x40, 0x2c, 0xd0, }, 6, 0, "", "",
"62 02 35 40 2c d0 \tvscalefps %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x2c, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 2c d0 \tvscalefpd %zmm24,%zmm25,%zmm26",},
{{0xc4, 0xe2, 0x5d, 0x2d, 0x31, }, 5, 0, "", "",
"c4 e2 5d 2d 31 \tvmaskmovpd (%rcx),%ymm4,%ymm6",},
{{0x62, 0x02, 0x35, 0x07, 0x2d, 0xd0, }, 6, 0, "", "",
"62 02 35 07 2d d0 \tvscalefss %xmm24,%xmm25,%xmm26{%k7}",},
{{0x62, 0x02, 0xb5, 0x07, 0x2d, 0xd0, }, 6, 0, "", "",
"62 02 b5 07 2d d0 \tvscalefsd %xmm24,%xmm25,%xmm26{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x30, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 30 e4 \tvpmovzxbw %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x30, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 30 de \tvpmovwb %zmm27,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x31, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 31 f4 \tvpmovzxbd %xmm4,%ymm6",},
{{0x62, 0x62, 0x7e, 0x4f, 0x31, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 31 de \tvpmovdb %zmm27,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x32, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 32 e4 \tvpmovzxbq %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x32, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 32 de \tvpmovqb %zmm27,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x33, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 33 e4 \tvpmovzxwd %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x33, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 33 de \tvpmovdw %zmm27,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x34, 0xf4, }, 5, 0, "", "",
"c4 e2 7d 34 f4 \tvpmovzxwq %xmm4,%ymm6",},
{{0x62, 0x62, 0x7e, 0x4f, 0x34, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 34 de \tvpmovqw %zmm27,%xmm6{%k7}",},
{{0xc4, 0xe2, 0x7d, 0x35, 0xe4, }, 5, 0, "", "",
"c4 e2 7d 35 e4 \tvpmovzxdq %xmm4,%ymm4",},
{{0x62, 0x62, 0x7e, 0x4f, 0x35, 0xde, }, 6, 0, "", "",
"62 62 7e 4f 35 de \tvpmovqd %zmm27,%ymm6{%k7}",},
{{0xc4, 0xe2, 0x4d, 0x36, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 36 d4 \tvpermd %ymm4,%ymm6,%ymm2",},
{{0x62, 0x82, 0x2d, 0x27, 0x36, 0xf0, }, 6, 0, "", "",
"62 82 2d 27 36 f0 \tvpermd %ymm24,%ymm26,%ymm22{%k7}",},
{{0x62, 0x82, 0xad, 0x27, 0x36, 0xf0, }, 6, 0, "", "",
"62 82 ad 27 36 f0 \tvpermq %ymm24,%ymm26,%ymm22{%k7}",},
{{0xc4, 0xe2, 0x4d, 0x38, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 38 d4 \tvpminsb %ymm4,%ymm6,%ymm2",},
{{0x62, 0x62, 0x7e, 0x48, 0x38, 0xe5, }, 6, 0, "", "",
"62 62 7e 48 38 e5 \tvpmovm2d %k5,%zmm28",},
{{0x62, 0x62, 0xfe, 0x48, 0x38, 0xe5, }, 6, 0, "", "",
"62 62 fe 48 38 e5 \tvpmovm2q %k5,%zmm28",},
{{0xc4, 0xe2, 0x69, 0x39, 0xd9, }, 5, 0, "", "",
"c4 e2 69 39 d9 \tvpminsd %xmm1,%xmm2,%xmm3",},
{{0x62, 0x02, 0x35, 0x40, 0x39, 0xd0, }, 6, 0, "", "",
"62 02 35 40 39 d0 \tvpminsd %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x39, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 39 d0 \tvpminsq %zmm24,%zmm25,%zmm26",},
{{0x62, 0x92, 0x7e, 0x48, 0x39, 0xec, }, 6, 0, "", "",
"62 92 7e 48 39 ec \tvpmovd2m %zmm28,%k5",},
{{0x62, 0x92, 0xfe, 0x48, 0x39, 0xec, }, 6, 0, "", "",
"62 92 fe 48 39 ec \tvpmovq2m %zmm28,%k5",},
{{0xc4, 0xe2, 0x4d, 0x3a, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3a d4 \tvpminuw %ymm4,%ymm6,%ymm2",},
{{0x62, 0x62, 0x7e, 0x48, 0x3a, 0xe6, }, 6, 0, "", "",
"62 62 7e 48 3a e6 \tvpbroadcastmw2d %k6,%zmm28",},
{{0xc4, 0xe2, 0x4d, 0x3b, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3b d4 \tvpminud %ymm4,%ymm6,%ymm2",},
{{0x62, 0x02, 0x35, 0x40, 0x3b, 0xd0, }, 6, 0, "", "",
"62 02 35 40 3b d0 \tvpminud %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x3b, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 3b d0 \tvpminuq %zmm24,%zmm25,%zmm26",},
{{0xc4, 0xe2, 0x4d, 0x3d, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3d d4 \tvpmaxsd %ymm4,%ymm6,%ymm2",},
{{0x62, 0x02, 0x35, 0x40, 0x3d, 0xd0, }, 6, 0, "", "",
"62 02 35 40 3d d0 \tvpmaxsd %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x3d, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 3d d0 \tvpmaxsq %zmm24,%zmm25,%zmm26",},
{{0xc4, 0xe2, 0x4d, 0x3f, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 3f d4 \tvpmaxud %ymm4,%ymm6,%ymm2",},
{{0x62, 0x02, 0x35, 0x40, 0x3f, 0xd0, }, 6, 0, "", "",
"62 02 35 40 3f d0 \tvpmaxud %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x3f, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 3f d0 \tvpmaxuq %zmm24,%zmm25,%zmm26",},
{{0xc4, 0xe2, 0x4d, 0x40, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 40 d4 \tvpmulld %ymm4,%ymm6,%ymm2",},
{{0x62, 0x02, 0x35, 0x40, 0x40, 0xd0, }, 6, 0, "", "",
"62 02 35 40 40 d0 \tvpmulld %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x40, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 40 d0 \tvpmullq %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0x7d, 0x48, 0x42, 0xd1, }, 6, 0, "", "",
"62 02 7d 48 42 d1 \tvgetexpps %zmm25,%zmm26",},
{{0x62, 0x02, 0xfd, 0x48, 0x42, 0xe3, }, 6, 0, "", "",
"62 02 fd 48 42 e3 \tvgetexppd %zmm27,%zmm28",},
{{0x62, 0x02, 0x35, 0x07, 0x43, 0xd0, }, 6, 0, "", "",
"62 02 35 07 43 d0 \tvgetexpss %xmm24,%xmm25,%xmm26{%k7}",},
{{0x62, 0x02, 0x95, 0x07, 0x43, 0xf4, }, 6, 0, "", "",
"62 02 95 07 43 f4 \tvgetexpsd %xmm28,%xmm29,%xmm30{%k7}",},
{{0x62, 0x02, 0x7d, 0x48, 0x44, 0xe3, }, 6, 0, "", "",
"62 02 7d 48 44 e3 \tvplzcntd %zmm27,%zmm28",},
{{0x62, 0x02, 0xfd, 0x48, 0x44, 0xe3, }, 6, 0, "", "",
"62 02 fd 48 44 e3 \tvplzcntq %zmm27,%zmm28",},
{{0xc4, 0xe2, 0x4d, 0x46, 0xd4, }, 5, 0, "", "",
"c4 e2 4d 46 d4 \tvpsravd %ymm4,%ymm6,%ymm2",},
{{0x62, 0x02, 0x35, 0x40, 0x46, 0xd0, }, 6, 0, "", "",
"62 02 35 40 46 d0 \tvpsravd %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xb5, 0x40, 0x46, 0xd0, }, 6, 0, "", "",
"62 02 b5 40 46 d0 \tvpsravq %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0x7d, 0x48, 0x4c, 0xd1, }, 6, 0, "", "",
"62 02 7d 48 4c d1 \tvrcp14ps %zmm25,%zmm26",},
{{0x62, 0x02, 0xfd, 0x48, 0x4c, 0xe3, }, 6, 0, "", "",
"62 02 fd 48 4c e3 \tvrcp14pd %zmm27,%zmm28",},
{{0x62, 0x02, 0x35, 0x07, 0x4d, 0xd0, }, 6, 0, "", "",
"62 02 35 07 4d d0 \tvrcp14ss %xmm24,%xmm25,%xmm26{%k7}",},
{{0x62, 0x02, 0xb5, 0x07, 0x4d, 0xd0, }, 6, 0, "", "",
"62 02 b5 07 4d d0 \tvrcp14sd %xmm24,%xmm25,%xmm26{%k7}",},
{{0x62, 0x02, 0x7d, 0x48, 0x4e, 0xd1, }, 6, 0, "", "",
"62 02 7d 48 4e d1 \tvrsqrt14ps %zmm25,%zmm26",},
{{0x62, 0x02, 0xfd, 0x48, 0x4e, 0xe3, }, 6, 0, "", "",
"62 02 fd 48 4e e3 \tvrsqrt14pd %zmm27,%zmm28",},
{{0x62, 0x02, 0x35, 0x07, 0x4f, 0xd0, }, 6, 0, "", "",
"62 02 35 07 4f d0 \tvrsqrt14ss %xmm24,%xmm25,%xmm26{%k7}",},
{{0x62, 0x02, 0xb5, 0x07, 0x4f, 0xd0, }, 6, 0, "", "",
"62 02 b5 07 4f d0 \tvrsqrt14sd %xmm24,%xmm25,%xmm26{%k7}",},
{{0xc4, 0xe2, 0x79, 0x59, 0xf4, }, 5, 0, "", "",
"c4 e2 79 59 f4 \tvpbroadcastq %xmm4,%xmm6",},
{{0x62, 0x02, 0x7d, 0x48, 0x59, 0xd3, }, 6, 0, "", "",
"62 02 7d 48 59 d3 \tvbroadcasti32x2 %xmm27,%zmm26",},
{{0xc4, 0xe2, 0x7d, 0x5a, 0x21, }, 5, 0, "", "",
"c4 e2 7d 5a 21 \tvbroadcasti128 (%rcx),%ymm4",},
{{0x62, 0x62, 0x7d, 0x48, 0x5a, 0x11, }, 6, 0, "", "",
"62 62 7d 48 5a 11 \tvbroadcasti32x4 (%rcx),%zmm26",},
{{0x62, 0x62, 0xfd, 0x48, 0x5a, 0x11, }, 6, 0, "", "",
"62 62 fd 48 5a 11 \tvbroadcasti64x2 (%rcx),%zmm26",},
{{0x62, 0x62, 0x7d, 0x48, 0x5b, 0x21, }, 6, 0, "", "",
"62 62 7d 48 5b 21 \tvbroadcasti32x8 (%rcx),%zmm28",},
{{0x62, 0x62, 0xfd, 0x48, 0x5b, 0x11, }, 6, 0, "", "",
"62 62 fd 48 5b 11 \tvbroadcasti64x4 (%rcx),%zmm26",},
{{0x62, 0x02, 0x25, 0x40, 0x64, 0xe2, }, 6, 0, "", "",
"62 02 25 40 64 e2 \tvpblendmd %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x64, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 64 e2 \tvpblendmq %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x35, 0x40, 0x65, 0xd0, }, 6, 0, "", "",
"62 02 35 40 65 d0 \tvblendmps %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xa5, 0x40, 0x65, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 65 e2 \tvblendmpd %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x25, 0x40, 0x66, 0xe2, }, 6, 0, "", "",
"62 02 25 40 66 e2 \tvpblendmb %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x66, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 66 e2 \tvpblendmw %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x35, 0x40, 0x75, 0xd0, }, 6, 0, "", "",
"62 02 35 40 75 d0 \tvpermi2b %zmm24,%zmm25,%zmm26",},
{{0x62, 0x02, 0xa5, 0x40, 0x75, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 75 e2 \tvpermi2w %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x25, 0x40, 0x76, 0xe2, }, 6, 0, "", "",
"62 02 25 40 76 e2 \tvpermi2d %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x76, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 76 e2 \tvpermi2q %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x25, 0x40, 0x77, 0xe2, }, 6, 0, "", "",
"62 02 25 40 77 e2 \tvpermi2ps %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x77, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 77 e2 \tvpermi2pd %zmm26,%zmm27,%zmm28",},
{{0x62, 0x62, 0x7d, 0x08, 0x7a, 0xf0, }, 6, 0, "", "",
"62 62 7d 08 7a f0 \tvpbroadcastb %eax,%xmm30",},
{{0x62, 0x62, 0x7d, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
"62 62 7d 08 7b f0 \tvpbroadcastw %eax,%xmm30",},
{{0x62, 0x62, 0x7d, 0x08, 0x7c, 0xf0, }, 6, 0, "", "",
"62 62 7d 08 7c f0 \tvpbroadcastd %eax,%xmm30",},
{{0x62, 0x62, 0xfd, 0x48, 0x7c, 0xf0, }, 6, 0, "", "",
"62 62 fd 48 7c f0 \tvpbroadcastq %rax,%zmm30",},
{{0x62, 0x02, 0x25, 0x40, 0x7d, 0xe2, }, 6, 0, "", "",
"62 02 25 40 7d e2 \tvpermt2b %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x7d, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 7d e2 \tvpermt2w %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x25, 0x40, 0x7e, 0xe2, }, 6, 0, "", "",
"62 02 25 40 7e e2 \tvpermt2d %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x7e, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 7e e2 \tvpermt2q %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x25, 0x40, 0x7f, 0xe2, }, 6, 0, "", "",
"62 02 25 40 7f e2 \tvpermt2ps %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x7f, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 7f e2 \tvpermt2pd %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x83, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 83 e2 \tvpmultishiftqb %zmm26,%zmm27,%zmm28",},
{{0x62, 0x62, 0x7d, 0x48, 0x88, 0x11, }, 6, 0, "", "",
"62 62 7d 48 88 11 \tvexpandps (%rcx),%zmm26",},
{{0x62, 0x62, 0xfd, 0x48, 0x88, 0x21, }, 6, 0, "", "",
"62 62 fd 48 88 21 \tvexpandpd (%rcx),%zmm28",},
{{0x62, 0x62, 0x7d, 0x48, 0x89, 0x21, }, 6, 0, "", "",
"62 62 7d 48 89 21 \tvpexpandd (%rcx),%zmm28",},
{{0x62, 0x62, 0xfd, 0x48, 0x89, 0x11, }, 6, 0, "", "",
"62 62 fd 48 89 11 \tvpexpandq (%rcx),%zmm26",},
{{0x62, 0x62, 0x7d, 0x48, 0x8a, 0x21, }, 6, 0, "", "",
"62 62 7d 48 8a 21 \tvcompressps %zmm28,(%rcx)",},
{{0x62, 0x62, 0xfd, 0x48, 0x8a, 0x21, }, 6, 0, "", "",
"62 62 fd 48 8a 21 \tvcompresspd %zmm28,(%rcx)",},
{{0x62, 0x62, 0x7d, 0x48, 0x8b, 0x21, }, 6, 0, "", "",
"62 62 7d 48 8b 21 \tvpcompressd %zmm28,(%rcx)",},
{{0x62, 0x62, 0xfd, 0x48, 0x8b, 0x11, }, 6, 0, "", "",
"62 62 fd 48 8b 11 \tvpcompressq %zmm26,(%rcx)",},
{{0x62, 0x02, 0x25, 0x40, 0x8d, 0xe2, }, 6, 0, "", "",
"62 02 25 40 8d e2 \tvpermb %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0x8d, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 8d e2 \tvpermw %zmm26,%zmm27,%zmm28",},
{{0xc4, 0xe2, 0x69, 0x90, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
"c4 e2 69 90 4c 7d 02 \tvpgatherdd %xmm2,0x2(%rbp,%xmm7,2),%xmm1",},
{{0xc4, 0xe2, 0xe9, 0x90, 0x4c, 0x7d, 0x04, }, 7, 0, "", "",
"c4 e2 e9 90 4c 7d 04 \tvpgatherdq %xmm2,0x4(%rbp,%xmm7,2),%xmm1",},
{{0x62, 0x22, 0x7d, 0x41, 0x90, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 7d 41 90 94 dd 7b 00 00 00 \tvpgatherdd 0x7b(%rbp,%zmm27,8),%zmm26{%k1}",},
{{0x62, 0x22, 0xfd, 0x41, 0x90, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 fd 41 90 94 dd 7b 00 00 00 \tvpgatherdq 0x7b(%rbp,%ymm27,8),%zmm26{%k1}",},
{{0xc4, 0xe2, 0x69, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
"c4 e2 69 91 4c 7d 02 \tvpgatherqd %xmm2,0x2(%rbp,%xmm7,2),%xmm1",},
{{0xc4, 0xe2, 0xe9, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
"c4 e2 e9 91 4c 7d 02 \tvpgatherqq %xmm2,0x2(%rbp,%xmm7,2),%xmm1",},
{{0x62, 0x22, 0x7d, 0x41, 0x91, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 7d 41 91 94 dd 7b 00 00 00 \tvpgatherqd 0x7b(%rbp,%zmm27,8),%ymm26{%k1}",},
{{0x62, 0x22, 0xfd, 0x41, 0x91, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 fd 41 91 94 dd 7b 00 00 00 \tvpgatherqq 0x7b(%rbp,%zmm27,8),%zmm26{%k1}",},
{{0x62, 0x22, 0x7d, 0x41, 0xa0, 0xa4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 7d 41 a0 a4 ed 7b 00 00 00 \tvpscatterdd %zmm28,0x7b(%rbp,%zmm29,8){%k1}",},
{{0x62, 0x22, 0xfd, 0x41, 0xa0, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 fd 41 a0 94 dd 7b 00 00 00 \tvpscatterdq %zmm26,0x7b(%rbp,%ymm27,8){%k1}",},
{{0x62, 0xb2, 0x7d, 0x41, 0xa1, 0xb4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 b2 7d 41 a1 b4 ed 7b 00 00 00 \tvpscatterqd %ymm6,0x7b(%rbp,%zmm29,8){%k1}",},
{{0x62, 0xb2, 0xfd, 0x21, 0xa1, 0xb4, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 b2 fd 21 a1 b4 dd 7b 00 00 00 \tvpscatterqq %ymm6,0x7b(%rbp,%ymm27,8){%k1}",},
{{0x62, 0x22, 0x7d, 0x41, 0xa2, 0xa4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 7d 41 a2 a4 ed 7b 00 00 00 \tvscatterdps %zmm28,0x7b(%rbp,%zmm29,8){%k1}",},
{{0x62, 0x22, 0xfd, 0x41, 0xa2, 0xa4, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 fd 41 a2 a4 dd 7b 00 00 00 \tvscatterdpd %zmm28,0x7b(%rbp,%ymm27,8){%k1}",},
{{0x62, 0xb2, 0x7d, 0x41, 0xa3, 0xb4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 b2 7d 41 a3 b4 ed 7b 00 00 00 \tvscatterqps %ymm6,0x7b(%rbp,%zmm29,8){%k1}",},
{{0x62, 0x22, 0xfd, 0x41, 0xa3, 0xa4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 22 fd 41 a3 a4 ed 7b 00 00 00 \tvscatterqpd %zmm28,0x7b(%rbp,%zmm29,8){%k1}",},
{{0x62, 0x02, 0xa5, 0x40, 0xb4, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 b4 e2 \tvpmadd52luq %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0xa5, 0x40, 0xb5, 0xe2, }, 6, 0, "", "",
"62 02 a5 40 b5 e2 \tvpmadd52huq %zmm26,%zmm27,%zmm28",},
{{0x62, 0x02, 0x7d, 0x48, 0xc4, 0xda, }, 6, 0, "", "",
"62 02 7d 48 c4 da \tvpconflictd %zmm26,%zmm27",},
{{0x62, 0x02, 0xfd, 0x48, 0xc4, 0xda, }, 6, 0, "", "",
"62 02 fd 48 c4 da \tvpconflictq %zmm26,%zmm27",},
{{0x62, 0x02, 0x7d, 0x48, 0xc8, 0xf5, }, 6, 0, "", "",
"62 02 7d 48 c8 f5 \tvexp2ps %zmm29,%zmm30",},
{{0x62, 0x02, 0xfd, 0x48, 0xc8, 0xda, }, 6, 0, "", "",
"62 02 fd 48 c8 da \tvexp2pd %zmm26,%zmm27",},
{{0x62, 0x02, 0x7d, 0x48, 0xca, 0xf5, }, 6, 0, "", "",
"62 02 7d 48 ca f5 \tvrcp28ps %zmm29,%zmm30",},
{{0x62, 0x02, 0xfd, 0x48, 0xca, 0xda, }, 6, 0, "", "",
"62 02 fd 48 ca da \tvrcp28pd %zmm26,%zmm27",},
{{0x62, 0x02, 0x15, 0x07, 0xcb, 0xf4, }, 6, 0, "", "",
"62 02 15 07 cb f4 \tvrcp28ss %xmm28,%xmm29,%xmm30{%k7}",},
{{0x62, 0x02, 0xad, 0x07, 0xcb, 0xd9, }, 6, 0, "", "",
"62 02 ad 07 cb d9 \tvrcp28sd %xmm25,%xmm26,%xmm27{%k7}",},
{{0x62, 0x02, 0x7d, 0x48, 0xcc, 0xf5, }, 6, 0, "", "",
"62 02 7d 48 cc f5 \tvrsqrt28ps %zmm29,%zmm30",},
{{0x62, 0x02, 0xfd, 0x48, 0xcc, 0xda, }, 6, 0, "", "",
"62 02 fd 48 cc da \tvrsqrt28pd %zmm26,%zmm27",},
{{0x62, 0x02, 0x15, 0x07, 0xcd, 0xf4, }, 6, 0, "", "",
"62 02 15 07 cd f4 \tvrsqrt28ss %xmm28,%xmm29,%xmm30{%k7}",},
{{0x62, 0x02, 0xad, 0x07, 0xcd, 0xd9, }, 6, 0, "", "",
"62 02 ad 07 cd d9 \tvrsqrt28sd %xmm25,%xmm26,%xmm27{%k7}",},
{{0x62, 0x03, 0x15, 0x40, 0x03, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 15 40 03 f4 12 \tvalignd $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0xad, 0x40, 0x03, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 ad 40 03 d9 12 \tvalignq $0x12,%zmm25,%zmm26,%zmm27",},
{{0xc4, 0xe3, 0x7d, 0x08, 0xd6, 0x05, }, 6, 0, "", "",
"c4 e3 7d 08 d6 05 \tvroundps $0x5,%ymm6,%ymm2",},
{{0x62, 0x03, 0x7d, 0x48, 0x08, 0xd1, 0x12, }, 7, 0, "", "",
"62 03 7d 48 08 d1 12 \tvrndscaleps $0x12,%zmm25,%zmm26",},
{{0xc4, 0xe3, 0x7d, 0x09, 0xd6, 0x05, }, 6, 0, "", "",
"c4 e3 7d 09 d6 05 \tvroundpd $0x5,%ymm6,%ymm2",},
{{0x62, 0x03, 0xfd, 0x48, 0x09, 0xd1, 0x12, }, 7, 0, "", "",
"62 03 fd 48 09 d1 12 \tvrndscalepd $0x12,%zmm25,%zmm26",},
{{0xc4, 0xe3, 0x49, 0x0a, 0xd4, 0x05, }, 6, 0, "", "",
"c4 e3 49 0a d4 05 \tvroundss $0x5,%xmm4,%xmm6,%xmm2",},
{{0x62, 0x03, 0x35, 0x07, 0x0a, 0xd0, 0x12, }, 7, 0, "", "",
"62 03 35 07 0a d0 12 \tvrndscaless $0x12,%xmm24,%xmm25,%xmm26{%k7}",},
{{0xc4, 0xe3, 0x49, 0x0b, 0xd4, 0x05, }, 6, 0, "", "",
"c4 e3 49 0b d4 05 \tvroundsd $0x5,%xmm4,%xmm6,%xmm2",},
{{0x62, 0x03, 0xb5, 0x07, 0x0b, 0xd0, 0x12, }, 7, 0, "", "",
"62 03 b5 07 0b d0 12 \tvrndscalesd $0x12,%xmm24,%xmm25,%xmm26{%k7}",},
{{0xc4, 0xe3, 0x5d, 0x18, 0xf4, 0x05, }, 6, 0, "", "",
"c4 e3 5d 18 f4 05 \tvinsertf128 $0x5,%xmm4,%ymm4,%ymm6",},
{{0x62, 0x03, 0x35, 0x47, 0x18, 0xd0, 0x12, }, 7, 0, "", "",
"62 03 35 47 18 d0 12 \tvinsertf32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
{{0x62, 0x03, 0xb5, 0x47, 0x18, 0xd0, 0x12, }, 7, 0, "", "",
"62 03 b5 47 18 d0 12 \tvinsertf64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
{{0xc4, 0xe3, 0x7d, 0x19, 0xe4, 0x05, }, 6, 0, "", "",
"c4 e3 7d 19 e4 05 \tvextractf128 $0x5,%ymm4,%xmm4",},
{{0x62, 0x03, 0x7d, 0x4f, 0x19, 0xca, 0x12, }, 7, 0, "", "",
"62 03 7d 4f 19 ca 12 \tvextractf32x4 $0x12,%zmm25,%xmm26{%k7}",},
{{0x62, 0x03, 0xfd, 0x4f, 0x19, 0xca, 0x12, }, 7, 0, "", "",
"62 03 fd 4f 19 ca 12 \tvextractf64x2 $0x12,%zmm25,%xmm26{%k7}",},
{{0x62, 0x03, 0x2d, 0x47, 0x1a, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 2d 47 1a d9 12 \tvinsertf32x8 $0x12,%ymm25,%zmm26,%zmm27{%k7}",},
{{0x62, 0x03, 0x95, 0x47, 0x1a, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 47 1a f4 12 \tvinsertf64x4 $0x12,%ymm28,%zmm29,%zmm30{%k7}",},
{{0x62, 0x03, 0x7d, 0x4f, 0x1b, 0xee, 0x12, }, 7, 0, "", "",
"62 03 7d 4f 1b ee 12 \tvextractf32x8 $0x12,%zmm29,%ymm30{%k7}",},
{{0x62, 0x03, 0xfd, 0x4f, 0x1b, 0xd3, 0x12, }, 7, 0, "", "",
"62 03 fd 4f 1b d3 12 \tvextractf64x4 $0x12,%zmm26,%ymm27{%k7}",},
{{0x62, 0x93, 0x0d, 0x40, 0x1e, 0xed, 0x12, }, 7, 0, "", "",
"62 93 0d 40 1e ed 12 \tvpcmpud $0x12,%zmm29,%zmm30,%k5",},
{{0x62, 0x93, 0xa5, 0x40, 0x1e, 0xea, 0x12, }, 7, 0, "", "",
"62 93 a5 40 1e ea 12 \tvpcmpuq $0x12,%zmm26,%zmm27,%k5",},
{{0x62, 0x93, 0x0d, 0x40, 0x1f, 0xed, 0x12, }, 7, 0, "", "",
"62 93 0d 40 1f ed 12 \tvpcmpd $0x12,%zmm29,%zmm30,%k5",},
{{0x62, 0x93, 0xa5, 0x40, 0x1f, 0xea, 0x12, }, 7, 0, "", "",
"62 93 a5 40 1f ea 12 \tvpcmpq $0x12,%zmm26,%zmm27,%k5",},
{{0x62, 0x03, 0x15, 0x40, 0x23, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 15 40 23 f4 12 \tvshuff32x4 $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0xad, 0x40, 0x23, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 ad 40 23 d9 12 \tvshuff64x2 $0x12,%zmm25,%zmm26,%zmm27",},
{{0x62, 0x03, 0x15, 0x40, 0x25, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 15 40 25 f4 12 \tvpternlogd $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0x95, 0x40, 0x25, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 40 25 f4 12 \tvpternlogq $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0x7d, 0x48, 0x26, 0xda, 0x12, }, 7, 0, "", "",
"62 03 7d 48 26 da 12 \tvgetmantps $0x12,%zmm26,%zmm27",},
{{0x62, 0x03, 0xfd, 0x48, 0x26, 0xf5, 0x12, }, 7, 0, "", "",
"62 03 fd 48 26 f5 12 \tvgetmantpd $0x12,%zmm29,%zmm30",},
{{0x62, 0x03, 0x2d, 0x07, 0x27, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 2d 07 27 d9 12 \tvgetmantss $0x12,%xmm25,%xmm26,%xmm27{%k7}",},
{{0x62, 0x03, 0x95, 0x07, 0x27, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 07 27 f4 12 \tvgetmantsd $0x12,%xmm28,%xmm29,%xmm30{%k7}",},
{{0xc4, 0xe3, 0x5d, 0x38, 0xf4, 0x05, }, 6, 0, "", "",
"c4 e3 5d 38 f4 05 \tvinserti128 $0x5,%xmm4,%ymm4,%ymm6",},
{{0x62, 0x03, 0x35, 0x47, 0x38, 0xd0, 0x12, }, 7, 0, "", "",
"62 03 35 47 38 d0 12 \tvinserti32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
{{0x62, 0x03, 0xb5, 0x47, 0x38, 0xd0, 0x12, }, 7, 0, "", "",
"62 03 b5 47 38 d0 12 \tvinserti64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
{{0xc4, 0xe3, 0x7d, 0x39, 0xe6, 0x05, }, 6, 0, "", "",
"c4 e3 7d 39 e6 05 \tvextracti128 $0x5,%ymm4,%xmm6",},
{{0x62, 0x03, 0x7d, 0x4f, 0x39, 0xca, 0x12, }, 7, 0, "", "",
"62 03 7d 4f 39 ca 12 \tvextracti32x4 $0x12,%zmm25,%xmm26{%k7}",},
{{0x62, 0x03, 0xfd, 0x4f, 0x39, 0xca, 0x12, }, 7, 0, "", "",
"62 03 fd 4f 39 ca 12 \tvextracti64x2 $0x12,%zmm25,%xmm26{%k7}",},
{{0x62, 0x03, 0x15, 0x47, 0x3a, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 15 47 3a f4 12 \tvinserti32x8 $0x12,%ymm28,%zmm29,%zmm30{%k7}",},
{{0x62, 0x03, 0xad, 0x47, 0x3a, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 ad 47 3a d9 12 \tvinserti64x4 $0x12,%ymm25,%zmm26,%zmm27{%k7}",},
{{0x62, 0x03, 0x7d, 0x4f, 0x3b, 0xee, 0x12, }, 7, 0, "", "",
"62 03 7d 4f 3b ee 12 \tvextracti32x8 $0x12,%zmm29,%ymm30{%k7}",},
{{0x62, 0x03, 0xfd, 0x4f, 0x3b, 0xd3, 0x12, }, 7, 0, "", "",
"62 03 fd 4f 3b d3 12 \tvextracti64x4 $0x12,%zmm26,%ymm27{%k7}",},
{{0x62, 0x93, 0x0d, 0x40, 0x3e, 0xed, 0x12, }, 7, 0, "", "",
"62 93 0d 40 3e ed 12 \tvpcmpub $0x12,%zmm29,%zmm30,%k5",},
{{0x62, 0x93, 0xa5, 0x40, 0x3e, 0xea, 0x12, }, 7, 0, "", "",
"62 93 a5 40 3e ea 12 \tvpcmpuw $0x12,%zmm26,%zmm27,%k5",},
{{0x62, 0x93, 0x0d, 0x40, 0x3f, 0xed, 0x12, }, 7, 0, "", "",
"62 93 0d 40 3f ed 12 \tvpcmpb $0x12,%zmm29,%zmm30,%k5",},
{{0x62, 0x93, 0xa5, 0x40, 0x3f, 0xea, 0x12, }, 7, 0, "", "",
"62 93 a5 40 3f ea 12 \tvpcmpw $0x12,%zmm26,%zmm27,%k5",},
{{0xc4, 0xe3, 0x4d, 0x42, 0xd4, 0x05, }, 6, 0, "", "",
"c4 e3 4d 42 d4 05 \tvmpsadbw $0x5,%ymm4,%ymm6,%ymm2",},
{{0x62, 0xf3, 0x55, 0x48, 0x42, 0xf4, 0x12, }, 7, 0, "", "",
"62 f3 55 48 42 f4 12 \tvdbpsadbw $0x12,%zmm4,%zmm5,%zmm6",},
{{0x62, 0x03, 0x2d, 0x40, 0x43, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 2d 40 43 d9 12 \tvshufi32x4 $0x12,%zmm25,%zmm26,%zmm27",},
{{0x62, 0x03, 0x95, 0x40, 0x43, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 40 43 f4 12 \tvshufi64x2 $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0x2d, 0x40, 0x50, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 2d 40 50 d9 12 \tvrangeps $0x12,%zmm25,%zmm26,%zmm27",},
{{0x62, 0x03, 0x95, 0x40, 0x50, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 40 50 f4 12 \tvrangepd $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0x2d, 0x00, 0x51, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 2d 00 51 d9 12 \tvrangess $0x12,%xmm25,%xmm26,%xmm27",},
{{0x62, 0x03, 0x95, 0x00, 0x51, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 00 51 f4 12 \tvrangesd $0x12,%xmm28,%xmm29,%xmm30",},
{{0x62, 0x03, 0x15, 0x40, 0x54, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 15 40 54 f4 12 \tvfixupimmps $0x12,%zmm28,%zmm29,%zmm30",},
{{0x62, 0x03, 0xad, 0x40, 0x54, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 ad 40 54 d9 12 \tvfixupimmpd $0x12,%zmm25,%zmm26,%zmm27",},
{{0x62, 0x03, 0x15, 0x07, 0x55, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 15 07 55 f4 12 \tvfixupimmss $0x12,%xmm28,%xmm29,%xmm30{%k7}",},
{{0x62, 0x03, 0xad, 0x07, 0x55, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 ad 07 55 d9 12 \tvfixupimmsd $0x12,%xmm25,%xmm26,%xmm27{%k7}",},
{{0x62, 0x03, 0x7d, 0x48, 0x56, 0xda, 0x12, }, 7, 0, "", "",
"62 03 7d 48 56 da 12 \tvreduceps $0x12,%zmm26,%zmm27",},
{{0x62, 0x03, 0xfd, 0x48, 0x56, 0xf5, 0x12, }, 7, 0, "", "",
"62 03 fd 48 56 f5 12 \tvreducepd $0x12,%zmm29,%zmm30",},
{{0x62, 0x03, 0x2d, 0x00, 0x57, 0xd9, 0x12, }, 7, 0, "", "",
"62 03 2d 00 57 d9 12 \tvreducess $0x12,%xmm25,%xmm26,%xmm27",},
{{0x62, 0x03, 0x95, 0x00, 0x57, 0xf4, 0x12, }, 7, 0, "", "",
"62 03 95 00 57 f4 12 \tvreducesd $0x12,%xmm28,%xmm29,%xmm30",},
{{0x62, 0x93, 0x7d, 0x48, 0x66, 0xeb, 0x12, }, 7, 0, "", "",
"62 93 7d 48 66 eb 12 \tvfpclassps $0x12,%zmm27,%k5",},
{{0x62, 0x93, 0xfd, 0x48, 0x66, 0xee, 0x12, }, 7, 0, "", "",
"62 93 fd 48 66 ee 12 \tvfpclasspd $0x12,%zmm30,%k5",},
{{0x62, 0x93, 0x7d, 0x08, 0x67, 0xeb, 0x12, }, 7, 0, "", "",
"62 93 7d 08 67 eb 12 \tvfpclassss $0x12,%xmm27,%k5",},
{{0x62, 0x93, 0xfd, 0x08, 0x67, 0xee, 0x12, }, 7, 0, "", "",
"62 93 fd 08 67 ee 12 \tvfpclasssd $0x12,%xmm30,%k5",},
{{0x62, 0x91, 0x2d, 0x40, 0x72, 0xc1, 0x12, }, 7, 0, "", "",
"62 91 2d 40 72 c1 12 \tvprord $0x12,%zmm25,%zmm26",},
{{0x62, 0x91, 0xad, 0x40, 0x72, 0xc1, 0x12, }, 7, 0, "", "",
"62 91 ad 40 72 c1 12 \tvprorq $0x12,%zmm25,%zmm26",},
{{0x62, 0x91, 0x0d, 0x40, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
"62 91 0d 40 72 cd 12 \tvprold $0x12,%zmm29,%zmm30",},
{{0x62, 0x91, 0x8d, 0x40, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
"62 91 8d 40 72 cd 12 \tvprolq $0x12,%zmm29,%zmm30",},
{{0x0f, 0x72, 0xe6, 0x02, }, 4, 0, "", "",
"0f 72 e6 02 \tpsrad $0x2,%mm6",},
{{0xc5, 0xed, 0x72, 0xe6, 0x05, }, 5, 0, "", "",
"c5 ed 72 e6 05 \tvpsrad $0x5,%ymm6,%ymm2",},
{{0x62, 0x91, 0x4d, 0x40, 0x72, 0xe2, 0x05, }, 7, 0, "", "",
"62 91 4d 40 72 e2 05 \tvpsrad $0x5,%zmm26,%zmm22",},
{{0x62, 0x91, 0xcd, 0x40, 0x72, 0xe2, 0x05, }, 7, 0, "", "",
"62 91 cd 40 72 e2 05 \tvpsraq $0x5,%zmm26,%zmm22",},
{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c6 8c fe 7b 00 00 00 \tvgatherpf0dps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c6 8c fe 7b 00 00 00 \tvgatherpf0dpd 0x7b(%r14,%ymm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c6 94 fe 7b 00 00 00 \tvgatherpf1dps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c6 94 fe 7b 00 00 00 \tvgatherpf1dpd 0x7b(%r14,%ymm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c6 ac fe 7b 00 00 00 \tvscatterpf0dps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c6 ac fe 7b 00 00 00 \tvscatterpf0dpd 0x7b(%r14,%ymm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c6 b4 fe 7b 00 00 00 \tvscatterpf1dps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c6 b4 fe 7b 00 00 00 \tvscatterpf1dpd 0x7b(%r14,%ymm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c7 8c fe 7b 00 00 00 \tvgatherpf0qps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c7 8c fe 7b 00 00 00 \tvgatherpf0qpd 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c7 94 fe 7b 00 00 00 \tvgatherpf1qps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c7 94 fe 7b 00 00 00 \tvgatherpf1qpd 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c7 ac fe 7b 00 00 00 \tvscatterpf0qps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c7 ac fe 7b 00 00 00 \tvscatterpf0qpd 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 7d 41 c7 b4 fe 7b 00 00 00 \tvscatterpf1qps 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
"62 92 fd 41 c7 b4 fe 7b 00 00 00 \tvscatterpf1qpd 0x7b(%r14,%zmm31,8){%k1}",},
{{0x62, 0x01, 0x95, 0x40, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 40 58 f4 \tvaddpd %zmm28,%zmm29,%zmm30",},
{{0x62, 0x01, 0x95, 0x47, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 47 58 f4 \tvaddpd %zmm28,%zmm29,%zmm30{%k7}",},
{{0x62, 0x01, 0x95, 0xc7, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 c7 58 f4 \tvaddpd %zmm28,%zmm29,%zmm30{%k7}{z}",},
{{0x62, 0x01, 0x95, 0x10, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 10 58 f4 \tvaddpd {rn-sae},%zmm28,%zmm29,%zmm30",},
{{0x62, 0x01, 0x95, 0x50, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 50 58 f4 \tvaddpd {ru-sae},%zmm28,%zmm29,%zmm30",},
{{0x62, 0x01, 0x95, 0x30, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 30 58 f4 \tvaddpd {rd-sae},%zmm28,%zmm29,%zmm30",},
{{0x62, 0x01, 0x95, 0x70, 0x58, 0xf4, }, 6, 0, "", "",
"62 01 95 70 58 f4 \tvaddpd {rz-sae},%zmm28,%zmm29,%zmm30",},
{{0x62, 0x61, 0x95, 0x40, 0x58, 0x31, }, 6, 0, "", "",
"62 61 95 40 58 31 \tvaddpd (%rcx),%zmm29,%zmm30",},
{{0x62, 0x21, 0x95, 0x40, 0x58, 0xb4, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 11, 0, "", "",
"62 21 95 40 58 b4 f0 23 01 00 00 \tvaddpd 0x123(%rax,%r14,8),%zmm29,%zmm30",},
{{0x62, 0x61, 0x95, 0x50, 0x58, 0x31, }, 6, 0, "", "",
"62 61 95 50 58 31 \tvaddpd (%rcx){1to8},%zmm29,%zmm30",},
{{0x62, 0x61, 0x95, 0x40, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
"62 61 95 40 58 72 7f \tvaddpd 0x1fc0(%rdx),%zmm29,%zmm30",},
{{0x62, 0x61, 0x95, 0x50, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
"62 61 95 50 58 72 7f \tvaddpd 0x3f8(%rdx){1to8},%zmm29,%zmm30",},
{{0x62, 0xf1, 0x0c, 0x50, 0xc2, 0x6a, 0x7f, 0x08, }, 8, 0, "", "",
"62 f1 0c 50 c2 6a 7f 08 \tvcmpeq_uqps 0x1fc(%rdx){1to16},%zmm30,%k5",},
{{0x62, 0xb1, 0x97, 0x07, 0xc2, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, 0x01, }, 12, 0, "", "",
"62 b1 97 07 c2 ac f0 23 01 00 00 01 \tvcmpltsd 0x123(%rax,%r14,8),%xmm29,%k5{%k7}",},
{{0x62, 0x91, 0x97, 0x17, 0xc2, 0xec, 0x02, }, 7, 0, "", "",
"62 91 97 17 c2 ec 02 \tvcmplesd {sae},%xmm28,%xmm29,%k5{%k7}",},
{{0x62, 0x23, 0x15, 0x07, 0x27, 0xb4, 0xf0, 0x23, 0x01, 0x00, 0x00, 0x5b, }, 12, 0, "", "",
"62 23 15 07 27 b4 f0 23 01 00 00 5b \tvgetmantss $0x5b,0x123(%rax,%r14,8),%xmm29,%xmm30{%k7}",},
{{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "", {{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "",
"f3 0f 1b 00 \tbndmk (%rax),%bnd0",}, "f3 0f 1b 00 \tbndmk (%rax),%bnd0",},
{{0xf3, 0x41, 0x0f, 0x1b, 0x00, }, 5, 0, "", "", {{0xf3, 0x41, 0x0f, 0x1b, 0x00, }, 5, 0, "", "",
...@@ -325,19 +1257,19 @@ ...@@ -325,19 +1257,19 @@
{{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "", {{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
"0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%rax,%rcx,1)",}, "0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%rax,%rcx,1)",},
{{0xf2, 0xe8, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "call", "unconditional", {{0xf2, 0xe8, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "call", "unconditional",
"f2 e8 00 00 00 00 \tbnd callq 3f6 <main+0x3f6>",}, "f2 e8 00 00 00 00 \tbnd callq f22 <main+0xf22>",},
{{0x67, 0xf2, 0xff, 0x10, }, 4, 0, "call", "indirect", {{0x67, 0xf2, 0xff, 0x10, }, 4, 0, "call", "indirect",
"67 f2 ff 10 \tbnd callq *(%eax)",}, "67 f2 ff 10 \tbnd callq *(%eax)",},
{{0xf2, 0xc3, }, 2, 0, "ret", "indirect", {{0xf2, 0xc3, }, 2, 0, "ret", "indirect",
"f2 c3 \tbnd retq ",}, "f2 c3 \tbnd retq ",},
{{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional", {{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional",
"f2 e9 00 00 00 00 \tbnd jmpq 402 <main+0x402>",}, "f2 e9 00 00 00 00 \tbnd jmpq f2e <main+0xf2e>",},
{{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional", {{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional",
"f2 e9 00 00 00 00 \tbnd jmpq 408 <main+0x408>",}, "f2 e9 00 00 00 00 \tbnd jmpq f34 <main+0xf34>",},
{{0x67, 0xf2, 0xff, 0x21, }, 4, 0, "jmp", "indirect", {{0x67, 0xf2, 0xff, 0x21, }, 4, 0, "jmp", "indirect",
"67 f2 ff 21 \tbnd jmpq *(%ecx)",}, "67 f2 ff 21 \tbnd jmpq *(%ecx)",},
{{0xf2, 0x0f, 0x85, 0x00, 0x00, 0x00, 0x00, }, 7, 0, "jcc", "conditional", {{0xf2, 0x0f, 0x85, 0x00, 0x00, 0x00, 0x00, }, 7, 0, "jcc", "conditional",
"f2 0f 85 00 00 00 00 \tbnd jne 413 <main+0x413>",}, "f2 0f 85 00 00 00 00 \tbnd jne f3f <main+0xf3f>",},
{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "", {{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "",
"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",}, "0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",},
{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "", {{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "",
......
...@@ -19,8 +19,882 @@ int main(void) ...@@ -19,8 +19,882 @@ int main(void)
/* Following line is a marker for the awk script - do not change */ /* Following line is a marker for the awk script - do not change */
asm volatile("rdtsc"); /* Start here */ asm volatile("rdtsc"); /* Start here */
/* Test fix for vcvtph2ps in x86-opcode-map.txt */
asm volatile("vcvtph2ps %xmm3,%ymm5");
#ifdef __x86_64__ #ifdef __x86_64__
/* AVX-512: Instructions with the same op codes as Mask Instructions */
asm volatile("cmovno %rax,%rbx");
asm volatile("cmovno 0x12345678(%rax),%rcx");
asm volatile("cmovno 0x12345678(%rax),%cx");
asm volatile("cmove %rax,%rbx");
asm volatile("cmove 0x12345678(%rax),%rcx");
asm volatile("cmove 0x12345678(%rax),%cx");
asm volatile("seto 0x12345678(%rax)");
asm volatile("setno 0x12345678(%rax)");
asm volatile("setb 0x12345678(%rax)");
asm volatile("setc 0x12345678(%rax)");
asm volatile("setnae 0x12345678(%rax)");
asm volatile("setae 0x12345678(%rax)");
asm volatile("setnb 0x12345678(%rax)");
asm volatile("setnc 0x12345678(%rax)");
asm volatile("sets 0x12345678(%rax)");
asm volatile("setns 0x12345678(%rax)");
/* AVX-512: Mask Instructions */
asm volatile("kandw %k7,%k6,%k5");
asm volatile("kandq %k7,%k6,%k5");
asm volatile("kandb %k7,%k6,%k5");
asm volatile("kandd %k7,%k6,%k5");
asm volatile("kandnw %k7,%k6,%k5");
asm volatile("kandnq %k7,%k6,%k5");
asm volatile("kandnb %k7,%k6,%k5");
asm volatile("kandnd %k7,%k6,%k5");
asm volatile("knotw %k7,%k6");
asm volatile("knotq %k7,%k6");
asm volatile("knotb %k7,%k6");
asm volatile("knotd %k7,%k6");
asm volatile("korw %k7,%k6,%k5");
asm volatile("korq %k7,%k6,%k5");
asm volatile("korb %k7,%k6,%k5");
asm volatile("kord %k7,%k6,%k5");
asm volatile("kxnorw %k7,%k6,%k5");
asm volatile("kxnorq %k7,%k6,%k5");
asm volatile("kxnorb %k7,%k6,%k5");
asm volatile("kxnord %k7,%k6,%k5");
asm volatile("kxorw %k7,%k6,%k5");
asm volatile("kxorq %k7,%k6,%k5");
asm volatile("kxorb %k7,%k6,%k5");
asm volatile("kxord %k7,%k6,%k5");
asm volatile("kaddw %k7,%k6,%k5");
asm volatile("kaddq %k7,%k6,%k5");
asm volatile("kaddb %k7,%k6,%k5");
asm volatile("kaddd %k7,%k6,%k5");
asm volatile("kunpckbw %k7,%k6,%k5");
asm volatile("kunpckwd %k7,%k6,%k5");
asm volatile("kunpckdq %k7,%k6,%k5");
asm volatile("kmovw %k6,%k5");
asm volatile("kmovw (%rcx),%k5");
asm volatile("kmovw 0x123(%rax,%r14,8),%k5");
asm volatile("kmovw %k5,(%rcx)");
asm volatile("kmovw %k5,0x123(%rax,%r14,8)");
asm volatile("kmovw %eax,%k5");
asm volatile("kmovw %ebp,%k5");
asm volatile("kmovw %r13d,%k5");
asm volatile("kmovw %k5,%eax");
asm volatile("kmovw %k5,%ebp");
asm volatile("kmovw %k5,%r13d");
asm volatile("kmovq %k6,%k5");
asm volatile("kmovq (%rcx),%k5");
asm volatile("kmovq 0x123(%rax,%r14,8),%k5");
asm volatile("kmovq %k5,(%rcx)");
asm volatile("kmovq %k5,0x123(%rax,%r14,8)");
asm volatile("kmovq %rax,%k5");
asm volatile("kmovq %rbp,%k5");
asm volatile("kmovq %r13,%k5");
asm volatile("kmovq %k5,%rax");
asm volatile("kmovq %k5,%rbp");
asm volatile("kmovq %k5,%r13");
asm volatile("kmovb %k6,%k5");
asm volatile("kmovb (%rcx),%k5");
asm volatile("kmovb 0x123(%rax,%r14,8),%k5");
asm volatile("kmovb %k5,(%rcx)");
asm volatile("kmovb %k5,0x123(%rax,%r14,8)");
asm volatile("kmovb %eax,%k5");
asm volatile("kmovb %ebp,%k5");
asm volatile("kmovb %r13d,%k5");
asm volatile("kmovb %k5,%eax");
asm volatile("kmovb %k5,%ebp");
asm volatile("kmovb %k5,%r13d");
asm volatile("kmovd %k6,%k5");
asm volatile("kmovd (%rcx),%k5");
asm volatile("kmovd 0x123(%rax,%r14,8),%k5");
asm volatile("kmovd %k5,(%rcx)");
asm volatile("kmovd %k5,0x123(%rax,%r14,8)");
asm volatile("kmovd %eax,%k5");
asm volatile("kmovd %ebp,%k5");
asm volatile("kmovd %r13d,%k5");
asm volatile("kmovd %k5,%eax");
asm volatile("kmovd %k5,%ebp");
asm volatile("kmovd %k5,%r13d");
asm volatile("kortestw %k6,%k5");
asm volatile("kortestq %k6,%k5");
asm volatile("kortestb %k6,%k5");
asm volatile("kortestd %k6,%k5");
asm volatile("ktestw %k6,%k5");
asm volatile("ktestq %k6,%k5");
asm volatile("ktestb %k6,%k5");
asm volatile("ktestd %k6,%k5");
asm volatile("kshiftrw $0x12,%k6,%k5");
asm volatile("kshiftrq $0x5b,%k6,%k5");
asm volatile("kshiftlw $0x12,%k6,%k5");
asm volatile("kshiftlq $0x5b,%k6,%k5");
/* AVX-512: Op code 0f 5b */
asm volatile("vcvtdq2ps %xmm5,%xmm6");
asm volatile("vcvtqq2ps %zmm29,%ymm6{%k7}");
asm volatile("vcvtps2dq %xmm5,%xmm6");
asm volatile("vcvttps2dq %xmm5,%xmm6");
/* AVX-512: Op code 0f 6f */
asm volatile("movq %mm0,%mm4");
asm volatile("vmovdqa %ymm4,%ymm6");
asm volatile("vmovdqa32 %zmm25,%zmm26");
asm volatile("vmovdqa64 %zmm25,%zmm26");
asm volatile("vmovdqu %ymm4,%ymm6");
asm volatile("vmovdqu32 %zmm29,%zmm30");
asm volatile("vmovdqu64 %zmm25,%zmm26");
asm volatile("vmovdqu8 %zmm29,%zmm30");
asm volatile("vmovdqu16 %zmm25,%zmm26");
/* AVX-512: Op code 0f 78 */
asm volatile("vmread %rax,%rbx");
asm volatile("vcvttps2udq %zmm25,%zmm26");
asm volatile("vcvttpd2udq %zmm29,%ymm6{%k7}");
asm volatile("vcvttsd2usi %xmm6,%rax");
asm volatile("vcvttss2usi %xmm6,%rax");
asm volatile("vcvttps2uqq %ymm5,%zmm26{%k7}");
asm volatile("vcvttpd2uqq %zmm29,%zmm30");
/* AVX-512: Op code 0f 79 */
asm volatile("vmwrite %rax,%rbx");
asm volatile("vcvtps2udq %zmm25,%zmm26");
asm volatile("vcvtpd2udq %zmm29,%ymm6{%k7}");
asm volatile("vcvtsd2usi %xmm6,%rax");
asm volatile("vcvtss2usi %xmm6,%rax");
asm volatile("vcvtps2uqq %ymm5,%zmm26{%k7}");
asm volatile("vcvtpd2uqq %zmm29,%zmm30");
/* AVX-512: Op code 0f 7a */
asm volatile("vcvtudq2pd %ymm5,%zmm29{%k7}");
asm volatile("vcvtuqq2pd %zmm25,%zmm26");
asm volatile("vcvtudq2ps %zmm29,%zmm30");
asm volatile("vcvtuqq2ps %zmm25,%ymm26{%k7}");
asm volatile("vcvttps2qq %ymm25,%zmm26{%k7}");
asm volatile("vcvttpd2qq %zmm29,%zmm30");
/* AVX-512: Op code 0f 7b */
asm volatile("vcvtusi2sd %eax,%xmm5,%xmm6");
asm volatile("vcvtusi2ss %eax,%xmm5,%xmm6");
asm volatile("vcvtps2qq %ymm5,%zmm26{%k7}");
asm volatile("vcvtpd2qq %zmm29,%zmm30");
/* AVX-512: Op code 0f 7f */
asm volatile("movq.s %mm0,%mm4");
asm volatile("vmovdqa %ymm8,%ymm6");
asm volatile("vmovdqa32.s %zmm25,%zmm26");
asm volatile("vmovdqa64.s %zmm25,%zmm26");
asm volatile("vmovdqu %ymm8,%ymm6");
asm volatile("vmovdqu32.s %zmm25,%zmm26");
asm volatile("vmovdqu64.s %zmm25,%zmm26");
asm volatile("vmovdqu8.s %zmm30,(%rcx)");
asm volatile("vmovdqu16.s %zmm25,%zmm26");
/* AVX-512: Op code 0f db */
asm volatile("pand %mm1,%mm2");
asm volatile("pand %xmm1,%xmm2");
asm volatile("vpand %ymm4,%ymm6,%ymm2");
asm volatile("vpandd %zmm24,%zmm25,%zmm26");
asm volatile("vpandq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f df */
asm volatile("pandn %mm1,%mm2");
asm volatile("pandn %xmm1,%xmm2");
asm volatile("vpandn %ymm4,%ymm6,%ymm2");
asm volatile("vpandnd %zmm24,%zmm25,%zmm26");
asm volatile("vpandnq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f e6 */
asm volatile("vcvttpd2dq %xmm1,%xmm2");
asm volatile("vcvtdq2pd %xmm5,%xmm6");
asm volatile("vcvtdq2pd %ymm5,%zmm26{%k7}");
asm volatile("vcvtqq2pd %zmm25,%zmm26");
asm volatile("vcvtpd2dq %xmm1,%xmm2");
/* AVX-512: Op code 0f eb */
asm volatile("por %mm4,%mm6");
asm volatile("vpor %ymm4,%ymm6,%ymm2");
asm volatile("vpord %zmm24,%zmm25,%zmm26");
asm volatile("vporq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f ef */
asm volatile("pxor %mm4,%mm6");
asm volatile("vpxor %ymm4,%ymm6,%ymm2");
asm volatile("vpxord %zmm24,%zmm25,%zmm26");
asm volatile("vpxorq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 10 */
asm volatile("pblendvb %xmm1,%xmm0");
asm volatile("vpsrlvw %zmm27,%zmm28,%zmm29");
asm volatile("vpmovuswb %zmm28,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 11 */
asm volatile("vpmovusdb %zmm28,%xmm6{%k7}");
asm volatile("vpsravw %zmm27,%zmm28,%zmm29");
/* AVX-512: Op code 0f 38 12 */
asm volatile("vpmovusqb %zmm27,%xmm6{%k7}");
asm volatile("vpsllvw %zmm27,%zmm28,%zmm29");
/* AVX-512: Op code 0f 38 13 */
asm volatile("vcvtph2ps %xmm3,%ymm5");
asm volatile("vcvtph2ps %ymm5,%zmm27{%k7}");
asm volatile("vpmovusdw %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 14 */
asm volatile("blendvps %xmm1,%xmm0");
asm volatile("vpmovusqw %zmm27,%xmm6{%k7}");
asm volatile("vprorvd %zmm27,%zmm28,%zmm29");
asm volatile("vprorvq %zmm27,%zmm28,%zmm29");
/* AVX-512: Op code 0f 38 15 */
asm volatile("blendvpd %xmm1,%xmm0");
asm volatile("vpmovusqd %zmm27,%ymm6{%k7}");
asm volatile("vprolvd %zmm27,%zmm28,%zmm29");
asm volatile("vprolvq %zmm27,%zmm28,%zmm29");
/* AVX-512: Op code 0f 38 16 */
asm volatile("vpermps %ymm4,%ymm6,%ymm2");
asm volatile("vpermps %ymm24,%ymm26,%ymm22{%k7}");
asm volatile("vpermpd %ymm24,%ymm26,%ymm22{%k7}");
/* AVX-512: Op code 0f 38 19 */
asm volatile("vbroadcastsd %xmm4,%ymm6");
asm volatile("vbroadcastf32x2 %xmm27,%zmm26");
/* AVX-512: Op code 0f 38 1a */
asm volatile("vbroadcastf128 (%rcx),%ymm4");
asm volatile("vbroadcastf32x4 (%rcx),%zmm26");
asm volatile("vbroadcastf64x2 (%rcx),%zmm26");
/* AVX-512: Op code 0f 38 1b */
asm volatile("vbroadcastf32x8 (%rcx),%zmm27");
asm volatile("vbroadcastf64x4 (%rcx),%zmm26");
/* AVX-512: Op code 0f 38 1f */
asm volatile("vpabsq %zmm27,%zmm28");
/* AVX-512: Op code 0f 38 20 */
asm volatile("vpmovsxbw %xmm4,%xmm5");
asm volatile("vpmovswb %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 21 */
asm volatile("vpmovsxbd %xmm4,%ymm6");
asm volatile("vpmovsdb %zmm27,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 22 */
asm volatile("vpmovsxbq %xmm4,%ymm4");
asm volatile("vpmovsqb %zmm27,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 23 */
asm volatile("vpmovsxwd %xmm4,%ymm4");
asm volatile("vpmovsdw %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 24 */
asm volatile("vpmovsxwq %xmm4,%ymm6");
asm volatile("vpmovsqw %zmm27,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 25 */
asm volatile("vpmovsxdq %xmm4,%ymm4");
asm volatile("vpmovsqd %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 26 */
asm volatile("vptestmb %zmm27,%zmm28,%k5");
asm volatile("vptestmw %zmm27,%zmm28,%k5");
asm volatile("vptestnmb %zmm26,%zmm27,%k5");
asm volatile("vptestnmw %zmm26,%zmm27,%k5");
/* AVX-512: Op code 0f 38 27 */
asm volatile("vptestmd %zmm27,%zmm28,%k5");
asm volatile("vptestmq %zmm27,%zmm28,%k5");
asm volatile("vptestnmd %zmm26,%zmm27,%k5");
asm volatile("vptestnmq %zmm26,%zmm27,%k5");
/* AVX-512: Op code 0f 38 28 */
asm volatile("vpmuldq %ymm4,%ymm6,%ymm2");
asm volatile("vpmovm2b %k5,%zmm28");
asm volatile("vpmovm2w %k5,%zmm28");
/* AVX-512: Op code 0f 38 29 */
asm volatile("vpcmpeqq %ymm4,%ymm6,%ymm2");
asm volatile("vpmovb2m %zmm28,%k5");
asm volatile("vpmovw2m %zmm28,%k5");
/* AVX-512: Op code 0f 38 2a */
asm volatile("vmovntdqa (%rcx),%ymm4");
asm volatile("vpbroadcastmb2q %k6,%zmm30");
/* AVX-512: Op code 0f 38 2c */
asm volatile("vmaskmovps (%rcx),%ymm4,%ymm6");
asm volatile("vscalefps %zmm24,%zmm25,%zmm26");
asm volatile("vscalefpd %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 2d */
asm volatile("vmaskmovpd (%rcx),%ymm4,%ymm6");
asm volatile("vscalefss %xmm24,%xmm25,%xmm26{%k7}");
asm volatile("vscalefsd %xmm24,%xmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 38 30 */
asm volatile("vpmovzxbw %xmm4,%ymm4");
asm volatile("vpmovwb %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 31 */
asm volatile("vpmovzxbd %xmm4,%ymm6");
asm volatile("vpmovdb %zmm27,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 32 */
asm volatile("vpmovzxbq %xmm4,%ymm4");
asm volatile("vpmovqb %zmm27,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 33 */
asm volatile("vpmovzxwd %xmm4,%ymm4");
asm volatile("vpmovdw %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 34 */
asm volatile("vpmovzxwq %xmm4,%ymm6");
asm volatile("vpmovqw %zmm27,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 35 */
asm volatile("vpmovzxdq %xmm4,%ymm4");
asm volatile("vpmovqd %zmm27,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 38 */
asm volatile("vpermd %ymm4,%ymm6,%ymm2");
asm volatile("vpermd %ymm24,%ymm26,%ymm22{%k7}");
asm volatile("vpermq %ymm24,%ymm26,%ymm22{%k7}");
/* AVX-512: Op code 0f 38 38 */
asm volatile("vpminsb %ymm4,%ymm6,%ymm2");
asm volatile("vpmovm2d %k5,%zmm28");
asm volatile("vpmovm2q %k5,%zmm28");
/* AVX-512: Op code 0f 38 39 */
asm volatile("vpminsd %xmm1,%xmm2,%xmm3");
asm volatile("vpminsd %zmm24,%zmm25,%zmm26");
asm volatile("vpminsq %zmm24,%zmm25,%zmm26");
asm volatile("vpmovd2m %zmm28,%k5");
asm volatile("vpmovq2m %zmm28,%k5");
/* AVX-512: Op code 0f 38 3a */
asm volatile("vpminuw %ymm4,%ymm6,%ymm2");
asm volatile("vpbroadcastmw2d %k6,%zmm28");
/* AVX-512: Op code 0f 38 3b */
asm volatile("vpminud %ymm4,%ymm6,%ymm2");
asm volatile("vpminud %zmm24,%zmm25,%zmm26");
asm volatile("vpminuq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 3d */
asm volatile("vpmaxsd %ymm4,%ymm6,%ymm2");
asm volatile("vpmaxsd %zmm24,%zmm25,%zmm26");
asm volatile("vpmaxsq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 3f */
asm volatile("vpmaxud %ymm4,%ymm6,%ymm2");
asm volatile("vpmaxud %zmm24,%zmm25,%zmm26");
asm volatile("vpmaxuq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 42 */
asm volatile("vpmulld %ymm4,%ymm6,%ymm2");
asm volatile("vpmulld %zmm24,%zmm25,%zmm26");
asm volatile("vpmullq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 42 */
asm volatile("vgetexpps %zmm25,%zmm26");
asm volatile("vgetexppd %zmm27,%zmm28");
/* AVX-512: Op code 0f 38 43 */
asm volatile("vgetexpss %xmm24,%xmm25,%xmm26{%k7}");
asm volatile("vgetexpsd %xmm28,%xmm29,%xmm30{%k7}");
/* AVX-512: Op code 0f 38 44 */
asm volatile("vplzcntd %zmm27,%zmm28");
asm volatile("vplzcntq %zmm27,%zmm28");
/* AVX-512: Op code 0f 38 46 */
asm volatile("vpsravd %ymm4,%ymm6,%ymm2");
asm volatile("vpsravd %zmm24,%zmm25,%zmm26");
asm volatile("vpsravq %zmm24,%zmm25,%zmm26");
/* AVX-512: Op code 0f 38 4c */
asm volatile("vrcp14ps %zmm25,%zmm26");
asm volatile("vrcp14pd %zmm27,%zmm28");
/* AVX-512: Op code 0f 38 4d */
asm volatile("vrcp14ss %xmm24,%xmm25,%xmm26{%k7}");
asm volatile("vrcp14sd %xmm24,%xmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 38 4e */
asm volatile("vrsqrt14ps %zmm25,%zmm26");
asm volatile("vrsqrt14pd %zmm27,%zmm28");
/* AVX-512: Op code 0f 38 4f */
asm volatile("vrsqrt14ss %xmm24,%xmm25,%xmm26{%k7}");
asm volatile("vrsqrt14sd %xmm24,%xmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 38 59 */
asm volatile("vpbroadcastq %xmm4,%xmm6");
asm volatile("vbroadcasti32x2 %xmm27,%zmm26");
/* AVX-512: Op code 0f 38 5a */
asm volatile("vbroadcasti128 (%rcx),%ymm4");
asm volatile("vbroadcasti32x4 (%rcx),%zmm26");
asm volatile("vbroadcasti64x2 (%rcx),%zmm26");
/* AVX-512: Op code 0f 38 5b */
asm volatile("vbroadcasti32x8 (%rcx),%zmm28");
asm volatile("vbroadcasti64x4 (%rcx),%zmm26");
/* AVX-512: Op code 0f 38 64 */
asm volatile("vpblendmd %zmm26,%zmm27,%zmm28");
asm volatile("vpblendmq %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 65 */
asm volatile("vblendmps %zmm24,%zmm25,%zmm26");
asm volatile("vblendmpd %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 66 */
asm volatile("vpblendmb %zmm26,%zmm27,%zmm28");
asm volatile("vpblendmw %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 75 */
asm volatile("vpermi2b %zmm24,%zmm25,%zmm26");
asm volatile("vpermi2w %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 76 */
asm volatile("vpermi2d %zmm26,%zmm27,%zmm28");
asm volatile("vpermi2q %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 77 */
asm volatile("vpermi2ps %zmm26,%zmm27,%zmm28");
asm volatile("vpermi2pd %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 7a */
asm volatile("vpbroadcastb %eax,%xmm30");
/* AVX-512: Op code 0f 38 7b */
asm volatile("vpbroadcastw %eax,%xmm30");
/* AVX-512: Op code 0f 38 7c */
asm volatile("vpbroadcastd %eax,%xmm30");
asm volatile("vpbroadcastq %rax,%zmm30");
/* AVX-512: Op code 0f 38 7d */
asm volatile("vpermt2b %zmm26,%zmm27,%zmm28");
asm volatile("vpermt2w %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 7e */
asm volatile("vpermt2d %zmm26,%zmm27,%zmm28");
asm volatile("vpermt2q %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 7f */
asm volatile("vpermt2ps %zmm26,%zmm27,%zmm28");
asm volatile("vpermt2pd %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 83 */
asm volatile("vpmultishiftqb %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 88 */
asm volatile("vexpandps (%rcx),%zmm26");
asm volatile("vexpandpd (%rcx),%zmm28");
/* AVX-512: Op code 0f 38 89 */
asm volatile("vpexpandd (%rcx),%zmm28");
asm volatile("vpexpandq (%rcx),%zmm26");
/* AVX-512: Op code 0f 38 8a */
asm volatile("vcompressps %zmm28,(%rcx)");
asm volatile("vcompresspd %zmm28,(%rcx)");
/* AVX-512: Op code 0f 38 8b */
asm volatile("vpcompressd %zmm28,(%rcx)");
asm volatile("vpcompressq %zmm26,(%rcx)");
/* AVX-512: Op code 0f 38 8d */
asm volatile("vpermb %zmm26,%zmm27,%zmm28");
asm volatile("vpermw %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 90 */
asm volatile("vpgatherdd %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
asm volatile("vpgatherdq %xmm2,0x04(%rbp,%xmm7,2),%xmm1");
asm volatile("vpgatherdd 0x7b(%rbp,%zmm27,8),%zmm26{%k1}");
asm volatile("vpgatherdq 0x7b(%rbp,%ymm27,8),%zmm26{%k1}");
/* AVX-512: Op code 0f 38 91 */
asm volatile("vpgatherqd %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
asm volatile("vpgatherqq %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
asm volatile("vpgatherqd 0x7b(%rbp,%zmm27,8),%ymm26{%k1}");
asm volatile("vpgatherqq 0x7b(%rbp,%zmm27,8),%zmm26{%k1}");
/* AVX-512: Op code 0f 38 a0 */
asm volatile("vpscatterdd %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
asm volatile("vpscatterdq %zmm26,0x7b(%rbp,%ymm27,8){%k1}");
/* AVX-512: Op code 0f 38 a1 */
asm volatile("vpscatterqd %ymm6,0x7b(%rbp,%zmm29,8){%k1}");
asm volatile("vpscatterqq %ymm6,0x7b(%rbp,%ymm27,8){%k1}");
/* AVX-512: Op code 0f 38 a2 */
asm volatile("vscatterdps %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
asm volatile("vscatterdpd %zmm28,0x7b(%rbp,%ymm27,8){%k1}");
/* AVX-512: Op code 0f 38 a3 */
asm volatile("vscatterqps %ymm6,0x7b(%rbp,%zmm29,8){%k1}");
asm volatile("vscatterqpd %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
/* AVX-512: Op code 0f 38 b4 */
asm volatile("vpmadd52luq %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 b5 */
asm volatile("vpmadd52huq %zmm26,%zmm27,%zmm28");
/* AVX-512: Op code 0f 38 c4 */
asm volatile("vpconflictd %zmm26,%zmm27");
asm volatile("vpconflictq %zmm26,%zmm27");
/* AVX-512: Op code 0f 38 c8 */
asm volatile("vexp2ps %zmm29,%zmm30");
asm volatile("vexp2pd %zmm26,%zmm27");
/* AVX-512: Op code 0f 38 ca */
asm volatile("vrcp28ps %zmm29,%zmm30");
asm volatile("vrcp28pd %zmm26,%zmm27");
/* AVX-512: Op code 0f 38 cb */
asm volatile("vrcp28ss %xmm28,%xmm29,%xmm30{%k7}");
asm volatile("vrcp28sd %xmm25,%xmm26,%xmm27{%k7}");
/* AVX-512: Op code 0f 38 cc */
asm volatile("vrsqrt28ps %zmm29,%zmm30");
asm volatile("vrsqrt28pd %zmm26,%zmm27");
/* AVX-512: Op code 0f 38 cd */
asm volatile("vrsqrt28ss %xmm28,%xmm29,%xmm30{%k7}");
asm volatile("vrsqrt28sd %xmm25,%xmm26,%xmm27{%k7}");
/* AVX-512: Op code 0f 3a 03 */
asm volatile("valignd $0x12,%zmm28,%zmm29,%zmm30");
asm volatile("valignq $0x12,%zmm25,%zmm26,%zmm27");
/* AVX-512: Op code 0f 3a 08 */
asm volatile("vroundps $0x5,%ymm6,%ymm2");
asm volatile("vrndscaleps $0x12,%zmm25,%zmm26");
/* AVX-512: Op code 0f 3a 09 */
asm volatile("vroundpd $0x5,%ymm6,%ymm2");
asm volatile("vrndscalepd $0x12,%zmm25,%zmm26");
/* AVX-512: Op code 0f 3a 1a */
asm volatile("vroundss $0x5,%xmm4,%xmm6,%xmm2");
asm volatile("vrndscaless $0x12,%xmm24,%xmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 3a 0b */
asm volatile("vroundsd $0x5,%xmm4,%xmm6,%xmm2");
asm volatile("vrndscalesd $0x12,%xmm24,%xmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 3a 18 */
asm volatile("vinsertf128 $0x5,%xmm4,%ymm4,%ymm6");
asm volatile("vinsertf32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
asm volatile("vinsertf64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
/* AVX-512: Op code 0f 3a 19 */
asm volatile("vextractf128 $0x5,%ymm4,%xmm4");
asm volatile("vextractf32x4 $0x12,%zmm25,%xmm26{%k7}");
asm volatile("vextractf64x2 $0x12,%zmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 3a 1a */
asm volatile("vinsertf32x8 $0x12,%ymm25,%zmm26,%zmm27{%k7}");
asm volatile("vinsertf64x4 $0x12,%ymm28,%zmm29,%zmm30{%k7}");
/* AVX-512: Op code 0f 3a 1b */
asm volatile("vextractf32x8 $0x12,%zmm29,%ymm30{%k7}");
asm volatile("vextractf64x4 $0x12,%zmm26,%ymm27{%k7}");
/* AVX-512: Op code 0f 3a 1e */
asm volatile("vpcmpud $0x12,%zmm29,%zmm30,%k5");
asm volatile("vpcmpuq $0x12,%zmm26,%zmm27,%k5");
/* AVX-512: Op code 0f 3a 1f */
asm volatile("vpcmpd $0x12,%zmm29,%zmm30,%k5");
asm volatile("vpcmpq $0x12,%zmm26,%zmm27,%k5");
/* AVX-512: Op code 0f 3a 23 */
asm volatile("vshuff32x4 $0x12,%zmm28,%zmm29,%zmm30");
asm volatile("vshuff64x2 $0x12,%zmm25,%zmm26,%zmm27");
/* AVX-512: Op code 0f 3a 25 */
asm volatile("vpternlogd $0x12,%zmm28,%zmm29,%zmm30");
asm volatile("vpternlogq $0x12,%zmm28,%zmm29,%zmm30");
/* AVX-512: Op code 0f 3a 26 */
asm volatile("vgetmantps $0x12,%zmm26,%zmm27");
asm volatile("vgetmantpd $0x12,%zmm29,%zmm30");
/* AVX-512: Op code 0f 3a 27 */
asm volatile("vgetmantss $0x12,%xmm25,%xmm26,%xmm27{%k7}");
asm volatile("vgetmantsd $0x12,%xmm28,%xmm29,%xmm30{%k7}");
/* AVX-512: Op code 0f 3a 38 */
asm volatile("vinserti128 $0x5,%xmm4,%ymm4,%ymm6");
asm volatile("vinserti32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
asm volatile("vinserti64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
/* AVX-512: Op code 0f 3a 39 */
asm volatile("vextracti128 $0x5,%ymm4,%xmm6");
asm volatile("vextracti32x4 $0x12,%zmm25,%xmm26{%k7}");
asm volatile("vextracti64x2 $0x12,%zmm25,%xmm26{%k7}");
/* AVX-512: Op code 0f 3a 3a */
asm volatile("vinserti32x8 $0x12,%ymm28,%zmm29,%zmm30{%k7}");
asm volatile("vinserti64x4 $0x12,%ymm25,%zmm26,%zmm27{%k7}");
/* AVX-512: Op code 0f 3a 3b */
asm volatile("vextracti32x8 $0x12,%zmm29,%ymm30{%k7}");
asm volatile("vextracti64x4 $0x12,%zmm26,%ymm27{%k7}");
/* AVX-512: Op code 0f 3a 3e */
asm volatile("vpcmpub $0x12,%zmm29,%zmm30,%k5");
asm volatile("vpcmpuw $0x12,%zmm26,%zmm27,%k5");
/* AVX-512: Op code 0f 3a 3f */
asm volatile("vpcmpb $0x12,%zmm29,%zmm30,%k5");
asm volatile("vpcmpw $0x12,%zmm26,%zmm27,%k5");
/* AVX-512: Op code 0f 3a 43 */
asm volatile("vmpsadbw $0x5,%ymm4,%ymm6,%ymm2");
asm volatile("vdbpsadbw $0x12,%zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 3a 43 */
asm volatile("vshufi32x4 $0x12,%zmm25,%zmm26,%zmm27");
asm volatile("vshufi64x2 $0x12,%zmm28,%zmm29,%zmm30");
/* AVX-512: Op code 0f 3a 50 */
asm volatile("vrangeps $0x12,%zmm25,%zmm26,%zmm27");
asm volatile("vrangepd $0x12,%zmm28,%zmm29,%zmm30");
/* AVX-512: Op code 0f 3a 51 */
asm volatile("vrangess $0x12,%xmm25,%xmm26,%xmm27");
asm volatile("vrangesd $0x12,%xmm28,%xmm29,%xmm30");
/* AVX-512: Op code 0f 3a 54 */
asm volatile("vfixupimmps $0x12,%zmm28,%zmm29,%zmm30");
asm volatile("vfixupimmpd $0x12,%zmm25,%zmm26,%zmm27");
/* AVX-512: Op code 0f 3a 55 */
asm volatile("vfixupimmss $0x12,%xmm28,%xmm29,%xmm30{%k7}");
asm volatile("vfixupimmsd $0x12,%xmm25,%xmm26,%xmm27{%k7}");
/* AVX-512: Op code 0f 3a 56 */
asm volatile("vreduceps $0x12,%zmm26,%zmm27");
asm volatile("vreducepd $0x12,%zmm29,%zmm30");
/* AVX-512: Op code 0f 3a 57 */
asm volatile("vreducess $0x12,%xmm25,%xmm26,%xmm27");
asm volatile("vreducesd $0x12,%xmm28,%xmm29,%xmm30");
/* AVX-512: Op code 0f 3a 66 */
asm volatile("vfpclassps $0x12,%zmm27,%k5");
asm volatile("vfpclasspd $0x12,%zmm30,%k5");
/* AVX-512: Op code 0f 3a 67 */
asm volatile("vfpclassss $0x12,%xmm27,%k5");
asm volatile("vfpclasssd $0x12,%xmm30,%k5");
/* AVX-512: Op code 0f 72 (Grp13) */
asm volatile("vprord $0x12,%zmm25,%zmm26");
asm volatile("vprorq $0x12,%zmm25,%zmm26");
asm volatile("vprold $0x12,%zmm29,%zmm30");
asm volatile("vprolq $0x12,%zmm29,%zmm30");
asm volatile("psrad $0x2,%mm6");
asm volatile("vpsrad $0x5,%ymm6,%ymm2");
asm volatile("vpsrad $0x5,%zmm26,%zmm22");
asm volatile("vpsraq $0x5,%zmm26,%zmm22");
/* AVX-512: Op code 0f 38 c6 (Grp18) */
asm volatile("vgatherpf0dps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vgatherpf0dpd 0x7b(%r14,%ymm31,8){%k1}");
asm volatile("vgatherpf1dps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vgatherpf1dpd 0x7b(%r14,%ymm31,8){%k1}");
asm volatile("vscatterpf0dps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vscatterpf0dpd 0x7b(%r14,%ymm31,8){%k1}");
asm volatile("vscatterpf1dps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vscatterpf1dpd 0x7b(%r14,%ymm31,8){%k1}");
/* AVX-512: Op code 0f 38 c7 (Grp19) */
asm volatile("vgatherpf0qps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vgatherpf0qpd 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vgatherpf1qps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vgatherpf1qpd 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vscatterpf0qps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vscatterpf0qpd 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vscatterpf1qps 0x7b(%r14,%zmm31,8){%k1}");
asm volatile("vscatterpf1qpd 0x7b(%r14,%zmm31,8){%k1}");
/* AVX-512: Examples */
asm volatile("vaddpd %zmm28,%zmm29,%zmm30");
asm volatile("vaddpd %zmm28,%zmm29,%zmm30{%k7}");
asm volatile("vaddpd %zmm28,%zmm29,%zmm30{%k7}{z}");
asm volatile("vaddpd {rn-sae},%zmm28,%zmm29,%zmm30");
asm volatile("vaddpd {ru-sae},%zmm28,%zmm29,%zmm30");
asm volatile("vaddpd {rd-sae},%zmm28,%zmm29,%zmm30");
asm volatile("vaddpd {rz-sae},%zmm28,%zmm29,%zmm30");
asm volatile("vaddpd (%rcx),%zmm29,%zmm30");
asm volatile("vaddpd 0x123(%rax,%r14,8),%zmm29,%zmm30");
asm volatile("vaddpd (%rcx){1to8},%zmm29,%zmm30");
asm volatile("vaddpd 0x1fc0(%rdx),%zmm29,%zmm30");
asm volatile("vaddpd 0x3f8(%rdx){1to8},%zmm29,%zmm30");
asm volatile("vcmpeq_uqps 0x1fc(%rdx){1to16},%zmm30,%k5");
asm volatile("vcmpltsd 0x123(%rax,%r14,8),%xmm29,%k5{%k7}");
asm volatile("vcmplesd {sae},%xmm28,%xmm29,%k5{%k7}");
asm volatile("vgetmantss $0x5b,0x123(%rax,%r14,8),%xmm29,%xmm30{%k7}");
/* bndmk m64, bnd */ /* bndmk m64, bnd */
asm volatile("bndmk (%rax), %bnd0"); asm volatile("bndmk (%rax), %bnd0");
...@@ -471,6 +1345,921 @@ int main(void) ...@@ -471,6 +1345,921 @@ int main(void)
#else /* #ifdef __x86_64__ */ #else /* #ifdef __x86_64__ */
/* bound r32, mem (same op code as EVEX prefix) */
asm volatile("bound %eax, 0x12345678(%ecx)");
asm volatile("bound %ecx, 0x12345678(%eax)");
asm volatile("bound %edx, 0x12345678(%eax)");
asm volatile("bound %ebx, 0x12345678(%eax)");
asm volatile("bound %esp, 0x12345678(%eax)");
asm volatile("bound %ebp, 0x12345678(%eax)");
asm volatile("bound %esi, 0x12345678(%eax)");
asm volatile("bound %edi, 0x12345678(%eax)");
asm volatile("bound %ecx, (%eax)");
asm volatile("bound %eax, (0x12345678)");
asm volatile("bound %edx, (%ecx,%eax,1)");
asm volatile("bound %edx, 0x12345678(,%eax,1)");
asm volatile("bound %edx, (%eax,%ecx,1)");
asm volatile("bound %edx, (%eax,%ecx,8)");
asm volatile("bound %edx, 0x12(%eax)");
asm volatile("bound %edx, 0x12(%ebp)");
asm volatile("bound %edx, 0x12(%ecx,%eax,1)");
asm volatile("bound %edx, 0x12(%ebp,%eax,1)");
asm volatile("bound %edx, 0x12(%eax,%ecx,1)");
asm volatile("bound %edx, 0x12(%eax,%ecx,8)");
asm volatile("bound %edx, 0x12345678(%eax)");
asm volatile("bound %edx, 0x12345678(%ebp)");
asm volatile("bound %edx, 0x12345678(%ecx,%eax,1)");
asm volatile("bound %edx, 0x12345678(%ebp,%eax,1)");
asm volatile("bound %edx, 0x12345678(%eax,%ecx,1)");
asm volatile("bound %edx, 0x12345678(%eax,%ecx,8)");
/* bound r16, mem (same op code as EVEX prefix) */
asm volatile("bound %ax, 0x12345678(%ecx)");
asm volatile("bound %cx, 0x12345678(%eax)");
asm volatile("bound %dx, 0x12345678(%eax)");
asm volatile("bound %bx, 0x12345678(%eax)");
asm volatile("bound %sp, 0x12345678(%eax)");
asm volatile("bound %bp, 0x12345678(%eax)");
asm volatile("bound %si, 0x12345678(%eax)");
asm volatile("bound %di, 0x12345678(%eax)");
asm volatile("bound %cx, (%eax)");
asm volatile("bound %ax, (0x12345678)");
asm volatile("bound %dx, (%ecx,%eax,1)");
asm volatile("bound %dx, 0x12345678(,%eax,1)");
asm volatile("bound %dx, (%eax,%ecx,1)");
asm volatile("bound %dx, (%eax,%ecx,8)");
asm volatile("bound %dx, 0x12(%eax)");
asm volatile("bound %dx, 0x12(%ebp)");
asm volatile("bound %dx, 0x12(%ecx,%eax,1)");
asm volatile("bound %dx, 0x12(%ebp,%eax,1)");
asm volatile("bound %dx, 0x12(%eax,%ecx,1)");
asm volatile("bound %dx, 0x12(%eax,%ecx,8)");
asm volatile("bound %dx, 0x12345678(%eax)");
asm volatile("bound %dx, 0x12345678(%ebp)");
asm volatile("bound %dx, 0x12345678(%ecx,%eax,1)");
asm volatile("bound %dx, 0x12345678(%ebp,%eax,1)");
asm volatile("bound %dx, 0x12345678(%eax,%ecx,1)");
asm volatile("bound %dx, 0x12345678(%eax,%ecx,8)");
/* AVX-512: Instructions with the same op codes as Mask Instructions */
asm volatile("cmovno %eax,%ebx");
asm volatile("cmovno 0x12345678(%eax),%ecx");
asm volatile("cmovno 0x12345678(%eax),%cx");
asm volatile("cmove %eax,%ebx");
asm volatile("cmove 0x12345678(%eax),%ecx");
asm volatile("cmove 0x12345678(%eax),%cx");
asm volatile("seto 0x12345678(%eax)");
asm volatile("setno 0x12345678(%eax)");
asm volatile("setb 0x12345678(%eax)");
asm volatile("setc 0x12345678(%eax)");
asm volatile("setnae 0x12345678(%eax)");
asm volatile("setae 0x12345678(%eax)");
asm volatile("setnb 0x12345678(%eax)");
asm volatile("setnc 0x12345678(%eax)");
asm volatile("sets 0x12345678(%eax)");
asm volatile("setns 0x12345678(%eax)");
/* AVX-512: Mask Instructions */
asm volatile("kandw %k7,%k6,%k5");
asm volatile("kandq %k7,%k6,%k5");
asm volatile("kandb %k7,%k6,%k5");
asm volatile("kandd %k7,%k6,%k5");
asm volatile("kandnw %k7,%k6,%k5");
asm volatile("kandnq %k7,%k6,%k5");
asm volatile("kandnb %k7,%k6,%k5");
asm volatile("kandnd %k7,%k6,%k5");
asm volatile("knotw %k7,%k6");
asm volatile("knotq %k7,%k6");
asm volatile("knotb %k7,%k6");
asm volatile("knotd %k7,%k6");
asm volatile("korw %k7,%k6,%k5");
asm volatile("korq %k7,%k6,%k5");
asm volatile("korb %k7,%k6,%k5");
asm volatile("kord %k7,%k6,%k5");
asm volatile("kxnorw %k7,%k6,%k5");
asm volatile("kxnorq %k7,%k6,%k5");
asm volatile("kxnorb %k7,%k6,%k5");
asm volatile("kxnord %k7,%k6,%k5");
asm volatile("kxorw %k7,%k6,%k5");
asm volatile("kxorq %k7,%k6,%k5");
asm volatile("kxorb %k7,%k6,%k5");
asm volatile("kxord %k7,%k6,%k5");
asm volatile("kaddw %k7,%k6,%k5");
asm volatile("kaddq %k7,%k6,%k5");
asm volatile("kaddb %k7,%k6,%k5");
asm volatile("kaddd %k7,%k6,%k5");
asm volatile("kunpckbw %k7,%k6,%k5");
asm volatile("kunpckwd %k7,%k6,%k5");
asm volatile("kunpckdq %k7,%k6,%k5");
asm volatile("kmovw %k6,%k5");
asm volatile("kmovw (%ecx),%k5");
asm volatile("kmovw 0x123(%eax,%ecx,8),%k5");
asm volatile("kmovw %k5,(%ecx)");
asm volatile("kmovw %k5,0x123(%eax,%ecx,8)");
asm volatile("kmovw %eax,%k5");
asm volatile("kmovw %ebp,%k5");
asm volatile("kmovw %k5,%eax");
asm volatile("kmovw %k5,%ebp");
asm volatile("kmovq %k6,%k5");
asm volatile("kmovq (%ecx),%k5");
asm volatile("kmovq 0x123(%eax,%ecx,8),%k5");
asm volatile("kmovq %k5,(%ecx)");
asm volatile("kmovq %k5,0x123(%eax,%ecx,8)");
asm volatile("kmovb %k6,%k5");
asm volatile("kmovb (%ecx),%k5");
asm volatile("kmovb 0x123(%eax,%ecx,8),%k5");
asm volatile("kmovb %k5,(%ecx)");
asm volatile("kmovb %k5,0x123(%eax,%ecx,8)");
asm volatile("kmovb %eax,%k5");
asm volatile("kmovb %ebp,%k5");
asm volatile("kmovb %k5,%eax");
asm volatile("kmovb %k5,%ebp");
asm volatile("kmovd %k6,%k5");
asm volatile("kmovd (%ecx),%k5");
asm volatile("kmovd 0x123(%eax,%ecx,8),%k5");
asm volatile("kmovd %k5,(%ecx)");
asm volatile("kmovd %k5,0x123(%eax,%ecx,8)");
asm volatile("kmovd %eax,%k5");
asm volatile("kmovd %ebp,%k5");
asm volatile("kmovd %k5,%eax");
asm volatile("kmovd %k5,%ebp");
asm volatile("kortestw %k6,%k5");
asm volatile("kortestq %k6,%k5");
asm volatile("kortestb %k6,%k5");
asm volatile("kortestd %k6,%k5");
asm volatile("ktestw %k6,%k5");
asm volatile("ktestq %k6,%k5");
asm volatile("ktestb %k6,%k5");
asm volatile("ktestd %k6,%k5");
asm volatile("kshiftrw $0x12,%k6,%k5");
asm volatile("kshiftrq $0x5b,%k6,%k5");
asm volatile("kshiftlw $0x12,%k6,%k5");
asm volatile("kshiftlq $0x5b,%k6,%k5");
/* AVX-512: Op code 0f 5b */
asm volatile("vcvtdq2ps %xmm5,%xmm6");
asm volatile("vcvtqq2ps %zmm5,%ymm6{%k7}");
asm volatile("vcvtps2dq %xmm5,%xmm6");
asm volatile("vcvttps2dq %xmm5,%xmm6");
/* AVX-512: Op code 0f 6f */
asm volatile("movq %mm0,%mm4");
asm volatile("vmovdqa %ymm4,%ymm6");
asm volatile("vmovdqa32 %zmm5,%zmm6");
asm volatile("vmovdqa64 %zmm5,%zmm6");
asm volatile("vmovdqu %ymm4,%ymm6");
asm volatile("vmovdqu32 %zmm5,%zmm6");
asm volatile("vmovdqu64 %zmm5,%zmm6");
asm volatile("vmovdqu8 %zmm5,%zmm6");
asm volatile("vmovdqu16 %zmm5,%zmm6");
/* AVX-512: Op code 0f 78 */
asm volatile("vmread %eax,%ebx");
asm volatile("vcvttps2udq %zmm5,%zmm6");
asm volatile("vcvttpd2udq %zmm5,%ymm6{%k7}");
asm volatile("vcvttsd2usi %xmm6,%eax");
asm volatile("vcvttss2usi %xmm6,%eax");
asm volatile("vcvttps2uqq %ymm5,%zmm6{%k7}");
asm volatile("vcvttpd2uqq %zmm5,%zmm6");
/* AVX-512: Op code 0f 79 */
asm volatile("vmwrite %eax,%ebx");
asm volatile("vcvtps2udq %zmm5,%zmm6");
asm volatile("vcvtpd2udq %zmm5,%ymm6{%k7}");
asm volatile("vcvtsd2usi %xmm6,%eax");
asm volatile("vcvtss2usi %xmm6,%eax");
asm volatile("vcvtps2uqq %ymm5,%zmm6{%k7}");
asm volatile("vcvtpd2uqq %zmm5,%zmm6");
/* AVX-512: Op code 0f 7a */
asm volatile("vcvtudq2pd %ymm5,%zmm6{%k7}");
asm volatile("vcvtuqq2pd %zmm5,%zmm6");
asm volatile("vcvtudq2ps %zmm5,%zmm6");
asm volatile("vcvtuqq2ps %zmm5,%ymm6{%k7}");
asm volatile("vcvttps2qq %ymm5,%zmm6{%k7}");
asm volatile("vcvttpd2qq %zmm5,%zmm6");
/* AVX-512: Op code 0f 7b */
asm volatile("vcvtusi2sd %eax,%xmm5,%xmm6");
asm volatile("vcvtusi2ss %eax,%xmm5,%xmm6");
asm volatile("vcvtps2qq %ymm5,%zmm6{%k7}");
asm volatile("vcvtpd2qq %zmm5,%zmm6");
/* AVX-512: Op code 0f 7f */
asm volatile("movq.s %mm0,%mm4");
asm volatile("vmovdqa.s %ymm5,%ymm6");
asm volatile("vmovdqa32.s %zmm5,%zmm6");
asm volatile("vmovdqa64.s %zmm5,%zmm6");
asm volatile("vmovdqu.s %ymm5,%ymm6");
asm volatile("vmovdqu32.s %zmm5,%zmm6");
asm volatile("vmovdqu64.s %zmm5,%zmm6");
asm volatile("vmovdqu8.s %zmm5,%zmm6");
asm volatile("vmovdqu16.s %zmm5,%zmm6");
/* AVX-512: Op code 0f db */
asm volatile("pand %mm1,%mm2");
asm volatile("pand %xmm1,%xmm2");
asm volatile("vpand %ymm4,%ymm6,%ymm2");
asm volatile("vpandd %zmm4,%zmm5,%zmm6");
asm volatile("vpandq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f df */
asm volatile("pandn %mm1,%mm2");
asm volatile("pandn %xmm1,%xmm2");
asm volatile("vpandn %ymm4,%ymm6,%ymm2");
asm volatile("vpandnd %zmm4,%zmm5,%zmm6");
asm volatile("vpandnq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f e6 */
asm volatile("vcvttpd2dq %xmm1,%xmm2");
asm volatile("vcvtdq2pd %xmm5,%xmm6");
asm volatile("vcvtdq2pd %ymm5,%zmm6{%k7}");
asm volatile("vcvtqq2pd %zmm5,%zmm6");
asm volatile("vcvtpd2dq %xmm1,%xmm2");
/* AVX-512: Op code 0f eb */
asm volatile("por %mm4,%mm6");
asm volatile("vpor %ymm4,%ymm6,%ymm2");
asm volatile("vpord %zmm4,%zmm5,%zmm6");
asm volatile("vporq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f ef */
asm volatile("pxor %mm4,%mm6");
asm volatile("vpxor %ymm4,%ymm6,%ymm2");
asm volatile("vpxord %zmm4,%zmm5,%zmm6");
asm volatile("vpxorq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 10 */
asm volatile("pblendvb %xmm1,%xmm0");
asm volatile("vpsrlvw %zmm4,%zmm5,%zmm6");
asm volatile("vpmovuswb %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 11 */
asm volatile("vpmovusdb %zmm5,%xmm6{%k7}");
asm volatile("vpsravw %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 12 */
asm volatile("vpmovusqb %zmm5,%xmm6{%k7}");
asm volatile("vpsllvw %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 13 */
asm volatile("vcvtph2ps %xmm3,%ymm5");
asm volatile("vcvtph2ps %ymm5,%zmm6{%k7}");
asm volatile("vpmovusdw %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 14 */
asm volatile("blendvps %xmm1,%xmm0");
asm volatile("vpmovusqw %zmm5,%xmm6{%k7}");
asm volatile("vprorvd %zmm4,%zmm5,%zmm6");
asm volatile("vprorvq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 15 */
asm volatile("blendvpd %xmm1,%xmm0");
asm volatile("vpmovusqd %zmm5,%ymm6{%k7}");
asm volatile("vprolvd %zmm4,%zmm5,%zmm6");
asm volatile("vprolvq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 16 */
asm volatile("vpermps %ymm4,%ymm6,%ymm2");
asm volatile("vpermps %ymm4,%ymm6,%ymm2{%k7}");
asm volatile("vpermpd %ymm4,%ymm6,%ymm2{%k7}");
/* AVX-512: Op code 0f 38 19 */
asm volatile("vbroadcastsd %xmm4,%ymm6");
asm volatile("vbroadcastf32x2 %xmm7,%zmm6");
/* AVX-512: Op code 0f 38 1a */
asm volatile("vbroadcastf128 (%ecx),%ymm4");
asm volatile("vbroadcastf32x4 (%ecx),%zmm6");
asm volatile("vbroadcastf64x2 (%ecx),%zmm6");
/* AVX-512: Op code 0f 38 1b */
asm volatile("vbroadcastf32x8 (%ecx),%zmm6");
asm volatile("vbroadcastf64x4 (%ecx),%zmm6");
/* AVX-512: Op code 0f 38 1f */
asm volatile("vpabsq %zmm4,%zmm6");
/* AVX-512: Op code 0f 38 20 */
asm volatile("vpmovsxbw %xmm4,%xmm5");
asm volatile("vpmovswb %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 21 */
asm volatile("vpmovsxbd %xmm4,%ymm6");
asm volatile("vpmovsdb %zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 22 */
asm volatile("vpmovsxbq %xmm4,%ymm4");
asm volatile("vpmovsqb %zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 23 */
asm volatile("vpmovsxwd %xmm4,%ymm4");
asm volatile("vpmovsdw %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 24 */
asm volatile("vpmovsxwq %xmm4,%ymm6");
asm volatile("vpmovsqw %zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 25 */
asm volatile("vpmovsxdq %xmm4,%ymm4");
asm volatile("vpmovsqd %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 26 */
asm volatile("vptestmb %zmm5,%zmm6,%k5");
asm volatile("vptestmw %zmm5,%zmm6,%k5");
asm volatile("vptestnmb %zmm4,%zmm5,%k5");
asm volatile("vptestnmw %zmm4,%zmm5,%k5");
/* AVX-512: Op code 0f 38 27 */
asm volatile("vptestmd %zmm5,%zmm6,%k5");
asm volatile("vptestmq %zmm5,%zmm6,%k5");
asm volatile("vptestnmd %zmm4,%zmm5,%k5");
asm volatile("vptestnmq %zmm4,%zmm5,%k5");
/* AVX-512: Op code 0f 38 28 */
asm volatile("vpmuldq %ymm4,%ymm6,%ymm2");
asm volatile("vpmovm2b %k5,%zmm6");
asm volatile("vpmovm2w %k5,%zmm6");
/* AVX-512: Op code 0f 38 29 */
asm volatile("vpcmpeqq %ymm4,%ymm6,%ymm2");
asm volatile("vpmovb2m %zmm6,%k5");
asm volatile("vpmovw2m %zmm6,%k5");
/* AVX-512: Op code 0f 38 2a */
asm volatile("vmovntdqa (%ecx),%ymm4");
asm volatile("vpbroadcastmb2q %k6,%zmm1");
/* AVX-512: Op code 0f 38 2c */
asm volatile("vmaskmovps (%ecx),%ymm4,%ymm6");
asm volatile("vscalefps %zmm4,%zmm5,%zmm6");
asm volatile("vscalefpd %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 2d */
asm volatile("vmaskmovpd (%ecx),%ymm4,%ymm6");
asm volatile("vscalefss %xmm4,%xmm5,%xmm6{%k7}");
asm volatile("vscalefsd %xmm4,%xmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 30 */
asm volatile("vpmovzxbw %xmm4,%ymm4");
asm volatile("vpmovwb %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 31 */
asm volatile("vpmovzxbd %xmm4,%ymm6");
asm volatile("vpmovdb %zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 32 */
asm volatile("vpmovzxbq %xmm4,%ymm4");
asm volatile("vpmovqb %zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 33 */
asm volatile("vpmovzxwd %xmm4,%ymm4");
asm volatile("vpmovdw %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 34 */
asm volatile("vpmovzxwq %xmm4,%ymm6");
asm volatile("vpmovqw %zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 35 */
asm volatile("vpmovzxdq %xmm4,%ymm4");
asm volatile("vpmovqd %zmm5,%ymm6{%k7}");
/* AVX-512: Op code 0f 38 36 */
asm volatile("vpermd %ymm4,%ymm6,%ymm2");
asm volatile("vpermd %ymm4,%ymm6,%ymm2{%k7}");
asm volatile("vpermq %ymm4,%ymm6,%ymm2{%k7}");
/* AVX-512: Op code 0f 38 38 */
asm volatile("vpminsb %ymm4,%ymm6,%ymm2");
asm volatile("vpmovm2d %k5,%zmm6");
asm volatile("vpmovm2q %k5,%zmm6");
/* AVX-512: Op code 0f 38 39 */
asm volatile("vpminsd %xmm1,%xmm2,%xmm3");
asm volatile("vpminsd %zmm4,%zmm5,%zmm6");
asm volatile("vpminsq %zmm4,%zmm5,%zmm6");
asm volatile("vpmovd2m %zmm6,%k5");
asm volatile("vpmovq2m %zmm6,%k5");
/* AVX-512: Op code 0f 38 3a */
asm volatile("vpminuw %ymm4,%ymm6,%ymm2");
asm volatile("vpbroadcastmw2d %k6,%zmm6");
/* AVX-512: Op code 0f 38 3b */
asm volatile("vpminud %ymm4,%ymm6,%ymm2");
asm volatile("vpminud %zmm4,%zmm5,%zmm6");
asm volatile("vpminuq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 3d */
asm volatile("vpmaxsd %ymm4,%ymm6,%ymm2");
asm volatile("vpmaxsd %zmm4,%zmm5,%zmm6");
asm volatile("vpmaxsq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 3f */
asm volatile("vpmaxud %ymm4,%ymm6,%ymm2");
asm volatile("vpmaxud %zmm4,%zmm5,%zmm6");
asm volatile("vpmaxuq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 40 */
asm volatile("vpmulld %ymm4,%ymm6,%ymm2");
asm volatile("vpmulld %zmm4,%zmm5,%zmm6");
asm volatile("vpmullq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 42 */
asm volatile("vgetexpps %zmm5,%zmm6");
asm volatile("vgetexppd %zmm5,%zmm6");
/* AVX-512: Op code 0f 38 43 */
asm volatile("vgetexpss %xmm4,%xmm5,%xmm6{%k7}");
asm volatile("vgetexpsd %xmm2,%xmm3,%xmm4{%k7}");
/* AVX-512: Op code 0f 38 44 */
asm volatile("vplzcntd %zmm5,%zmm6");
asm volatile("vplzcntq %zmm5,%zmm6");
/* AVX-512: Op code 0f 38 46 */
asm volatile("vpsravd %ymm4,%ymm6,%ymm2");
asm volatile("vpsravd %zmm4,%zmm5,%zmm6");
asm volatile("vpsravq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 4c */
asm volatile("vrcp14ps %zmm5,%zmm6");
asm volatile("vrcp14pd %zmm5,%zmm6");
/* AVX-512: Op code 0f 38 4d */
asm volatile("vrcp14ss %xmm4,%xmm5,%xmm6{%k7}");
asm volatile("vrcp14sd %xmm4,%xmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 4e */
asm volatile("vrsqrt14ps %zmm5,%zmm6");
asm volatile("vrsqrt14pd %zmm5,%zmm6");
/* AVX-512: Op code 0f 38 4f */
asm volatile("vrsqrt14ss %xmm4,%xmm5,%xmm6{%k7}");
asm volatile("vrsqrt14sd %xmm4,%xmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 38 59 */
asm volatile("vpbroadcastq %xmm4,%xmm6");
asm volatile("vbroadcasti32x2 %xmm7,%zmm6");
/* AVX-512: Op code 0f 38 5a */
asm volatile("vbroadcasti128 (%ecx),%ymm4");
asm volatile("vbroadcasti32x4 (%ecx),%zmm6");
asm volatile("vbroadcasti64x2 (%ecx),%zmm6");
/* AVX-512: Op code 0f 38 5b */
asm volatile("vbroadcasti32x8 (%ecx),%zmm6");
asm volatile("vbroadcasti64x4 (%ecx),%zmm6");
/* AVX-512: Op code 0f 38 64 */
asm volatile("vpblendmd %zmm4,%zmm5,%zmm6");
asm volatile("vpblendmq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 65 */
asm volatile("vblendmps %zmm4,%zmm5,%zmm6");
asm volatile("vblendmpd %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 66 */
asm volatile("vpblendmb %zmm4,%zmm5,%zmm6");
asm volatile("vpblendmw %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 75 */
asm volatile("vpermi2b %zmm4,%zmm5,%zmm6");
asm volatile("vpermi2w %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 76 */
asm volatile("vpermi2d %zmm4,%zmm5,%zmm6");
asm volatile("vpermi2q %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 77 */
asm volatile("vpermi2ps %zmm4,%zmm5,%zmm6");
asm volatile("vpermi2pd %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 7a */
asm volatile("vpbroadcastb %eax,%xmm3");
/* AVX-512: Op code 0f 38 7b */
asm volatile("vpbroadcastw %eax,%xmm3");
/* AVX-512: Op code 0f 38 7c */
asm volatile("vpbroadcastd %eax,%xmm3");
/* AVX-512: Op code 0f 38 7d */
asm volatile("vpermt2b %zmm4,%zmm5,%zmm6");
asm volatile("vpermt2w %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 7e */
asm volatile("vpermt2d %zmm4,%zmm5,%zmm6");
asm volatile("vpermt2q %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 7f */
asm volatile("vpermt2ps %zmm4,%zmm5,%zmm6");
asm volatile("vpermt2pd %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 83 */
asm volatile("vpmultishiftqb %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 88 */
asm volatile("vexpandps (%ecx),%zmm6");
asm volatile("vexpandpd (%ecx),%zmm6");
/* AVX-512: Op code 0f 38 89 */
asm volatile("vpexpandd (%ecx),%zmm6");
asm volatile("vpexpandq (%ecx),%zmm6");
/* AVX-512: Op code 0f 38 8a */
asm volatile("vcompressps %zmm6,(%ecx)");
asm volatile("vcompresspd %zmm6,(%ecx)");
/* AVX-512: Op code 0f 38 8b */
asm volatile("vpcompressd %zmm6,(%ecx)");
asm volatile("vpcompressq %zmm6,(%ecx)");
/* AVX-512: Op code 0f 38 8d */
asm volatile("vpermb %zmm4,%zmm5,%zmm6");
asm volatile("vpermw %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 90 */
asm volatile("vpgatherdd %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
asm volatile("vpgatherdq %xmm2,0x04(%ebp,%xmm7,2),%xmm1");
asm volatile("vpgatherdd 0x7b(%ebp,%zmm7,8),%zmm6{%k1}");
asm volatile("vpgatherdq 0x7b(%ebp,%ymm7,8),%zmm6{%k1}");
/* AVX-512: Op code 0f 38 91 */
asm volatile("vpgatherqd %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
asm volatile("vpgatherqq %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
asm volatile("vpgatherqd 0x7b(%ebp,%zmm7,8),%ymm6{%k1}");
asm volatile("vpgatherqq 0x7b(%ebp,%zmm7,8),%zmm6{%k1}");
/* AVX-512: Op code 0f 38 a0 */
asm volatile("vpscatterdd %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vpscatterdq %zmm6,0x7b(%ebp,%ymm7,8){%k1}");
/* AVX-512: Op code 0f 38 a1 */
asm volatile("vpscatterqd %ymm6,0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vpscatterqq %ymm6,0x7b(%ebp,%ymm7,8){%k1}");
/* AVX-512: Op code 0f 38 a2 */
asm volatile("vscatterdps %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterdpd %zmm6,0x7b(%ebp,%ymm7,8){%k1}");
/* AVX-512: Op code 0f 38 a3 */
asm volatile("vscatterqps %ymm6,0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterqpd %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
/* AVX-512: Op code 0f 38 b4 */
asm volatile("vpmadd52luq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 b5 */
asm volatile("vpmadd52huq %zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 38 c4 */
asm volatile("vpconflictd %zmm5,%zmm6");
asm volatile("vpconflictq %zmm5,%zmm6");
/* AVX-512: Op code 0f 38 c8 */
asm volatile("vexp2ps %zmm6,%zmm7");
asm volatile("vexp2pd %zmm6,%zmm7");
/* AVX-512: Op code 0f 38 ca */
asm volatile("vrcp28ps %zmm6,%zmm7");
asm volatile("vrcp28pd %zmm6,%zmm7");
/* AVX-512: Op code 0f 38 cb */
asm volatile("vrcp28ss %xmm5,%xmm6,%xmm7{%k7}");
asm volatile("vrcp28sd %xmm5,%xmm6,%xmm7{%k7}");
/* AVX-512: Op code 0f 38 cc */
asm volatile("vrsqrt28ps %zmm6,%zmm7");
asm volatile("vrsqrt28pd %zmm6,%zmm7");
/* AVX-512: Op code 0f 38 cd */
asm volatile("vrsqrt28ss %xmm5,%xmm6,%xmm7{%k7}");
asm volatile("vrsqrt28sd %xmm5,%xmm6,%xmm7{%k7}");
/* AVX-512: Op code 0f 3a 03 */
asm volatile("valignd $0x12,%zmm5,%zmm6,%zmm7");
asm volatile("valignq $0x12,%zmm5,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 08 */
asm volatile("vroundps $0x5,%ymm6,%ymm2");
asm volatile("vrndscaleps $0x12,%zmm5,%zmm6");
/* AVX-512: Op code 0f 3a 09 */
asm volatile("vroundpd $0x5,%ymm6,%ymm2");
asm volatile("vrndscalepd $0x12,%zmm5,%zmm6");
/* AVX-512: Op code 0f 3a 0a */
asm volatile("vroundss $0x5,%xmm4,%xmm6,%xmm2");
asm volatile("vrndscaless $0x12,%xmm4,%xmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 3a 0b */
asm volatile("vroundsd $0x5,%xmm4,%xmm6,%xmm2");
asm volatile("vrndscalesd $0x12,%xmm4,%xmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 3a 18 */
asm volatile("vinsertf128 $0x5,%xmm4,%ymm4,%ymm6");
asm volatile("vinsertf32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
asm volatile("vinsertf64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
/* AVX-512: Op code 0f 3a 19 */
asm volatile("vextractf128 $0x5,%ymm4,%xmm4");
asm volatile("vextractf32x4 $0x12,%zmm5,%xmm6{%k7}");
asm volatile("vextractf64x2 $0x12,%zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 3a 1a */
asm volatile("vinsertf32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
asm volatile("vinsertf64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
/* AVX-512: Op code 0f 3a 1b */
asm volatile("vextractf32x8 $0x12,%zmm6,%ymm7{%k7}");
asm volatile("vextractf64x4 $0x12,%zmm6,%ymm7{%k7}");
/* AVX-512: Op code 0f 3a 1e */
asm volatile("vpcmpud $0x12,%zmm6,%zmm7,%k5");
asm volatile("vpcmpuq $0x12,%zmm6,%zmm7,%k5");
/* AVX-512: Op code 0f 3a 1f */
asm volatile("vpcmpd $0x12,%zmm6,%zmm7,%k5");
asm volatile("vpcmpq $0x12,%zmm6,%zmm7,%k5");
/* AVX-512: Op code 0f 3a 23 */
asm volatile("vshuff32x4 $0x12,%zmm5,%zmm6,%zmm7");
asm volatile("vshuff64x2 $0x12,%zmm5,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 25 */
asm volatile("vpternlogd $0x12,%zmm5,%zmm6,%zmm7");
asm volatile("vpternlogq $0x12,%zmm5,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 26 */
asm volatile("vgetmantps $0x12,%zmm6,%zmm7");
asm volatile("vgetmantpd $0x12,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 27 */
asm volatile("vgetmantss $0x12,%xmm5,%xmm6,%xmm7{%k7}");
asm volatile("vgetmantsd $0x12,%xmm5,%xmm6,%xmm7{%k7}");
/* AVX-512: Op code 0f 3a 38 */
asm volatile("vinserti128 $0x5,%xmm4,%ymm4,%ymm6");
asm volatile("vinserti32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
asm volatile("vinserti64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
/* AVX-512: Op code 0f 3a 39 */
asm volatile("vextracti128 $0x5,%ymm4,%xmm6");
asm volatile("vextracti32x4 $0x12,%zmm5,%xmm6{%k7}");
asm volatile("vextracti64x2 $0x12,%zmm5,%xmm6{%k7}");
/* AVX-512: Op code 0f 3a 3a */
asm volatile("vinserti32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
asm volatile("vinserti64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
/* AVX-512: Op code 0f 3a 3b */
asm volatile("vextracti32x8 $0x12,%zmm6,%ymm7{%k7}");
asm volatile("vextracti64x4 $0x12,%zmm6,%ymm7{%k7}");
/* AVX-512: Op code 0f 3a 3e */
asm volatile("vpcmpub $0x12,%zmm6,%zmm7,%k5");
asm volatile("vpcmpuw $0x12,%zmm6,%zmm7,%k5");
/* AVX-512: Op code 0f 3a 3f */
asm volatile("vpcmpb $0x12,%zmm6,%zmm7,%k5");
asm volatile("vpcmpw $0x12,%zmm6,%zmm7,%k5");
/* AVX-512: Op code 0f 3a 42 */
asm volatile("vmpsadbw $0x5,%ymm4,%ymm6,%ymm2");
asm volatile("vdbpsadbw $0x12,%zmm4,%zmm5,%zmm6");
/* AVX-512: Op code 0f 3a 43 */
asm volatile("vshufi32x4 $0x12,%zmm5,%zmm6,%zmm7");
asm volatile("vshufi64x2 $0x12,%zmm5,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 50 */
asm volatile("vrangeps $0x12,%zmm5,%zmm6,%zmm7");
asm volatile("vrangepd $0x12,%zmm5,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 51 */
asm volatile("vrangess $0x12,%xmm5,%xmm6,%xmm7");
asm volatile("vrangesd $0x12,%xmm5,%xmm6,%xmm7");
/* AVX-512: Op code 0f 3a 54 */
asm volatile("vfixupimmps $0x12,%zmm5,%zmm6,%zmm7");
asm volatile("vfixupimmpd $0x12,%zmm5,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 55 */
asm volatile("vfixupimmss $0x12,%xmm5,%xmm6,%xmm7{%k7}");
asm volatile("vfixupimmsd $0x12,%xmm5,%xmm6,%xmm7{%k7}");
/* AVX-512: Op code 0f 3a 56 */
asm volatile("vreduceps $0x12,%zmm6,%zmm7");
asm volatile("vreducepd $0x12,%zmm6,%zmm7");
/* AVX-512: Op code 0f 3a 57 */
asm volatile("vreducess $0x12,%xmm5,%xmm6,%xmm7");
asm volatile("vreducesd $0x12,%xmm5,%xmm6,%xmm7");
/* AVX-512: Op code 0f 3a 66 */
asm volatile("vfpclassps $0x12,%zmm7,%k5");
asm volatile("vfpclasspd $0x12,%zmm7,%k5");
/* AVX-512: Op code 0f 3a 67 */
asm volatile("vfpclassss $0x12,%xmm7,%k5");
asm volatile("vfpclasssd $0x12,%xmm7,%k5");
/* AVX-512: Op code 0f 72 (Grp13) */
asm volatile("vprord $0x12,%zmm5,%zmm6");
asm volatile("vprorq $0x12,%zmm5,%zmm6");
asm volatile("vprold $0x12,%zmm5,%zmm6");
asm volatile("vprolq $0x12,%zmm5,%zmm6");
asm volatile("psrad $0x2,%mm6");
asm volatile("vpsrad $0x5,%ymm6,%ymm2");
asm volatile("vpsrad $0x5,%zmm6,%zmm2");
asm volatile("vpsraq $0x5,%zmm6,%zmm2");
/* AVX-512: Op code 0f 38 c6 (Grp18) */
asm volatile("vgatherpf0dps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vgatherpf0dpd 0x7b(%ebp,%ymm7,8){%k1}");
asm volatile("vgatherpf1dps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vgatherpf1dpd 0x7b(%ebp,%ymm7,8){%k1}");
asm volatile("vscatterpf0dps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterpf0dpd 0x7b(%ebp,%ymm7,8){%k1}");
asm volatile("vscatterpf1dps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterpf1dpd 0x7b(%ebp,%ymm7,8){%k1}");
/* AVX-512: Op code 0f 38 c7 (Grp19) */
asm volatile("vgatherpf0qps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vgatherpf0qpd 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vgatherpf1qps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vgatherpf1qpd 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterpf0qps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterpf0qpd 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterpf1qps 0x7b(%ebp,%zmm7,8){%k1}");
asm volatile("vscatterpf1qpd 0x7b(%ebp,%zmm7,8){%k1}");
/* AVX-512: Examples */
asm volatile("vaddpd %zmm4,%zmm5,%zmm6");
asm volatile("vaddpd %zmm4,%zmm5,%zmm6{%k7}");
asm volatile("vaddpd %zmm4,%zmm5,%zmm6{%k7}{z}");
asm volatile("vaddpd {rn-sae},%zmm4,%zmm5,%zmm6");
asm volatile("vaddpd {ru-sae},%zmm4,%zmm5,%zmm6");
asm volatile("vaddpd {rd-sae},%zmm4,%zmm5,%zmm6");
asm volatile("vaddpd {rz-sae},%zmm4,%zmm5,%zmm6");
asm volatile("vaddpd (%ecx),%zmm5,%zmm6");
asm volatile("vaddpd 0x123(%eax,%ecx,8),%zmm5,%zmm6");
asm volatile("vaddpd (%ecx){1to8},%zmm5,%zmm6");
asm volatile("vaddpd 0x1fc0(%edx),%zmm5,%zmm6");
asm volatile("vaddpd 0x3f8(%edx){1to8},%zmm5,%zmm6");
asm volatile("vcmpeq_uqps 0x1fc(%edx){1to16},%zmm6,%k5");
asm volatile("vcmpltsd 0x123(%eax,%ecx,8),%xmm3,%k5{%k7}");
asm volatile("vcmplesd {sae},%xmm4,%xmm5,%k5{%k7}");
asm volatile("vgetmantss $0x5b,0x123(%eax,%ecx,8),%xmm4,%xmm5{%k7}");
/* bndmk m32, bnd */ /* bndmk m32, bnd */
asm volatile("bndmk (%eax), %bnd0"); asm volatile("bndmk (%eax), %bnd0");
......
#include <stdbool.h> #include <stdbool.h>
#include <stdlib.h>
#include "tests.h" #include "tests.h"
#include "dso.h" #include "dso.h"
#include "debug.h" #include "debug.h"
......
...@@ -8,7 +8,6 @@ ...@@ -8,7 +8,6 @@
#include "map.h" #include "map.h"
#include "build-id.h" #include "build-id.h"
#include "perf_regs.h" #include "perf_regs.h"
#include <asm/perf_regs.h>
struct mmap_event { struct mmap_event {
struct perf_event_header header; struct perf_event_header header;
......
...@@ -72,12 +72,14 @@ BEGIN { ...@@ -72,12 +72,14 @@ BEGIN {
lprefix_expr = "\\((66|F2|F3)\\)" lprefix_expr = "\\((66|F2|F3)\\)"
max_lprefix = 4 max_lprefix = 4
# All opcodes starting with lower-case 'v' or with (v1) superscript # All opcodes starting with lower-case 'v', 'k' or with (v1) superscript
# accepts VEX prefix # accepts VEX prefix
vexok_opcode_expr = "^v.*" vexok_opcode_expr = "^[vk].*"
vexok_expr = "\\(v1\\)" vexok_expr = "\\(v1\\)"
# All opcodes with (v) superscript supports *only* VEX prefix # All opcodes with (v) superscript supports *only* VEX prefix
vexonly_expr = "\\(v\\)" vexonly_expr = "\\(v\\)"
# All opcodes with (ev) superscript supports *only* EVEX prefix
evexonly_expr = "\\(ev\\)"
prefix_expr = "\\(Prefix\\)" prefix_expr = "\\(Prefix\\)"
prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ" prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ"
...@@ -95,6 +97,7 @@ BEGIN { ...@@ -95,6 +97,7 @@ BEGIN {
prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ" prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ"
prefix_num["VEX+1byte"] = "INAT_PFX_VEX2" prefix_num["VEX+1byte"] = "INAT_PFX_VEX2"
prefix_num["VEX+2byte"] = "INAT_PFX_VEX3" prefix_num["VEX+2byte"] = "INAT_PFX_VEX3"
prefix_num["EVEX"] = "INAT_PFX_EVEX"
clear_vars() clear_vars()
} }
...@@ -319,7 +322,9 @@ function convert_operands(count,opnd, i,j,imm,mod) ...@@ -319,7 +322,9 @@ function convert_operands(count,opnd, i,j,imm,mod)
flags = add_flags(flags, "INAT_MODRM") flags = add_flags(flags, "INAT_MODRM")
# check VEX codes # check VEX codes
if (match(ext, vexonly_expr)) if (match(ext, evexonly_expr))
flags = add_flags(flags, "INAT_VEXOK | INAT_EVEXONLY")
else if (match(ext, vexonly_expr))
flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY") flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY")
else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr)) else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr))
flags = add_flags(flags, "INAT_VEXOK") flags = add_flags(flags, "INAT_VEXOK")
......
...@@ -48,6 +48,7 @@ ...@@ -48,6 +48,7 @@
/* AVX VEX prefixes */ /* AVX VEX prefixes */
#define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */ #define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */
#define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */ #define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */
#define INAT_PFX_EVEX 15 /* EVEX prefix */
#define INAT_LSTPFX_MAX 3 #define INAT_LSTPFX_MAX 3
#define INAT_LGCPFX_MAX 11 #define INAT_LGCPFX_MAX 11
...@@ -89,6 +90,7 @@ ...@@ -89,6 +90,7 @@
#define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4)) #define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4))
#define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5)) #define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5))
#define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6)) #define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6))
#define INAT_EVEXONLY (1 << (INAT_FLAG_OFFS + 7))
/* Attribute making macros for attribute tables */ /* Attribute making macros for attribute tables */
#define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS) #define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS)
#define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS) #define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS)
...@@ -141,7 +143,13 @@ static inline int inat_last_prefix_id(insn_attr_t attr) ...@@ -141,7 +143,13 @@ static inline int inat_last_prefix_id(insn_attr_t attr)
static inline int inat_is_vex_prefix(insn_attr_t attr) static inline int inat_is_vex_prefix(insn_attr_t attr)
{ {
attr &= INAT_PFX_MASK; attr &= INAT_PFX_MASK;
return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3; return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3 ||
attr == INAT_PFX_EVEX;
}
static inline int inat_is_evex_prefix(insn_attr_t attr)
{
return (attr & INAT_PFX_MASK) == INAT_PFX_EVEX;
} }
static inline int inat_is_vex3_prefix(insn_attr_t attr) static inline int inat_is_vex3_prefix(insn_attr_t attr)
...@@ -216,6 +224,11 @@ static inline int inat_accept_vex(insn_attr_t attr) ...@@ -216,6 +224,11 @@ static inline int inat_accept_vex(insn_attr_t attr)
static inline int inat_must_vex(insn_attr_t attr) static inline int inat_must_vex(insn_attr_t attr)
{ {
return attr & INAT_VEXONLY; return attr & (INAT_VEXONLY | INAT_EVEXONLY);
}
static inline int inat_must_evex(insn_attr_t attr)
{
return attr & INAT_EVEXONLY;
} }
#endif #endif
...@@ -155,14 +155,24 @@ void insn_get_prefixes(struct insn *insn) ...@@ -155,14 +155,24 @@ void insn_get_prefixes(struct insn *insn)
/* /*
* In 32-bits mode, if the [7:6] bits (mod bits of * In 32-bits mode, if the [7:6] bits (mod bits of
* ModRM) on the second byte are not 11b, it is * ModRM) on the second byte are not 11b, it is
* LDS or LES. * LDS or LES or BOUND.
*/ */
if (X86_MODRM_MOD(b2) != 3) if (X86_MODRM_MOD(b2) != 3)
goto vex_end; goto vex_end;
} }
insn->vex_prefix.bytes[0] = b; insn->vex_prefix.bytes[0] = b;
insn->vex_prefix.bytes[1] = b2; insn->vex_prefix.bytes[1] = b2;
if (inat_is_vex3_prefix(attr)) { if (inat_is_evex_prefix(attr)) {
b2 = peek_nbyte_next(insn_byte_t, insn, 2);
insn->vex_prefix.bytes[2] = b2;
b2 = peek_nbyte_next(insn_byte_t, insn, 3);
insn->vex_prefix.bytes[3] = b2;
insn->vex_prefix.nbytes = 4;
insn->next_byte += 4;
if (insn->x86_64 && X86_VEX_W(b2))
/* VEX.W overrides opnd_size */
insn->opnd_bytes = 8;
} else if (inat_is_vex3_prefix(attr)) {
b2 = peek_nbyte_next(insn_byte_t, insn, 2); b2 = peek_nbyte_next(insn_byte_t, insn, 2);
insn->vex_prefix.bytes[2] = b2; insn->vex_prefix.bytes[2] = b2;
insn->vex_prefix.nbytes = 3; insn->vex_prefix.nbytes = 3;
...@@ -221,7 +231,9 @@ void insn_get_opcode(struct insn *insn) ...@@ -221,7 +231,9 @@ void insn_get_opcode(struct insn *insn)
m = insn_vex_m_bits(insn); m = insn_vex_m_bits(insn);
p = insn_vex_p_bits(insn); p = insn_vex_p_bits(insn);
insn->attr = inat_get_avx_attribute(op, m, p); insn->attr = inat_get_avx_attribute(op, m, p);
if (!inat_accept_vex(insn->attr) && !inat_is_group(insn->attr)) if ((inat_must_evex(insn->attr) && !insn_is_evex(insn)) ||
(!inat_accept_vex(insn->attr) &&
!inat_is_group(insn->attr)))
insn->attr = 0; /* This instruction is bad */ insn->attr = 0; /* This instruction is bad */
goto end; /* VEX has only 1 byte for opcode */ goto end; /* VEX has only 1 byte for opcode */
} }
......
...@@ -91,6 +91,7 @@ struct insn { ...@@ -91,6 +91,7 @@ struct insn {
#define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */ #define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */
#define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */ #define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */
/* VEX bit fields */ /* VEX bit fields */
#define X86_EVEX_M(vex) ((vex) & 0x03) /* EVEX Byte1 */
#define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */ #define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */
#define X86_VEX2_M 1 /* VEX2.M always 1 */ #define X86_VEX2_M 1 /* VEX2.M always 1 */
#define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */ #define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */
...@@ -133,6 +134,13 @@ static inline int insn_is_avx(struct insn *insn) ...@@ -133,6 +134,13 @@ static inline int insn_is_avx(struct insn *insn)
return (insn->vex_prefix.value != 0); return (insn->vex_prefix.value != 0);
} }
static inline int insn_is_evex(struct insn *insn)
{
if (!insn->prefixes.got)
insn_get_prefixes(insn);
return (insn->vex_prefix.nbytes == 4);
}
/* Ensure this instruction is decoded completely */ /* Ensure this instruction is decoded completely */
static inline int insn_complete(struct insn *insn) static inline int insn_complete(struct insn *insn)
{ {
...@@ -144,8 +152,10 @@ static inline insn_byte_t insn_vex_m_bits(struct insn *insn) ...@@ -144,8 +152,10 @@ static inline insn_byte_t insn_vex_m_bits(struct insn *insn)
{ {
if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */
return X86_VEX2_M; return X86_VEX2_M;
else else if (insn->vex_prefix.nbytes == 3) /* 3 bytes VEX */
return X86_VEX3_M(insn->vex_prefix.bytes[1]); return X86_VEX3_M(insn->vex_prefix.bytes[1]);
else /* EVEX */
return X86_EVEX_M(insn->vex_prefix.bytes[1]);
} }
static inline insn_byte_t insn_vex_p_bits(struct insn *insn) static inline insn_byte_t insn_vex_p_bits(struct insn *insn)
......
...@@ -13,12 +13,17 @@ ...@@ -13,12 +13,17 @@
# opcode: escape # escaped-name # opcode: escape # escaped-name
# EndTable # EndTable
# #
# mnemonics that begin with lowercase 'v' accept a VEX or EVEX prefix
# mnemonics that begin with lowercase 'k' accept a VEX prefix
#
#<group maps> #<group maps>
# GrpTable: GrpXXX # GrpTable: GrpXXX
# reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...] # reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...]
# EndTable # EndTable
# #
# AVX Superscripts # AVX Superscripts
# (ev): this opcode requires EVEX prefix.
# (evo): this opcode is changed by EVEX prefix (EVEX opcode)
# (v): this opcode requires VEX prefix. # (v): this opcode requires VEX prefix.
# (v1): this opcode only supports 128bit VEX. # (v1): this opcode only supports 128bit VEX.
# #
...@@ -137,7 +142,7 @@ AVXcode: ...@@ -137,7 +142,7 @@ AVXcode:
# 0x60 - 0x6f # 0x60 - 0x6f
60: PUSHA/PUSHAD (i64) 60: PUSHA/PUSHAD (i64)
61: POPA/POPAD (i64) 61: POPA/POPAD (i64)
62: BOUND Gv,Ma (i64) 62: BOUND Gv,Ma (i64) | EVEX (Prefix)
63: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64) 63: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64)
64: SEG=FS (Prefix) 64: SEG=FS (Prefix)
65: SEG=GS (Prefix) 65: SEG=GS (Prefix)
...@@ -399,17 +404,17 @@ AVXcode: 1 ...@@ -399,17 +404,17 @@ AVXcode: 1
3f: 3f:
# 0x0f 0x40-0x4f # 0x0f 0x40-0x4f
40: CMOVO Gv,Ev 40: CMOVO Gv,Ev
41: CMOVNO Gv,Ev 41: CMOVNO Gv,Ev | kandw/q Vk,Hk,Uk | kandb/d Vk,Hk,Uk (66)
42: CMOVB/C/NAE Gv,Ev 42: CMOVB/C/NAE Gv,Ev | kandnw/q Vk,Hk,Uk | kandnb/d Vk,Hk,Uk (66)
43: CMOVAE/NB/NC Gv,Ev 43: CMOVAE/NB/NC Gv,Ev
44: CMOVE/Z Gv,Ev 44: CMOVE/Z Gv,Ev | knotw/q Vk,Uk | knotb/d Vk,Uk (66)
45: CMOVNE/NZ Gv,Ev 45: CMOVNE/NZ Gv,Ev | korw/q Vk,Hk,Uk | korb/d Vk,Hk,Uk (66)
46: CMOVBE/NA Gv,Ev 46: CMOVBE/NA Gv,Ev | kxnorw/q Vk,Hk,Uk | kxnorb/d Vk,Hk,Uk (66)
47: CMOVA/NBE Gv,Ev 47: CMOVA/NBE Gv,Ev | kxorw/q Vk,Hk,Uk | kxorb/d Vk,Hk,Uk (66)
48: CMOVS Gv,Ev 48: CMOVS Gv,Ev
49: CMOVNS Gv,Ev 49: CMOVNS Gv,Ev
4a: CMOVP/PE Gv,Ev 4a: CMOVP/PE Gv,Ev | kaddw/q Vk,Hk,Uk | kaddb/d Vk,Hk,Uk (66)
4b: CMOVNP/PO Gv,Ev 4b: CMOVNP/PO Gv,Ev | kunpckbw Vk,Hk,Uk (66) | kunpckwd/dq Vk,Hk,Uk
4c: CMOVL/NGE Gv,Ev 4c: CMOVL/NGE Gv,Ev
4d: CMOVNL/GE Gv,Ev 4d: CMOVNL/GE Gv,Ev
4e: CMOVLE/NG Gv,Ev 4e: CMOVLE/NG Gv,Ev
...@@ -426,7 +431,7 @@ AVXcode: 1 ...@@ -426,7 +431,7 @@ AVXcode: 1
58: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1) 58: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1)
59: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1) 59: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1)
5a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1) 5a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1)
5b: vcvtdq2ps Vps,Wdq | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3) 5b: vcvtdq2ps Vps,Wdq | vcvtqq2ps Vps,Wqq (evo) | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3)
5c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1) 5c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1)
5d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1) 5d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1)
5e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1) 5e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1)
...@@ -447,7 +452,7 @@ AVXcode: 1 ...@@ -447,7 +452,7 @@ AVXcode: 1
6c: vpunpcklqdq Vx,Hx,Wx (66),(v1) 6c: vpunpcklqdq Vx,Hx,Wx (66),(v1)
6d: vpunpckhqdq Vx,Hx,Wx (66),(v1) 6d: vpunpckhqdq Vx,Hx,Wx (66),(v1)
6e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1) 6e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1)
6f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqu Vx,Wx (F3) 6f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqa32/64 Vx,Wx (66),(evo) | vmovdqu Vx,Wx (F3) | vmovdqu32/64 Vx,Wx (F3),(evo) | vmovdqu8/16 Vx,Wx (F2),(ev)
# 0x0f 0x70-0x7f # 0x0f 0x70-0x7f
70: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1) 70: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1)
71: Grp12 (1A) 71: Grp12 (1A)
...@@ -458,14 +463,14 @@ AVXcode: 1 ...@@ -458,14 +463,14 @@ AVXcode: 1
76: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1) 76: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1)
# Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX. # Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX.
77: emms | vzeroupper | vzeroall 77: emms | vzeroupper | vzeroall
78: VMREAD Ey,Gy 78: VMREAD Ey,Gy | vcvttps2udq/pd2udq Vx,Wpd (evo) | vcvttsd2usi Gv,Wx (F2),(ev) | vcvttss2usi Gv,Wx (F3),(ev) | vcvttps2uqq/pd2uqq Vx,Wx (66),(ev)
79: VMWRITE Gy,Ey 79: VMWRITE Gy,Ey | vcvtps2udq/pd2udq Vx,Wpd (evo) | vcvtsd2usi Gv,Wx (F2),(ev) | vcvtss2usi Gv,Wx (F3),(ev) | vcvtps2uqq/pd2uqq Vx,Wx (66),(ev)
7a: 7a: vcvtudq2pd/uqq2pd Vpd,Wx (F3),(ev) | vcvtudq2ps/uqq2ps Vpd,Wx (F2),(ev) | vcvttps2qq/pd2qq Vx,Wx (66),(ev)
7b: 7b: vcvtusi2sd Vpd,Hpd,Ev (F2),(ev) | vcvtusi2ss Vps,Hps,Ev (F3),(ev) | vcvtps2qq/pd2qq Vx,Wx (66),(ev)
7c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2) 7c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2)
7d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2) 7d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2)
7e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1) 7e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1)
7f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqu Wx,Vx (F3) 7f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqa32/64 Wx,Vx (66),(evo) | vmovdqu Wx,Vx (F3) | vmovdqu32/64 Wx,Vx (F3),(evo) | vmovdqu8/16 Wx,Vx (F2),(ev)
# 0x0f 0x80-0x8f # 0x0f 0x80-0x8f
# Note: "forced64" is Intel CPU behavior (see comment about CALL insn). # Note: "forced64" is Intel CPU behavior (see comment about CALL insn).
80: JO Jz (f64) 80: JO Jz (f64)
...@@ -485,16 +490,16 @@ AVXcode: 1 ...@@ -485,16 +490,16 @@ AVXcode: 1
8e: JLE/JNG Jz (f64) 8e: JLE/JNG Jz (f64)
8f: JNLE/JG Jz (f64) 8f: JNLE/JG Jz (f64)
# 0x0f 0x90-0x9f # 0x0f 0x90-0x9f
90: SETO Eb 90: SETO Eb | kmovw/q Vk,Wk | kmovb/d Vk,Wk (66)
91: SETNO Eb 91: SETNO Eb | kmovw/q Mv,Vk | kmovb/d Mv,Vk (66)
92: SETB/C/NAE Eb 92: SETB/C/NAE Eb | kmovw Vk,Rv | kmovb Vk,Rv (66) | kmovq/d Vk,Rv (F2)
93: SETAE/NB/NC Eb 93: SETAE/NB/NC Eb | kmovw Gv,Uk | kmovb Gv,Uk (66) | kmovq/d Gv,Uk (F2)
94: SETE/Z Eb 94: SETE/Z Eb
95: SETNE/NZ Eb 95: SETNE/NZ Eb
96: SETBE/NA Eb 96: SETBE/NA Eb
97: SETA/NBE Eb 97: SETA/NBE Eb
98: SETS Eb 98: SETS Eb | kortestw/q Vk,Uk | kortestb/d Vk,Uk (66)
99: SETNS Eb 99: SETNS Eb | ktestw/q Vk,Uk | ktestb/d Vk,Uk (66)
9a: SETP/PE Eb 9a: SETP/PE Eb
9b: SETNP/PO Eb 9b: SETNP/PO Eb
9c: SETL/NGE Eb 9c: SETL/NGE Eb
...@@ -564,11 +569,11 @@ d7: pmovmskb Gd,Nq | vpmovmskb Gd,Ux (66),(v1) ...@@ -564,11 +569,11 @@ d7: pmovmskb Gd,Nq | vpmovmskb Gd,Ux (66),(v1)
d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1) d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1)
d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1) d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1)
da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1) da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1)
db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) | vpandd/q Vx,Hx,Wx (66),(evo)
dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1) dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1)
dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1) dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1)
de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1) de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1)
df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) | vpandnd/q Vx,Hx,Wx (66),(evo)
# 0x0f 0xe0-0xef # 0x0f 0xe0-0xef
e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1) e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1)
e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1) e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1)
...@@ -576,16 +581,16 @@ e2: psrad Pq,Qq | vpsrad Vx,Hx,Wx (66),(v1) ...@@ -576,16 +581,16 @@ e2: psrad Pq,Qq | vpsrad Vx,Hx,Wx (66),(v1)
e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1) e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1)
e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1) e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1)
e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1) e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1)
e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtpd2dq Vx,Wpd (F2) e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtdq2pd/qq2pd Vx,Wdq (F3),(evo) | vcvtpd2dq Vx,Wpd (F2)
e7: movntq Mq,Pq | vmovntdq Mx,Vx (66) e7: movntq Mq,Pq | vmovntdq Mx,Vx (66)
e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1) e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1)
e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1) e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1)
ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1) ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1)
eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) | vpord/q Vx,Hx,Wx (66),(evo)
ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1) ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1)
ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1) ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1)
ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1) ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1)
ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) | vpxord/q Vx,Hx,Wx (66),(evo)
# 0x0f 0xf0-0xff # 0x0f 0xf0-0xff
f0: vlddqu Vx,Mx (F2) f0: vlddqu Vx,Mx (F2)
f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1) f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1)
...@@ -626,81 +631,105 @@ AVXcode: 2 ...@@ -626,81 +631,105 @@ AVXcode: 2
0e: vtestps Vx,Wx (66),(v) 0e: vtestps Vx,Wx (66),(v)
0f: vtestpd Vx,Wx (66),(v) 0f: vtestpd Vx,Wx (66),(v)
# 0x0f 0x38 0x10-0x1f # 0x0f 0x38 0x10-0x1f
10: pblendvb Vdq,Wdq (66) 10: pblendvb Vdq,Wdq (66) | vpsrlvw Vx,Hx,Wx (66),(evo) | vpmovuswb Wx,Vx (F3),(ev)
11: 11: vpmovusdb Wx,Vd (F3),(ev) | vpsravw Vx,Hx,Wx (66),(ev)
12: 12: vpmovusqb Wx,Vq (F3),(ev) | vpsllvw Vx,Hx,Wx (66),(ev)
13: vcvtph2ps Vx,Wx,Ib (66),(v) 13: vcvtph2ps Vx,Wx (66),(v) | vpmovusdw Wx,Vd (F3),(ev)
14: blendvps Vdq,Wdq (66) 14: blendvps Vdq,Wdq (66) | vpmovusqw Wx,Vq (F3),(ev) | vprorvd/q Vx,Hx,Wx (66),(evo)
15: blendvpd Vdq,Wdq (66) 15: blendvpd Vdq,Wdq (66) | vpmovusqd Wx,Vq (F3),(ev) | vprolvd/q Vx,Hx,Wx (66),(evo)
16: vpermps Vqq,Hqq,Wqq (66),(v) 16: vpermps Vqq,Hqq,Wqq (66),(v) | vpermps/d Vqq,Hqq,Wqq (66),(evo)
17: vptest Vx,Wx (66) 17: vptest Vx,Wx (66)
18: vbroadcastss Vx,Wd (66),(v) 18: vbroadcastss Vx,Wd (66),(v)
19: vbroadcastsd Vqq,Wq (66),(v) 19: vbroadcastsd Vqq,Wq (66),(v) | vbroadcastf32x2 Vqq,Wq (66),(evo)
1a: vbroadcastf128 Vqq,Mdq (66),(v) 1a: vbroadcastf128 Vqq,Mdq (66),(v) | vbroadcastf32x4/64x2 Vqq,Wq (66),(evo)
1b: 1b: vbroadcastf32x8/64x4 Vqq,Mdq (66),(ev)
1c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1) 1c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1)
1d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1) 1d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1)
1e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1) 1e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1)
1f: 1f: vpabsq Vx,Wx (66),(ev)
# 0x0f 0x38 0x20-0x2f # 0x0f 0x38 0x20-0x2f
20: vpmovsxbw Vx,Ux/Mq (66),(v1) 20: vpmovsxbw Vx,Ux/Mq (66),(v1) | vpmovswb Wx,Vx (F3),(ev)
21: vpmovsxbd Vx,Ux/Md (66),(v1) 21: vpmovsxbd Vx,Ux/Md (66),(v1) | vpmovsdb Wx,Vd (F3),(ev)
22: vpmovsxbq Vx,Ux/Mw (66),(v1) 22: vpmovsxbq Vx,Ux/Mw (66),(v1) | vpmovsqb Wx,Vq (F3),(ev)
23: vpmovsxwd Vx,Ux/Mq (66),(v1) 23: vpmovsxwd Vx,Ux/Mq (66),(v1) | vpmovsdw Wx,Vd (F3),(ev)
24: vpmovsxwq Vx,Ux/Md (66),(v1) 24: vpmovsxwq Vx,Ux/Md (66),(v1) | vpmovsqw Wx,Vq (F3),(ev)
25: vpmovsxdq Vx,Ux/Mq (66),(v1) 25: vpmovsxdq Vx,Ux/Mq (66),(v1) | vpmovsqd Wx,Vq (F3),(ev)
26: 26: vptestmb/w Vk,Hx,Wx (66),(ev) | vptestnmb/w Vk,Hx,Wx (F3),(ev)
27: 27: vptestmd/q Vk,Hx,Wx (66),(ev) | vptestnmd/q Vk,Hx,Wx (F3),(ev)
28: vpmuldq Vx,Hx,Wx (66),(v1) 28: vpmuldq Vx,Hx,Wx (66),(v1) | vpmovm2b/w Vx,Uk (F3),(ev)
29: vpcmpeqq Vx,Hx,Wx (66),(v1) 29: vpcmpeqq Vx,Hx,Wx (66),(v1) | vpmovb2m/w2m Vk,Ux (F3),(ev)
2a: vmovntdqa Vx,Mx (66),(v1) 2a: vmovntdqa Vx,Mx (66),(v1) | vpbroadcastmb2q Vx,Uk (F3),(ev)
2b: vpackusdw Vx,Hx,Wx (66),(v1) 2b: vpackusdw Vx,Hx,Wx (66),(v1)
2c: vmaskmovps Vx,Hx,Mx (66),(v) 2c: vmaskmovps Vx,Hx,Mx (66),(v) | vscalefps/d Vx,Hx,Wx (66),(evo)
2d: vmaskmovpd Vx,Hx,Mx (66),(v) 2d: vmaskmovpd Vx,Hx,Mx (66),(v) | vscalefss/d Vx,Hx,Wx (66),(evo)
2e: vmaskmovps Mx,Hx,Vx (66),(v) 2e: vmaskmovps Mx,Hx,Vx (66),(v)
2f: vmaskmovpd Mx,Hx,Vx (66),(v) 2f: vmaskmovpd Mx,Hx,Vx (66),(v)
# 0x0f 0x38 0x30-0x3f # 0x0f 0x38 0x30-0x3f
30: vpmovzxbw Vx,Ux/Mq (66),(v1) 30: vpmovzxbw Vx,Ux/Mq (66),(v1) | vpmovwb Wx,Vx (F3),(ev)
31: vpmovzxbd Vx,Ux/Md (66),(v1) 31: vpmovzxbd Vx,Ux/Md (66),(v1) | vpmovdb Wx,Vd (F3),(ev)
32: vpmovzxbq Vx,Ux/Mw (66),(v1) 32: vpmovzxbq Vx,Ux/Mw (66),(v1) | vpmovqb Wx,Vq (F3),(ev)
33: vpmovzxwd Vx,Ux/Mq (66),(v1) 33: vpmovzxwd Vx,Ux/Mq (66),(v1) | vpmovdw Wx,Vd (F3),(ev)
34: vpmovzxwq Vx,Ux/Md (66),(v1) 34: vpmovzxwq Vx,Ux/Md (66),(v1) | vpmovqw Wx,Vq (F3),(ev)
35: vpmovzxdq Vx,Ux/Mq (66),(v1) 35: vpmovzxdq Vx,Ux/Mq (66),(v1) | vpmovqd Wx,Vq (F3),(ev)
36: vpermd Vqq,Hqq,Wqq (66),(v) 36: vpermd Vqq,Hqq,Wqq (66),(v) | vpermd/q Vqq,Hqq,Wqq (66),(evo)
37: vpcmpgtq Vx,Hx,Wx (66),(v1) 37: vpcmpgtq Vx,Hx,Wx (66),(v1)
38: vpminsb Vx,Hx,Wx (66),(v1) 38: vpminsb Vx,Hx,Wx (66),(v1) | vpmovm2d/q Vx,Uk (F3),(ev)
39: vpminsd Vx,Hx,Wx (66),(v1) 39: vpminsd Vx,Hx,Wx (66),(v1) | vpminsd/q Vx,Hx,Wx (66),(evo) | vpmovd2m/q2m Vk,Ux (F3),(ev)
3a: vpminuw Vx,Hx,Wx (66),(v1) 3a: vpminuw Vx,Hx,Wx (66),(v1) | vpbroadcastmw2d Vx,Uk (F3),(ev)
3b: vpminud Vx,Hx,Wx (66),(v1) 3b: vpminud Vx,Hx,Wx (66),(v1) | vpminud/q Vx,Hx,Wx (66),(evo)
3c: vpmaxsb Vx,Hx,Wx (66),(v1) 3c: vpmaxsb Vx,Hx,Wx (66),(v1)
3d: vpmaxsd Vx,Hx,Wx (66),(v1) 3d: vpmaxsd Vx,Hx,Wx (66),(v1) | vpmaxsd/q Vx,Hx,Wx (66),(evo)
3e: vpmaxuw Vx,Hx,Wx (66),(v1) 3e: vpmaxuw Vx,Hx,Wx (66),(v1)
3f: vpmaxud Vx,Hx,Wx (66),(v1) 3f: vpmaxud Vx,Hx,Wx (66),(v1) | vpmaxud/q Vx,Hx,Wx (66),(evo)
# 0x0f 0x38 0x40-0x8f # 0x0f 0x38 0x40-0x8f
40: vpmulld Vx,Hx,Wx (66),(v1) 40: vpmulld Vx,Hx,Wx (66),(v1) | vpmulld/q Vx,Hx,Wx (66),(evo)
41: vphminposuw Vdq,Wdq (66),(v1) 41: vphminposuw Vdq,Wdq (66),(v1)
42: 42: vgetexpps/d Vx,Wx (66),(ev)
43: 43: vgetexpss/d Vx,Hx,Wx (66),(ev)
44: 44: vplzcntd/q Vx,Wx (66),(ev)
45: vpsrlvd/q Vx,Hx,Wx (66),(v) 45: vpsrlvd/q Vx,Hx,Wx (66),(v)
46: vpsravd Vx,Hx,Wx (66),(v) 46: vpsravd Vx,Hx,Wx (66),(v) | vpsravd/q Vx,Hx,Wx (66),(evo)
47: vpsllvd/q Vx,Hx,Wx (66),(v) 47: vpsllvd/q Vx,Hx,Wx (66),(v)
# Skip 0x48-0x57 # Skip 0x48-0x4b
4c: vrcp14ps/d Vpd,Wpd (66),(ev)
4d: vrcp14ss/d Vsd,Hpd,Wsd (66),(ev)
4e: vrsqrt14ps/d Vpd,Wpd (66),(ev)
4f: vrsqrt14ss/d Vsd,Hsd,Wsd (66),(ev)
# Skip 0x50-0x57
58: vpbroadcastd Vx,Wx (66),(v) 58: vpbroadcastd Vx,Wx (66),(v)
59: vpbroadcastq Vx,Wx (66),(v) 59: vpbroadcastq Vx,Wx (66),(v) | vbroadcasti32x2 Vx,Wx (66),(evo)
5a: vbroadcasti128 Vqq,Mdq (66),(v) 5a: vbroadcasti128 Vqq,Mdq (66),(v) | vbroadcasti32x4/64x2 Vx,Wx (66),(evo)
# Skip 0x5b-0x77 5b: vbroadcasti32x8/64x4 Vqq,Mdq (66),(ev)
# Skip 0x5c-0x63
64: vpblendmd/q Vx,Hx,Wx (66),(ev)
65: vblendmps/d Vx,Hx,Wx (66),(ev)
66: vpblendmb/w Vx,Hx,Wx (66),(ev)
# Skip 0x67-0x74
75: vpermi2b/w Vx,Hx,Wx (66),(ev)
76: vpermi2d/q Vx,Hx,Wx (66),(ev)
77: vpermi2ps/d Vx,Hx,Wx (66),(ev)
78: vpbroadcastb Vx,Wx (66),(v) 78: vpbroadcastb Vx,Wx (66),(v)
79: vpbroadcastw Vx,Wx (66),(v) 79: vpbroadcastw Vx,Wx (66),(v)
# Skip 0x7a-0x7f 7a: vpbroadcastb Vx,Rv (66),(ev)
7b: vpbroadcastw Vx,Rv (66),(ev)
7c: vpbroadcastd/q Vx,Rv (66),(ev)
7d: vpermt2b/w Vx,Hx,Wx (66),(ev)
7e: vpermt2d/q Vx,Hx,Wx (66),(ev)
7f: vpermt2ps/d Vx,Hx,Wx (66),(ev)
80: INVEPT Gy,Mdq (66) 80: INVEPT Gy,Mdq (66)
81: INVPID Gy,Mdq (66) 81: INVPID Gy,Mdq (66)
82: INVPCID Gy,Mdq (66) 82: INVPCID Gy,Mdq (66)
83: vpmultishiftqb Vx,Hx,Wx (66),(ev)
88: vexpandps/d Vpd,Wpd (66),(ev)
89: vpexpandd/q Vx,Wx (66),(ev)
8a: vcompressps/d Wx,Vx (66),(ev)
8b: vpcompressd/q Wx,Vx (66),(ev)
8c: vpmaskmovd/q Vx,Hx,Mx (66),(v) 8c: vpmaskmovd/q Vx,Hx,Mx (66),(v)
8d: vpermb/w Vx,Hx,Wx (66),(ev)
8e: vpmaskmovd/q Mx,Vx,Hx (66),(v) 8e: vpmaskmovd/q Mx,Vx,Hx (66),(v)
# 0x0f 0x38 0x90-0xbf (FMA) # 0x0f 0x38 0x90-0xbf (FMA)
90: vgatherdd/q Vx,Hx,Wx (66),(v) 90: vgatherdd/q Vx,Hx,Wx (66),(v) | vpgatherdd/q Vx,Wx (66),(evo)
91: vgatherqd/q Vx,Hx,Wx (66),(v) 91: vgatherqd/q Vx,Hx,Wx (66),(v) | vpgatherqd/q Vx,Wx (66),(evo)
92: vgatherdps/d Vx,Hx,Wx (66),(v) 92: vgatherdps/d Vx,Hx,Wx (66),(v)
93: vgatherqps/d Vx,Hx,Wx (66),(v) 93: vgatherqps/d Vx,Hx,Wx (66),(v)
94: 94:
...@@ -715,6 +744,10 @@ AVXcode: 2 ...@@ -715,6 +744,10 @@ AVXcode: 2
9d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1) 9d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1)
9e: vfnmsub132ps/d Vx,Hx,Wx (66),(v) 9e: vfnmsub132ps/d Vx,Hx,Wx (66),(v)
9f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1) 9f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1)
a0: vpscatterdd/q Wx,Vx (66),(ev)
a1: vpscatterqd/q Wx,Vx (66),(ev)
a2: vscatterdps/d Wx,Vx (66),(ev)
a3: vscatterqps/d Wx,Vx (66),(ev)
a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v) a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v)
a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v) a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v)
a8: vfmadd213ps/d Vx,Hx,Wx (66),(v) a8: vfmadd213ps/d Vx,Hx,Wx (66),(v)
...@@ -725,6 +758,8 @@ ac: vfnmadd213ps/d Vx,Hx,Wx (66),(v) ...@@ -725,6 +758,8 @@ ac: vfnmadd213ps/d Vx,Hx,Wx (66),(v)
ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1) ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1)
ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v) ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v)
af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1) af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1)
b4: vpmadd52luq Vx,Hx,Wx (66),(ev)
b5: vpmadd52huq Vx,Hx,Wx (66),(ev)
b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v) b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v)
b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v) b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v)
b8: vfmadd231ps/d Vx,Hx,Wx (66),(v) b8: vfmadd231ps/d Vx,Hx,Wx (66),(v)
...@@ -736,12 +771,15 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1) ...@@ -736,12 +771,15 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1)
be: vfnmsub231ps/d Vx,Hx,Wx (66),(v) be: vfnmsub231ps/d Vx,Hx,Wx (66),(v)
bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1) bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1)
# 0x0f 0x38 0xc0-0xff # 0x0f 0x38 0xc0-0xff
c8: sha1nexte Vdq,Wdq c4: vpconflictd/q Vx,Wx (66),(ev)
c6: Grp18 (1A)
c7: Grp19 (1A)
c8: sha1nexte Vdq,Wdq | vexp2ps/d Vx,Wx (66),(ev)
c9: sha1msg1 Vdq,Wdq c9: sha1msg1 Vdq,Wdq
ca: sha1msg2 Vdq,Wdq ca: sha1msg2 Vdq,Wdq | vrcp28ps/d Vx,Wx (66),(ev)
cb: sha256rnds2 Vdq,Wdq cb: sha256rnds2 Vdq,Wdq | vrcp28ss/d Vx,Hx,Wx (66),(ev)
cc: sha256msg1 Vdq,Wdq cc: sha256msg1 Vdq,Wdq | vrsqrt28ps/d Vx,Wx (66),(ev)
cd: sha256msg2 Vdq,Wdq cd: sha256msg2 Vdq,Wdq | vrsqrt28ss/d Vx,Hx,Wx (66),(ev)
db: VAESIMC Vdq,Wdq (66),(v1) db: VAESIMC Vdq,Wdq (66),(v1)
dc: VAESENC Vdq,Hdq,Wdq (66),(v1) dc: VAESENC Vdq,Hdq,Wdq (66),(v1)
dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1) dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1)
...@@ -763,15 +801,15 @@ AVXcode: 3 ...@@ -763,15 +801,15 @@ AVXcode: 3
00: vpermq Vqq,Wqq,Ib (66),(v) 00: vpermq Vqq,Wqq,Ib (66),(v)
01: vpermpd Vqq,Wqq,Ib (66),(v) 01: vpermpd Vqq,Wqq,Ib (66),(v)
02: vpblendd Vx,Hx,Wx,Ib (66),(v) 02: vpblendd Vx,Hx,Wx,Ib (66),(v)
03: 03: valignd/q Vx,Hx,Wx,Ib (66),(ev)
04: vpermilps Vx,Wx,Ib (66),(v) 04: vpermilps Vx,Wx,Ib (66),(v)
05: vpermilpd Vx,Wx,Ib (66),(v) 05: vpermilpd Vx,Wx,Ib (66),(v)
06: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v) 06: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v)
07: 07:
08: vroundps Vx,Wx,Ib (66) 08: vroundps Vx,Wx,Ib (66) | vrndscaleps Vx,Wx,Ib (66),(evo)
09: vroundpd Vx,Wx,Ib (66) 09: vroundpd Vx,Wx,Ib (66) | vrndscalepd Vx,Wx,Ib (66),(evo)
0a: vroundss Vss,Wss,Ib (66),(v1) 0a: vroundss Vss,Wss,Ib (66),(v1) | vrndscaless Vx,Hx,Wx,Ib (66),(evo)
0b: vroundsd Vsd,Wsd,Ib (66),(v1) 0b: vroundsd Vsd,Wsd,Ib (66),(v1) | vrndscalesd Vx,Hx,Wx,Ib (66),(evo)
0c: vblendps Vx,Hx,Wx,Ib (66) 0c: vblendps Vx,Hx,Wx,Ib (66)
0d: vblendpd Vx,Hx,Wx,Ib (66) 0d: vblendpd Vx,Hx,Wx,Ib (66)
0e: vpblendw Vx,Hx,Wx,Ib (66),(v1) 0e: vpblendw Vx,Hx,Wx,Ib (66),(v1)
...@@ -780,26 +818,51 @@ AVXcode: 3 ...@@ -780,26 +818,51 @@ AVXcode: 3
15: vpextrw Rd/Mw,Vdq,Ib (66),(v1) 15: vpextrw Rd/Mw,Vdq,Ib (66),(v1)
16: vpextrd/q Ey,Vdq,Ib (66),(v1) 16: vpextrd/q Ey,Vdq,Ib (66),(v1)
17: vextractps Ed,Vdq,Ib (66),(v1) 17: vextractps Ed,Vdq,Ib (66),(v1)
18: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) 18: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) | vinsertf32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
19: vextractf128 Wdq,Vqq,Ib (66),(v) 19: vextractf128 Wdq,Vqq,Ib (66),(v) | vextractf32x4/64x2 Wdq,Vqq,Ib (66),(evo)
1a: vinsertf32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
1b: vextractf32x8/64x4 Wdq,Vqq,Ib (66),(ev)
1d: vcvtps2ph Wx,Vx,Ib (66),(v) 1d: vcvtps2ph Wx,Vx,Ib (66),(v)
1e: vpcmpud/q Vk,Hd,Wd,Ib (66),(ev)
1f: vpcmpd/q Vk,Hd,Wd,Ib (66),(ev)
20: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1) 20: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1)
21: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1) 21: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1)
22: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1) 22: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1)
38: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) 23: vshuff32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
39: vextracti128 Wdq,Vqq,Ib (66),(v) 25: vpternlogd/q Vx,Hx,Wx,Ib (66),(ev)
26: vgetmantps/d Vx,Wx,Ib (66),(ev)
27: vgetmantss/d Vx,Hx,Wx,Ib (66),(ev)
30: kshiftrb/w Vk,Uk,Ib (66),(v)
31: kshiftrd/q Vk,Uk,Ib (66),(v)
32: kshiftlb/w Vk,Uk,Ib (66),(v)
33: kshiftld/q Vk,Uk,Ib (66),(v)
38: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) | vinserti32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
39: vextracti128 Wdq,Vqq,Ib (66),(v) | vextracti32x4/64x2 Wdq,Vqq,Ib (66),(evo)
3a: vinserti32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
3b: vextracti32x8/64x4 Wdq,Vqq,Ib (66),(ev)
3e: vpcmpub/w Vk,Hk,Wx,Ib (66),(ev)
3f: vpcmpb/w Vk,Hk,Wx,Ib (66),(ev)
40: vdpps Vx,Hx,Wx,Ib (66) 40: vdpps Vx,Hx,Wx,Ib (66)
41: vdppd Vdq,Hdq,Wdq,Ib (66),(v1) 41: vdppd Vdq,Hdq,Wdq,Ib (66),(v1)
42: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) 42: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) | vdbpsadbw Vx,Hx,Wx,Ib (66),(evo)
43: vshufi32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
44: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1) 44: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1)
46: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v) 46: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v)
4a: vblendvps Vx,Hx,Wx,Lx (66),(v) 4a: vblendvps Vx,Hx,Wx,Lx (66),(v)
4b: vblendvpd Vx,Hx,Wx,Lx (66),(v) 4b: vblendvpd Vx,Hx,Wx,Lx (66),(v)
4c: vpblendvb Vx,Hx,Wx,Lx (66),(v1) 4c: vpblendvb Vx,Hx,Wx,Lx (66),(v1)
50: vrangeps/d Vx,Hx,Wx,Ib (66),(ev)
51: vrangess/d Vx,Hx,Wx,Ib (66),(ev)
54: vfixupimmps/d Vx,Hx,Wx,Ib (66),(ev)
55: vfixupimmss/d Vx,Hx,Wx,Ib (66),(ev)
56: vreduceps/d Vx,Wx,Ib (66),(ev)
57: vreducess/d Vx,Hx,Wx,Ib (66),(ev)
60: vpcmpestrm Vdq,Wdq,Ib (66),(v1) 60: vpcmpestrm Vdq,Wdq,Ib (66),(v1)
61: vpcmpestri Vdq,Wdq,Ib (66),(v1) 61: vpcmpestri Vdq,Wdq,Ib (66),(v1)
62: vpcmpistrm Vdq,Wdq,Ib (66),(v1) 62: vpcmpistrm Vdq,Wdq,Ib (66),(v1)
63: vpcmpistri Vdq,Wdq,Ib (66),(v1) 63: vpcmpistri Vdq,Wdq,Ib (66),(v1)
66: vfpclassps/d Vk,Wx,Ib (66),(ev)
67: vfpclassss/d Vk,Wx,Ib (66),(ev)
cc: sha1rnds4 Vdq,Wdq,Ib cc: sha1rnds4 Vdq,Wdq,Ib
df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1) df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1)
f0: RORX Gy,Ey,Ib (F2),(v) f0: RORX Gy,Ey,Ib (F2),(v)
...@@ -927,8 +990,10 @@ GrpTable: Grp12 ...@@ -927,8 +990,10 @@ GrpTable: Grp12
EndTable EndTable
GrpTable: Grp13 GrpTable: Grp13
0: vprord/q Hx,Wx,Ib (66),(ev)
1: vprold/q Hx,Wx,Ib (66),(ev)
2: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1) 2: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1)
4: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) 4: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) | vpsrad/q Hx,Ux,Ib (66),(evo)
6: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1) 6: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1)
EndTable EndTable
...@@ -963,6 +1028,20 @@ GrpTable: Grp17 ...@@ -963,6 +1028,20 @@ GrpTable: Grp17
3: BLSI By,Ey (v) 3: BLSI By,Ey (v)
EndTable EndTable
GrpTable: Grp18
1: vgatherpf0dps/d Wx (66),(ev)
2: vgatherpf1dps/d Wx (66),(ev)
5: vscatterpf0dps/d Wx (66),(ev)
6: vscatterpf1dps/d Wx (66),(ev)
EndTable
GrpTable: Grp19
1: vgatherpf0qps/d Wx (66),(ev)
2: vgatherpf1qps/d Wx (66),(ev)
5: vscatterpf0qps/d Wx (66),(ev)
6: vscatterpf1qps/d Wx (66),(ev)
EndTable
# AMD's Prefetch Group # AMD's Prefetch Group
GrpTable: GrpP GrpTable: GrpP
0: PREFETCH 0: PREFETCH
......
ifndef ARCH HOSTARCH := $(shell uname -m | sed -e s/i.86/x86/ -e s/x86_64/x86/ \
ARCH := $(shell uname -m 2>/dev/null || echo not)
endif
ARCH := $(shell echo $(ARCH) | sed -e s/i.86/x86/ -e s/x86_64/x86/ \
-e s/sun4u/sparc/ -e s/sparc64/sparc/ \ -e s/sun4u/sparc/ -e s/sparc64/sparc/ \
-e /arm64/!s/arm.*/arm/ -e s/sa110/arm/ \ -e /arm64/!s/arm.*/arm/ -e s/sa110/arm/ \
-e s/s390x/s390/ -e s/parisc64/parisc/ \ -e s/s390x/s390/ -e s/parisc64/parisc/ \
...@@ -10,6 +6,41 @@ ARCH := $(shell echo $(ARCH) | sed -e s/i.86/x86/ -e s/x86_64/x86/ \ ...@@ -10,6 +6,41 @@ ARCH := $(shell echo $(ARCH) | sed -e s/i.86/x86/ -e s/x86_64/x86/ \
-e s/sh[234].*/sh/ -e s/aarch64.*/arm64/ \ -e s/sh[234].*/sh/ -e s/aarch64.*/arm64/ \
-e s/tile.*/tile/ ) -e s/tile.*/tile/ )
ifndef ARCH
ARCH := $(HOSTARCH)
endif
SRCARCH := $(ARCH)
# Additional ARCH settings for x86
ifeq ($(ARCH),i386)
SRCARCH := x86
endif
ifeq ($(ARCH),x86_64)
SRCARCH := x86
endif
# Additional ARCH settings for sparc
ifeq ($(ARCH),sparc32)
SRCARCH := sparc
endif
ifeq ($(ARCH),sparc64)
SRCARCH := sparc
endif
# Additional ARCH settings for sh
ifeq ($(ARCH),sh64)
SRCARCH := sh
endif
# Additional ARCH settings for tile
ifeq ($(ARCH),tilepro)
SRCARCH := tile
endif
ifeq ($(ARCH),tilegx)
SRCARCH := tile
endif
LP64 := $(shell echo __LP64__ | ${CC} ${CFLAGS} -E -x c - | tail -n 1) LP64 := $(shell echo __LP64__ | ${CC} ${CFLAGS} -E -x c - | tail -n 1)
ifeq ($(LP64), 1) ifeq ($(LP64), 1)
IS_64_BIT := 1 IS_64_BIT := 1
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment