aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorIngo Molnar <mingo@kernel.org>2016-07-25 13:48:41 -0400
committerIngo Molnar <mingo@kernel.org>2016-07-25 13:48:41 -0400
commit674d2d69b14f677a771ceec4b48bfade94a0c5f1 (patch)
tree1413429fd703b57aaf9f5b7fdab4b283b32b13cd
parent5048c2af078d5976895d521262a8802ea791f3b0 (diff)
parent4e3ba8af21b00b91b451e7c4a9fa3a63b025dd56 (diff)
Merge tag 'perf-core-for-mingo-20160725' of git://git.kernel.org/pub/scm/linux/kernel/git/acme/linux into perf/core
Pull perf/core improvements and fixes from Arnaldo Carvalho de Melo: - Add AVX-512 support to the instruction decoder, used by Intel PT, fix vcvtph2ps instruction decoding (Adrian Hunter) - Make objtool and vdso2c use the right arch header search path (Stephen Rothwell, Josh Poimboeuf, Arnaldo Carvalho de Melo) Signed-off-by: Arnaldo Carvalho de Melo <acme@redhat.com> Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--arch/x86/entry/vdso/Makefile2
-rw-r--r--arch/x86/include/asm/inat.h17
-rw-r--r--arch/x86/include/asm/insn.h12
-rw-r--r--arch/x86/lib/insn.c18
-rw-r--r--arch/x86/lib/x86-opcode-map.txt263
-rw-r--r--arch/x86/tools/gen-insn-attr-x86.awk11
-rw-r--r--tools/objtool/Build2
-rw-r--r--tools/objtool/Makefile8
-rw-r--r--tools/perf/arch/x86/tests/insn-x86-dat-32.c1018
-rw-r--r--tools/perf/arch/x86/tests/insn-x86-dat-64.c940
-rw-r--r--tools/perf/arch/x86/tests/insn-x86-dat-src.c1789
-rw-r--r--tools/perf/tests/kmod-path.c1
-rw-r--r--tools/perf/util/event.h1
-rw-r--r--tools/perf/util/intel-pt-decoder/gen-insn-attr-x86.awk11
-rw-r--r--tools/perf/util/intel-pt-decoder/inat.h17
-rw-r--r--tools/perf/util/intel-pt-decoder/insn.c18
-rw-r--r--tools/perf/util/intel-pt-decoder/insn.h12
-rw-r--r--tools/perf/util/intel-pt-decoder/x86-opcode-map.txt263
-rw-r--r--tools/scripts/Makefile.arch41
19 files changed, 4221 insertions, 223 deletions
diff --git a/arch/x86/entry/vdso/Makefile b/arch/x86/entry/vdso/Makefile
index 253b72eaade6..25e88c030c47 100644
--- a/arch/x86/entry/vdso/Makefile
+++ b/arch/x86/entry/vdso/Makefile
@@ -55,7 +55,7 @@ VDSO_LDFLAGS_vdso.lds = -m64 -Wl,-soname=linux-vdso.so.1 \
55$(obj)/vdso64.so.dbg: $(src)/vdso.lds $(vobjs) FORCE 55$(obj)/vdso64.so.dbg: $(src)/vdso.lds $(vobjs) FORCE
56 $(call if_changed,vdso) 56 $(call if_changed,vdso)
57 57
58HOST_EXTRACFLAGS += -I$(srctree)/tools/include -I$(srctree)/include/uapi -I$(srctree)/arch/x86/include/uapi 58HOST_EXTRACFLAGS += -I$(srctree)/tools/include -I$(srctree)/include/uapi -I$(srctree)/arch/$(SUBARCH)/include/uapi
59hostprogs-y += vdso2c 59hostprogs-y += vdso2c
60 60
61quiet_cmd_vdso2c = VDSO2C $@ 61quiet_cmd_vdso2c = VDSO2C $@
diff --git a/arch/x86/include/asm/inat.h b/arch/x86/include/asm/inat.h
index 74a2e312e8a2..02aff0867211 100644
--- a/arch/x86/include/asm/inat.h
+++ b/arch/x86/include/asm/inat.h
@@ -48,6 +48,7 @@
48/* AVX VEX prefixes */ 48/* AVX VEX prefixes */
49#define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */ 49#define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */
50#define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */ 50#define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */
51#define INAT_PFX_EVEX 15 /* EVEX prefix */
51 52
52#define INAT_LSTPFX_MAX 3 53#define INAT_LSTPFX_MAX 3
53#define INAT_LGCPFX_MAX 11 54#define INAT_LGCPFX_MAX 11
@@ -89,6 +90,7 @@
89#define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4)) 90#define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4))
90#define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5)) 91#define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5))
91#define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6)) 92#define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6))
93#define INAT_EVEXONLY (1 << (INAT_FLAG_OFFS + 7))
92/* Attribute making macros for attribute tables */ 94/* Attribute making macros for attribute tables */
93#define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS) 95#define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS)
94#define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS) 96#define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS)
@@ -141,7 +143,13 @@ static inline int inat_last_prefix_id(insn_attr_t attr)
141static inline int inat_is_vex_prefix(insn_attr_t attr) 143static inline int inat_is_vex_prefix(insn_attr_t attr)
142{ 144{
143 attr &= INAT_PFX_MASK; 145 attr &= INAT_PFX_MASK;
144 return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3; 146 return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3 ||
147 attr == INAT_PFX_EVEX;
148}
149
150static inline int inat_is_evex_prefix(insn_attr_t attr)
151{
152 return (attr & INAT_PFX_MASK) == INAT_PFX_EVEX;
145} 153}
146 154
147static inline int inat_is_vex3_prefix(insn_attr_t attr) 155static inline int inat_is_vex3_prefix(insn_attr_t attr)
@@ -216,6 +224,11 @@ static inline int inat_accept_vex(insn_attr_t attr)
216 224
217static inline int inat_must_vex(insn_attr_t attr) 225static inline int inat_must_vex(insn_attr_t attr)
218{ 226{
219 return attr & INAT_VEXONLY; 227 return attr & (INAT_VEXONLY | INAT_EVEXONLY);
228}
229
230static inline int inat_must_evex(insn_attr_t attr)
231{
232 return attr & INAT_EVEXONLY;
220} 233}
221#endif 234#endif
diff --git a/arch/x86/include/asm/insn.h b/arch/x86/include/asm/insn.h
index e7814b74caf8..b3e32b010ab1 100644
--- a/arch/x86/include/asm/insn.h
+++ b/arch/x86/include/asm/insn.h
@@ -91,6 +91,7 @@ struct insn {
91#define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */ 91#define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */
92#define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */ 92#define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */
93/* VEX bit fields */ 93/* VEX bit fields */
94#define X86_EVEX_M(vex) ((vex) & 0x03) /* EVEX Byte1 */
94#define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */ 95#define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */
95#define X86_VEX2_M 1 /* VEX2.M always 1 */ 96#define X86_VEX2_M 1 /* VEX2.M always 1 */
96#define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */ 97#define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */
@@ -133,6 +134,13 @@ static inline int insn_is_avx(struct insn *insn)
133 return (insn->vex_prefix.value != 0); 134 return (insn->vex_prefix.value != 0);
134} 135}
135 136
137static inline int insn_is_evex(struct insn *insn)
138{
139 if (!insn->prefixes.got)
140 insn_get_prefixes(insn);
141 return (insn->vex_prefix.nbytes == 4);
142}
143
136/* Ensure this instruction is decoded completely */ 144/* Ensure this instruction is decoded completely */
137static inline int insn_complete(struct insn *insn) 145static inline int insn_complete(struct insn *insn)
138{ 146{
@@ -144,8 +152,10 @@ static inline insn_byte_t insn_vex_m_bits(struct insn *insn)
144{ 152{
145 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ 153 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */
146 return X86_VEX2_M; 154 return X86_VEX2_M;
147 else 155 else if (insn->vex_prefix.nbytes == 3) /* 3 bytes VEX */
148 return X86_VEX3_M(insn->vex_prefix.bytes[1]); 156 return X86_VEX3_M(insn->vex_prefix.bytes[1]);
157 else /* EVEX */
158 return X86_EVEX_M(insn->vex_prefix.bytes[1]);
149} 159}
150 160
151static inline insn_byte_t insn_vex_p_bits(struct insn *insn) 161static inline insn_byte_t insn_vex_p_bits(struct insn *insn)
diff --git a/arch/x86/lib/insn.c b/arch/x86/lib/insn.c
index 1a416935bac9..1088eb8f3a5f 100644
--- a/arch/x86/lib/insn.c
+++ b/arch/x86/lib/insn.c
@@ -155,14 +155,24 @@ found:
155 /* 155 /*
156 * In 32-bits mode, if the [7:6] bits (mod bits of 156 * In 32-bits mode, if the [7:6] bits (mod bits of
157 * ModRM) on the second byte are not 11b, it is 157 * ModRM) on the second byte are not 11b, it is
158 * LDS or LES. 158 * LDS or LES or BOUND.
159 */ 159 */
160 if (X86_MODRM_MOD(b2) != 3) 160 if (X86_MODRM_MOD(b2) != 3)
161 goto vex_end; 161 goto vex_end;
162 } 162 }
163 insn->vex_prefix.bytes[0] = b; 163 insn->vex_prefix.bytes[0] = b;
164 insn->vex_prefix.bytes[1] = b2; 164 insn->vex_prefix.bytes[1] = b2;
165 if (inat_is_vex3_prefix(attr)) { 165 if (inat_is_evex_prefix(attr)) {
166 b2 = peek_nbyte_next(insn_byte_t, insn, 2);
167 insn->vex_prefix.bytes[2] = b2;
168 b2 = peek_nbyte_next(insn_byte_t, insn, 3);
169 insn->vex_prefix.bytes[3] = b2;
170 insn->vex_prefix.nbytes = 4;
171 insn->next_byte += 4;
172 if (insn->x86_64 && X86_VEX_W(b2))
173 /* VEX.W overrides opnd_size */
174 insn->opnd_bytes = 8;
175 } else if (inat_is_vex3_prefix(attr)) {
166 b2 = peek_nbyte_next(insn_byte_t, insn, 2); 176 b2 = peek_nbyte_next(insn_byte_t, insn, 2);
167 insn->vex_prefix.bytes[2] = b2; 177 insn->vex_prefix.bytes[2] = b2;
168 insn->vex_prefix.nbytes = 3; 178 insn->vex_prefix.nbytes = 3;
@@ -221,7 +231,9 @@ void insn_get_opcode(struct insn *insn)
221 m = insn_vex_m_bits(insn); 231 m = insn_vex_m_bits(insn);
222 p = insn_vex_p_bits(insn); 232 p = insn_vex_p_bits(insn);
223 insn->attr = inat_get_avx_attribute(op, m, p); 233 insn->attr = inat_get_avx_attribute(op, m, p);
224 if (!inat_accept_vex(insn->attr) && !inat_is_group(insn->attr)) 234 if ((inat_must_evex(insn->attr) && !insn_is_evex(insn)) ||
235 (!inat_accept_vex(insn->attr) &&
236 !inat_is_group(insn->attr)))
225 insn->attr = 0; /* This instruction is bad */ 237 insn->attr = 0; /* This instruction is bad */
226 goto end; /* VEX has only 1 byte for opcode */ 238 goto end; /* VEX has only 1 byte for opcode */
227 } 239 }
diff --git a/arch/x86/lib/x86-opcode-map.txt b/arch/x86/lib/x86-opcode-map.txt
index d388de72eaca..ec378cd7b71e 100644
--- a/arch/x86/lib/x86-opcode-map.txt
+++ b/arch/x86/lib/x86-opcode-map.txt
@@ -13,12 +13,17 @@
13# opcode: escape # escaped-name 13# opcode: escape # escaped-name
14# EndTable 14# EndTable
15# 15#
16# mnemonics that begin with lowercase 'v' accept a VEX or EVEX prefix
17# mnemonics that begin with lowercase 'k' accept a VEX prefix
18#
16#<group maps> 19#<group maps>
17# GrpTable: GrpXXX 20# GrpTable: GrpXXX
18# reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...] 21# reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...]
19# EndTable 22# EndTable
20# 23#
21# AVX Superscripts 24# AVX Superscripts
25# (ev): this opcode requires EVEX prefix.
26# (evo): this opcode is changed by EVEX prefix (EVEX opcode)
22# (v): this opcode requires VEX prefix. 27# (v): this opcode requires VEX prefix.
23# (v1): this opcode only supports 128bit VEX. 28# (v1): this opcode only supports 128bit VEX.
24# 29#
@@ -137,7 +142,7 @@ AVXcode:
137# 0x60 - 0x6f 142# 0x60 - 0x6f
13860: PUSHA/PUSHAD (i64) 14360: PUSHA/PUSHAD (i64)
13961: POPA/POPAD (i64) 14461: POPA/POPAD (i64)
14062: BOUND Gv,Ma (i64) 14562: BOUND Gv,Ma (i64) | EVEX (Prefix)
14163: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64) 14663: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64)
14264: SEG=FS (Prefix) 14764: SEG=FS (Prefix)
14365: SEG=GS (Prefix) 14865: SEG=GS (Prefix)
@@ -399,17 +404,17 @@ AVXcode: 1
3993f: 4043f:
400# 0x0f 0x40-0x4f 405# 0x0f 0x40-0x4f
40140: CMOVO Gv,Ev 40640: CMOVO Gv,Ev
40241: CMOVNO Gv,Ev 40741: CMOVNO Gv,Ev | kandw/q Vk,Hk,Uk | kandb/d Vk,Hk,Uk (66)
40342: CMOVB/C/NAE Gv,Ev 40842: CMOVB/C/NAE Gv,Ev | kandnw/q Vk,Hk,Uk | kandnb/d Vk,Hk,Uk (66)
40443: CMOVAE/NB/NC Gv,Ev 40943: CMOVAE/NB/NC Gv,Ev
40544: CMOVE/Z Gv,Ev 41044: CMOVE/Z Gv,Ev | knotw/q Vk,Uk | knotb/d Vk,Uk (66)
40645: CMOVNE/NZ Gv,Ev 41145: CMOVNE/NZ Gv,Ev | korw/q Vk,Hk,Uk | korb/d Vk,Hk,Uk (66)
40746: CMOVBE/NA Gv,Ev 41246: CMOVBE/NA Gv,Ev | kxnorw/q Vk,Hk,Uk | kxnorb/d Vk,Hk,Uk (66)
40847: CMOVA/NBE Gv,Ev 41347: CMOVA/NBE Gv,Ev | kxorw/q Vk,Hk,Uk | kxorb/d Vk,Hk,Uk (66)
40948: CMOVS Gv,Ev 41448: CMOVS Gv,Ev
41049: CMOVNS Gv,Ev 41549: CMOVNS Gv,Ev
4114a: CMOVP/PE Gv,Ev 4164a: CMOVP/PE Gv,Ev | kaddw/q Vk,Hk,Uk | kaddb/d Vk,Hk,Uk (66)
4124b: CMOVNP/PO Gv,Ev 4174b: CMOVNP/PO Gv,Ev | kunpckbw Vk,Hk,Uk (66) | kunpckwd/dq Vk,Hk,Uk
4134c: CMOVL/NGE Gv,Ev 4184c: CMOVL/NGE Gv,Ev
4144d: CMOVNL/GE Gv,Ev 4194d: CMOVNL/GE Gv,Ev
4154e: CMOVLE/NG Gv,Ev 4204e: CMOVLE/NG Gv,Ev
@@ -426,7 +431,7 @@ AVXcode: 1
42658: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1) 43158: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1)
42759: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1) 43259: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1)
4285a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1) 4335a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1)
4295b: vcvtdq2ps Vps,Wdq | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3) 4345b: vcvtdq2ps Vps,Wdq | vcvtqq2ps Vps,Wqq (evo) | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3)
4305c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1) 4355c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1)
4315d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1) 4365d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1)
4325e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1) 4375e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1)
@@ -447,7 +452,7 @@ AVXcode: 1
4476c: vpunpcklqdq Vx,Hx,Wx (66),(v1) 4526c: vpunpcklqdq Vx,Hx,Wx (66),(v1)
4486d: vpunpckhqdq Vx,Hx,Wx (66),(v1) 4536d: vpunpckhqdq Vx,Hx,Wx (66),(v1)
4496e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1) 4546e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1)
4506f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqu Vx,Wx (F3) 4556f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqa32/64 Vx,Wx (66),(evo) | vmovdqu Vx,Wx (F3) | vmovdqu32/64 Vx,Wx (F3),(evo) | vmovdqu8/16 Vx,Wx (F2),(ev)
451# 0x0f 0x70-0x7f 456# 0x0f 0x70-0x7f
45270: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1) 45770: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1)
45371: Grp12 (1A) 45871: Grp12 (1A)
@@ -458,14 +463,14 @@ AVXcode: 1
45876: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1) 46376: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1)
459# Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX. 464# Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX.
46077: emms | vzeroupper | vzeroall 46577: emms | vzeroupper | vzeroall
46178: VMREAD Ey,Gy 46678: VMREAD Ey,Gy | vcvttps2udq/pd2udq Vx,Wpd (evo) | vcvttsd2usi Gv,Wx (F2),(ev) | vcvttss2usi Gv,Wx (F3),(ev) | vcvttps2uqq/pd2uqq Vx,Wx (66),(ev)
46279: VMWRITE Gy,Ey 46779: VMWRITE Gy,Ey | vcvtps2udq/pd2udq Vx,Wpd (evo) | vcvtsd2usi Gv,Wx (F2),(ev) | vcvtss2usi Gv,Wx (F3),(ev) | vcvtps2uqq/pd2uqq Vx,Wx (66),(ev)
4637a: 4687a: vcvtudq2pd/uqq2pd Vpd,Wx (F3),(ev) | vcvtudq2ps/uqq2ps Vpd,Wx (F2),(ev) | vcvttps2qq/pd2qq Vx,Wx (66),(ev)
4647b: 4697b: vcvtusi2sd Vpd,Hpd,Ev (F2),(ev) | vcvtusi2ss Vps,Hps,Ev (F3),(ev) | vcvtps2qq/pd2qq Vx,Wx (66),(ev)
4657c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2) 4707c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2)
4667d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2) 4717d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2)
4677e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1) 4727e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1)
4687f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqu Wx,Vx (F3) 4737f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqa32/64 Wx,Vx (66),(evo) | vmovdqu Wx,Vx (F3) | vmovdqu32/64 Wx,Vx (F3),(evo) | vmovdqu8/16 Wx,Vx (F2),(ev)
469# 0x0f 0x80-0x8f 474# 0x0f 0x80-0x8f
470# Note: "forced64" is Intel CPU behavior (see comment about CALL insn). 475# Note: "forced64" is Intel CPU behavior (see comment about CALL insn).
47180: JO Jz (f64) 47680: JO Jz (f64)
@@ -485,16 +490,16 @@ AVXcode: 1
4858e: JLE/JNG Jz (f64) 4908e: JLE/JNG Jz (f64)
4868f: JNLE/JG Jz (f64) 4918f: JNLE/JG Jz (f64)
487# 0x0f 0x90-0x9f 492# 0x0f 0x90-0x9f
48890: SETO Eb 49390: SETO Eb | kmovw/q Vk,Wk | kmovb/d Vk,Wk (66)
48991: SETNO Eb 49491: SETNO Eb | kmovw/q Mv,Vk | kmovb/d Mv,Vk (66)
49092: SETB/C/NAE Eb 49592: SETB/C/NAE Eb | kmovw Vk,Rv | kmovb Vk,Rv (66) | kmovq/d Vk,Rv (F2)
49193: SETAE/NB/NC Eb 49693: SETAE/NB/NC Eb | kmovw Gv,Uk | kmovb Gv,Uk (66) | kmovq/d Gv,Uk (F2)
49294: SETE/Z Eb 49794: SETE/Z Eb
49395: SETNE/NZ Eb 49895: SETNE/NZ Eb
49496: SETBE/NA Eb 49996: SETBE/NA Eb
49597: SETA/NBE Eb 50097: SETA/NBE Eb
49698: SETS Eb 50198: SETS Eb | kortestw/q Vk,Uk | kortestb/d Vk,Uk (66)
49799: SETNS Eb 50299: SETNS Eb | ktestw/q Vk,Uk | ktestb/d Vk,Uk (66)
4989a: SETP/PE Eb 5039a: SETP/PE Eb
4999b: SETNP/PO Eb 5049b: SETNP/PO Eb
5009c: SETL/NGE Eb 5059c: SETL/NGE Eb
@@ -564,11 +569,11 @@ d7: pmovmskb Gd,Nq | vpmovmskb Gd,Ux (66),(v1)
564d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1) 569d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1)
565d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1) 570d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1)
566da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1) 571da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1)
567db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) 572db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) | vpandd/q Vx,Hx,Wx (66),(evo)
568dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1) 573dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1)
569dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1) 574dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1)
570de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1) 575de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1)
571df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) 576df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) | vpandnd/q Vx,Hx,Wx (66),(evo)
572# 0x0f 0xe0-0xef 577# 0x0f 0xe0-0xef
573e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1) 578e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1)
574e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1) 579e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1)
@@ -576,16 +581,16 @@ e2: psrad Pq,Qq | vpsrad Vx,Hx,Wx (66),(v1)
576e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1) 581e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1)
577e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1) 582e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1)
578e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1) 583e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1)
579e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtpd2dq Vx,Wpd (F2) 584e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtdq2pd/qq2pd Vx,Wdq (F3),(evo) | vcvtpd2dq Vx,Wpd (F2)
580e7: movntq Mq,Pq | vmovntdq Mx,Vx (66) 585e7: movntq Mq,Pq | vmovntdq Mx,Vx (66)
581e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1) 586e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1)
582e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1) 587e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1)
583ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1) 588ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1)
584eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) 589eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) | vpord/q Vx,Hx,Wx (66),(evo)
585ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1) 590ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1)
586ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1) 591ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1)
587ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1) 592ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1)
588ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) 593ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) | vpxord/q Vx,Hx,Wx (66),(evo)
589# 0x0f 0xf0-0xff 594# 0x0f 0xf0-0xff
590f0: vlddqu Vx,Mx (F2) 595f0: vlddqu Vx,Mx (F2)
591f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1) 596f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1)
@@ -626,81 +631,105 @@ AVXcode: 2
6260e: vtestps Vx,Wx (66),(v) 6310e: vtestps Vx,Wx (66),(v)
6270f: vtestpd Vx,Wx (66),(v) 6320f: vtestpd Vx,Wx (66),(v)
628# 0x0f 0x38 0x10-0x1f 633# 0x0f 0x38 0x10-0x1f
62910: pblendvb Vdq,Wdq (66) 63410: pblendvb Vdq,Wdq (66) | vpsrlvw Vx,Hx,Wx (66),(evo) | vpmovuswb Wx,Vx (F3),(ev)
63011: 63511: vpmovusdb Wx,Vd (F3),(ev) | vpsravw Vx,Hx,Wx (66),(ev)
63112: 63612: vpmovusqb Wx,Vq (F3),(ev) | vpsllvw Vx,Hx,Wx (66),(ev)
63213: vcvtph2ps Vx,Wx,Ib (66),(v) 63713: vcvtph2ps Vx,Wx (66),(v) | vpmovusdw Wx,Vd (F3),(ev)
63314: blendvps Vdq,Wdq (66) 63814: blendvps Vdq,Wdq (66) | vpmovusqw Wx,Vq (F3),(ev) | vprorvd/q Vx,Hx,Wx (66),(evo)
63415: blendvpd Vdq,Wdq (66) 63915: blendvpd Vdq,Wdq (66) | vpmovusqd Wx,Vq (F3),(ev) | vprolvd/q Vx,Hx,Wx (66),(evo)
63516: vpermps Vqq,Hqq,Wqq (66),(v) 64016: vpermps Vqq,Hqq,Wqq (66),(v) | vpermps/d Vqq,Hqq,Wqq (66),(evo)
63617: vptest Vx,Wx (66) 64117: vptest Vx,Wx (66)
63718: vbroadcastss Vx,Wd (66),(v) 64218: vbroadcastss Vx,Wd (66),(v)
63819: vbroadcastsd Vqq,Wq (66),(v) 64319: vbroadcastsd Vqq,Wq (66),(v) | vbroadcastf32x2 Vqq,Wq (66),(evo)
6391a: vbroadcastf128 Vqq,Mdq (66),(v) 6441a: vbroadcastf128 Vqq,Mdq (66),(v) | vbroadcastf32x4/64x2 Vqq,Wq (66),(evo)
6401b: 6451b: vbroadcastf32x8/64x4 Vqq,Mdq (66),(ev)
6411c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1) 6461c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1)
6421d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1) 6471d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1)
6431e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1) 6481e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1)
6441f: 6491f: vpabsq Vx,Wx (66),(ev)
645# 0x0f 0x38 0x20-0x2f 650# 0x0f 0x38 0x20-0x2f
64620: vpmovsxbw Vx,Ux/Mq (66),(v1) 65120: vpmovsxbw Vx,Ux/Mq (66),(v1) | vpmovswb Wx,Vx (F3),(ev)
64721: vpmovsxbd Vx,Ux/Md (66),(v1) 65221: vpmovsxbd Vx,Ux/Md (66),(v1) | vpmovsdb Wx,Vd (F3),(ev)
64822: vpmovsxbq Vx,Ux/Mw (66),(v1) 65322: vpmovsxbq Vx,Ux/Mw (66),(v1) | vpmovsqb Wx,Vq (F3),(ev)
64923: vpmovsxwd Vx,Ux/Mq (66),(v1) 65423: vpmovsxwd Vx,Ux/Mq (66),(v1) | vpmovsdw Wx,Vd (F3),(ev)
65024: vpmovsxwq Vx,Ux/Md (66),(v1) 65524: vpmovsxwq Vx,Ux/Md (66),(v1) | vpmovsqw Wx,Vq (F3),(ev)
65125: vpmovsxdq Vx,Ux/Mq (66),(v1) 65625: vpmovsxdq Vx,Ux/Mq (66),(v1) | vpmovsqd Wx,Vq (F3),(ev)
65226: 65726: vptestmb/w Vk,Hx,Wx (66),(ev) | vptestnmb/w Vk,Hx,Wx (F3),(ev)
65327: 65827: vptestmd/q Vk,Hx,Wx (66),(ev) | vptestnmd/q Vk,Hx,Wx (F3),(ev)
65428: vpmuldq Vx,Hx,Wx (66),(v1) 65928: vpmuldq Vx,Hx,Wx (66),(v1) | vpmovm2b/w Vx,Uk (F3),(ev)
65529: vpcmpeqq Vx,Hx,Wx (66),(v1) 66029: vpcmpeqq Vx,Hx,Wx (66),(v1) | vpmovb2m/w2m Vk,Ux (F3),(ev)
6562a: vmovntdqa Vx,Mx (66),(v1) 6612a: vmovntdqa Vx,Mx (66),(v1) | vpbroadcastmb2q Vx,Uk (F3),(ev)
6572b: vpackusdw Vx,Hx,Wx (66),(v1) 6622b: vpackusdw Vx,Hx,Wx (66),(v1)
6582c: vmaskmovps Vx,Hx,Mx (66),(v) 6632c: vmaskmovps Vx,Hx,Mx (66),(v) | vscalefps/d Vx,Hx,Wx (66),(evo)
6592d: vmaskmovpd Vx,Hx,Mx (66),(v) 6642d: vmaskmovpd Vx,Hx,Mx (66),(v) | vscalefss/d Vx,Hx,Wx (66),(evo)
6602e: vmaskmovps Mx,Hx,Vx (66),(v) 6652e: vmaskmovps Mx,Hx,Vx (66),(v)
6612f: vmaskmovpd Mx,Hx,Vx (66),(v) 6662f: vmaskmovpd Mx,Hx,Vx (66),(v)
662# 0x0f 0x38 0x30-0x3f 667# 0x0f 0x38 0x30-0x3f
66330: vpmovzxbw Vx,Ux/Mq (66),(v1) 66830: vpmovzxbw Vx,Ux/Mq (66),(v1) | vpmovwb Wx,Vx (F3),(ev)
66431: vpmovzxbd Vx,Ux/Md (66),(v1) 66931: vpmovzxbd Vx,Ux/Md (66),(v1) | vpmovdb Wx,Vd (F3),(ev)
66532: vpmovzxbq Vx,Ux/Mw (66),(v1) 67032: vpmovzxbq Vx,Ux/Mw (66),(v1) | vpmovqb Wx,Vq (F3),(ev)
66633: vpmovzxwd Vx,Ux/Mq (66),(v1) 67133: vpmovzxwd Vx,Ux/Mq (66),(v1) | vpmovdw Wx,Vd (F3),(ev)
66734: vpmovzxwq Vx,Ux/Md (66),(v1) 67234: vpmovzxwq Vx,Ux/Md (66),(v1) | vpmovqw Wx,Vq (F3),(ev)
66835: vpmovzxdq Vx,Ux/Mq (66),(v1) 67335: vpmovzxdq Vx,Ux/Mq (66),(v1) | vpmovqd Wx,Vq (F3),(ev)
66936: vpermd Vqq,Hqq,Wqq (66),(v) 67436: vpermd Vqq,Hqq,Wqq (66),(v) | vpermd/q Vqq,Hqq,Wqq (66),(evo)
67037: vpcmpgtq Vx,Hx,Wx (66),(v1) 67537: vpcmpgtq Vx,Hx,Wx (66),(v1)
67138: vpminsb Vx,Hx,Wx (66),(v1) 67638: vpminsb Vx,Hx,Wx (66),(v1) | vpmovm2d/q Vx,Uk (F3),(ev)
67239: vpminsd Vx,Hx,Wx (66),(v1) 67739: vpminsd Vx,Hx,Wx (66),(v1) | vpminsd/q Vx,Hx,Wx (66),(evo) | vpmovd2m/q2m Vk,Ux (F3),(ev)
6733a: vpminuw Vx,Hx,Wx (66),(v1) 6783a: vpminuw Vx,Hx,Wx (66),(v1) | vpbroadcastmw2d Vx,Uk (F3),(ev)
6743b: vpminud Vx,Hx,Wx (66),(v1) 6793b: vpminud Vx,Hx,Wx (66),(v1) | vpminud/q Vx,Hx,Wx (66),(evo)
6753c: vpmaxsb Vx,Hx,Wx (66),(v1) 6803c: vpmaxsb Vx,Hx,Wx (66),(v1)
6763d: vpmaxsd Vx,Hx,Wx (66),(v1) 6813d: vpmaxsd Vx,Hx,Wx (66),(v1) | vpmaxsd/q Vx,Hx,Wx (66),(evo)
6773e: vpmaxuw Vx,Hx,Wx (66),(v1) 6823e: vpmaxuw Vx,Hx,Wx (66),(v1)
6783f: vpmaxud Vx,Hx,Wx (66),(v1) 6833f: vpmaxud Vx,Hx,Wx (66),(v1) | vpmaxud/q Vx,Hx,Wx (66),(evo)
679# 0x0f 0x38 0x40-0x8f 684# 0x0f 0x38 0x40-0x8f
68040: vpmulld Vx,Hx,Wx (66),(v1) 68540: vpmulld Vx,Hx,Wx (66),(v1) | vpmulld/q Vx,Hx,Wx (66),(evo)
68141: vphminposuw Vdq,Wdq (66),(v1) 68641: vphminposuw Vdq,Wdq (66),(v1)
68242: 68742: vgetexpps/d Vx,Wx (66),(ev)
68343: 68843: vgetexpss/d Vx,Hx,Wx (66),(ev)
68444: 68944: vplzcntd/q Vx,Wx (66),(ev)
68545: vpsrlvd/q Vx,Hx,Wx (66),(v) 69045: vpsrlvd/q Vx,Hx,Wx (66),(v)
68646: vpsravd Vx,Hx,Wx (66),(v) 69146: vpsravd Vx,Hx,Wx (66),(v) | vpsravd/q Vx,Hx,Wx (66),(evo)
68747: vpsllvd/q Vx,Hx,Wx (66),(v) 69247: vpsllvd/q Vx,Hx,Wx (66),(v)
688# Skip 0x48-0x57 693# Skip 0x48-0x4b
6944c: vrcp14ps/d Vpd,Wpd (66),(ev)
6954d: vrcp14ss/d Vsd,Hpd,Wsd (66),(ev)
6964e: vrsqrt14ps/d Vpd,Wpd (66),(ev)
6974f: vrsqrt14ss/d Vsd,Hsd,Wsd (66),(ev)
698# Skip 0x50-0x57
68958: vpbroadcastd Vx,Wx (66),(v) 69958: vpbroadcastd Vx,Wx (66),(v)
69059: vpbroadcastq Vx,Wx (66),(v) 70059: vpbroadcastq Vx,Wx (66),(v) | vbroadcasti32x2 Vx,Wx (66),(evo)
6915a: vbroadcasti128 Vqq,Mdq (66),(v) 7015a: vbroadcasti128 Vqq,Mdq (66),(v) | vbroadcasti32x4/64x2 Vx,Wx (66),(evo)
692# Skip 0x5b-0x77 7025b: vbroadcasti32x8/64x4 Vqq,Mdq (66),(ev)
703# Skip 0x5c-0x63
70464: vpblendmd/q Vx,Hx,Wx (66),(ev)
70565: vblendmps/d Vx,Hx,Wx (66),(ev)
70666: vpblendmb/w Vx,Hx,Wx (66),(ev)
707# Skip 0x67-0x74
70875: vpermi2b/w Vx,Hx,Wx (66),(ev)
70976: vpermi2d/q Vx,Hx,Wx (66),(ev)
71077: vpermi2ps/d Vx,Hx,Wx (66),(ev)
69378: vpbroadcastb Vx,Wx (66),(v) 71178: vpbroadcastb Vx,Wx (66),(v)
69479: vpbroadcastw Vx,Wx (66),(v) 71279: vpbroadcastw Vx,Wx (66),(v)
695# Skip 0x7a-0x7f 7137a: vpbroadcastb Vx,Rv (66),(ev)
7147b: vpbroadcastw Vx,Rv (66),(ev)
7157c: vpbroadcastd/q Vx,Rv (66),(ev)
7167d: vpermt2b/w Vx,Hx,Wx (66),(ev)
7177e: vpermt2d/q Vx,Hx,Wx (66),(ev)
7187f: vpermt2ps/d Vx,Hx,Wx (66),(ev)
69680: INVEPT Gy,Mdq (66) 71980: INVEPT Gy,Mdq (66)
69781: INVPID Gy,Mdq (66) 72081: INVPID Gy,Mdq (66)
69882: INVPCID Gy,Mdq (66) 72182: INVPCID Gy,Mdq (66)
72283: vpmultishiftqb Vx,Hx,Wx (66),(ev)
72388: vexpandps/d Vpd,Wpd (66),(ev)
72489: vpexpandd/q Vx,Wx (66),(ev)
7258a: vcompressps/d Wx,Vx (66),(ev)
7268b: vpcompressd/q Wx,Vx (66),(ev)
6998c: vpmaskmovd/q Vx,Hx,Mx (66),(v) 7278c: vpmaskmovd/q Vx,Hx,Mx (66),(v)
7288d: vpermb/w Vx,Hx,Wx (66),(ev)
7008e: vpmaskmovd/q Mx,Vx,Hx (66),(v) 7298e: vpmaskmovd/q Mx,Vx,Hx (66),(v)
701# 0x0f 0x38 0x90-0xbf (FMA) 730# 0x0f 0x38 0x90-0xbf (FMA)
70290: vgatherdd/q Vx,Hx,Wx (66),(v) 73190: vgatherdd/q Vx,Hx,Wx (66),(v) | vpgatherdd/q Vx,Wx (66),(evo)
70391: vgatherqd/q Vx,Hx,Wx (66),(v) 73291: vgatherqd/q Vx,Hx,Wx (66),(v) | vpgatherqd/q Vx,Wx (66),(evo)
70492: vgatherdps/d Vx,Hx,Wx (66),(v) 73392: vgatherdps/d Vx,Hx,Wx (66),(v)
70593: vgatherqps/d Vx,Hx,Wx (66),(v) 73493: vgatherqps/d Vx,Hx,Wx (66),(v)
70694: 73594:
@@ -715,6 +744,10 @@ AVXcode: 2
7159d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1) 7449d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1)
7169e: vfnmsub132ps/d Vx,Hx,Wx (66),(v) 7459e: vfnmsub132ps/d Vx,Hx,Wx (66),(v)
7179f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1) 7469f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1)
747a0: vpscatterdd/q Wx,Vx (66),(ev)
748a1: vpscatterqd/q Wx,Vx (66),(ev)
749a2: vscatterdps/d Wx,Vx (66),(ev)
750a3: vscatterqps/d Wx,Vx (66),(ev)
718a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v) 751a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v)
719a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v) 752a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v)
720a8: vfmadd213ps/d Vx,Hx,Wx (66),(v) 753a8: vfmadd213ps/d Vx,Hx,Wx (66),(v)
@@ -725,6 +758,8 @@ ac: vfnmadd213ps/d Vx,Hx,Wx (66),(v)
725ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1) 758ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1)
726ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v) 759ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v)
727af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1) 760af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1)
761b4: vpmadd52luq Vx,Hx,Wx (66),(ev)
762b5: vpmadd52huq Vx,Hx,Wx (66),(ev)
728b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v) 763b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v)
729b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v) 764b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v)
730b8: vfmadd231ps/d Vx,Hx,Wx (66),(v) 765b8: vfmadd231ps/d Vx,Hx,Wx (66),(v)
@@ -736,12 +771,15 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1)
736be: vfnmsub231ps/d Vx,Hx,Wx (66),(v) 771be: vfnmsub231ps/d Vx,Hx,Wx (66),(v)
737bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1) 772bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1)
738# 0x0f 0x38 0xc0-0xff 773# 0x0f 0x38 0xc0-0xff
739c8: sha1nexte Vdq,Wdq 774c4: vpconflictd/q Vx,Wx (66),(ev)
775c6: Grp18 (1A)
776c7: Grp19 (1A)
777c8: sha1nexte Vdq,Wdq | vexp2ps/d Vx,Wx (66),(ev)
740c9: sha1msg1 Vdq,Wdq 778c9: sha1msg1 Vdq,Wdq
741ca: sha1msg2 Vdq,Wdq 779ca: sha1msg2 Vdq,Wdq | vrcp28ps/d Vx,Wx (66),(ev)
742cb: sha256rnds2 Vdq,Wdq 780cb: sha256rnds2 Vdq,Wdq | vrcp28ss/d Vx,Hx,Wx (66),(ev)
743cc: sha256msg1 Vdq,Wdq 781cc: sha256msg1 Vdq,Wdq | vrsqrt28ps/d Vx,Wx (66),(ev)
744cd: sha256msg2 Vdq,Wdq 782cd: sha256msg2 Vdq,Wdq | vrsqrt28ss/d Vx,Hx,Wx (66),(ev)
745db: VAESIMC Vdq,Wdq (66),(v1) 783db: VAESIMC Vdq,Wdq (66),(v1)
746dc: VAESENC Vdq,Hdq,Wdq (66),(v1) 784dc: VAESENC Vdq,Hdq,Wdq (66),(v1)
747dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1) 785dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1)
@@ -763,15 +801,15 @@ AVXcode: 3
76300: vpermq Vqq,Wqq,Ib (66),(v) 80100: vpermq Vqq,Wqq,Ib (66),(v)
76401: vpermpd Vqq,Wqq,Ib (66),(v) 80201: vpermpd Vqq,Wqq,Ib (66),(v)
76502: vpblendd Vx,Hx,Wx,Ib (66),(v) 80302: vpblendd Vx,Hx,Wx,Ib (66),(v)
76603: 80403: valignd/q Vx,Hx,Wx,Ib (66),(ev)
76704: vpermilps Vx,Wx,Ib (66),(v) 80504: vpermilps Vx,Wx,Ib (66),(v)
76805: vpermilpd Vx,Wx,Ib (66),(v) 80605: vpermilpd Vx,Wx,Ib (66),(v)
76906: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v) 80706: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v)
77007: 80807:
77108: vroundps Vx,Wx,Ib (66) 80908: vroundps Vx,Wx,Ib (66) | vrndscaleps Vx,Wx,Ib (66),(evo)
77209: vroundpd Vx,Wx,Ib (66) 81009: vroundpd Vx,Wx,Ib (66) | vrndscalepd Vx,Wx,Ib (66),(evo)
7730a: vroundss Vss,Wss,Ib (66),(v1) 8110a: vroundss Vss,Wss,Ib (66),(v1) | vrndscaless Vx,Hx,Wx,Ib (66),(evo)
7740b: vroundsd Vsd,Wsd,Ib (66),(v1) 8120b: vroundsd Vsd,Wsd,Ib (66),(v1) | vrndscalesd Vx,Hx,Wx,Ib (66),(evo)
7750c: vblendps Vx,Hx,Wx,Ib (66) 8130c: vblendps Vx,Hx,Wx,Ib (66)
7760d: vblendpd Vx,Hx,Wx,Ib (66) 8140d: vblendpd Vx,Hx,Wx,Ib (66)
7770e: vpblendw Vx,Hx,Wx,Ib (66),(v1) 8150e: vpblendw Vx,Hx,Wx,Ib (66),(v1)
@@ -780,26 +818,51 @@ AVXcode: 3
78015: vpextrw Rd/Mw,Vdq,Ib (66),(v1) 81815: vpextrw Rd/Mw,Vdq,Ib (66),(v1)
78116: vpextrd/q Ey,Vdq,Ib (66),(v1) 81916: vpextrd/q Ey,Vdq,Ib (66),(v1)
78217: vextractps Ed,Vdq,Ib (66),(v1) 82017: vextractps Ed,Vdq,Ib (66),(v1)
78318: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) 82118: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) | vinsertf32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
78419: vextractf128 Wdq,Vqq,Ib (66),(v) 82219: vextractf128 Wdq,Vqq,Ib (66),(v) | vextractf32x4/64x2 Wdq,Vqq,Ib (66),(evo)
8231a: vinsertf32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
8241b: vextractf32x8/64x4 Wdq,Vqq,Ib (66),(ev)
7851d: vcvtps2ph Wx,Vx,Ib (66),(v) 8251d: vcvtps2ph Wx,Vx,Ib (66),(v)
8261e: vpcmpud/q Vk,Hd,Wd,Ib (66),(ev)
8271f: vpcmpd/q Vk,Hd,Wd,Ib (66),(ev)
78620: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1) 82820: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1)
78721: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1) 82921: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1)
78822: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1) 83022: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1)
78938: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) 83123: vshuff32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
79039: vextracti128 Wdq,Vqq,Ib (66),(v) 83225: vpternlogd/q Vx,Hx,Wx,Ib (66),(ev)
83326: vgetmantps/d Vx,Wx,Ib (66),(ev)
83427: vgetmantss/d Vx,Hx,Wx,Ib (66),(ev)
83530: kshiftrb/w Vk,Uk,Ib (66),(v)
83631: kshiftrd/q Vk,Uk,Ib (66),(v)
83732: kshiftlb/w Vk,Uk,Ib (66),(v)
83833: kshiftld/q Vk,Uk,Ib (66),(v)
83938: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) | vinserti32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
84039: vextracti128 Wdq,Vqq,Ib (66),(v) | vextracti32x4/64x2 Wdq,Vqq,Ib (66),(evo)
8413a: vinserti32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
8423b: vextracti32x8/64x4 Wdq,Vqq,Ib (66),(ev)
8433e: vpcmpub/w Vk,Hk,Wx,Ib (66),(ev)
8443f: vpcmpb/w Vk,Hk,Wx,Ib (66),(ev)
79140: vdpps Vx,Hx,Wx,Ib (66) 84540: vdpps Vx,Hx,Wx,Ib (66)
79241: vdppd Vdq,Hdq,Wdq,Ib (66),(v1) 84641: vdppd Vdq,Hdq,Wdq,Ib (66),(v1)
79342: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) 84742: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) | vdbpsadbw Vx,Hx,Wx,Ib (66),(evo)
84843: vshufi32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
79444: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1) 84944: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1)
79546: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v) 85046: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v)
7964a: vblendvps Vx,Hx,Wx,Lx (66),(v) 8514a: vblendvps Vx,Hx,Wx,Lx (66),(v)
7974b: vblendvpd Vx,Hx,Wx,Lx (66),(v) 8524b: vblendvpd Vx,Hx,Wx,Lx (66),(v)
7984c: vpblendvb Vx,Hx,Wx,Lx (66),(v1) 8534c: vpblendvb Vx,Hx,Wx,Lx (66),(v1)
85450: vrangeps/d Vx,Hx,Wx,Ib (66),(ev)
85551: vrangess/d Vx,Hx,Wx,Ib (66),(ev)
85654: vfixupimmps/d Vx,Hx,Wx,Ib (66),(ev)
85755: vfixupimmss/d Vx,Hx,Wx,Ib (66),(ev)
85856: vreduceps/d Vx,Wx,Ib (66),(ev)
85957: vreducess/d Vx,Hx,Wx,Ib (66),(ev)
79960: vpcmpestrm Vdq,Wdq,Ib (66),(v1) 86060: vpcmpestrm Vdq,Wdq,Ib (66),(v1)
80061: vpcmpestri Vdq,Wdq,Ib (66),(v1) 86161: vpcmpestri Vdq,Wdq,Ib (66),(v1)
80162: vpcmpistrm Vdq,Wdq,Ib (66),(v1) 86262: vpcmpistrm Vdq,Wdq,Ib (66),(v1)
80263: vpcmpistri Vdq,Wdq,Ib (66),(v1) 86363: vpcmpistri Vdq,Wdq,Ib (66),(v1)
86466: vfpclassps/d Vk,Wx,Ib (66),(ev)
86567: vfpclassss/d Vk,Wx,Ib (66),(ev)
803cc: sha1rnds4 Vdq,Wdq,Ib 866cc: sha1rnds4 Vdq,Wdq,Ib
804df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1) 867df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1)
805f0: RORX Gy,Ey,Ib (F2),(v) 868f0: RORX Gy,Ey,Ib (F2),(v)
@@ -927,8 +990,10 @@ GrpTable: Grp12
927EndTable 990EndTable
928 991
929GrpTable: Grp13 992GrpTable: Grp13
9930: vprord/q Hx,Wx,Ib (66),(ev)
9941: vprold/q Hx,Wx,Ib (66),(ev)
9302: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1) 9952: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1)
9314: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) 9964: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) | vpsrad/q Hx,Ux,Ib (66),(evo)
9326: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1) 9976: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1)
933EndTable 998EndTable
934 999
@@ -963,6 +1028,20 @@ GrpTable: Grp17
9633: BLSI By,Ey (v) 10283: BLSI By,Ey (v)
964EndTable 1029EndTable
965 1030
1031GrpTable: Grp18
10321: vgatherpf0dps/d Wx (66),(ev)
10332: vgatherpf1dps/d Wx (66),(ev)
10345: vscatterpf0dps/d Wx (66),(ev)
10356: vscatterpf1dps/d Wx (66),(ev)
1036EndTable
1037
1038GrpTable: Grp19
10391: vgatherpf0qps/d Wx (66),(ev)
10402: vgatherpf1qps/d Wx (66),(ev)
10415: vscatterpf0qps/d Wx (66),(ev)
10426: vscatterpf1qps/d Wx (66),(ev)
1043EndTable
1044
966# AMD's Prefetch Group 1045# AMD's Prefetch Group
967GrpTable: GrpP 1046GrpTable: GrpP
9680: PREFETCH 10470: PREFETCH
diff --git a/arch/x86/tools/gen-insn-attr-x86.awk b/arch/x86/tools/gen-insn-attr-x86.awk
index 093a892026f9..a3d2c62fd805 100644
--- a/arch/x86/tools/gen-insn-attr-x86.awk
+++ b/arch/x86/tools/gen-insn-attr-x86.awk
@@ -72,12 +72,14 @@ BEGIN {
72 lprefix_expr = "\\((66|F2|F3)\\)" 72 lprefix_expr = "\\((66|F2|F3)\\)"
73 max_lprefix = 4 73 max_lprefix = 4
74 74
75 # All opcodes starting with lower-case 'v' or with (v1) superscript 75 # All opcodes starting with lower-case 'v', 'k' or with (v1) superscript
76 # accepts VEX prefix 76 # accepts VEX prefix
77 vexok_opcode_expr = "^v.*" 77 vexok_opcode_expr = "^[vk].*"
78 vexok_expr = "\\(v1\\)" 78 vexok_expr = "\\(v1\\)"
79 # All opcodes with (v) superscript supports *only* VEX prefix 79 # All opcodes with (v) superscript supports *only* VEX prefix
80 vexonly_expr = "\\(v\\)" 80 vexonly_expr = "\\(v\\)"
81 # All opcodes with (ev) superscript supports *only* EVEX prefix
82 evexonly_expr = "\\(ev\\)"
81 83
82 prefix_expr = "\\(Prefix\\)" 84 prefix_expr = "\\(Prefix\\)"
83 prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ" 85 prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ"
@@ -95,6 +97,7 @@ BEGIN {
95 prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ" 97 prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ"
96 prefix_num["VEX+1byte"] = "INAT_PFX_VEX2" 98 prefix_num["VEX+1byte"] = "INAT_PFX_VEX2"
97 prefix_num["VEX+2byte"] = "INAT_PFX_VEX3" 99 prefix_num["VEX+2byte"] = "INAT_PFX_VEX3"
100 prefix_num["EVEX"] = "INAT_PFX_EVEX"
98 101
99 clear_vars() 102 clear_vars()
100} 103}
@@ -319,7 +322,9 @@ function convert_operands(count,opnd, i,j,imm,mod)
319 flags = add_flags(flags, "INAT_MODRM") 322 flags = add_flags(flags, "INAT_MODRM")
320 323
321 # check VEX codes 324 # check VEX codes
322 if (match(ext, vexonly_expr)) 325 if (match(ext, evexonly_expr))
326 flags = add_flags(flags, "INAT_VEXOK | INAT_EVEXONLY")
327 else if (match(ext, vexonly_expr))
323 flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY") 328 flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY")
324 else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr)) 329 else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr))
325 flags = add_flags(flags, "INAT_VEXOK") 330 flags = add_flags(flags, "INAT_VEXOK")
diff --git a/tools/objtool/Build b/tools/objtool/Build
index 2457916a3943..d6cdece5e58b 100644
--- a/tools/objtool/Build
+++ b/tools/objtool/Build
@@ -1,4 +1,4 @@
1objtool-y += arch/$(ARCH)/ 1objtool-y += arch/$(SRCARCH)/
2objtool-y += builtin-check.o 2objtool-y += builtin-check.o
3objtool-y += elf.o 3objtool-y += elf.o
4objtool-y += special.o 4objtool-y += special.o
diff --git a/tools/objtool/Makefile b/tools/objtool/Makefile
index 1f75b0a046cc..0b437700f688 100644
--- a/tools/objtool/Makefile
+++ b/tools/objtool/Makefile
@@ -1,11 +1,9 @@
1include ../scripts/Makefile.include 1include ../scripts/Makefile.include
2include ../scripts/Makefile.arch
2 3
3ifndef ($(ARCH))
4ARCH ?= $(shell uname -m)
5ifeq ($(ARCH),x86_64) 4ifeq ($(ARCH),x86_64)
6ARCH := x86 5ARCH := x86
7endif 6endif
8endif
9 7
10# always use the host compiler 8# always use the host compiler
11CC = gcc 9CC = gcc
@@ -26,7 +24,7 @@ OBJTOOL_IN := $(OBJTOOL)-in.o
26 24
27all: $(OBJTOOL) 25all: $(OBJTOOL)
28 26
29INCLUDES := -I$(srctree)/tools/include -I$(srctree)/tools/arch/$(ARCH)/include/uapi 27INCLUDES := -I$(srctree)/tools/include -I$(srctree)/tools/arch/$(HOSTARCH)/include/uapi
30CFLAGS += -Wall -Werror $(EXTRA_WARNINGS) -fomit-frame-pointer -O2 -g $(INCLUDES) 28CFLAGS += -Wall -Werror $(EXTRA_WARNINGS) -fomit-frame-pointer -O2 -g $(INCLUDES)
31LDFLAGS += -lelf $(LIBSUBCMD) 29LDFLAGS += -lelf $(LIBSUBCMD)
32 30
@@ -35,7 +33,7 @@ elfshdr := $(shell echo '\#include <libelf.h>' | $(CC) $(CFLAGS) -x c -E - | gre
35CFLAGS += $(if $(elfshdr),,-DLIBELF_USE_DEPRECATED) 33CFLAGS += $(if $(elfshdr),,-DLIBELF_USE_DEPRECATED)
36 34
37AWK = awk 35AWK = awk
38export srctree OUTPUT CFLAGS ARCH AWK 36export srctree OUTPUT CFLAGS SRCARCH AWK
39include $(srctree)/tools/build/Makefile.include 37include $(srctree)/tools/build/Makefile.include
40 38
41$(OBJTOOL_IN): fixdep FORCE 39$(OBJTOOL_IN): fixdep FORCE
diff --git a/tools/perf/arch/x86/tests/insn-x86-dat-32.c b/tools/perf/arch/x86/tests/insn-x86-dat-32.c
index 3b491cfe204e..3918dd52e903 100644
--- a/tools/perf/arch/x86/tests/insn-x86-dat-32.c
+++ b/tools/perf/arch/x86/tests/insn-x86-dat-32.c
@@ -6,6 +6,1016 @@
6 6
7{{0x0f, 0x31, }, 2, 0, "", "", 7{{0x0f, 0x31, }, 2, 0, "", "",
8"0f 31 \trdtsc ",}, 8"0f 31 \trdtsc ",},
9{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
10"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
11{{0x62, 0x81, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
12"62 81 78 56 34 12 \tbound %eax,0x12345678(%ecx)",},
13{{0x62, 0x88, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
14"62 88 78 56 34 12 \tbound %ecx,0x12345678(%eax)",},
15{{0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
16"62 90 78 56 34 12 \tbound %edx,0x12345678(%eax)",},
17{{0x62, 0x98, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
18"62 98 78 56 34 12 \tbound %ebx,0x12345678(%eax)",},
19{{0x62, 0xa0, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
20"62 a0 78 56 34 12 \tbound %esp,0x12345678(%eax)",},
21{{0x62, 0xa8, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
22"62 a8 78 56 34 12 \tbound %ebp,0x12345678(%eax)",},
23{{0x62, 0xb0, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
24"62 b0 78 56 34 12 \tbound %esi,0x12345678(%eax)",},
25{{0x62, 0xb8, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
26"62 b8 78 56 34 12 \tbound %edi,0x12345678(%eax)",},
27{{0x62, 0x08, }, 2, 0, "", "",
28"62 08 \tbound %ecx,(%eax)",},
29{{0x62, 0x05, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
30"62 05 78 56 34 12 \tbound %eax,0x12345678",},
31{{0x62, 0x14, 0x01, }, 3, 0, "", "",
32"62 14 01 \tbound %edx,(%ecx,%eax,1)",},
33{{0x62, 0x14, 0x05, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
34"62 14 05 78 56 34 12 \tbound %edx,0x12345678(,%eax,1)",},
35{{0x62, 0x14, 0x08, }, 3, 0, "", "",
36"62 14 08 \tbound %edx,(%eax,%ecx,1)",},
37{{0x62, 0x14, 0xc8, }, 3, 0, "", "",
38"62 14 c8 \tbound %edx,(%eax,%ecx,8)",},
39{{0x62, 0x50, 0x12, }, 3, 0, "", "",
40"62 50 12 \tbound %edx,0x12(%eax)",},
41{{0x62, 0x55, 0x12, }, 3, 0, "", "",
42"62 55 12 \tbound %edx,0x12(%ebp)",},
43{{0x62, 0x54, 0x01, 0x12, }, 4, 0, "", "",
44"62 54 01 12 \tbound %edx,0x12(%ecx,%eax,1)",},
45{{0x62, 0x54, 0x05, 0x12, }, 4, 0, "", "",
46"62 54 05 12 \tbound %edx,0x12(%ebp,%eax,1)",},
47{{0x62, 0x54, 0x08, 0x12, }, 4, 0, "", "",
48"62 54 08 12 \tbound %edx,0x12(%eax,%ecx,1)",},
49{{0x62, 0x54, 0xc8, 0x12, }, 4, 0, "", "",
50"62 54 c8 12 \tbound %edx,0x12(%eax,%ecx,8)",},
51{{0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
52"62 90 78 56 34 12 \tbound %edx,0x12345678(%eax)",},
53{{0x62, 0x95, 0x78, 0x56, 0x34, 0x12, }, 6, 0, "", "",
54"62 95 78 56 34 12 \tbound %edx,0x12345678(%ebp)",},
55{{0x62, 0x94, 0x01, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
56"62 94 01 78 56 34 12 \tbound %edx,0x12345678(%ecx,%eax,1)",},
57{{0x62, 0x94, 0x05, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
58"62 94 05 78 56 34 12 \tbound %edx,0x12345678(%ebp,%eax,1)",},
59{{0x62, 0x94, 0x08, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
60"62 94 08 78 56 34 12 \tbound %edx,0x12345678(%eax,%ecx,1)",},
61{{0x62, 0x94, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
62"62 94 c8 78 56 34 12 \tbound %edx,0x12345678(%eax,%ecx,8)",},
63{{0x66, 0x62, 0x81, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
64"66 62 81 78 56 34 12 \tbound %ax,0x12345678(%ecx)",},
65{{0x66, 0x62, 0x88, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
66"66 62 88 78 56 34 12 \tbound %cx,0x12345678(%eax)",},
67{{0x66, 0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
68"66 62 90 78 56 34 12 \tbound %dx,0x12345678(%eax)",},
69{{0x66, 0x62, 0x98, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
70"66 62 98 78 56 34 12 \tbound %bx,0x12345678(%eax)",},
71{{0x66, 0x62, 0xa0, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
72"66 62 a0 78 56 34 12 \tbound %sp,0x12345678(%eax)",},
73{{0x66, 0x62, 0xa8, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
74"66 62 a8 78 56 34 12 \tbound %bp,0x12345678(%eax)",},
75{{0x66, 0x62, 0xb0, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
76"66 62 b0 78 56 34 12 \tbound %si,0x12345678(%eax)",},
77{{0x66, 0x62, 0xb8, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
78"66 62 b8 78 56 34 12 \tbound %di,0x12345678(%eax)",},
79{{0x66, 0x62, 0x08, }, 3, 0, "", "",
80"66 62 08 \tbound %cx,(%eax)",},
81{{0x66, 0x62, 0x05, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
82"66 62 05 78 56 34 12 \tbound %ax,0x12345678",},
83{{0x66, 0x62, 0x14, 0x01, }, 4, 0, "", "",
84"66 62 14 01 \tbound %dx,(%ecx,%eax,1)",},
85{{0x66, 0x62, 0x14, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
86"66 62 14 05 78 56 34 12 \tbound %dx,0x12345678(,%eax,1)",},
87{{0x66, 0x62, 0x14, 0x08, }, 4, 0, "", "",
88"66 62 14 08 \tbound %dx,(%eax,%ecx,1)",},
89{{0x66, 0x62, 0x14, 0xc8, }, 4, 0, "", "",
90"66 62 14 c8 \tbound %dx,(%eax,%ecx,8)",},
91{{0x66, 0x62, 0x50, 0x12, }, 4, 0, "", "",
92"66 62 50 12 \tbound %dx,0x12(%eax)",},
93{{0x66, 0x62, 0x55, 0x12, }, 4, 0, "", "",
94"66 62 55 12 \tbound %dx,0x12(%ebp)",},
95{{0x66, 0x62, 0x54, 0x01, 0x12, }, 5, 0, "", "",
96"66 62 54 01 12 \tbound %dx,0x12(%ecx,%eax,1)",},
97{{0x66, 0x62, 0x54, 0x05, 0x12, }, 5, 0, "", "",
98"66 62 54 05 12 \tbound %dx,0x12(%ebp,%eax,1)",},
99{{0x66, 0x62, 0x54, 0x08, 0x12, }, 5, 0, "", "",
100"66 62 54 08 12 \tbound %dx,0x12(%eax,%ecx,1)",},
101{{0x66, 0x62, 0x54, 0xc8, 0x12, }, 5, 0, "", "",
102"66 62 54 c8 12 \tbound %dx,0x12(%eax,%ecx,8)",},
103{{0x66, 0x62, 0x90, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
104"66 62 90 78 56 34 12 \tbound %dx,0x12345678(%eax)",},
105{{0x66, 0x62, 0x95, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
106"66 62 95 78 56 34 12 \tbound %dx,0x12345678(%ebp)",},
107{{0x66, 0x62, 0x94, 0x01, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
108"66 62 94 01 78 56 34 12 \tbound %dx,0x12345678(%ecx,%eax,1)",},
109{{0x66, 0x62, 0x94, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
110"66 62 94 05 78 56 34 12 \tbound %dx,0x12345678(%ebp,%eax,1)",},
111{{0x66, 0x62, 0x94, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
112"66 62 94 08 78 56 34 12 \tbound %dx,0x12345678(%eax,%ecx,1)",},
113{{0x66, 0x62, 0x94, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
114"66 62 94 c8 78 56 34 12 \tbound %dx,0x12345678(%eax,%ecx,8)",},
115{{0x0f, 0x41, 0xd8, }, 3, 0, "", "",
116"0f 41 d8 \tcmovno %eax,%ebx",},
117{{0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
118"0f 41 88 78 56 34 12 \tcmovno 0x12345678(%eax),%ecx",},
119{{0x66, 0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
120"66 0f 41 88 78 56 34 12 \tcmovno 0x12345678(%eax),%cx",},
121{{0x0f, 0x44, 0xd8, }, 3, 0, "", "",
122"0f 44 d8 \tcmove %eax,%ebx",},
123{{0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
124"0f 44 88 78 56 34 12 \tcmove 0x12345678(%eax),%ecx",},
125{{0x66, 0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
126"66 0f 44 88 78 56 34 12 \tcmove 0x12345678(%eax),%cx",},
127{{0x0f, 0x90, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
128"0f 90 80 78 56 34 12 \tseto 0x12345678(%eax)",},
129{{0x0f, 0x91, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
130"0f 91 80 78 56 34 12 \tsetno 0x12345678(%eax)",},
131{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
132"0f 92 80 78 56 34 12 \tsetb 0x12345678(%eax)",},
133{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
134"0f 92 80 78 56 34 12 \tsetb 0x12345678(%eax)",},
135{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
136"0f 92 80 78 56 34 12 \tsetb 0x12345678(%eax)",},
137{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
138"0f 93 80 78 56 34 12 \tsetae 0x12345678(%eax)",},
139{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
140"0f 93 80 78 56 34 12 \tsetae 0x12345678(%eax)",},
141{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
142"0f 93 80 78 56 34 12 \tsetae 0x12345678(%eax)",},
143{{0x0f, 0x98, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
144"0f 98 80 78 56 34 12 \tsets 0x12345678(%eax)",},
145{{0x0f, 0x99, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
146"0f 99 80 78 56 34 12 \tsetns 0x12345678(%eax)",},
147{{0xc5, 0xcc, 0x41, 0xef, }, 4, 0, "", "",
148"c5 cc 41 ef \tkandw %k7,%k6,%k5",},
149{{0xc4, 0xe1, 0xcc, 0x41, 0xef, }, 5, 0, "", "",
150"c4 e1 cc 41 ef \tkandq %k7,%k6,%k5",},
151{{0xc5, 0xcd, 0x41, 0xef, }, 4, 0, "", "",
152"c5 cd 41 ef \tkandb %k7,%k6,%k5",},
153{{0xc4, 0xe1, 0xcd, 0x41, 0xef, }, 5, 0, "", "",
154"c4 e1 cd 41 ef \tkandd %k7,%k6,%k5",},
155{{0xc5, 0xcc, 0x42, 0xef, }, 4, 0, "", "",
156"c5 cc 42 ef \tkandnw %k7,%k6,%k5",},
157{{0xc4, 0xe1, 0xcc, 0x42, 0xef, }, 5, 0, "", "",
158"c4 e1 cc 42 ef \tkandnq %k7,%k6,%k5",},
159{{0xc5, 0xcd, 0x42, 0xef, }, 4, 0, "", "",
160"c5 cd 42 ef \tkandnb %k7,%k6,%k5",},
161{{0xc4, 0xe1, 0xcd, 0x42, 0xef, }, 5, 0, "", "",
162"c4 e1 cd 42 ef \tkandnd %k7,%k6,%k5",},
163{{0xc5, 0xf8, 0x44, 0xf7, }, 4, 0, "", "",
164"c5 f8 44 f7 \tknotw %k7,%k6",},
165{{0xc4, 0xe1, 0xf8, 0x44, 0xf7, }, 5, 0, "", "",
166"c4 e1 f8 44 f7 \tknotq %k7,%k6",},
167{{0xc5, 0xf9, 0x44, 0xf7, }, 4, 0, "", "",
168"c5 f9 44 f7 \tknotb %k7,%k6",},
169{{0xc4, 0xe1, 0xf9, 0x44, 0xf7, }, 5, 0, "", "",
170"c4 e1 f9 44 f7 \tknotd %k7,%k6",},
171{{0xc5, 0xcc, 0x45, 0xef, }, 4, 0, "", "",
172"c5 cc 45 ef \tkorw %k7,%k6,%k5",},
173{{0xc4, 0xe1, 0xcc, 0x45, 0xef, }, 5, 0, "", "",
174"c4 e1 cc 45 ef \tkorq %k7,%k6,%k5",},
175{{0xc5, 0xcd, 0x45, 0xef, }, 4, 0, "", "",
176"c5 cd 45 ef \tkorb %k7,%k6,%k5",},
177{{0xc4, 0xe1, 0xcd, 0x45, 0xef, }, 5, 0, "", "",
178"c4 e1 cd 45 ef \tkord %k7,%k6,%k5",},
179{{0xc5, 0xcc, 0x46, 0xef, }, 4, 0, "", "",
180"c5 cc 46 ef \tkxnorw %k7,%k6,%k5",},
181{{0xc4, 0xe1, 0xcc, 0x46, 0xef, }, 5, 0, "", "",
182"c4 e1 cc 46 ef \tkxnorq %k7,%k6,%k5",},
183{{0xc5, 0xcd, 0x46, 0xef, }, 4, 0, "", "",
184"c5 cd 46 ef \tkxnorb %k7,%k6,%k5",},
185{{0xc4, 0xe1, 0xcd, 0x46, 0xef, }, 5, 0, "", "",
186"c4 e1 cd 46 ef \tkxnord %k7,%k6,%k5",},
187{{0xc5, 0xcc, 0x47, 0xef, }, 4, 0, "", "",
188"c5 cc 47 ef \tkxorw %k7,%k6,%k5",},
189{{0xc4, 0xe1, 0xcc, 0x47, 0xef, }, 5, 0, "", "",
190"c4 e1 cc 47 ef \tkxorq %k7,%k6,%k5",},
191{{0xc5, 0xcd, 0x47, 0xef, }, 4, 0, "", "",
192"c5 cd 47 ef \tkxorb %k7,%k6,%k5",},
193{{0xc4, 0xe1, 0xcd, 0x47, 0xef, }, 5, 0, "", "",
194"c4 e1 cd 47 ef \tkxord %k7,%k6,%k5",},
195{{0xc5, 0xcc, 0x4a, 0xef, }, 4, 0, "", "",
196"c5 cc 4a ef \tkaddw %k7,%k6,%k5",},
197{{0xc4, 0xe1, 0xcc, 0x4a, 0xef, }, 5, 0, "", "",
198"c4 e1 cc 4a ef \tkaddq %k7,%k6,%k5",},
199{{0xc5, 0xcd, 0x4a, 0xef, }, 4, 0, "", "",
200"c5 cd 4a ef \tkaddb %k7,%k6,%k5",},
201{{0xc4, 0xe1, 0xcd, 0x4a, 0xef, }, 5, 0, "", "",
202"c4 e1 cd 4a ef \tkaddd %k7,%k6,%k5",},
203{{0xc5, 0xcd, 0x4b, 0xef, }, 4, 0, "", "",
204"c5 cd 4b ef \tkunpckbw %k7,%k6,%k5",},
205{{0xc5, 0xcc, 0x4b, 0xef, }, 4, 0, "", "",
206"c5 cc 4b ef \tkunpckwd %k7,%k6,%k5",},
207{{0xc4, 0xe1, 0xcc, 0x4b, 0xef, }, 5, 0, "", "",
208"c4 e1 cc 4b ef \tkunpckdq %k7,%k6,%k5",},
209{{0xc5, 0xf8, 0x90, 0xee, }, 4, 0, "", "",
210"c5 f8 90 ee \tkmovw %k6,%k5",},
211{{0xc5, 0xf8, 0x90, 0x29, }, 4, 0, "", "",
212"c5 f8 90 29 \tkmovw (%ecx),%k5",},
213{{0xc5, 0xf8, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
214"c5 f8 90 ac c8 23 01 00 00 \tkmovw 0x123(%eax,%ecx,8),%k5",},
215{{0xc5, 0xf8, 0x91, 0x29, }, 4, 0, "", "",
216"c5 f8 91 29 \tkmovw %k5,(%ecx)",},
217{{0xc5, 0xf8, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
218"c5 f8 91 ac c8 23 01 00 00 \tkmovw %k5,0x123(%eax,%ecx,8)",},
219{{0xc5, 0xf8, 0x92, 0xe8, }, 4, 0, "", "",
220"c5 f8 92 e8 \tkmovw %eax,%k5",},
221{{0xc5, 0xf8, 0x92, 0xed, }, 4, 0, "", "",
222"c5 f8 92 ed \tkmovw %ebp,%k5",},
223{{0xc5, 0xf8, 0x93, 0xc5, }, 4, 0, "", "",
224"c5 f8 93 c5 \tkmovw %k5,%eax",},
225{{0xc5, 0xf8, 0x93, 0xed, }, 4, 0, "", "",
226"c5 f8 93 ed \tkmovw %k5,%ebp",},
227{{0xc4, 0xe1, 0xf8, 0x90, 0xee, }, 5, 0, "", "",
228"c4 e1 f8 90 ee \tkmovq %k6,%k5",},
229{{0xc4, 0xe1, 0xf8, 0x90, 0x29, }, 5, 0, "", "",
230"c4 e1 f8 90 29 \tkmovq (%ecx),%k5",},
231{{0xc4, 0xe1, 0xf8, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
232"c4 e1 f8 90 ac c8 23 01 00 00 \tkmovq 0x123(%eax,%ecx,8),%k5",},
233{{0xc4, 0xe1, 0xf8, 0x91, 0x29, }, 5, 0, "", "",
234"c4 e1 f8 91 29 \tkmovq %k5,(%ecx)",},
235{{0xc4, 0xe1, 0xf8, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
236"c4 e1 f8 91 ac c8 23 01 00 00 \tkmovq %k5,0x123(%eax,%ecx,8)",},
237{{0xc5, 0xf9, 0x90, 0xee, }, 4, 0, "", "",
238"c5 f9 90 ee \tkmovb %k6,%k5",},
239{{0xc5, 0xf9, 0x90, 0x29, }, 4, 0, "", "",
240"c5 f9 90 29 \tkmovb (%ecx),%k5",},
241{{0xc5, 0xf9, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
242"c5 f9 90 ac c8 23 01 00 00 \tkmovb 0x123(%eax,%ecx,8),%k5",},
243{{0xc5, 0xf9, 0x91, 0x29, }, 4, 0, "", "",
244"c5 f9 91 29 \tkmovb %k5,(%ecx)",},
245{{0xc5, 0xf9, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 9, 0, "", "",
246"c5 f9 91 ac c8 23 01 00 00 \tkmovb %k5,0x123(%eax,%ecx,8)",},
247{{0xc5, 0xf9, 0x92, 0xe8, }, 4, 0, "", "",
248"c5 f9 92 e8 \tkmovb %eax,%k5",},
249{{0xc5, 0xf9, 0x92, 0xed, }, 4, 0, "", "",
250"c5 f9 92 ed \tkmovb %ebp,%k5",},
251{{0xc5, 0xf9, 0x93, 0xc5, }, 4, 0, "", "",
252"c5 f9 93 c5 \tkmovb %k5,%eax",},
253{{0xc5, 0xf9, 0x93, 0xed, }, 4, 0, "", "",
254"c5 f9 93 ed \tkmovb %k5,%ebp",},
255{{0xc4, 0xe1, 0xf9, 0x90, 0xee, }, 5, 0, "", "",
256"c4 e1 f9 90 ee \tkmovd %k6,%k5",},
257{{0xc4, 0xe1, 0xf9, 0x90, 0x29, }, 5, 0, "", "",
258"c4 e1 f9 90 29 \tkmovd (%ecx),%k5",},
259{{0xc4, 0xe1, 0xf9, 0x90, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
260"c4 e1 f9 90 ac c8 23 01 00 00 \tkmovd 0x123(%eax,%ecx,8),%k5",},
261{{0xc4, 0xe1, 0xf9, 0x91, 0x29, }, 5, 0, "", "",
262"c4 e1 f9 91 29 \tkmovd %k5,(%ecx)",},
263{{0xc4, 0xe1, 0xf9, 0x91, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
264"c4 e1 f9 91 ac c8 23 01 00 00 \tkmovd %k5,0x123(%eax,%ecx,8)",},
265{{0xc5, 0xfb, 0x92, 0xe8, }, 4, 0, "", "",
266"c5 fb 92 e8 \tkmovd %eax,%k5",},
267{{0xc5, 0xfb, 0x92, 0xed, }, 4, 0, "", "",
268"c5 fb 92 ed \tkmovd %ebp,%k5",},
269{{0xc5, 0xfb, 0x93, 0xc5, }, 4, 0, "", "",
270"c5 fb 93 c5 \tkmovd %k5,%eax",},
271{{0xc5, 0xfb, 0x93, 0xed, }, 4, 0, "", "",
272"c5 fb 93 ed \tkmovd %k5,%ebp",},
273{{0xc5, 0xf8, 0x98, 0xee, }, 4, 0, "", "",
274"c5 f8 98 ee \tkortestw %k6,%k5",},
275{{0xc4, 0xe1, 0xf8, 0x98, 0xee, }, 5, 0, "", "",
276"c4 e1 f8 98 ee \tkortestq %k6,%k5",},
277{{0xc5, 0xf9, 0x98, 0xee, }, 4, 0, "", "",
278"c5 f9 98 ee \tkortestb %k6,%k5",},
279{{0xc4, 0xe1, 0xf9, 0x98, 0xee, }, 5, 0, "", "",
280"c4 e1 f9 98 ee \tkortestd %k6,%k5",},
281{{0xc5, 0xf8, 0x99, 0xee, }, 4, 0, "", "",
282"c5 f8 99 ee \tktestw %k6,%k5",},
283{{0xc4, 0xe1, 0xf8, 0x99, 0xee, }, 5, 0, "", "",
284"c4 e1 f8 99 ee \tktestq %k6,%k5",},
285{{0xc5, 0xf9, 0x99, 0xee, }, 4, 0, "", "",
286"c5 f9 99 ee \tktestb %k6,%k5",},
287{{0xc4, 0xe1, 0xf9, 0x99, 0xee, }, 5, 0, "", "",
288"c4 e1 f9 99 ee \tktestd %k6,%k5",},
289{{0xc4, 0xe3, 0xf9, 0x30, 0xee, 0x12, }, 6, 0, "", "",
290"c4 e3 f9 30 ee 12 \tkshiftrw $0x12,%k6,%k5",},
291{{0xc4, 0xe3, 0xf9, 0x31, 0xee, 0x5b, }, 6, 0, "", "",
292"c4 e3 f9 31 ee 5b \tkshiftrq $0x5b,%k6,%k5",},
293{{0xc4, 0xe3, 0xf9, 0x32, 0xee, 0x12, }, 6, 0, "", "",
294"c4 e3 f9 32 ee 12 \tkshiftlw $0x12,%k6,%k5",},
295{{0xc4, 0xe3, 0xf9, 0x33, 0xee, 0x5b, }, 6, 0, "", "",
296"c4 e3 f9 33 ee 5b \tkshiftlq $0x5b,%k6,%k5",},
297{{0xc5, 0xf8, 0x5b, 0xf5, }, 4, 0, "", "",
298"c5 f8 5b f5 \tvcvtdq2ps %xmm5,%xmm6",},
299{{0x62, 0xf1, 0xfc, 0x4f, 0x5b, 0xf5, }, 6, 0, "", "",
300"62 f1 fc 4f 5b f5 \tvcvtqq2ps %zmm5,%ymm6{%k7}",},
301{{0xc5, 0xf9, 0x5b, 0xf5, }, 4, 0, "", "",
302"c5 f9 5b f5 \tvcvtps2dq %xmm5,%xmm6",},
303{{0xc5, 0xfa, 0x5b, 0xf5, }, 4, 0, "", "",
304"c5 fa 5b f5 \tvcvttps2dq %xmm5,%xmm6",},
305{{0x0f, 0x6f, 0xe0, }, 3, 0, "", "",
306"0f 6f e0 \tmovq %mm0,%mm4",},
307{{0xc5, 0xfd, 0x6f, 0xf4, }, 4, 0, "", "",
308"c5 fd 6f f4 \tvmovdqa %ymm4,%ymm6",},
309{{0x62, 0xf1, 0x7d, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
310"62 f1 7d 48 6f f5 \tvmovdqa32 %zmm5,%zmm6",},
311{{0x62, 0xf1, 0xfd, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
312"62 f1 fd 48 6f f5 \tvmovdqa64 %zmm5,%zmm6",},
313{{0xc5, 0xfe, 0x6f, 0xf4, }, 4, 0, "", "",
314"c5 fe 6f f4 \tvmovdqu %ymm4,%ymm6",},
315{{0x62, 0xf1, 0x7e, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
316"62 f1 7e 48 6f f5 \tvmovdqu32 %zmm5,%zmm6",},
317{{0x62, 0xf1, 0xfe, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
318"62 f1 fe 48 6f f5 \tvmovdqu64 %zmm5,%zmm6",},
319{{0x62, 0xf1, 0x7f, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
320"62 f1 7f 48 6f f5 \tvmovdqu8 %zmm5,%zmm6",},
321{{0x62, 0xf1, 0xff, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
322"62 f1 ff 48 6f f5 \tvmovdqu16 %zmm5,%zmm6",},
323{{0x0f, 0x78, 0xc3, }, 3, 0, "", "",
324"0f 78 c3 \tvmread %eax,%ebx",},
325{{0x62, 0xf1, 0x7c, 0x48, 0x78, 0xf5, }, 6, 0, "", "",
326"62 f1 7c 48 78 f5 \tvcvttps2udq %zmm5,%zmm6",},
327{{0x62, 0xf1, 0xfc, 0x4f, 0x78, 0xf5, }, 6, 0, "", "",
328"62 f1 fc 4f 78 f5 \tvcvttpd2udq %zmm5,%ymm6{%k7}",},
329{{0x62, 0xf1, 0x7f, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
330"62 f1 7f 08 78 c6 \tvcvttsd2usi %xmm6,%eax",},
331{{0x62, 0xf1, 0x7e, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
332"62 f1 7e 08 78 c6 \tvcvttss2usi %xmm6,%eax",},
333{{0x62, 0xf1, 0x7d, 0x4f, 0x78, 0xf5, }, 6, 0, "", "",
334"62 f1 7d 4f 78 f5 \tvcvttps2uqq %ymm5,%zmm6{%k7}",},
335{{0x62, 0xf1, 0xfd, 0x48, 0x78, 0xf5, }, 6, 0, "", "",
336"62 f1 fd 48 78 f5 \tvcvttpd2uqq %zmm5,%zmm6",},
337{{0x0f, 0x79, 0xd8, }, 3, 0, "", "",
338"0f 79 d8 \tvmwrite %eax,%ebx",},
339{{0x62, 0xf1, 0x7c, 0x48, 0x79, 0xf5, }, 6, 0, "", "",
340"62 f1 7c 48 79 f5 \tvcvtps2udq %zmm5,%zmm6",},
341{{0x62, 0xf1, 0xfc, 0x4f, 0x79, 0xf5, }, 6, 0, "", "",
342"62 f1 fc 4f 79 f5 \tvcvtpd2udq %zmm5,%ymm6{%k7}",},
343{{0x62, 0xf1, 0x7f, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
344"62 f1 7f 08 79 c6 \tvcvtsd2usi %xmm6,%eax",},
345{{0x62, 0xf1, 0x7e, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
346"62 f1 7e 08 79 c6 \tvcvtss2usi %xmm6,%eax",},
347{{0x62, 0xf1, 0x7d, 0x4f, 0x79, 0xf5, }, 6, 0, "", "",
348"62 f1 7d 4f 79 f5 \tvcvtps2uqq %ymm5,%zmm6{%k7}",},
349{{0x62, 0xf1, 0xfd, 0x48, 0x79, 0xf5, }, 6, 0, "", "",
350"62 f1 fd 48 79 f5 \tvcvtpd2uqq %zmm5,%zmm6",},
351{{0x62, 0xf1, 0x7e, 0x4f, 0x7a, 0xf5, }, 6, 0, "", "",
352"62 f1 7e 4f 7a f5 \tvcvtudq2pd %ymm5,%zmm6{%k7}",},
353{{0x62, 0xf1, 0xfe, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
354"62 f1 fe 48 7a f5 \tvcvtuqq2pd %zmm5,%zmm6",},
355{{0x62, 0xf1, 0x7f, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
356"62 f1 7f 48 7a f5 \tvcvtudq2ps %zmm5,%zmm6",},
357{{0x62, 0xf1, 0xff, 0x4f, 0x7a, 0xf5, }, 6, 0, "", "",
358"62 f1 ff 4f 7a f5 \tvcvtuqq2ps %zmm5,%ymm6{%k7}",},
359{{0x62, 0xf1, 0x7d, 0x4f, 0x7a, 0xf5, }, 6, 0, "", "",
360"62 f1 7d 4f 7a f5 \tvcvttps2qq %ymm5,%zmm6{%k7}",},
361{{0x62, 0xf1, 0xfd, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
362"62 f1 fd 48 7a f5 \tvcvttpd2qq %zmm5,%zmm6",},
363{{0x62, 0xf1, 0x57, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
364"62 f1 57 08 7b f0 \tvcvtusi2sd %eax,%xmm5,%xmm6",},
365{{0x62, 0xf1, 0x56, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
366"62 f1 56 08 7b f0 \tvcvtusi2ss %eax,%xmm5,%xmm6",},
367{{0x62, 0xf1, 0x7d, 0x4f, 0x7b, 0xf5, }, 6, 0, "", "",
368"62 f1 7d 4f 7b f5 \tvcvtps2qq %ymm5,%zmm6{%k7}",},
369{{0x62, 0xf1, 0xfd, 0x48, 0x7b, 0xf5, }, 6, 0, "", "",
370"62 f1 fd 48 7b f5 \tvcvtpd2qq %zmm5,%zmm6",},
371{{0x0f, 0x7f, 0xc4, }, 3, 0, "", "",
372"0f 7f c4 \tmovq %mm0,%mm4",},
373{{0xc5, 0xfd, 0x7f, 0xee, }, 4, 0, "", "",
374"c5 fd 7f ee \tvmovdqa %ymm5,%ymm6",},
375{{0x62, 0xf1, 0x7d, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
376"62 f1 7d 48 7f ee \tvmovdqa32 %zmm5,%zmm6",},
377{{0x62, 0xf1, 0xfd, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
378"62 f1 fd 48 7f ee \tvmovdqa64 %zmm5,%zmm6",},
379{{0xc5, 0xfe, 0x7f, 0xee, }, 4, 0, "", "",
380"c5 fe 7f ee \tvmovdqu %ymm5,%ymm6",},
381{{0x62, 0xf1, 0x7e, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
382"62 f1 7e 48 7f ee \tvmovdqu32 %zmm5,%zmm6",},
383{{0x62, 0xf1, 0xfe, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
384"62 f1 fe 48 7f ee \tvmovdqu64 %zmm5,%zmm6",},
385{{0x62, 0xf1, 0x7f, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
386"62 f1 7f 48 7f ee \tvmovdqu8 %zmm5,%zmm6",},
387{{0x62, 0xf1, 0xff, 0x48, 0x7f, 0xee, }, 6, 0, "", "",
388"62 f1 ff 48 7f ee \tvmovdqu16 %zmm5,%zmm6",},
389{{0x0f, 0xdb, 0xd1, }, 3, 0, "", "",
390"0f db d1 \tpand %mm1,%mm2",},
391{{0x66, 0x0f, 0xdb, 0xd1, }, 4, 0, "", "",
392"66 0f db d1 \tpand %xmm1,%xmm2",},
393{{0xc5, 0xcd, 0xdb, 0xd4, }, 4, 0, "", "",
394"c5 cd db d4 \tvpand %ymm4,%ymm6,%ymm2",},
395{{0x62, 0xf1, 0x55, 0x48, 0xdb, 0xf4, }, 6, 0, "", "",
396"62 f1 55 48 db f4 \tvpandd %zmm4,%zmm5,%zmm6",},
397{{0x62, 0xf1, 0xd5, 0x48, 0xdb, 0xf4, }, 6, 0, "", "",
398"62 f1 d5 48 db f4 \tvpandq %zmm4,%zmm5,%zmm6",},
399{{0x0f, 0xdf, 0xd1, }, 3, 0, "", "",
400"0f df d1 \tpandn %mm1,%mm2",},
401{{0x66, 0x0f, 0xdf, 0xd1, }, 4, 0, "", "",
402"66 0f df d1 \tpandn %xmm1,%xmm2",},
403{{0xc5, 0xcd, 0xdf, 0xd4, }, 4, 0, "", "",
404"c5 cd df d4 \tvpandn %ymm4,%ymm6,%ymm2",},
405{{0x62, 0xf1, 0x55, 0x48, 0xdf, 0xf4, }, 6, 0, "", "",
406"62 f1 55 48 df f4 \tvpandnd %zmm4,%zmm5,%zmm6",},
407{{0x62, 0xf1, 0xd5, 0x48, 0xdf, 0xf4, }, 6, 0, "", "",
408"62 f1 d5 48 df f4 \tvpandnq %zmm4,%zmm5,%zmm6",},
409{{0xc5, 0xf9, 0xe6, 0xd1, }, 4, 0, "", "",
410"c5 f9 e6 d1 \tvcvttpd2dq %xmm1,%xmm2",},
411{{0xc5, 0xfa, 0xe6, 0xf5, }, 4, 0, "", "",
412"c5 fa e6 f5 \tvcvtdq2pd %xmm5,%xmm6",},
413{{0x62, 0xf1, 0x7e, 0x4f, 0xe6, 0xf5, }, 6, 0, "", "",
414"62 f1 7e 4f e6 f5 \tvcvtdq2pd %ymm5,%zmm6{%k7}",},
415{{0x62, 0xf1, 0xfe, 0x48, 0xe6, 0xf5, }, 6, 0, "", "",
416"62 f1 fe 48 e6 f5 \tvcvtqq2pd %zmm5,%zmm6",},
417{{0xc5, 0xfb, 0xe6, 0xd1, }, 4, 0, "", "",
418"c5 fb e6 d1 \tvcvtpd2dq %xmm1,%xmm2",},
419{{0x0f, 0xeb, 0xf4, }, 3, 0, "", "",
420"0f eb f4 \tpor %mm4,%mm6",},
421{{0xc5, 0xcd, 0xeb, 0xd4, }, 4, 0, "", "",
422"c5 cd eb d4 \tvpor %ymm4,%ymm6,%ymm2",},
423{{0x62, 0xf1, 0x55, 0x48, 0xeb, 0xf4, }, 6, 0, "", "",
424"62 f1 55 48 eb f4 \tvpord %zmm4,%zmm5,%zmm6",},
425{{0x62, 0xf1, 0xd5, 0x48, 0xeb, 0xf4, }, 6, 0, "", "",
426"62 f1 d5 48 eb f4 \tvporq %zmm4,%zmm5,%zmm6",},
427{{0x0f, 0xef, 0xf4, }, 3, 0, "", "",
428"0f ef f4 \tpxor %mm4,%mm6",},
429{{0xc5, 0xcd, 0xef, 0xd4, }, 4, 0, "", "",
430"c5 cd ef d4 \tvpxor %ymm4,%ymm6,%ymm2",},
431{{0x62, 0xf1, 0x55, 0x48, 0xef, 0xf4, }, 6, 0, "", "",
432"62 f1 55 48 ef f4 \tvpxord %zmm4,%zmm5,%zmm6",},
433{{0x62, 0xf1, 0xd5, 0x48, 0xef, 0xf4, }, 6, 0, "", "",
434"62 f1 d5 48 ef f4 \tvpxorq %zmm4,%zmm5,%zmm6",},
435{{0x66, 0x0f, 0x38, 0x10, 0xc1, }, 5, 0, "", "",
436"66 0f 38 10 c1 \tpblendvb %xmm0,%xmm1,%xmm0",},
437{{0x62, 0xf2, 0xd5, 0x48, 0x10, 0xf4, }, 6, 0, "", "",
438"62 f2 d5 48 10 f4 \tvpsrlvw %zmm4,%zmm5,%zmm6",},
439{{0x62, 0xf2, 0x7e, 0x4f, 0x10, 0xee, }, 6, 0, "", "",
440"62 f2 7e 4f 10 ee \tvpmovuswb %zmm5,%ymm6{%k7}",},
441{{0x62, 0xf2, 0x7e, 0x4f, 0x11, 0xee, }, 6, 0, "", "",
442"62 f2 7e 4f 11 ee \tvpmovusdb %zmm5,%xmm6{%k7}",},
443{{0x62, 0xf2, 0xd5, 0x48, 0x11, 0xf4, }, 6, 0, "", "",
444"62 f2 d5 48 11 f4 \tvpsravw %zmm4,%zmm5,%zmm6",},
445{{0x62, 0xf2, 0x7e, 0x4f, 0x12, 0xee, }, 6, 0, "", "",
446"62 f2 7e 4f 12 ee \tvpmovusqb %zmm5,%xmm6{%k7}",},
447{{0x62, 0xf2, 0xd5, 0x48, 0x12, 0xf4, }, 6, 0, "", "",
448"62 f2 d5 48 12 f4 \tvpsllvw %zmm4,%zmm5,%zmm6",},
449{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
450"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
451{{0x62, 0xf2, 0x7d, 0x4f, 0x13, 0xf5, }, 6, 0, "", "",
452"62 f2 7d 4f 13 f5 \tvcvtph2ps %ymm5,%zmm6{%k7}",},
453{{0x62, 0xf2, 0x7e, 0x4f, 0x13, 0xee, }, 6, 0, "", "",
454"62 f2 7e 4f 13 ee \tvpmovusdw %zmm5,%ymm6{%k7}",},
455{{0x66, 0x0f, 0x38, 0x14, 0xc1, }, 5, 0, "", "",
456"66 0f 38 14 c1 \tblendvps %xmm0,%xmm1,%xmm0",},
457{{0x62, 0xf2, 0x7e, 0x4f, 0x14, 0xee, }, 6, 0, "", "",
458"62 f2 7e 4f 14 ee \tvpmovusqw %zmm5,%xmm6{%k7}",},
459{{0x62, 0xf2, 0x55, 0x48, 0x14, 0xf4, }, 6, 0, "", "",
460"62 f2 55 48 14 f4 \tvprorvd %zmm4,%zmm5,%zmm6",},
461{{0x62, 0xf2, 0xd5, 0x48, 0x14, 0xf4, }, 6, 0, "", "",
462"62 f2 d5 48 14 f4 \tvprorvq %zmm4,%zmm5,%zmm6",},
463{{0x66, 0x0f, 0x38, 0x15, 0xc1, }, 5, 0, "", "",
464"66 0f 38 15 c1 \tblendvpd %xmm0,%xmm1,%xmm0",},
465{{0x62, 0xf2, 0x7e, 0x4f, 0x15, 0xee, }, 6, 0, "", "",
466"62 f2 7e 4f 15 ee \tvpmovusqd %zmm5,%ymm6{%k7}",},
467{{0x62, 0xf2, 0x55, 0x48, 0x15, 0xf4, }, 6, 0, "", "",
468"62 f2 55 48 15 f4 \tvprolvd %zmm4,%zmm5,%zmm6",},
469{{0x62, 0xf2, 0xd5, 0x48, 0x15, 0xf4, }, 6, 0, "", "",
470"62 f2 d5 48 15 f4 \tvprolvq %zmm4,%zmm5,%zmm6",},
471{{0xc4, 0xe2, 0x4d, 0x16, 0xd4, }, 5, 0, "", "",
472"c4 e2 4d 16 d4 \tvpermps %ymm4,%ymm6,%ymm2",},
473{{0x62, 0xf2, 0x4d, 0x2f, 0x16, 0xd4, }, 6, 0, "", "",
474"62 f2 4d 2f 16 d4 \tvpermps %ymm4,%ymm6,%ymm2{%k7}",},
475{{0x62, 0xf2, 0xcd, 0x2f, 0x16, 0xd4, }, 6, 0, "", "",
476"62 f2 cd 2f 16 d4 \tvpermpd %ymm4,%ymm6,%ymm2{%k7}",},
477{{0xc4, 0xe2, 0x7d, 0x19, 0xf4, }, 5, 0, "", "",
478"c4 e2 7d 19 f4 \tvbroadcastsd %xmm4,%ymm6",},
479{{0x62, 0xf2, 0x7d, 0x48, 0x19, 0xf7, }, 6, 0, "", "",
480"62 f2 7d 48 19 f7 \tvbroadcastf32x2 %xmm7,%zmm6",},
481{{0xc4, 0xe2, 0x7d, 0x1a, 0x21, }, 5, 0, "", "",
482"c4 e2 7d 1a 21 \tvbroadcastf128 (%ecx),%ymm4",},
483{{0x62, 0xf2, 0x7d, 0x48, 0x1a, 0x31, }, 6, 0, "", "",
484"62 f2 7d 48 1a 31 \tvbroadcastf32x4 (%ecx),%zmm6",},
485{{0x62, 0xf2, 0xfd, 0x48, 0x1a, 0x31, }, 6, 0, "", "",
486"62 f2 fd 48 1a 31 \tvbroadcastf64x2 (%ecx),%zmm6",},
487{{0x62, 0xf2, 0x7d, 0x48, 0x1b, 0x31, }, 6, 0, "", "",
488"62 f2 7d 48 1b 31 \tvbroadcastf32x8 (%ecx),%zmm6",},
489{{0x62, 0xf2, 0xfd, 0x48, 0x1b, 0x31, }, 6, 0, "", "",
490"62 f2 fd 48 1b 31 \tvbroadcastf64x4 (%ecx),%zmm6",},
491{{0x62, 0xf2, 0xfd, 0x48, 0x1f, 0xf4, }, 6, 0, "", "",
492"62 f2 fd 48 1f f4 \tvpabsq %zmm4,%zmm6",},
493{{0xc4, 0xe2, 0x79, 0x20, 0xec, }, 5, 0, "", "",
494"c4 e2 79 20 ec \tvpmovsxbw %xmm4,%xmm5",},
495{{0x62, 0xf2, 0x7e, 0x4f, 0x20, 0xee, }, 6, 0, "", "",
496"62 f2 7e 4f 20 ee \tvpmovswb %zmm5,%ymm6{%k7}",},
497{{0xc4, 0xe2, 0x7d, 0x21, 0xf4, }, 5, 0, "", "",
498"c4 e2 7d 21 f4 \tvpmovsxbd %xmm4,%ymm6",},
499{{0x62, 0xf2, 0x7e, 0x4f, 0x21, 0xee, }, 6, 0, "", "",
500"62 f2 7e 4f 21 ee \tvpmovsdb %zmm5,%xmm6{%k7}",},
501{{0xc4, 0xe2, 0x7d, 0x22, 0xe4, }, 5, 0, "", "",
502"c4 e2 7d 22 e4 \tvpmovsxbq %xmm4,%ymm4",},
503{{0x62, 0xf2, 0x7e, 0x4f, 0x22, 0xee, }, 6, 0, "", "",
504"62 f2 7e 4f 22 ee \tvpmovsqb %zmm5,%xmm6{%k7}",},
505{{0xc4, 0xe2, 0x7d, 0x23, 0xe4, }, 5, 0, "", "",
506"c4 e2 7d 23 e4 \tvpmovsxwd %xmm4,%ymm4",},
507{{0x62, 0xf2, 0x7e, 0x4f, 0x23, 0xee, }, 6, 0, "", "",
508"62 f2 7e 4f 23 ee \tvpmovsdw %zmm5,%ymm6{%k7}",},
509{{0xc4, 0xe2, 0x7d, 0x24, 0xf4, }, 5, 0, "", "",
510"c4 e2 7d 24 f4 \tvpmovsxwq %xmm4,%ymm6",},
511{{0x62, 0xf2, 0x7e, 0x4f, 0x24, 0xee, }, 6, 0, "", "",
512"62 f2 7e 4f 24 ee \tvpmovsqw %zmm5,%xmm6{%k7}",},
513{{0xc4, 0xe2, 0x7d, 0x25, 0xe4, }, 5, 0, "", "",
514"c4 e2 7d 25 e4 \tvpmovsxdq %xmm4,%ymm4",},
515{{0x62, 0xf2, 0x7e, 0x4f, 0x25, 0xee, }, 6, 0, "", "",
516"62 f2 7e 4f 25 ee \tvpmovsqd %zmm5,%ymm6{%k7}",},
517{{0x62, 0xf2, 0x4d, 0x48, 0x26, 0xed, }, 6, 0, "", "",
518"62 f2 4d 48 26 ed \tvptestmb %zmm5,%zmm6,%k5",},
519{{0x62, 0xf2, 0xcd, 0x48, 0x26, 0xed, }, 6, 0, "", "",
520"62 f2 cd 48 26 ed \tvptestmw %zmm5,%zmm6,%k5",},
521{{0x62, 0xf2, 0x56, 0x48, 0x26, 0xec, }, 6, 0, "", "",
522"62 f2 56 48 26 ec \tvptestnmb %zmm4,%zmm5,%k5",},
523{{0x62, 0xf2, 0xd6, 0x48, 0x26, 0xec, }, 6, 0, "", "",
524"62 f2 d6 48 26 ec \tvptestnmw %zmm4,%zmm5,%k5",},
525{{0x62, 0xf2, 0x4d, 0x48, 0x27, 0xed, }, 6, 0, "", "",
526"62 f2 4d 48 27 ed \tvptestmd %zmm5,%zmm6,%k5",},
527{{0x62, 0xf2, 0xcd, 0x48, 0x27, 0xed, }, 6, 0, "", "",
528"62 f2 cd 48 27 ed \tvptestmq %zmm5,%zmm6,%k5",},
529{{0x62, 0xf2, 0x56, 0x48, 0x27, 0xec, }, 6, 0, "", "",
530"62 f2 56 48 27 ec \tvptestnmd %zmm4,%zmm5,%k5",},
531{{0x62, 0xf2, 0xd6, 0x48, 0x27, 0xec, }, 6, 0, "", "",
532"62 f2 d6 48 27 ec \tvptestnmq %zmm4,%zmm5,%k5",},
533{{0xc4, 0xe2, 0x4d, 0x28, 0xd4, }, 5, 0, "", "",
534"c4 e2 4d 28 d4 \tvpmuldq %ymm4,%ymm6,%ymm2",},
535{{0x62, 0xf2, 0x7e, 0x48, 0x28, 0xf5, }, 6, 0, "", "",
536"62 f2 7e 48 28 f5 \tvpmovm2b %k5,%zmm6",},
537{{0x62, 0xf2, 0xfe, 0x48, 0x28, 0xf5, }, 6, 0, "", "",
538"62 f2 fe 48 28 f5 \tvpmovm2w %k5,%zmm6",},
539{{0xc4, 0xe2, 0x4d, 0x29, 0xd4, }, 5, 0, "", "",
540"c4 e2 4d 29 d4 \tvpcmpeqq %ymm4,%ymm6,%ymm2",},
541{{0x62, 0xf2, 0x7e, 0x48, 0x29, 0xee, }, 6, 0, "", "",
542"62 f2 7e 48 29 ee \tvpmovb2m %zmm6,%k5",},
543{{0x62, 0xf2, 0xfe, 0x48, 0x29, 0xee, }, 6, 0, "", "",
544"62 f2 fe 48 29 ee \tvpmovw2m %zmm6,%k5",},
545{{0xc4, 0xe2, 0x7d, 0x2a, 0x21, }, 5, 0, "", "",
546"c4 e2 7d 2a 21 \tvmovntdqa (%ecx),%ymm4",},
547{{0x62, 0xf2, 0xfe, 0x48, 0x2a, 0xce, }, 6, 0, "", "",
548"62 f2 fe 48 2a ce \tvpbroadcastmb2q %k6,%zmm1",},
549{{0xc4, 0xe2, 0x5d, 0x2c, 0x31, }, 5, 0, "", "",
550"c4 e2 5d 2c 31 \tvmaskmovps (%ecx),%ymm4,%ymm6",},
551{{0x62, 0xf2, 0x55, 0x48, 0x2c, 0xf4, }, 6, 0, "", "",
552"62 f2 55 48 2c f4 \tvscalefps %zmm4,%zmm5,%zmm6",},
553{{0x62, 0xf2, 0xd5, 0x48, 0x2c, 0xf4, }, 6, 0, "", "",
554"62 f2 d5 48 2c f4 \tvscalefpd %zmm4,%zmm5,%zmm6",},
555{{0xc4, 0xe2, 0x5d, 0x2d, 0x31, }, 5, 0, "", "",
556"c4 e2 5d 2d 31 \tvmaskmovpd (%ecx),%ymm4,%ymm6",},
557{{0x62, 0xf2, 0x55, 0x0f, 0x2d, 0xf4, }, 6, 0, "", "",
558"62 f2 55 0f 2d f4 \tvscalefss %xmm4,%xmm5,%xmm6{%k7}",},
559{{0x62, 0xf2, 0xd5, 0x0f, 0x2d, 0xf4, }, 6, 0, "", "",
560"62 f2 d5 0f 2d f4 \tvscalefsd %xmm4,%xmm5,%xmm6{%k7}",},
561{{0xc4, 0xe2, 0x7d, 0x30, 0xe4, }, 5, 0, "", "",
562"c4 e2 7d 30 e4 \tvpmovzxbw %xmm4,%ymm4",},
563{{0x62, 0xf2, 0x7e, 0x4f, 0x30, 0xee, }, 6, 0, "", "",
564"62 f2 7e 4f 30 ee \tvpmovwb %zmm5,%ymm6{%k7}",},
565{{0xc4, 0xe2, 0x7d, 0x31, 0xf4, }, 5, 0, "", "",
566"c4 e2 7d 31 f4 \tvpmovzxbd %xmm4,%ymm6",},
567{{0x62, 0xf2, 0x7e, 0x4f, 0x31, 0xee, }, 6, 0, "", "",
568"62 f2 7e 4f 31 ee \tvpmovdb %zmm5,%xmm6{%k7}",},
569{{0xc4, 0xe2, 0x7d, 0x32, 0xe4, }, 5, 0, "", "",
570"c4 e2 7d 32 e4 \tvpmovzxbq %xmm4,%ymm4",},
571{{0x62, 0xf2, 0x7e, 0x4f, 0x32, 0xee, }, 6, 0, "", "",
572"62 f2 7e 4f 32 ee \tvpmovqb %zmm5,%xmm6{%k7}",},
573{{0xc4, 0xe2, 0x7d, 0x33, 0xe4, }, 5, 0, "", "",
574"c4 e2 7d 33 e4 \tvpmovzxwd %xmm4,%ymm4",},
575{{0x62, 0xf2, 0x7e, 0x4f, 0x33, 0xee, }, 6, 0, "", "",
576"62 f2 7e 4f 33 ee \tvpmovdw %zmm5,%ymm6{%k7}",},
577{{0xc4, 0xe2, 0x7d, 0x34, 0xf4, }, 5, 0, "", "",
578"c4 e2 7d 34 f4 \tvpmovzxwq %xmm4,%ymm6",},
579{{0x62, 0xf2, 0x7e, 0x4f, 0x34, 0xee, }, 6, 0, "", "",
580"62 f2 7e 4f 34 ee \tvpmovqw %zmm5,%xmm6{%k7}",},
581{{0xc4, 0xe2, 0x7d, 0x35, 0xe4, }, 5, 0, "", "",
582"c4 e2 7d 35 e4 \tvpmovzxdq %xmm4,%ymm4",},
583{{0x62, 0xf2, 0x7e, 0x4f, 0x35, 0xee, }, 6, 0, "", "",
584"62 f2 7e 4f 35 ee \tvpmovqd %zmm5,%ymm6{%k7}",},
585{{0xc4, 0xe2, 0x4d, 0x36, 0xd4, }, 5, 0, "", "",
586"c4 e2 4d 36 d4 \tvpermd %ymm4,%ymm6,%ymm2",},
587{{0x62, 0xf2, 0x4d, 0x2f, 0x36, 0xd4, }, 6, 0, "", "",
588"62 f2 4d 2f 36 d4 \tvpermd %ymm4,%ymm6,%ymm2{%k7}",},
589{{0x62, 0xf2, 0xcd, 0x2f, 0x36, 0xd4, }, 6, 0, "", "",
590"62 f2 cd 2f 36 d4 \tvpermq %ymm4,%ymm6,%ymm2{%k7}",},
591{{0xc4, 0xe2, 0x4d, 0x38, 0xd4, }, 5, 0, "", "",
592"c4 e2 4d 38 d4 \tvpminsb %ymm4,%ymm6,%ymm2",},
593{{0x62, 0xf2, 0x7e, 0x48, 0x38, 0xf5, }, 6, 0, "", "",
594"62 f2 7e 48 38 f5 \tvpmovm2d %k5,%zmm6",},
595{{0x62, 0xf2, 0xfe, 0x48, 0x38, 0xf5, }, 6, 0, "", "",
596"62 f2 fe 48 38 f5 \tvpmovm2q %k5,%zmm6",},
597{{0xc4, 0xe2, 0x69, 0x39, 0xd9, }, 5, 0, "", "",
598"c4 e2 69 39 d9 \tvpminsd %xmm1,%xmm2,%xmm3",},
599{{0x62, 0xf2, 0x55, 0x48, 0x39, 0xf4, }, 6, 0, "", "",
600"62 f2 55 48 39 f4 \tvpminsd %zmm4,%zmm5,%zmm6",},
601{{0x62, 0xf2, 0xd5, 0x48, 0x39, 0xf4, }, 6, 0, "", "",
602"62 f2 d5 48 39 f4 \tvpminsq %zmm4,%zmm5,%zmm6",},
603{{0x62, 0xf2, 0x7e, 0x48, 0x39, 0xee, }, 6, 0, "", "",
604"62 f2 7e 48 39 ee \tvpmovd2m %zmm6,%k5",},
605{{0x62, 0xf2, 0xfe, 0x48, 0x39, 0xee, }, 6, 0, "", "",
606"62 f2 fe 48 39 ee \tvpmovq2m %zmm6,%k5",},
607{{0xc4, 0xe2, 0x4d, 0x3a, 0xd4, }, 5, 0, "", "",
608"c4 e2 4d 3a d4 \tvpminuw %ymm4,%ymm6,%ymm2",},
609{{0x62, 0xf2, 0x7e, 0x48, 0x3a, 0xf6, }, 6, 0, "", "",
610"62 f2 7e 48 3a f6 \tvpbroadcastmw2d %k6,%zmm6",},
611{{0xc4, 0xe2, 0x4d, 0x3b, 0xd4, }, 5, 0, "", "",
612"c4 e2 4d 3b d4 \tvpminud %ymm4,%ymm6,%ymm2",},
613{{0x62, 0xf2, 0x55, 0x48, 0x3b, 0xf4, }, 6, 0, "", "",
614"62 f2 55 48 3b f4 \tvpminud %zmm4,%zmm5,%zmm6",},
615{{0x62, 0xf2, 0xd5, 0x48, 0x3b, 0xf4, }, 6, 0, "", "",
616"62 f2 d5 48 3b f4 \tvpminuq %zmm4,%zmm5,%zmm6",},
617{{0xc4, 0xe2, 0x4d, 0x3d, 0xd4, }, 5, 0, "", "",
618"c4 e2 4d 3d d4 \tvpmaxsd %ymm4,%ymm6,%ymm2",},
619{{0x62, 0xf2, 0x55, 0x48, 0x3d, 0xf4, }, 6, 0, "", "",
620"62 f2 55 48 3d f4 \tvpmaxsd %zmm4,%zmm5,%zmm6",},
621{{0x62, 0xf2, 0xd5, 0x48, 0x3d, 0xf4, }, 6, 0, "", "",
622"62 f2 d5 48 3d f4 \tvpmaxsq %zmm4,%zmm5,%zmm6",},
623{{0xc4, 0xe2, 0x4d, 0x3f, 0xd4, }, 5, 0, "", "",
624"c4 e2 4d 3f d4 \tvpmaxud %ymm4,%ymm6,%ymm2",},
625{{0x62, 0xf2, 0x55, 0x48, 0x3f, 0xf4, }, 6, 0, "", "",
626"62 f2 55 48 3f f4 \tvpmaxud %zmm4,%zmm5,%zmm6",},
627{{0x62, 0xf2, 0xd5, 0x48, 0x3f, 0xf4, }, 6, 0, "", "",
628"62 f2 d5 48 3f f4 \tvpmaxuq %zmm4,%zmm5,%zmm6",},
629{{0xc4, 0xe2, 0x4d, 0x40, 0xd4, }, 5, 0, "", "",
630"c4 e2 4d 40 d4 \tvpmulld %ymm4,%ymm6,%ymm2",},
631{{0x62, 0xf2, 0x55, 0x48, 0x40, 0xf4, }, 6, 0, "", "",
632"62 f2 55 48 40 f4 \tvpmulld %zmm4,%zmm5,%zmm6",},
633{{0x62, 0xf2, 0xd5, 0x48, 0x40, 0xf4, }, 6, 0, "", "",
634"62 f2 d5 48 40 f4 \tvpmullq %zmm4,%zmm5,%zmm6",},
635{{0x62, 0xf2, 0x7d, 0x48, 0x42, 0xf5, }, 6, 0, "", "",
636"62 f2 7d 48 42 f5 \tvgetexpps %zmm5,%zmm6",},
637{{0x62, 0xf2, 0xfd, 0x48, 0x42, 0xf5, }, 6, 0, "", "",
638"62 f2 fd 48 42 f5 \tvgetexppd %zmm5,%zmm6",},
639{{0x62, 0xf2, 0x55, 0x0f, 0x43, 0xf4, }, 6, 0, "", "",
640"62 f2 55 0f 43 f4 \tvgetexpss %xmm4,%xmm5,%xmm6{%k7}",},
641{{0x62, 0xf2, 0xe5, 0x0f, 0x43, 0xe2, }, 6, 0, "", "",
642"62 f2 e5 0f 43 e2 \tvgetexpsd %xmm2,%xmm3,%xmm4{%k7}",},
643{{0x62, 0xf2, 0x7d, 0x48, 0x44, 0xf5, }, 6, 0, "", "",
644"62 f2 7d 48 44 f5 \tvplzcntd %zmm5,%zmm6",},
645{{0x62, 0xf2, 0xfd, 0x48, 0x44, 0xf5, }, 6, 0, "", "",
646"62 f2 fd 48 44 f5 \tvplzcntq %zmm5,%zmm6",},
647{{0xc4, 0xe2, 0x4d, 0x46, 0xd4, }, 5, 0, "", "",
648"c4 e2 4d 46 d4 \tvpsravd %ymm4,%ymm6,%ymm2",},
649{{0x62, 0xf2, 0x55, 0x48, 0x46, 0xf4, }, 6, 0, "", "",
650"62 f2 55 48 46 f4 \tvpsravd %zmm4,%zmm5,%zmm6",},
651{{0x62, 0xf2, 0xd5, 0x48, 0x46, 0xf4, }, 6, 0, "", "",
652"62 f2 d5 48 46 f4 \tvpsravq %zmm4,%zmm5,%zmm6",},
653{{0x62, 0xf2, 0x7d, 0x48, 0x4c, 0xf5, }, 6, 0, "", "",
654"62 f2 7d 48 4c f5 \tvrcp14ps %zmm5,%zmm6",},
655{{0x62, 0xf2, 0xfd, 0x48, 0x4c, 0xf5, }, 6, 0, "", "",
656"62 f2 fd 48 4c f5 \tvrcp14pd %zmm5,%zmm6",},
657{{0x62, 0xf2, 0x55, 0x0f, 0x4d, 0xf4, }, 6, 0, "", "",
658"62 f2 55 0f 4d f4 \tvrcp14ss %xmm4,%xmm5,%xmm6{%k7}",},
659{{0x62, 0xf2, 0xd5, 0x0f, 0x4d, 0xf4, }, 6, 0, "", "",
660"62 f2 d5 0f 4d f4 \tvrcp14sd %xmm4,%xmm5,%xmm6{%k7}",},
661{{0x62, 0xf2, 0x7d, 0x48, 0x4e, 0xf5, }, 6, 0, "", "",
662"62 f2 7d 48 4e f5 \tvrsqrt14ps %zmm5,%zmm6",},
663{{0x62, 0xf2, 0xfd, 0x48, 0x4e, 0xf5, }, 6, 0, "", "",
664"62 f2 fd 48 4e f5 \tvrsqrt14pd %zmm5,%zmm6",},
665{{0x62, 0xf2, 0x55, 0x0f, 0x4f, 0xf4, }, 6, 0, "", "",
666"62 f2 55 0f 4f f4 \tvrsqrt14ss %xmm4,%xmm5,%xmm6{%k7}",},
667{{0x62, 0xf2, 0xd5, 0x0f, 0x4f, 0xf4, }, 6, 0, "", "",
668"62 f2 d5 0f 4f f4 \tvrsqrt14sd %xmm4,%xmm5,%xmm6{%k7}",},
669{{0xc4, 0xe2, 0x79, 0x59, 0xf4, }, 5, 0, "", "",
670"c4 e2 79 59 f4 \tvpbroadcastq %xmm4,%xmm6",},
671{{0x62, 0xf2, 0x7d, 0x48, 0x59, 0xf7, }, 6, 0, "", "",
672"62 f2 7d 48 59 f7 \tvbroadcasti32x2 %xmm7,%zmm6",},
673{{0xc4, 0xe2, 0x7d, 0x5a, 0x21, }, 5, 0, "", "",
674"c4 e2 7d 5a 21 \tvbroadcasti128 (%ecx),%ymm4",},
675{{0x62, 0xf2, 0x7d, 0x48, 0x5a, 0x31, }, 6, 0, "", "",
676"62 f2 7d 48 5a 31 \tvbroadcasti32x4 (%ecx),%zmm6",},
677{{0x62, 0xf2, 0xfd, 0x48, 0x5a, 0x31, }, 6, 0, "", "",
678"62 f2 fd 48 5a 31 \tvbroadcasti64x2 (%ecx),%zmm6",},
679{{0x62, 0xf2, 0x7d, 0x48, 0x5b, 0x31, }, 6, 0, "", "",
680"62 f2 7d 48 5b 31 \tvbroadcasti32x8 (%ecx),%zmm6",},
681{{0x62, 0xf2, 0xfd, 0x48, 0x5b, 0x31, }, 6, 0, "", "",
682"62 f2 fd 48 5b 31 \tvbroadcasti64x4 (%ecx),%zmm6",},
683{{0x62, 0xf2, 0x55, 0x48, 0x64, 0xf4, }, 6, 0, "", "",
684"62 f2 55 48 64 f4 \tvpblendmd %zmm4,%zmm5,%zmm6",},
685{{0x62, 0xf2, 0xd5, 0x48, 0x64, 0xf4, }, 6, 0, "", "",
686"62 f2 d5 48 64 f4 \tvpblendmq %zmm4,%zmm5,%zmm6",},
687{{0x62, 0xf2, 0x55, 0x48, 0x65, 0xf4, }, 6, 0, "", "",
688"62 f2 55 48 65 f4 \tvblendmps %zmm4,%zmm5,%zmm6",},
689{{0x62, 0xf2, 0xd5, 0x48, 0x65, 0xf4, }, 6, 0, "", "",
690"62 f2 d5 48 65 f4 \tvblendmpd %zmm4,%zmm5,%zmm6",},
691{{0x62, 0xf2, 0x55, 0x48, 0x66, 0xf4, }, 6, 0, "", "",
692"62 f2 55 48 66 f4 \tvpblendmb %zmm4,%zmm5,%zmm6",},
693{{0x62, 0xf2, 0xd5, 0x48, 0x66, 0xf4, }, 6, 0, "", "",
694"62 f2 d5 48 66 f4 \tvpblendmw %zmm4,%zmm5,%zmm6",},
695{{0x62, 0xf2, 0x55, 0x48, 0x75, 0xf4, }, 6, 0, "", "",
696"62 f2 55 48 75 f4 \tvpermi2b %zmm4,%zmm5,%zmm6",},
697{{0x62, 0xf2, 0xd5, 0x48, 0x75, 0xf4, }, 6, 0, "", "",
698"62 f2 d5 48 75 f4 \tvpermi2w %zmm4,%zmm5,%zmm6",},
699{{0x62, 0xf2, 0x55, 0x48, 0x76, 0xf4, }, 6, 0, "", "",
700"62 f2 55 48 76 f4 \tvpermi2d %zmm4,%zmm5,%zmm6",},
701{{0x62, 0xf2, 0xd5, 0x48, 0x76, 0xf4, }, 6, 0, "", "",
702"62 f2 d5 48 76 f4 \tvpermi2q %zmm4,%zmm5,%zmm6",},
703{{0x62, 0xf2, 0x55, 0x48, 0x77, 0xf4, }, 6, 0, "", "",
704"62 f2 55 48 77 f4 \tvpermi2ps %zmm4,%zmm5,%zmm6",},
705{{0x62, 0xf2, 0xd5, 0x48, 0x77, 0xf4, }, 6, 0, "", "",
706"62 f2 d5 48 77 f4 \tvpermi2pd %zmm4,%zmm5,%zmm6",},
707{{0x62, 0xf2, 0x7d, 0x08, 0x7a, 0xd8, }, 6, 0, "", "",
708"62 f2 7d 08 7a d8 \tvpbroadcastb %eax,%xmm3",},
709{{0x62, 0xf2, 0x7d, 0x08, 0x7b, 0xd8, }, 6, 0, "", "",
710"62 f2 7d 08 7b d8 \tvpbroadcastw %eax,%xmm3",},
711{{0x62, 0xf2, 0x7d, 0x08, 0x7c, 0xd8, }, 6, 0, "", "",
712"62 f2 7d 08 7c d8 \tvpbroadcastd %eax,%xmm3",},
713{{0x62, 0xf2, 0x55, 0x48, 0x7d, 0xf4, }, 6, 0, "", "",
714"62 f2 55 48 7d f4 \tvpermt2b %zmm4,%zmm5,%zmm6",},
715{{0x62, 0xf2, 0xd5, 0x48, 0x7d, 0xf4, }, 6, 0, "", "",
716"62 f2 d5 48 7d f4 \tvpermt2w %zmm4,%zmm5,%zmm6",},
717{{0x62, 0xf2, 0x55, 0x48, 0x7e, 0xf4, }, 6, 0, "", "",
718"62 f2 55 48 7e f4 \tvpermt2d %zmm4,%zmm5,%zmm6",},
719{{0x62, 0xf2, 0xd5, 0x48, 0x7e, 0xf4, }, 6, 0, "", "",
720"62 f2 d5 48 7e f4 \tvpermt2q %zmm4,%zmm5,%zmm6",},
721{{0x62, 0xf2, 0x55, 0x48, 0x7f, 0xf4, }, 6, 0, "", "",
722"62 f2 55 48 7f f4 \tvpermt2ps %zmm4,%zmm5,%zmm6",},
723{{0x62, 0xf2, 0xd5, 0x48, 0x7f, 0xf4, }, 6, 0, "", "",
724"62 f2 d5 48 7f f4 \tvpermt2pd %zmm4,%zmm5,%zmm6",},
725{{0x62, 0xf2, 0xd5, 0x48, 0x83, 0xf4, }, 6, 0, "", "",
726"62 f2 d5 48 83 f4 \tvpmultishiftqb %zmm4,%zmm5,%zmm6",},
727{{0x62, 0xf2, 0x7d, 0x48, 0x88, 0x31, }, 6, 0, "", "",
728"62 f2 7d 48 88 31 \tvexpandps (%ecx),%zmm6",},
729{{0x62, 0xf2, 0xfd, 0x48, 0x88, 0x31, }, 6, 0, "", "",
730"62 f2 fd 48 88 31 \tvexpandpd (%ecx),%zmm6",},
731{{0x62, 0xf2, 0x7d, 0x48, 0x89, 0x31, }, 6, 0, "", "",
732"62 f2 7d 48 89 31 \tvpexpandd (%ecx),%zmm6",},
733{{0x62, 0xf2, 0xfd, 0x48, 0x89, 0x31, }, 6, 0, "", "",
734"62 f2 fd 48 89 31 \tvpexpandq (%ecx),%zmm6",},
735{{0x62, 0xf2, 0x7d, 0x48, 0x8a, 0x31, }, 6, 0, "", "",
736"62 f2 7d 48 8a 31 \tvcompressps %zmm6,(%ecx)",},
737{{0x62, 0xf2, 0xfd, 0x48, 0x8a, 0x31, }, 6, 0, "", "",
738"62 f2 fd 48 8a 31 \tvcompresspd %zmm6,(%ecx)",},
739{{0x62, 0xf2, 0x7d, 0x48, 0x8b, 0x31, }, 6, 0, "", "",
740"62 f2 7d 48 8b 31 \tvpcompressd %zmm6,(%ecx)",},
741{{0x62, 0xf2, 0xfd, 0x48, 0x8b, 0x31, }, 6, 0, "", "",
742"62 f2 fd 48 8b 31 \tvpcompressq %zmm6,(%ecx)",},
743{{0x62, 0xf2, 0x55, 0x48, 0x8d, 0xf4, }, 6, 0, "", "",
744"62 f2 55 48 8d f4 \tvpermb %zmm4,%zmm5,%zmm6",},
745{{0x62, 0xf2, 0xd5, 0x48, 0x8d, 0xf4, }, 6, 0, "", "",
746"62 f2 d5 48 8d f4 \tvpermw %zmm4,%zmm5,%zmm6",},
747{{0xc4, 0xe2, 0x69, 0x90, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
748"c4 e2 69 90 4c 7d 02 \tvpgatherdd %xmm2,0x2(%ebp,%xmm7,2),%xmm1",},
749{{0xc4, 0xe2, 0xe9, 0x90, 0x4c, 0x7d, 0x04, }, 7, 0, "", "",
750"c4 e2 e9 90 4c 7d 04 \tvpgatherdq %xmm2,0x4(%ebp,%xmm7,2),%xmm1",},
751{{0x62, 0xf2, 0x7d, 0x49, 0x90, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
752"62 f2 7d 49 90 b4 fd 7b 00 00 00 \tvpgatherdd 0x7b(%ebp,%zmm7,8),%zmm6{%k1}",},
753{{0x62, 0xf2, 0xfd, 0x49, 0x90, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
754"62 f2 fd 49 90 b4 fd 7b 00 00 00 \tvpgatherdq 0x7b(%ebp,%ymm7,8),%zmm6{%k1}",},
755{{0xc4, 0xe2, 0x69, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
756"c4 e2 69 91 4c 7d 02 \tvpgatherqd %xmm2,0x2(%ebp,%xmm7,2),%xmm1",},
757{{0xc4, 0xe2, 0xe9, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
758"c4 e2 e9 91 4c 7d 02 \tvpgatherqq %xmm2,0x2(%ebp,%xmm7,2),%xmm1",},
759{{0x62, 0xf2, 0x7d, 0x49, 0x91, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
760"62 f2 7d 49 91 b4 fd 7b 00 00 00 \tvpgatherqd 0x7b(%ebp,%zmm7,8),%ymm6{%k1}",},
761{{0x62, 0xf2, 0xfd, 0x49, 0x91, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
762"62 f2 fd 49 91 b4 fd 7b 00 00 00 \tvpgatherqq 0x7b(%ebp,%zmm7,8),%zmm6{%k1}",},
763{{0x62, 0xf2, 0x7d, 0x49, 0xa0, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
764"62 f2 7d 49 a0 b4 fd 7b 00 00 00 \tvpscatterdd %zmm6,0x7b(%ebp,%zmm7,8){%k1}",},
765{{0x62, 0xf2, 0xfd, 0x49, 0xa0, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
766"62 f2 fd 49 a0 b4 fd 7b 00 00 00 \tvpscatterdq %zmm6,0x7b(%ebp,%ymm7,8){%k1}",},
767{{0x62, 0xf2, 0x7d, 0x49, 0xa1, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
768"62 f2 7d 49 a1 b4 fd 7b 00 00 00 \tvpscatterqd %ymm6,0x7b(%ebp,%zmm7,8){%k1}",},
769{{0x62, 0xf2, 0xfd, 0x29, 0xa1, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
770"62 f2 fd 29 a1 b4 fd 7b 00 00 00 \tvpscatterqq %ymm6,0x7b(%ebp,%ymm7,8){%k1}",},
771{{0x62, 0xf2, 0x7d, 0x49, 0xa2, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
772"62 f2 7d 49 a2 b4 fd 7b 00 00 00 \tvscatterdps %zmm6,0x7b(%ebp,%zmm7,8){%k1}",},
773{{0x62, 0xf2, 0xfd, 0x49, 0xa2, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
774"62 f2 fd 49 a2 b4 fd 7b 00 00 00 \tvscatterdpd %zmm6,0x7b(%ebp,%ymm7,8){%k1}",},
775{{0x62, 0xf2, 0x7d, 0x49, 0xa3, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
776"62 f2 7d 49 a3 b4 fd 7b 00 00 00 \tvscatterqps %ymm6,0x7b(%ebp,%zmm7,8){%k1}",},
777{{0x62, 0xf2, 0xfd, 0x49, 0xa3, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
778"62 f2 fd 49 a3 b4 fd 7b 00 00 00 \tvscatterqpd %zmm6,0x7b(%ebp,%zmm7,8){%k1}",},
779{{0x62, 0xf2, 0xd5, 0x48, 0xb4, 0xf4, }, 6, 0, "", "",
780"62 f2 d5 48 b4 f4 \tvpmadd52luq %zmm4,%zmm5,%zmm6",},
781{{0x62, 0xf2, 0xd5, 0x48, 0xb5, 0xf4, }, 6, 0, "", "",
782"62 f2 d5 48 b5 f4 \tvpmadd52huq %zmm4,%zmm5,%zmm6",},
783{{0x62, 0xf2, 0x7d, 0x48, 0xc4, 0xf5, }, 6, 0, "", "",
784"62 f2 7d 48 c4 f5 \tvpconflictd %zmm5,%zmm6",},
785{{0x62, 0xf2, 0xfd, 0x48, 0xc4, 0xf5, }, 6, 0, "", "",
786"62 f2 fd 48 c4 f5 \tvpconflictq %zmm5,%zmm6",},
787{{0x62, 0xf2, 0x7d, 0x48, 0xc8, 0xfe, }, 6, 0, "", "",
788"62 f2 7d 48 c8 fe \tvexp2ps %zmm6,%zmm7",},
789{{0x62, 0xf2, 0xfd, 0x48, 0xc8, 0xfe, }, 6, 0, "", "",
790"62 f2 fd 48 c8 fe \tvexp2pd %zmm6,%zmm7",},
791{{0x62, 0xf2, 0x7d, 0x48, 0xca, 0xfe, }, 6, 0, "", "",
792"62 f2 7d 48 ca fe \tvrcp28ps %zmm6,%zmm7",},
793{{0x62, 0xf2, 0xfd, 0x48, 0xca, 0xfe, }, 6, 0, "", "",
794"62 f2 fd 48 ca fe \tvrcp28pd %zmm6,%zmm7",},
795{{0x62, 0xf2, 0x4d, 0x0f, 0xcb, 0xfd, }, 6, 0, "", "",
796"62 f2 4d 0f cb fd \tvrcp28ss %xmm5,%xmm6,%xmm7{%k7}",},
797{{0x62, 0xf2, 0xcd, 0x0f, 0xcb, 0xfd, }, 6, 0, "", "",
798"62 f2 cd 0f cb fd \tvrcp28sd %xmm5,%xmm6,%xmm7{%k7}",},
799{{0x62, 0xf2, 0x7d, 0x48, 0xcc, 0xfe, }, 6, 0, "", "",
800"62 f2 7d 48 cc fe \tvrsqrt28ps %zmm6,%zmm7",},
801{{0x62, 0xf2, 0xfd, 0x48, 0xcc, 0xfe, }, 6, 0, "", "",
802"62 f2 fd 48 cc fe \tvrsqrt28pd %zmm6,%zmm7",},
803{{0x62, 0xf2, 0x4d, 0x0f, 0xcd, 0xfd, }, 6, 0, "", "",
804"62 f2 4d 0f cd fd \tvrsqrt28ss %xmm5,%xmm6,%xmm7{%k7}",},
805{{0x62, 0xf2, 0xcd, 0x0f, 0xcd, 0xfd, }, 6, 0, "", "",
806"62 f2 cd 0f cd fd \tvrsqrt28sd %xmm5,%xmm6,%xmm7{%k7}",},
807{{0x62, 0xf3, 0x4d, 0x48, 0x03, 0xfd, 0x12, }, 7, 0, "", "",
808"62 f3 4d 48 03 fd 12 \tvalignd $0x12,%zmm5,%zmm6,%zmm7",},
809{{0x62, 0xf3, 0xcd, 0x48, 0x03, 0xfd, 0x12, }, 7, 0, "", "",
810"62 f3 cd 48 03 fd 12 \tvalignq $0x12,%zmm5,%zmm6,%zmm7",},
811{{0xc4, 0xe3, 0x7d, 0x08, 0xd6, 0x05, }, 6, 0, "", "",
812"c4 e3 7d 08 d6 05 \tvroundps $0x5,%ymm6,%ymm2",},
813{{0x62, 0xf3, 0x7d, 0x48, 0x08, 0xf5, 0x12, }, 7, 0, "", "",
814"62 f3 7d 48 08 f5 12 \tvrndscaleps $0x12,%zmm5,%zmm6",},
815{{0xc4, 0xe3, 0x7d, 0x09, 0xd6, 0x05, }, 6, 0, "", "",
816"c4 e3 7d 09 d6 05 \tvroundpd $0x5,%ymm6,%ymm2",},
817{{0x62, 0xf3, 0xfd, 0x48, 0x09, 0xf5, 0x12, }, 7, 0, "", "",
818"62 f3 fd 48 09 f5 12 \tvrndscalepd $0x12,%zmm5,%zmm6",},
819{{0xc4, 0xe3, 0x49, 0x0a, 0xd4, 0x05, }, 6, 0, "", "",
820"c4 e3 49 0a d4 05 \tvroundss $0x5,%xmm4,%xmm6,%xmm2",},
821{{0x62, 0xf3, 0x55, 0x0f, 0x0a, 0xf4, 0x12, }, 7, 0, "", "",
822"62 f3 55 0f 0a f4 12 \tvrndscaless $0x12,%xmm4,%xmm5,%xmm6{%k7}",},
823{{0xc4, 0xe3, 0x49, 0x0b, 0xd4, 0x05, }, 6, 0, "", "",
824"c4 e3 49 0b d4 05 \tvroundsd $0x5,%xmm4,%xmm6,%xmm2",},
825{{0x62, 0xf3, 0xd5, 0x0f, 0x0b, 0xf4, 0x12, }, 7, 0, "", "",
826"62 f3 d5 0f 0b f4 12 \tvrndscalesd $0x12,%xmm4,%xmm5,%xmm6{%k7}",},
827{{0xc4, 0xe3, 0x5d, 0x18, 0xf4, 0x05, }, 6, 0, "", "",
828"c4 e3 5d 18 f4 05 \tvinsertf128 $0x5,%xmm4,%ymm4,%ymm6",},
829{{0x62, 0xf3, 0x55, 0x4f, 0x18, 0xf4, 0x12, }, 7, 0, "", "",
830"62 f3 55 4f 18 f4 12 \tvinsertf32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
831{{0x62, 0xf3, 0xd5, 0x4f, 0x18, 0xf4, 0x12, }, 7, 0, "", "",
832"62 f3 d5 4f 18 f4 12 \tvinsertf64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
833{{0xc4, 0xe3, 0x7d, 0x19, 0xe4, 0x05, }, 6, 0, "", "",
834"c4 e3 7d 19 e4 05 \tvextractf128 $0x5,%ymm4,%xmm4",},
835{{0x62, 0xf3, 0x7d, 0x4f, 0x19, 0xee, 0x12, }, 7, 0, "", "",
836"62 f3 7d 4f 19 ee 12 \tvextractf32x4 $0x12,%zmm5,%xmm6{%k7}",},
837{{0x62, 0xf3, 0xfd, 0x4f, 0x19, 0xee, 0x12, }, 7, 0, "", "",
838"62 f3 fd 4f 19 ee 12 \tvextractf64x2 $0x12,%zmm5,%xmm6{%k7}",},
839{{0x62, 0xf3, 0x4d, 0x4f, 0x1a, 0xfd, 0x12, }, 7, 0, "", "",
840"62 f3 4d 4f 1a fd 12 \tvinsertf32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
841{{0x62, 0xf3, 0xcd, 0x4f, 0x1a, 0xfd, 0x12, }, 7, 0, "", "",
842"62 f3 cd 4f 1a fd 12 \tvinsertf64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
843{{0x62, 0xf3, 0x7d, 0x4f, 0x1b, 0xf7, 0x12, }, 7, 0, "", "",
844"62 f3 7d 4f 1b f7 12 \tvextractf32x8 $0x12,%zmm6,%ymm7{%k7}",},
845{{0x62, 0xf3, 0xfd, 0x4f, 0x1b, 0xf7, 0x12, }, 7, 0, "", "",
846"62 f3 fd 4f 1b f7 12 \tvextractf64x4 $0x12,%zmm6,%ymm7{%k7}",},
847{{0x62, 0xf3, 0x45, 0x48, 0x1e, 0xee, 0x12, }, 7, 0, "", "",
848"62 f3 45 48 1e ee 12 \tvpcmpud $0x12,%zmm6,%zmm7,%k5",},
849{{0x62, 0xf3, 0xc5, 0x48, 0x1e, 0xee, 0x12, }, 7, 0, "", "",
850"62 f3 c5 48 1e ee 12 \tvpcmpuq $0x12,%zmm6,%zmm7,%k5",},
851{{0x62, 0xf3, 0x45, 0x48, 0x1f, 0xee, 0x12, }, 7, 0, "", "",
852"62 f3 45 48 1f ee 12 \tvpcmpd $0x12,%zmm6,%zmm7,%k5",},
853{{0x62, 0xf3, 0xc5, 0x48, 0x1f, 0xee, 0x12, }, 7, 0, "", "",
854"62 f3 c5 48 1f ee 12 \tvpcmpq $0x12,%zmm6,%zmm7,%k5",},
855{{0x62, 0xf3, 0x4d, 0x48, 0x23, 0xfd, 0x12, }, 7, 0, "", "",
856"62 f3 4d 48 23 fd 12 \tvshuff32x4 $0x12,%zmm5,%zmm6,%zmm7",},
857{{0x62, 0xf3, 0xcd, 0x48, 0x23, 0xfd, 0x12, }, 7, 0, "", "",
858"62 f3 cd 48 23 fd 12 \tvshuff64x2 $0x12,%zmm5,%zmm6,%zmm7",},
859{{0x62, 0xf3, 0x4d, 0x48, 0x25, 0xfd, 0x12, }, 7, 0, "", "",
860"62 f3 4d 48 25 fd 12 \tvpternlogd $0x12,%zmm5,%zmm6,%zmm7",},
861{{0x62, 0xf3, 0xcd, 0x48, 0x25, 0xfd, 0x12, }, 7, 0, "", "",
862"62 f3 cd 48 25 fd 12 \tvpternlogq $0x12,%zmm5,%zmm6,%zmm7",},
863{{0x62, 0xf3, 0x7d, 0x48, 0x26, 0xfe, 0x12, }, 7, 0, "", "",
864"62 f3 7d 48 26 fe 12 \tvgetmantps $0x12,%zmm6,%zmm7",},
865{{0x62, 0xf3, 0xfd, 0x48, 0x26, 0xfe, 0x12, }, 7, 0, "", "",
866"62 f3 fd 48 26 fe 12 \tvgetmantpd $0x12,%zmm6,%zmm7",},
867{{0x62, 0xf3, 0x4d, 0x0f, 0x27, 0xfd, 0x12, }, 7, 0, "", "",
868"62 f3 4d 0f 27 fd 12 \tvgetmantss $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
869{{0x62, 0xf3, 0xcd, 0x0f, 0x27, 0xfd, 0x12, }, 7, 0, "", "",
870"62 f3 cd 0f 27 fd 12 \tvgetmantsd $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
871{{0xc4, 0xe3, 0x5d, 0x38, 0xf4, 0x05, }, 6, 0, "", "",
872"c4 e3 5d 38 f4 05 \tvinserti128 $0x5,%xmm4,%ymm4,%ymm6",},
873{{0x62, 0xf3, 0x55, 0x4f, 0x38, 0xf4, 0x12, }, 7, 0, "", "",
874"62 f3 55 4f 38 f4 12 \tvinserti32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
875{{0x62, 0xf3, 0xd5, 0x4f, 0x38, 0xf4, 0x12, }, 7, 0, "", "",
876"62 f3 d5 4f 38 f4 12 \tvinserti64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}",},
877{{0xc4, 0xe3, 0x7d, 0x39, 0xe6, 0x05, }, 6, 0, "", "",
878"c4 e3 7d 39 e6 05 \tvextracti128 $0x5,%ymm4,%xmm6",},
879{{0x62, 0xf3, 0x7d, 0x4f, 0x39, 0xee, 0x12, }, 7, 0, "", "",
880"62 f3 7d 4f 39 ee 12 \tvextracti32x4 $0x12,%zmm5,%xmm6{%k7}",},
881{{0x62, 0xf3, 0xfd, 0x4f, 0x39, 0xee, 0x12, }, 7, 0, "", "",
882"62 f3 fd 4f 39 ee 12 \tvextracti64x2 $0x12,%zmm5,%xmm6{%k7}",},
883{{0x62, 0xf3, 0x4d, 0x4f, 0x3a, 0xfd, 0x12, }, 7, 0, "", "",
884"62 f3 4d 4f 3a fd 12 \tvinserti32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
885{{0x62, 0xf3, 0xcd, 0x4f, 0x3a, 0xfd, 0x12, }, 7, 0, "", "",
886"62 f3 cd 4f 3a fd 12 \tvinserti64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}",},
887{{0x62, 0xf3, 0x7d, 0x4f, 0x3b, 0xf7, 0x12, }, 7, 0, "", "",
888"62 f3 7d 4f 3b f7 12 \tvextracti32x8 $0x12,%zmm6,%ymm7{%k7}",},
889{{0x62, 0xf3, 0xfd, 0x4f, 0x3b, 0xf7, 0x12, }, 7, 0, "", "",
890"62 f3 fd 4f 3b f7 12 \tvextracti64x4 $0x12,%zmm6,%ymm7{%k7}",},
891{{0x62, 0xf3, 0x45, 0x48, 0x3e, 0xee, 0x12, }, 7, 0, "", "",
892"62 f3 45 48 3e ee 12 \tvpcmpub $0x12,%zmm6,%zmm7,%k5",},
893{{0x62, 0xf3, 0xc5, 0x48, 0x3e, 0xee, 0x12, }, 7, 0, "", "",
894"62 f3 c5 48 3e ee 12 \tvpcmpuw $0x12,%zmm6,%zmm7,%k5",},
895{{0x62, 0xf3, 0x45, 0x48, 0x3f, 0xee, 0x12, }, 7, 0, "", "",
896"62 f3 45 48 3f ee 12 \tvpcmpb $0x12,%zmm6,%zmm7,%k5",},
897{{0x62, 0xf3, 0xc5, 0x48, 0x3f, 0xee, 0x12, }, 7, 0, "", "",
898"62 f3 c5 48 3f ee 12 \tvpcmpw $0x12,%zmm6,%zmm7,%k5",},
899{{0xc4, 0xe3, 0x4d, 0x42, 0xd4, 0x05, }, 6, 0, "", "",
900"c4 e3 4d 42 d4 05 \tvmpsadbw $0x5,%ymm4,%ymm6,%ymm2",},
901{{0x62, 0xf3, 0x55, 0x48, 0x42, 0xf4, 0x12, }, 7, 0, "", "",
902"62 f3 55 48 42 f4 12 \tvdbpsadbw $0x12,%zmm4,%zmm5,%zmm6",},
903{{0x62, 0xf3, 0x4d, 0x48, 0x43, 0xfd, 0x12, }, 7, 0, "", "",
904"62 f3 4d 48 43 fd 12 \tvshufi32x4 $0x12,%zmm5,%zmm6,%zmm7",},
905{{0x62, 0xf3, 0xcd, 0x48, 0x43, 0xfd, 0x12, }, 7, 0, "", "",
906"62 f3 cd 48 43 fd 12 \tvshufi64x2 $0x12,%zmm5,%zmm6,%zmm7",},
907{{0x62, 0xf3, 0x4d, 0x48, 0x50, 0xfd, 0x12, }, 7, 0, "", "",
908"62 f3 4d 48 50 fd 12 \tvrangeps $0x12,%zmm5,%zmm6,%zmm7",},
909{{0x62, 0xf3, 0xcd, 0x48, 0x50, 0xfd, 0x12, }, 7, 0, "", "",
910"62 f3 cd 48 50 fd 12 \tvrangepd $0x12,%zmm5,%zmm6,%zmm7",},
911{{0x62, 0xf3, 0x4d, 0x08, 0x51, 0xfd, 0x12, }, 7, 0, "", "",
912"62 f3 4d 08 51 fd 12 \tvrangess $0x12,%xmm5,%xmm6,%xmm7",},
913{{0x62, 0xf3, 0xcd, 0x08, 0x51, 0xfd, 0x12, }, 7, 0, "", "",
914"62 f3 cd 08 51 fd 12 \tvrangesd $0x12,%xmm5,%xmm6,%xmm7",},
915{{0x62, 0xf3, 0x4d, 0x48, 0x54, 0xfd, 0x12, }, 7, 0, "", "",
916"62 f3 4d 48 54 fd 12 \tvfixupimmps $0x12,%zmm5,%zmm6,%zmm7",},
917{{0x62, 0xf3, 0xcd, 0x48, 0x54, 0xfd, 0x12, }, 7, 0, "", "",
918"62 f3 cd 48 54 fd 12 \tvfixupimmpd $0x12,%zmm5,%zmm6,%zmm7",},
919{{0x62, 0xf3, 0x4d, 0x0f, 0x55, 0xfd, 0x12, }, 7, 0, "", "",
920"62 f3 4d 0f 55 fd 12 \tvfixupimmss $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
921{{0x62, 0xf3, 0xcd, 0x0f, 0x55, 0xfd, 0x12, }, 7, 0, "", "",
922"62 f3 cd 0f 55 fd 12 \tvfixupimmsd $0x12,%xmm5,%xmm6,%xmm7{%k7}",},
923{{0x62, 0xf3, 0x7d, 0x48, 0x56, 0xfe, 0x12, }, 7, 0, "", "",
924"62 f3 7d 48 56 fe 12 \tvreduceps $0x12,%zmm6,%zmm7",},
925{{0x62, 0xf3, 0xfd, 0x48, 0x56, 0xfe, 0x12, }, 7, 0, "", "",
926"62 f3 fd 48 56 fe 12 \tvreducepd $0x12,%zmm6,%zmm7",},
927{{0x62, 0xf3, 0x4d, 0x08, 0x57, 0xfd, 0x12, }, 7, 0, "", "",
928"62 f3 4d 08 57 fd 12 \tvreducess $0x12,%xmm5,%xmm6,%xmm7",},
929{{0x62, 0xf3, 0xcd, 0x08, 0x57, 0xfd, 0x12, }, 7, 0, "", "",
930"62 f3 cd 08 57 fd 12 \tvreducesd $0x12,%xmm5,%xmm6,%xmm7",},
931{{0x62, 0xf3, 0x7d, 0x48, 0x66, 0xef, 0x12, }, 7, 0, "", "",
932"62 f3 7d 48 66 ef 12 \tvfpclassps $0x12,%zmm7,%k5",},
933{{0x62, 0xf3, 0xfd, 0x48, 0x66, 0xef, 0x12, }, 7, 0, "", "",
934"62 f3 fd 48 66 ef 12 \tvfpclasspd $0x12,%zmm7,%k5",},
935{{0x62, 0xf3, 0x7d, 0x08, 0x67, 0xef, 0x12, }, 7, 0, "", "",
936"62 f3 7d 08 67 ef 12 \tvfpclassss $0x12,%xmm7,%k5",},
937{{0x62, 0xf3, 0xfd, 0x08, 0x67, 0xef, 0x12, }, 7, 0, "", "",
938"62 f3 fd 08 67 ef 12 \tvfpclasssd $0x12,%xmm7,%k5",},
939{{0x62, 0xf1, 0x4d, 0x48, 0x72, 0xc5, 0x12, }, 7, 0, "", "",
940"62 f1 4d 48 72 c5 12 \tvprord $0x12,%zmm5,%zmm6",},
941{{0x62, 0xf1, 0xcd, 0x48, 0x72, 0xc5, 0x12, }, 7, 0, "", "",
942"62 f1 cd 48 72 c5 12 \tvprorq $0x12,%zmm5,%zmm6",},
943{{0x62, 0xf1, 0x4d, 0x48, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
944"62 f1 4d 48 72 cd 12 \tvprold $0x12,%zmm5,%zmm6",},
945{{0x62, 0xf1, 0xcd, 0x48, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
946"62 f1 cd 48 72 cd 12 \tvprolq $0x12,%zmm5,%zmm6",},
947{{0x0f, 0x72, 0xe6, 0x02, }, 4, 0, "", "",
948"0f 72 e6 02 \tpsrad $0x2,%mm6",},
949{{0xc5, 0xed, 0x72, 0xe6, 0x05, }, 5, 0, "", "",
950"c5 ed 72 e6 05 \tvpsrad $0x5,%ymm6,%ymm2",},
951{{0x62, 0xf1, 0x6d, 0x48, 0x72, 0xe6, 0x05, }, 7, 0, "", "",
952"62 f1 6d 48 72 e6 05 \tvpsrad $0x5,%zmm6,%zmm2",},
953{{0x62, 0xf1, 0xed, 0x48, 0x72, 0xe6, 0x05, }, 7, 0, "", "",
954"62 f1 ed 48 72 e6 05 \tvpsraq $0x5,%zmm6,%zmm2",},
955{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
956"62 f2 7d 49 c6 8c fd 7b 00 00 00 \tvgatherpf0dps 0x7b(%ebp,%zmm7,8){%k1}",},
957{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
958"62 f2 fd 49 c6 8c fd 7b 00 00 00 \tvgatherpf0dpd 0x7b(%ebp,%ymm7,8){%k1}",},
959{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
960"62 f2 7d 49 c6 94 fd 7b 00 00 00 \tvgatherpf1dps 0x7b(%ebp,%zmm7,8){%k1}",},
961{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
962"62 f2 fd 49 c6 94 fd 7b 00 00 00 \tvgatherpf1dpd 0x7b(%ebp,%ymm7,8){%k1}",},
963{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
964"62 f2 7d 49 c6 ac fd 7b 00 00 00 \tvscatterpf0dps 0x7b(%ebp,%zmm7,8){%k1}",},
965{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
966"62 f2 fd 49 c6 ac fd 7b 00 00 00 \tvscatterpf0dpd 0x7b(%ebp,%ymm7,8){%k1}",},
967{{0x62, 0xf2, 0x7d, 0x49, 0xc6, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
968"62 f2 7d 49 c6 b4 fd 7b 00 00 00 \tvscatterpf1dps 0x7b(%ebp,%zmm7,8){%k1}",},
969{{0x62, 0xf2, 0xfd, 0x49, 0xc6, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
970"62 f2 fd 49 c6 b4 fd 7b 00 00 00 \tvscatterpf1dpd 0x7b(%ebp,%ymm7,8){%k1}",},
971{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
972"62 f2 7d 49 c7 8c fd 7b 00 00 00 \tvgatherpf0qps 0x7b(%ebp,%zmm7,8){%k1}",},
973{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0x8c, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
974"62 f2 fd 49 c7 8c fd 7b 00 00 00 \tvgatherpf0qpd 0x7b(%ebp,%zmm7,8){%k1}",},
975{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
976"62 f2 7d 49 c7 94 fd 7b 00 00 00 \tvgatherpf1qps 0x7b(%ebp,%zmm7,8){%k1}",},
977{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0x94, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
978"62 f2 fd 49 c7 94 fd 7b 00 00 00 \tvgatherpf1qpd 0x7b(%ebp,%zmm7,8){%k1}",},
979{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
980"62 f2 7d 49 c7 ac fd 7b 00 00 00 \tvscatterpf0qps 0x7b(%ebp,%zmm7,8){%k1}",},
981{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0xac, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
982"62 f2 fd 49 c7 ac fd 7b 00 00 00 \tvscatterpf0qpd 0x7b(%ebp,%zmm7,8){%k1}",},
983{{0x62, 0xf2, 0x7d, 0x49, 0xc7, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
984"62 f2 7d 49 c7 b4 fd 7b 00 00 00 \tvscatterpf1qps 0x7b(%ebp,%zmm7,8){%k1}",},
985{{0x62, 0xf2, 0xfd, 0x49, 0xc7, 0xb4, 0xfd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
986"62 f2 fd 49 c7 b4 fd 7b 00 00 00 \tvscatterpf1qpd 0x7b(%ebp,%zmm7,8){%k1}",},
987{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0xf4, }, 6, 0, "", "",
988"62 f1 d5 48 58 f4 \tvaddpd %zmm4,%zmm5,%zmm6",},
989{{0x62, 0xf1, 0xd5, 0x4f, 0x58, 0xf4, }, 6, 0, "", "",
990"62 f1 d5 4f 58 f4 \tvaddpd %zmm4,%zmm5,%zmm6{%k7}",},
991{{0x62, 0xf1, 0xd5, 0xcf, 0x58, 0xf4, }, 6, 0, "", "",
992"62 f1 d5 cf 58 f4 \tvaddpd %zmm4,%zmm5,%zmm6{%k7}{z}",},
993{{0x62, 0xf1, 0xd5, 0x18, 0x58, 0xf4, }, 6, 0, "", "",
994"62 f1 d5 18 58 f4 \tvaddpd {rn-sae},%zmm4,%zmm5,%zmm6",},
995{{0x62, 0xf1, 0xd5, 0x58, 0x58, 0xf4, }, 6, 0, "", "",
996"62 f1 d5 58 58 f4 \tvaddpd {ru-sae},%zmm4,%zmm5,%zmm6",},
997{{0x62, 0xf1, 0xd5, 0x38, 0x58, 0xf4, }, 6, 0, "", "",
998"62 f1 d5 38 58 f4 \tvaddpd {rd-sae},%zmm4,%zmm5,%zmm6",},
999{{0x62, 0xf1, 0xd5, 0x78, 0x58, 0xf4, }, 6, 0, "", "",
1000"62 f1 d5 78 58 f4 \tvaddpd {rz-sae},%zmm4,%zmm5,%zmm6",},
1001{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0x31, }, 6, 0, "", "",
1002"62 f1 d5 48 58 31 \tvaddpd (%ecx),%zmm5,%zmm6",},
1003{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0xb4, 0xc8, 0x23, 0x01, 0x00, 0x00, }, 11, 0, "", "",
1004"62 f1 d5 48 58 b4 c8 23 01 00 00 \tvaddpd 0x123(%eax,%ecx,8),%zmm5,%zmm6",},
1005{{0x62, 0xf1, 0xd5, 0x58, 0x58, 0x31, }, 6, 0, "", "",
1006"62 f1 d5 58 58 31 \tvaddpd (%ecx){1to8},%zmm5,%zmm6",},
1007{{0x62, 0xf1, 0xd5, 0x48, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
1008"62 f1 d5 48 58 72 7f \tvaddpd 0x1fc0(%edx),%zmm5,%zmm6",},
1009{{0x62, 0xf1, 0xd5, 0x58, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
1010"62 f1 d5 58 58 72 7f \tvaddpd 0x3f8(%edx){1to8},%zmm5,%zmm6",},
1011{{0x62, 0xf1, 0x4c, 0x58, 0xc2, 0x6a, 0x7f, 0x08, }, 8, 0, "", "",
1012"62 f1 4c 58 c2 6a 7f 08 \tvcmpeq_uqps 0x1fc(%edx){1to16},%zmm6,%k5",},
1013{{0x62, 0xf1, 0xe7, 0x0f, 0xc2, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, 0x01, }, 12, 0, "", "",
1014"62 f1 e7 0f c2 ac c8 23 01 00 00 01 \tvcmpltsd 0x123(%eax,%ecx,8),%xmm3,%k5{%k7}",},
1015{{0x62, 0xf1, 0xd7, 0x1f, 0xc2, 0xec, 0x02, }, 7, 0, "", "",
1016"62 f1 d7 1f c2 ec 02 \tvcmplesd {sae},%xmm4,%xmm5,%k5{%k7}",},
1017{{0x62, 0xf3, 0x5d, 0x0f, 0x27, 0xac, 0xc8, 0x23, 0x01, 0x00, 0x00, 0x5b, }, 12, 0, "", "",
1018"62 f3 5d 0f 27 ac c8 23 01 00 00 5b \tvgetmantss $0x5b,0x123(%eax,%ecx,8),%xmm4,%xmm5{%k7}",},
9{{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "", 1019{{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "",
10"f3 0f 1b 00 \tbndmk (%eax),%bnd0",}, 1020"f3 0f 1b 00 \tbndmk (%eax),%bnd0",},
11{{0xf3, 0x0f, 0x1b, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "", 1021{{0xf3, 0x0f, 0x1b, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
@@ -309,19 +1319,19 @@
309{{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "", 1319{{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
310"0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%eax,%ecx,1)",}, 1320"0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%eax,%ecx,1)",},
311{{0xf2, 0xe8, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "call", "unconditional", 1321{{0xf2, 0xe8, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "call", "unconditional",
312"f2 e8 fc ff ff ff \tbnd call 3c3 <main+0x3c3>",}, 1322"f2 e8 fc ff ff ff \tbnd call fce <main+0xfce>",},
313{{0xf2, 0xff, 0x10, }, 3, 0, "call", "indirect", 1323{{0xf2, 0xff, 0x10, }, 3, 0, "call", "indirect",
314"f2 ff 10 \tbnd call *(%eax)",}, 1324"f2 ff 10 \tbnd call *(%eax)",},
315{{0xf2, 0xc3, }, 2, 0, "ret", "indirect", 1325{{0xf2, 0xc3, }, 2, 0, "ret", "indirect",
316"f2 c3 \tbnd ret ",}, 1326"f2 c3 \tbnd ret ",},
317{{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional", 1327{{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional",
318"f2 e9 fc ff ff ff \tbnd jmp 3ce <main+0x3ce>",}, 1328"f2 e9 fc ff ff ff \tbnd jmp fd9 <main+0xfd9>",},
319{{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional", 1329{{0xf2, 0xe9, 0xfc, 0xff, 0xff, 0xff, }, 6, 0xfffffffc, "jmp", "unconditional",
320"f2 e9 fc ff ff ff \tbnd jmp 3d4 <main+0x3d4>",}, 1330"f2 e9 fc ff ff ff \tbnd jmp fdf <main+0xfdf>",},
321{{0xf2, 0xff, 0x21, }, 3, 0, "jmp", "indirect", 1331{{0xf2, 0xff, 0x21, }, 3, 0, "jmp", "indirect",
322"f2 ff 21 \tbnd jmp *(%ecx)",}, 1332"f2 ff 21 \tbnd jmp *(%ecx)",},
323{{0xf2, 0x0f, 0x85, 0xfc, 0xff, 0xff, 0xff, }, 7, 0xfffffffc, "jcc", "conditional", 1333{{0xf2, 0x0f, 0x85, 0xfc, 0xff, 0xff, 0xff, }, 7, 0xfffffffc, "jcc", "conditional",
324"f2 0f 85 fc ff ff ff \tbnd jne 3de <main+0x3de>",}, 1334"f2 0f 85 fc ff ff ff \tbnd jne fe9 <main+0xfe9>",},
325{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "", 1335{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "",
326"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",}, 1336"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",},
327{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "", 1337{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "",
diff --git a/tools/perf/arch/x86/tests/insn-x86-dat-64.c b/tools/perf/arch/x86/tests/insn-x86-dat-64.c
index 4fe7cce179c4..9c8c61e06d5a 100644
--- a/tools/perf/arch/x86/tests/insn-x86-dat-64.c
+++ b/tools/perf/arch/x86/tests/insn-x86-dat-64.c
@@ -6,6 +6,938 @@
6 6
7{{0x0f, 0x31, }, 2, 0, "", "", 7{{0x0f, 0x31, }, 2, 0, "", "",
8"0f 31 \trdtsc ",}, 8"0f 31 \trdtsc ",},
9{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
10"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
11{{0x48, 0x0f, 0x41, 0xd8, }, 4, 0, "", "",
12"48 0f 41 d8 \tcmovno %rax,%rbx",},
13{{0x48, 0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
14"48 0f 41 88 78 56 34 12 \tcmovno 0x12345678(%rax),%rcx",},
15{{0x66, 0x0f, 0x41, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
16"66 0f 41 88 78 56 34 12 \tcmovno 0x12345678(%rax),%cx",},
17{{0x48, 0x0f, 0x44, 0xd8, }, 4, 0, "", "",
18"48 0f 44 d8 \tcmove %rax,%rbx",},
19{{0x48, 0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
20"48 0f 44 88 78 56 34 12 \tcmove 0x12345678(%rax),%rcx",},
21{{0x66, 0x0f, 0x44, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
22"66 0f 44 88 78 56 34 12 \tcmove 0x12345678(%rax),%cx",},
23{{0x0f, 0x90, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
24"0f 90 80 78 56 34 12 \tseto 0x12345678(%rax)",},
25{{0x0f, 0x91, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
26"0f 91 80 78 56 34 12 \tsetno 0x12345678(%rax)",},
27{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
28"0f 92 80 78 56 34 12 \tsetb 0x12345678(%rax)",},
29{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
30"0f 92 80 78 56 34 12 \tsetb 0x12345678(%rax)",},
31{{0x0f, 0x92, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
32"0f 92 80 78 56 34 12 \tsetb 0x12345678(%rax)",},
33{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
34"0f 93 80 78 56 34 12 \tsetae 0x12345678(%rax)",},
35{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
36"0f 93 80 78 56 34 12 \tsetae 0x12345678(%rax)",},
37{{0x0f, 0x93, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
38"0f 93 80 78 56 34 12 \tsetae 0x12345678(%rax)",},
39{{0x0f, 0x98, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
40"0f 98 80 78 56 34 12 \tsets 0x12345678(%rax)",},
41{{0x0f, 0x99, 0x80, 0x78, 0x56, 0x34, 0x12, }, 7, 0, "", "",
42"0f 99 80 78 56 34 12 \tsetns 0x12345678(%rax)",},
43{{0xc5, 0xcc, 0x41, 0xef, }, 4, 0, "", "",
44"c5 cc 41 ef \tkandw %k7,%k6,%k5",},
45{{0xc4, 0xe1, 0xcc, 0x41, 0xef, }, 5, 0, "", "",
46"c4 e1 cc 41 ef \tkandq %k7,%k6,%k5",},
47{{0xc5, 0xcd, 0x41, 0xef, }, 4, 0, "", "",
48"c5 cd 41 ef \tkandb %k7,%k6,%k5",},
49{{0xc4, 0xe1, 0xcd, 0x41, 0xef, }, 5, 0, "", "",
50"c4 e1 cd 41 ef \tkandd %k7,%k6,%k5",},
51{{0xc5, 0xcc, 0x42, 0xef, }, 4, 0, "", "",
52"c5 cc 42 ef \tkandnw %k7,%k6,%k5",},
53{{0xc4, 0xe1, 0xcc, 0x42, 0xef, }, 5, 0, "", "",
54"c4 e1 cc 42 ef \tkandnq %k7,%k6,%k5",},
55{{0xc5, 0xcd, 0x42, 0xef, }, 4, 0, "", "",
56"c5 cd 42 ef \tkandnb %k7,%k6,%k5",},
57{{0xc4, 0xe1, 0xcd, 0x42, 0xef, }, 5, 0, "", "",
58"c4 e1 cd 42 ef \tkandnd %k7,%k6,%k5",},
59{{0xc5, 0xf8, 0x44, 0xf7, }, 4, 0, "", "",
60"c5 f8 44 f7 \tknotw %k7,%k6",},
61{{0xc4, 0xe1, 0xf8, 0x44, 0xf7, }, 5, 0, "", "",
62"c4 e1 f8 44 f7 \tknotq %k7,%k6",},
63{{0xc5, 0xf9, 0x44, 0xf7, }, 4, 0, "", "",
64"c5 f9 44 f7 \tknotb %k7,%k6",},
65{{0xc4, 0xe1, 0xf9, 0x44, 0xf7, }, 5, 0, "", "",
66"c4 e1 f9 44 f7 \tknotd %k7,%k6",},
67{{0xc5, 0xcc, 0x45, 0xef, }, 4, 0, "", "",
68"c5 cc 45 ef \tkorw %k7,%k6,%k5",},
69{{0xc4, 0xe1, 0xcc, 0x45, 0xef, }, 5, 0, "", "",
70"c4 e1 cc 45 ef \tkorq %k7,%k6,%k5",},
71{{0xc5, 0xcd, 0x45, 0xef, }, 4, 0, "", "",
72"c5 cd 45 ef \tkorb %k7,%k6,%k5",},
73{{0xc4, 0xe1, 0xcd, 0x45, 0xef, }, 5, 0, "", "",
74"c4 e1 cd 45 ef \tkord %k7,%k6,%k5",},
75{{0xc5, 0xcc, 0x46, 0xef, }, 4, 0, "", "",
76"c5 cc 46 ef \tkxnorw %k7,%k6,%k5",},
77{{0xc4, 0xe1, 0xcc, 0x46, 0xef, }, 5, 0, "", "",
78"c4 e1 cc 46 ef \tkxnorq %k7,%k6,%k5",},
79{{0xc5, 0xcd, 0x46, 0xef, }, 4, 0, "", "",
80"c5 cd 46 ef \tkxnorb %k7,%k6,%k5",},
81{{0xc4, 0xe1, 0xcd, 0x46, 0xef, }, 5, 0, "", "",
82"c4 e1 cd 46 ef \tkxnord %k7,%k6,%k5",},
83{{0xc5, 0xcc, 0x47, 0xef, }, 4, 0, "", "",
84"c5 cc 47 ef \tkxorw %k7,%k6,%k5",},
85{{0xc4, 0xe1, 0xcc, 0x47, 0xef, }, 5, 0, "", "",
86"c4 e1 cc 47 ef \tkxorq %k7,%k6,%k5",},
87{{0xc5, 0xcd, 0x47, 0xef, }, 4, 0, "", "",
88"c5 cd 47 ef \tkxorb %k7,%k6,%k5",},
89{{0xc4, 0xe1, 0xcd, 0x47, 0xef, }, 5, 0, "", "",
90"c4 e1 cd 47 ef \tkxord %k7,%k6,%k5",},
91{{0xc5, 0xcc, 0x4a, 0xef, }, 4, 0, "", "",
92"c5 cc 4a ef \tkaddw %k7,%k6,%k5",},
93{{0xc4, 0xe1, 0xcc, 0x4a, 0xef, }, 5, 0, "", "",
94"c4 e1 cc 4a ef \tkaddq %k7,%k6,%k5",},
95{{0xc5, 0xcd, 0x4a, 0xef, }, 4, 0, "", "",
96"c5 cd 4a ef \tkaddb %k7,%k6,%k5",},
97{{0xc4, 0xe1, 0xcd, 0x4a, 0xef, }, 5, 0, "", "",
98"c4 e1 cd 4a ef \tkaddd %k7,%k6,%k5",},
99{{0xc5, 0xcd, 0x4b, 0xef, }, 4, 0, "", "",
100"c5 cd 4b ef \tkunpckbw %k7,%k6,%k5",},
101{{0xc5, 0xcc, 0x4b, 0xef, }, 4, 0, "", "",
102"c5 cc 4b ef \tkunpckwd %k7,%k6,%k5",},
103{{0xc4, 0xe1, 0xcc, 0x4b, 0xef, }, 5, 0, "", "",
104"c4 e1 cc 4b ef \tkunpckdq %k7,%k6,%k5",},
105{{0xc5, 0xf8, 0x90, 0xee, }, 4, 0, "", "",
106"c5 f8 90 ee \tkmovw %k6,%k5",},
107{{0xc5, 0xf8, 0x90, 0x29, }, 4, 0, "", "",
108"c5 f8 90 29 \tkmovw (%rcx),%k5",},
109{{0xc4, 0xa1, 0x78, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
110"c4 a1 78 90 ac f0 23 01 00 00 \tkmovw 0x123(%rax,%r14,8),%k5",},
111{{0xc5, 0xf8, 0x91, 0x29, }, 4, 0, "", "",
112"c5 f8 91 29 \tkmovw %k5,(%rcx)",},
113{{0xc4, 0xa1, 0x78, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
114"c4 a1 78 91 ac f0 23 01 00 00 \tkmovw %k5,0x123(%rax,%r14,8)",},
115{{0xc5, 0xf8, 0x92, 0xe8, }, 4, 0, "", "",
116"c5 f8 92 e8 \tkmovw %eax,%k5",},
117{{0xc5, 0xf8, 0x92, 0xed, }, 4, 0, "", "",
118"c5 f8 92 ed \tkmovw %ebp,%k5",},
119{{0xc4, 0xc1, 0x78, 0x92, 0xed, }, 5, 0, "", "",
120"c4 c1 78 92 ed \tkmovw %r13d,%k5",},
121{{0xc5, 0xf8, 0x93, 0xc5, }, 4, 0, "", "",
122"c5 f8 93 c5 \tkmovw %k5,%eax",},
123{{0xc5, 0xf8, 0x93, 0xed, }, 4, 0, "", "",
124"c5 f8 93 ed \tkmovw %k5,%ebp",},
125{{0xc5, 0x78, 0x93, 0xed, }, 4, 0, "", "",
126"c5 78 93 ed \tkmovw %k5,%r13d",},
127{{0xc4, 0xe1, 0xf8, 0x90, 0xee, }, 5, 0, "", "",
128"c4 e1 f8 90 ee \tkmovq %k6,%k5",},
129{{0xc4, 0xe1, 0xf8, 0x90, 0x29, }, 5, 0, "", "",
130"c4 e1 f8 90 29 \tkmovq (%rcx),%k5",},
131{{0xc4, 0xa1, 0xf8, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
132"c4 a1 f8 90 ac f0 23 01 00 00 \tkmovq 0x123(%rax,%r14,8),%k5",},
133{{0xc4, 0xe1, 0xf8, 0x91, 0x29, }, 5, 0, "", "",
134"c4 e1 f8 91 29 \tkmovq %k5,(%rcx)",},
135{{0xc4, 0xa1, 0xf8, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
136"c4 a1 f8 91 ac f0 23 01 00 00 \tkmovq %k5,0x123(%rax,%r14,8)",},
137{{0xc4, 0xe1, 0xfb, 0x92, 0xe8, }, 5, 0, "", "",
138"c4 e1 fb 92 e8 \tkmovq %rax,%k5",},
139{{0xc4, 0xe1, 0xfb, 0x92, 0xed, }, 5, 0, "", "",
140"c4 e1 fb 92 ed \tkmovq %rbp,%k5",},
141{{0xc4, 0xc1, 0xfb, 0x92, 0xed, }, 5, 0, "", "",
142"c4 c1 fb 92 ed \tkmovq %r13,%k5",},
143{{0xc4, 0xe1, 0xfb, 0x93, 0xc5, }, 5, 0, "", "",
144"c4 e1 fb 93 c5 \tkmovq %k5,%rax",},
145{{0xc4, 0xe1, 0xfb, 0x93, 0xed, }, 5, 0, "", "",
146"c4 e1 fb 93 ed \tkmovq %k5,%rbp",},
147{{0xc4, 0x61, 0xfb, 0x93, 0xed, }, 5, 0, "", "",
148"c4 61 fb 93 ed \tkmovq %k5,%r13",},
149{{0xc5, 0xf9, 0x90, 0xee, }, 4, 0, "", "",
150"c5 f9 90 ee \tkmovb %k6,%k5",},
151{{0xc5, 0xf9, 0x90, 0x29, }, 4, 0, "", "",
152"c5 f9 90 29 \tkmovb (%rcx),%k5",},
153{{0xc4, 0xa1, 0x79, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
154"c4 a1 79 90 ac f0 23 01 00 00 \tkmovb 0x123(%rax,%r14,8),%k5",},
155{{0xc5, 0xf9, 0x91, 0x29, }, 4, 0, "", "",
156"c5 f9 91 29 \tkmovb %k5,(%rcx)",},
157{{0xc4, 0xa1, 0x79, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
158"c4 a1 79 91 ac f0 23 01 00 00 \tkmovb %k5,0x123(%rax,%r14,8)",},
159{{0xc5, 0xf9, 0x92, 0xe8, }, 4, 0, "", "",
160"c5 f9 92 e8 \tkmovb %eax,%k5",},
161{{0xc5, 0xf9, 0x92, 0xed, }, 4, 0, "", "",
162"c5 f9 92 ed \tkmovb %ebp,%k5",},
163{{0xc4, 0xc1, 0x79, 0x92, 0xed, }, 5, 0, "", "",
164"c4 c1 79 92 ed \tkmovb %r13d,%k5",},
165{{0xc5, 0xf9, 0x93, 0xc5, }, 4, 0, "", "",
166"c5 f9 93 c5 \tkmovb %k5,%eax",},
167{{0xc5, 0xf9, 0x93, 0xed, }, 4, 0, "", "",
168"c5 f9 93 ed \tkmovb %k5,%ebp",},
169{{0xc5, 0x79, 0x93, 0xed, }, 4, 0, "", "",
170"c5 79 93 ed \tkmovb %k5,%r13d",},
171{{0xc4, 0xe1, 0xf9, 0x90, 0xee, }, 5, 0, "", "",
172"c4 e1 f9 90 ee \tkmovd %k6,%k5",},
173{{0xc4, 0xe1, 0xf9, 0x90, 0x29, }, 5, 0, "", "",
174"c4 e1 f9 90 29 \tkmovd (%rcx),%k5",},
175{{0xc4, 0xa1, 0xf9, 0x90, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
176"c4 a1 f9 90 ac f0 23 01 00 00 \tkmovd 0x123(%rax,%r14,8),%k5",},
177{{0xc4, 0xe1, 0xf9, 0x91, 0x29, }, 5, 0, "", "",
178"c4 e1 f9 91 29 \tkmovd %k5,(%rcx)",},
179{{0xc4, 0xa1, 0xf9, 0x91, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 10, 0, "", "",
180"c4 a1 f9 91 ac f0 23 01 00 00 \tkmovd %k5,0x123(%rax,%r14,8)",},
181{{0xc5, 0xfb, 0x92, 0xe8, }, 4, 0, "", "",
182"c5 fb 92 e8 \tkmovd %eax,%k5",},
183{{0xc5, 0xfb, 0x92, 0xed, }, 4, 0, "", "",
184"c5 fb 92 ed \tkmovd %ebp,%k5",},
185{{0xc4, 0xc1, 0x7b, 0x92, 0xed, }, 5, 0, "", "",
186"c4 c1 7b 92 ed \tkmovd %r13d,%k5",},
187{{0xc5, 0xfb, 0x93, 0xc5, }, 4, 0, "", "",
188"c5 fb 93 c5 \tkmovd %k5,%eax",},
189{{0xc5, 0xfb, 0x93, 0xed, }, 4, 0, "", "",
190"c5 fb 93 ed \tkmovd %k5,%ebp",},
191{{0xc5, 0x7b, 0x93, 0xed, }, 4, 0, "", "",
192"c5 7b 93 ed \tkmovd %k5,%r13d",},
193{{0xc5, 0xf8, 0x98, 0xee, }, 4, 0, "", "",
194"c5 f8 98 ee \tkortestw %k6,%k5",},
195{{0xc4, 0xe1, 0xf8, 0x98, 0xee, }, 5, 0, "", "",
196"c4 e1 f8 98 ee \tkortestq %k6,%k5",},
197{{0xc5, 0xf9, 0x98, 0xee, }, 4, 0, "", "",
198"c5 f9 98 ee \tkortestb %k6,%k5",},
199{{0xc4, 0xe1, 0xf9, 0x98, 0xee, }, 5, 0, "", "",
200"c4 e1 f9 98 ee \tkortestd %k6,%k5",},
201{{0xc5, 0xf8, 0x99, 0xee, }, 4, 0, "", "",
202"c5 f8 99 ee \tktestw %k6,%k5",},
203{{0xc4, 0xe1, 0xf8, 0x99, 0xee, }, 5, 0, "", "",
204"c4 e1 f8 99 ee \tktestq %k6,%k5",},
205{{0xc5, 0xf9, 0x99, 0xee, }, 4, 0, "", "",
206"c5 f9 99 ee \tktestb %k6,%k5",},
207{{0xc4, 0xe1, 0xf9, 0x99, 0xee, }, 5, 0, "", "",
208"c4 e1 f9 99 ee \tktestd %k6,%k5",},
209{{0xc4, 0xe3, 0xf9, 0x30, 0xee, 0x12, }, 6, 0, "", "",
210"c4 e3 f9 30 ee 12 \tkshiftrw $0x12,%k6,%k5",},
211{{0xc4, 0xe3, 0xf9, 0x31, 0xee, 0x5b, }, 6, 0, "", "",
212"c4 e3 f9 31 ee 5b \tkshiftrq $0x5b,%k6,%k5",},
213{{0xc4, 0xe3, 0xf9, 0x32, 0xee, 0x12, }, 6, 0, "", "",
214"c4 e3 f9 32 ee 12 \tkshiftlw $0x12,%k6,%k5",},
215{{0xc4, 0xe3, 0xf9, 0x33, 0xee, 0x5b, }, 6, 0, "", "",
216"c4 e3 f9 33 ee 5b \tkshiftlq $0x5b,%k6,%k5",},
217{{0xc5, 0xf8, 0x5b, 0xf5, }, 4, 0, "", "",
218"c5 f8 5b f5 \tvcvtdq2ps %xmm5,%xmm6",},
219{{0x62, 0x91, 0xfc, 0x4f, 0x5b, 0xf5, }, 6, 0, "", "",
220"62 91 fc 4f 5b f5 \tvcvtqq2ps %zmm29,%ymm6{%k7}",},
221{{0xc5, 0xf9, 0x5b, 0xf5, }, 4, 0, "", "",
222"c5 f9 5b f5 \tvcvtps2dq %xmm5,%xmm6",},
223{{0xc5, 0xfa, 0x5b, 0xf5, }, 4, 0, "", "",
224"c5 fa 5b f5 \tvcvttps2dq %xmm5,%xmm6",},
225{{0x0f, 0x6f, 0xe0, }, 3, 0, "", "",
226"0f 6f e0 \tmovq %mm0,%mm4",},
227{{0xc5, 0xfd, 0x6f, 0xf4, }, 4, 0, "", "",
228"c5 fd 6f f4 \tvmovdqa %ymm4,%ymm6",},
229{{0x62, 0x01, 0x7d, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
230"62 01 7d 48 6f d1 \tvmovdqa32 %zmm25,%zmm26",},
231{{0x62, 0x01, 0xfd, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
232"62 01 fd 48 6f d1 \tvmovdqa64 %zmm25,%zmm26",},
233{{0xc5, 0xfe, 0x6f, 0xf4, }, 4, 0, "", "",
234"c5 fe 6f f4 \tvmovdqu %ymm4,%ymm6",},
235{{0x62, 0x01, 0x7e, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
236"62 01 7e 48 6f f5 \tvmovdqu32 %zmm29,%zmm30",},
237{{0x62, 0x01, 0xfe, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
238"62 01 fe 48 6f d1 \tvmovdqu64 %zmm25,%zmm26",},
239{{0x62, 0x01, 0x7f, 0x48, 0x6f, 0xf5, }, 6, 0, "", "",
240"62 01 7f 48 6f f5 \tvmovdqu8 %zmm29,%zmm30",},
241{{0x62, 0x01, 0xff, 0x48, 0x6f, 0xd1, }, 6, 0, "", "",
242"62 01 ff 48 6f d1 \tvmovdqu16 %zmm25,%zmm26",},
243{{0x0f, 0x78, 0xc3, }, 3, 0, "", "",
244"0f 78 c3 \tvmread %rax,%rbx",},
245{{0x62, 0x01, 0x7c, 0x48, 0x78, 0xd1, }, 6, 0, "", "",
246"62 01 7c 48 78 d1 \tvcvttps2udq %zmm25,%zmm26",},
247{{0x62, 0x91, 0xfc, 0x4f, 0x78, 0xf5, }, 6, 0, "", "",
248"62 91 fc 4f 78 f5 \tvcvttpd2udq %zmm29,%ymm6{%k7}",},
249{{0x62, 0xf1, 0xff, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
250"62 f1 ff 08 78 c6 \tvcvttsd2usi %xmm6,%rax",},
251{{0x62, 0xf1, 0xfe, 0x08, 0x78, 0xc6, }, 6, 0, "", "",
252"62 f1 fe 08 78 c6 \tvcvttss2usi %xmm6,%rax",},
253{{0x62, 0x61, 0x7d, 0x4f, 0x78, 0xd5, }, 6, 0, "", "",
254"62 61 7d 4f 78 d5 \tvcvttps2uqq %ymm5,%zmm26{%k7}",},
255{{0x62, 0x01, 0xfd, 0x48, 0x78, 0xf5, }, 6, 0, "", "",
256"62 01 fd 48 78 f5 \tvcvttpd2uqq %zmm29,%zmm30",},
257{{0x0f, 0x79, 0xd8, }, 3, 0, "", "",
258"0f 79 d8 \tvmwrite %rax,%rbx",},
259{{0x62, 0x01, 0x7c, 0x48, 0x79, 0xd1, }, 6, 0, "", "",
260"62 01 7c 48 79 d1 \tvcvtps2udq %zmm25,%zmm26",},
261{{0x62, 0x91, 0xfc, 0x4f, 0x79, 0xf5, }, 6, 0, "", "",
262"62 91 fc 4f 79 f5 \tvcvtpd2udq %zmm29,%ymm6{%k7}",},
263{{0x62, 0xf1, 0xff, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
264"62 f1 ff 08 79 c6 \tvcvtsd2usi %xmm6,%rax",},
265{{0x62, 0xf1, 0xfe, 0x08, 0x79, 0xc6, }, 6, 0, "", "",
266"62 f1 fe 08 79 c6 \tvcvtss2usi %xmm6,%rax",},
267{{0x62, 0x61, 0x7d, 0x4f, 0x79, 0xd5, }, 6, 0, "", "",
268"62 61 7d 4f 79 d5 \tvcvtps2uqq %ymm5,%zmm26{%k7}",},
269{{0x62, 0x01, 0xfd, 0x48, 0x79, 0xf5, }, 6, 0, "", "",
270"62 01 fd 48 79 f5 \tvcvtpd2uqq %zmm29,%zmm30",},
271{{0x62, 0x61, 0x7e, 0x4f, 0x7a, 0xed, }, 6, 0, "", "",
272"62 61 7e 4f 7a ed \tvcvtudq2pd %ymm5,%zmm29{%k7}",},
273{{0x62, 0x01, 0xfe, 0x48, 0x7a, 0xd1, }, 6, 0, "", "",
274"62 01 fe 48 7a d1 \tvcvtuqq2pd %zmm25,%zmm26",},
275{{0x62, 0x01, 0x7f, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
276"62 01 7f 48 7a f5 \tvcvtudq2ps %zmm29,%zmm30",},
277{{0x62, 0x01, 0xff, 0x4f, 0x7a, 0xd1, }, 6, 0, "", "",
278"62 01 ff 4f 7a d1 \tvcvtuqq2ps %zmm25,%ymm26{%k7}",},
279{{0x62, 0x01, 0x7d, 0x4f, 0x7a, 0xd1, }, 6, 0, "", "",
280"62 01 7d 4f 7a d1 \tvcvttps2qq %ymm25,%zmm26{%k7}",},
281{{0x62, 0x01, 0xfd, 0x48, 0x7a, 0xf5, }, 6, 0, "", "",
282"62 01 fd 48 7a f5 \tvcvttpd2qq %zmm29,%zmm30",},
283{{0x62, 0xf1, 0x57, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
284"62 f1 57 08 7b f0 \tvcvtusi2sd %eax,%xmm5,%xmm6",},
285{{0x62, 0xf1, 0x56, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
286"62 f1 56 08 7b f0 \tvcvtusi2ss %eax,%xmm5,%xmm6",},
287{{0x62, 0x61, 0x7d, 0x4f, 0x7b, 0xd5, }, 6, 0, "", "",
288"62 61 7d 4f 7b d5 \tvcvtps2qq %ymm5,%zmm26{%k7}",},
289{{0x62, 0x01, 0xfd, 0x48, 0x7b, 0xf5, }, 6, 0, "", "",
290"62 01 fd 48 7b f5 \tvcvtpd2qq %zmm29,%zmm30",},
291{{0x0f, 0x7f, 0xc4, }, 3, 0, "", "",
292"0f 7f c4 \tmovq %mm0,%mm4",},
293{{0xc5, 0x7d, 0x7f, 0xc6, }, 4, 0, "", "",
294"c5 7d 7f c6 \tvmovdqa %ymm8,%ymm6",},
295{{0x62, 0x01, 0x7d, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
296"62 01 7d 48 7f ca \tvmovdqa32 %zmm25,%zmm26",},
297{{0x62, 0x01, 0xfd, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
298"62 01 fd 48 7f ca \tvmovdqa64 %zmm25,%zmm26",},
299{{0xc5, 0x7e, 0x7f, 0xc6, }, 4, 0, "", "",
300"c5 7e 7f c6 \tvmovdqu %ymm8,%ymm6",},
301{{0x62, 0x01, 0x7e, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
302"62 01 7e 48 7f ca \tvmovdqu32 %zmm25,%zmm26",},
303{{0x62, 0x01, 0xfe, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
304"62 01 fe 48 7f ca \tvmovdqu64 %zmm25,%zmm26",},
305{{0x62, 0x61, 0x7f, 0x48, 0x7f, 0x31, }, 6, 0, "", "",
306"62 61 7f 48 7f 31 \tvmovdqu8 %zmm30,(%rcx)",},
307{{0x62, 0x01, 0xff, 0x48, 0x7f, 0xca, }, 6, 0, "", "",
308"62 01 ff 48 7f ca \tvmovdqu16 %zmm25,%zmm26",},
309{{0x0f, 0xdb, 0xd1, }, 3, 0, "", "",
310"0f db d1 \tpand %mm1,%mm2",},
311{{0x66, 0x0f, 0xdb, 0xd1, }, 4, 0, "", "",
312"66 0f db d1 \tpand %xmm1,%xmm2",},
313{{0xc5, 0xcd, 0xdb, 0xd4, }, 4, 0, "", "",
314"c5 cd db d4 \tvpand %ymm4,%ymm6,%ymm2",},
315{{0x62, 0x01, 0x35, 0x40, 0xdb, 0xd0, }, 6, 0, "", "",
316"62 01 35 40 db d0 \tvpandd %zmm24,%zmm25,%zmm26",},
317{{0x62, 0x01, 0xb5, 0x40, 0xdb, 0xd0, }, 6, 0, "", "",
318"62 01 b5 40 db d0 \tvpandq %zmm24,%zmm25,%zmm26",},
319{{0x0f, 0xdf, 0xd1, }, 3, 0, "", "",
320"0f df d1 \tpandn %mm1,%mm2",},
321{{0x66, 0x0f, 0xdf, 0xd1, }, 4, 0, "", "",
322"66 0f df d1 \tpandn %xmm1,%xmm2",},
323{{0xc5, 0xcd, 0xdf, 0xd4, }, 4, 0, "", "",
324"c5 cd df d4 \tvpandn %ymm4,%ymm6,%ymm2",},
325{{0x62, 0x01, 0x35, 0x40, 0xdf, 0xd0, }, 6, 0, "", "",
326"62 01 35 40 df d0 \tvpandnd %zmm24,%zmm25,%zmm26",},
327{{0x62, 0x01, 0xb5, 0x40, 0xdf, 0xd0, }, 6, 0, "", "",
328"62 01 b5 40 df d0 \tvpandnq %zmm24,%zmm25,%zmm26",},
329{{0xc5, 0xf9, 0xe6, 0xd1, }, 4, 0, "", "",
330"c5 f9 e6 d1 \tvcvttpd2dq %xmm1,%xmm2",},
331{{0xc5, 0xfa, 0xe6, 0xf5, }, 4, 0, "", "",
332"c5 fa e6 f5 \tvcvtdq2pd %xmm5,%xmm6",},
333{{0x62, 0x61, 0x7e, 0x4f, 0xe6, 0xd5, }, 6, 0, "", "",
334"62 61 7e 4f e6 d5 \tvcvtdq2pd %ymm5,%zmm26{%k7}",},
335{{0x62, 0x01, 0xfe, 0x48, 0xe6, 0xd1, }, 6, 0, "", "",
336"62 01 fe 48 e6 d1 \tvcvtqq2pd %zmm25,%zmm26",},
337{{0xc5, 0xfb, 0xe6, 0xd1, }, 4, 0, "", "",
338"c5 fb e6 d1 \tvcvtpd2dq %xmm1,%xmm2",},
339{{0x0f, 0xeb, 0xf4, }, 3, 0, "", "",
340"0f eb f4 \tpor %mm4,%mm6",},
341{{0xc5, 0xcd, 0xeb, 0xd4, }, 4, 0, "", "",
342"c5 cd eb d4 \tvpor %ymm4,%ymm6,%ymm2",},
343{{0x62, 0x01, 0x35, 0x40, 0xeb, 0xd0, }, 6, 0, "", "",
344"62 01 35 40 eb d0 \tvpord %zmm24,%zmm25,%zmm26",},
345{{0x62, 0x01, 0xb5, 0x40, 0xeb, 0xd0, }, 6, 0, "", "",
346"62 01 b5 40 eb d0 \tvporq %zmm24,%zmm25,%zmm26",},
347{{0x0f, 0xef, 0xf4, }, 3, 0, "", "",
348"0f ef f4 \tpxor %mm4,%mm6",},
349{{0xc5, 0xcd, 0xef, 0xd4, }, 4, 0, "", "",
350"c5 cd ef d4 \tvpxor %ymm4,%ymm6,%ymm2",},
351{{0x62, 0x01, 0x35, 0x40, 0xef, 0xd0, }, 6, 0, "", "",
352"62 01 35 40 ef d0 \tvpxord %zmm24,%zmm25,%zmm26",},
353{{0x62, 0x01, 0xb5, 0x40, 0xef, 0xd0, }, 6, 0, "", "",
354"62 01 b5 40 ef d0 \tvpxorq %zmm24,%zmm25,%zmm26",},
355{{0x66, 0x0f, 0x38, 0x10, 0xc1, }, 5, 0, "", "",
356"66 0f 38 10 c1 \tpblendvb %xmm0,%xmm1,%xmm0",},
357{{0x62, 0x02, 0x9d, 0x40, 0x10, 0xeb, }, 6, 0, "", "",
358"62 02 9d 40 10 eb \tvpsrlvw %zmm27,%zmm28,%zmm29",},
359{{0x62, 0x62, 0x7e, 0x4f, 0x10, 0xe6, }, 6, 0, "", "",
360"62 62 7e 4f 10 e6 \tvpmovuswb %zmm28,%ymm6{%k7}",},
361{{0x62, 0x62, 0x7e, 0x4f, 0x11, 0xe6, }, 6, 0, "", "",
362"62 62 7e 4f 11 e6 \tvpmovusdb %zmm28,%xmm6{%k7}",},
363{{0x62, 0x02, 0x9d, 0x40, 0x11, 0xeb, }, 6, 0, "", "",
364"62 02 9d 40 11 eb \tvpsravw %zmm27,%zmm28,%zmm29",},
365{{0x62, 0x62, 0x7e, 0x4f, 0x12, 0xde, }, 6, 0, "", "",
366"62 62 7e 4f 12 de \tvpmovusqb %zmm27,%xmm6{%k7}",},
367{{0x62, 0x02, 0x9d, 0x40, 0x12, 0xeb, }, 6, 0, "", "",
368"62 02 9d 40 12 eb \tvpsllvw %zmm27,%zmm28,%zmm29",},
369{{0xc4, 0xe2, 0x7d, 0x13, 0xeb, }, 5, 0, "", "",
370"c4 e2 7d 13 eb \tvcvtph2ps %xmm3,%ymm5",},
371{{0x62, 0x62, 0x7d, 0x4f, 0x13, 0xdd, }, 6, 0, "", "",
372"62 62 7d 4f 13 dd \tvcvtph2ps %ymm5,%zmm27{%k7}",},
373{{0x62, 0x62, 0x7e, 0x4f, 0x13, 0xde, }, 6, 0, "", "",
374"62 62 7e 4f 13 de \tvpmovusdw %zmm27,%ymm6{%k7}",},
375{{0x66, 0x0f, 0x38, 0x14, 0xc1, }, 5, 0, "", "",
376"66 0f 38 14 c1 \tblendvps %xmm0,%xmm1,%xmm0",},
377{{0x62, 0x62, 0x7e, 0x4f, 0x14, 0xde, }, 6, 0, "", "",
378"62 62 7e 4f 14 de \tvpmovusqw %zmm27,%xmm6{%k7}",},
379{{0x62, 0x02, 0x1d, 0x40, 0x14, 0xeb, }, 6, 0, "", "",
380"62 02 1d 40 14 eb \tvprorvd %zmm27,%zmm28,%zmm29",},
381{{0x62, 0x02, 0x9d, 0x40, 0x14, 0xeb, }, 6, 0, "", "",
382"62 02 9d 40 14 eb \tvprorvq %zmm27,%zmm28,%zmm29",},
383{{0x66, 0x0f, 0x38, 0x15, 0xc1, }, 5, 0, "", "",
384"66 0f 38 15 c1 \tblendvpd %xmm0,%xmm1,%xmm0",},
385{{0x62, 0x62, 0x7e, 0x4f, 0x15, 0xde, }, 6, 0, "", "",
386"62 62 7e 4f 15 de \tvpmovusqd %zmm27,%ymm6{%k7}",},
387{{0x62, 0x02, 0x1d, 0x40, 0x15, 0xeb, }, 6, 0, "", "",
388"62 02 1d 40 15 eb \tvprolvd %zmm27,%zmm28,%zmm29",},
389{{0x62, 0x02, 0x9d, 0x40, 0x15, 0xeb, }, 6, 0, "", "",
390"62 02 9d 40 15 eb \tvprolvq %zmm27,%zmm28,%zmm29",},
391{{0xc4, 0xe2, 0x4d, 0x16, 0xd4, }, 5, 0, "", "",
392"c4 e2 4d 16 d4 \tvpermps %ymm4,%ymm6,%ymm2",},
393{{0x62, 0x82, 0x2d, 0x27, 0x16, 0xf0, }, 6, 0, "", "",
394"62 82 2d 27 16 f0 \tvpermps %ymm24,%ymm26,%ymm22{%k7}",},
395{{0x62, 0x82, 0xad, 0x27, 0x16, 0xf0, }, 6, 0, "", "",
396"62 82 ad 27 16 f0 \tvpermpd %ymm24,%ymm26,%ymm22{%k7}",},
397{{0xc4, 0xe2, 0x7d, 0x19, 0xf4, }, 5, 0, "", "",
398"c4 e2 7d 19 f4 \tvbroadcastsd %xmm4,%ymm6",},
399{{0x62, 0x02, 0x7d, 0x48, 0x19, 0xd3, }, 6, 0, "", "",
400"62 02 7d 48 19 d3 \tvbroadcastf32x2 %xmm27,%zmm26",},
401{{0xc4, 0xe2, 0x7d, 0x1a, 0x21, }, 5, 0, "", "",
402"c4 e2 7d 1a 21 \tvbroadcastf128 (%rcx),%ymm4",},
403{{0x62, 0x62, 0x7d, 0x48, 0x1a, 0x11, }, 6, 0, "", "",
404"62 62 7d 48 1a 11 \tvbroadcastf32x4 (%rcx),%zmm26",},
405{{0x62, 0x62, 0xfd, 0x48, 0x1a, 0x11, }, 6, 0, "", "",
406"62 62 fd 48 1a 11 \tvbroadcastf64x2 (%rcx),%zmm26",},
407{{0x62, 0x62, 0x7d, 0x48, 0x1b, 0x19, }, 6, 0, "", "",
408"62 62 7d 48 1b 19 \tvbroadcastf32x8 (%rcx),%zmm27",},
409{{0x62, 0x62, 0xfd, 0x48, 0x1b, 0x11, }, 6, 0, "", "",
410"62 62 fd 48 1b 11 \tvbroadcastf64x4 (%rcx),%zmm26",},
411{{0x62, 0x02, 0xfd, 0x48, 0x1f, 0xe3, }, 6, 0, "", "",
412"62 02 fd 48 1f e3 \tvpabsq %zmm27,%zmm28",},
413{{0xc4, 0xe2, 0x79, 0x20, 0xec, }, 5, 0, "", "",
414"c4 e2 79 20 ec \tvpmovsxbw %xmm4,%xmm5",},
415{{0x62, 0x62, 0x7e, 0x4f, 0x20, 0xde, }, 6, 0, "", "",
416"62 62 7e 4f 20 de \tvpmovswb %zmm27,%ymm6{%k7}",},
417{{0xc4, 0xe2, 0x7d, 0x21, 0xf4, }, 5, 0, "", "",
418"c4 e2 7d 21 f4 \tvpmovsxbd %xmm4,%ymm6",},
419{{0x62, 0x62, 0x7e, 0x4f, 0x21, 0xde, }, 6, 0, "", "",
420"62 62 7e 4f 21 de \tvpmovsdb %zmm27,%xmm6{%k7}",},
421{{0xc4, 0xe2, 0x7d, 0x22, 0xe4, }, 5, 0, "", "",
422"c4 e2 7d 22 e4 \tvpmovsxbq %xmm4,%ymm4",},
423{{0x62, 0x62, 0x7e, 0x4f, 0x22, 0xde, }, 6, 0, "", "",
424"62 62 7e 4f 22 de \tvpmovsqb %zmm27,%xmm6{%k7}",},
425{{0xc4, 0xe2, 0x7d, 0x23, 0xe4, }, 5, 0, "", "",
426"c4 e2 7d 23 e4 \tvpmovsxwd %xmm4,%ymm4",},
427{{0x62, 0x62, 0x7e, 0x4f, 0x23, 0xde, }, 6, 0, "", "",
428"62 62 7e 4f 23 de \tvpmovsdw %zmm27,%ymm6{%k7}",},
429{{0xc4, 0xe2, 0x7d, 0x24, 0xf4, }, 5, 0, "", "",
430"c4 e2 7d 24 f4 \tvpmovsxwq %xmm4,%ymm6",},
431{{0x62, 0x62, 0x7e, 0x4f, 0x24, 0xde, }, 6, 0, "", "",
432"62 62 7e 4f 24 de \tvpmovsqw %zmm27,%xmm6{%k7}",},
433{{0xc4, 0xe2, 0x7d, 0x25, 0xe4, }, 5, 0, "", "",
434"c4 e2 7d 25 e4 \tvpmovsxdq %xmm4,%ymm4",},
435{{0x62, 0x62, 0x7e, 0x4f, 0x25, 0xde, }, 6, 0, "", "",
436"62 62 7e 4f 25 de \tvpmovsqd %zmm27,%ymm6{%k7}",},
437{{0x62, 0x92, 0x1d, 0x40, 0x26, 0xeb, }, 6, 0, "", "",
438"62 92 1d 40 26 eb \tvptestmb %zmm27,%zmm28,%k5",},
439{{0x62, 0x92, 0x9d, 0x40, 0x26, 0xeb, }, 6, 0, "", "",
440"62 92 9d 40 26 eb \tvptestmw %zmm27,%zmm28,%k5",},
441{{0x62, 0x92, 0x26, 0x40, 0x26, 0xea, }, 6, 0, "", "",
442"62 92 26 40 26 ea \tvptestnmb %zmm26,%zmm27,%k5",},
443{{0x62, 0x92, 0xa6, 0x40, 0x26, 0xea, }, 6, 0, "", "",
444"62 92 a6 40 26 ea \tvptestnmw %zmm26,%zmm27,%k5",},
445{{0x62, 0x92, 0x1d, 0x40, 0x27, 0xeb, }, 6, 0, "", "",
446"62 92 1d 40 27 eb \tvptestmd %zmm27,%zmm28,%k5",},
447{{0x62, 0x92, 0x9d, 0x40, 0x27, 0xeb, }, 6, 0, "", "",
448"62 92 9d 40 27 eb \tvptestmq %zmm27,%zmm28,%k5",},
449{{0x62, 0x92, 0x26, 0x40, 0x27, 0xea, }, 6, 0, "", "",
450"62 92 26 40 27 ea \tvptestnmd %zmm26,%zmm27,%k5",},
451{{0x62, 0x92, 0xa6, 0x40, 0x27, 0xea, }, 6, 0, "", "",
452"62 92 a6 40 27 ea \tvptestnmq %zmm26,%zmm27,%k5",},
453{{0xc4, 0xe2, 0x4d, 0x28, 0xd4, }, 5, 0, "", "",
454"c4 e2 4d 28 d4 \tvpmuldq %ymm4,%ymm6,%ymm2",},
455{{0x62, 0x62, 0x7e, 0x48, 0x28, 0xe5, }, 6, 0, "", "",
456"62 62 7e 48 28 e5 \tvpmovm2b %k5,%zmm28",},
457{{0x62, 0x62, 0xfe, 0x48, 0x28, 0xe5, }, 6, 0, "", "",
458"62 62 fe 48 28 e5 \tvpmovm2w %k5,%zmm28",},
459{{0xc4, 0xe2, 0x4d, 0x29, 0xd4, }, 5, 0, "", "",
460"c4 e2 4d 29 d4 \tvpcmpeqq %ymm4,%ymm6,%ymm2",},
461{{0x62, 0x92, 0x7e, 0x48, 0x29, 0xec, }, 6, 0, "", "",
462"62 92 7e 48 29 ec \tvpmovb2m %zmm28,%k5",},
463{{0x62, 0x92, 0xfe, 0x48, 0x29, 0xec, }, 6, 0, "", "",
464"62 92 fe 48 29 ec \tvpmovw2m %zmm28,%k5",},
465{{0xc4, 0xe2, 0x7d, 0x2a, 0x21, }, 5, 0, "", "",
466"c4 e2 7d 2a 21 \tvmovntdqa (%rcx),%ymm4",},
467{{0x62, 0x62, 0xfe, 0x48, 0x2a, 0xf6, }, 6, 0, "", "",
468"62 62 fe 48 2a f6 \tvpbroadcastmb2q %k6,%zmm30",},
469{{0xc4, 0xe2, 0x5d, 0x2c, 0x31, }, 5, 0, "", "",
470"c4 e2 5d 2c 31 \tvmaskmovps (%rcx),%ymm4,%ymm6",},
471{{0x62, 0x02, 0x35, 0x40, 0x2c, 0xd0, }, 6, 0, "", "",
472"62 02 35 40 2c d0 \tvscalefps %zmm24,%zmm25,%zmm26",},
473{{0x62, 0x02, 0xb5, 0x40, 0x2c, 0xd0, }, 6, 0, "", "",
474"62 02 b5 40 2c d0 \tvscalefpd %zmm24,%zmm25,%zmm26",},
475{{0xc4, 0xe2, 0x5d, 0x2d, 0x31, }, 5, 0, "", "",
476"c4 e2 5d 2d 31 \tvmaskmovpd (%rcx),%ymm4,%ymm6",},
477{{0x62, 0x02, 0x35, 0x07, 0x2d, 0xd0, }, 6, 0, "", "",
478"62 02 35 07 2d d0 \tvscalefss %xmm24,%xmm25,%xmm26{%k7}",},
479{{0x62, 0x02, 0xb5, 0x07, 0x2d, 0xd0, }, 6, 0, "", "",
480"62 02 b5 07 2d d0 \tvscalefsd %xmm24,%xmm25,%xmm26{%k7}",},
481{{0xc4, 0xe2, 0x7d, 0x30, 0xe4, }, 5, 0, "", "",
482"c4 e2 7d 30 e4 \tvpmovzxbw %xmm4,%ymm4",},
483{{0x62, 0x62, 0x7e, 0x4f, 0x30, 0xde, }, 6, 0, "", "",
484"62 62 7e 4f 30 de \tvpmovwb %zmm27,%ymm6{%k7}",},
485{{0xc4, 0xe2, 0x7d, 0x31, 0xf4, }, 5, 0, "", "",
486"c4 e2 7d 31 f4 \tvpmovzxbd %xmm4,%ymm6",},
487{{0x62, 0x62, 0x7e, 0x4f, 0x31, 0xde, }, 6, 0, "", "",
488"62 62 7e 4f 31 de \tvpmovdb %zmm27,%xmm6{%k7}",},
489{{0xc4, 0xe2, 0x7d, 0x32, 0xe4, }, 5, 0, "", "",
490"c4 e2 7d 32 e4 \tvpmovzxbq %xmm4,%ymm4",},
491{{0x62, 0x62, 0x7e, 0x4f, 0x32, 0xde, }, 6, 0, "", "",
492"62 62 7e 4f 32 de \tvpmovqb %zmm27,%xmm6{%k7}",},
493{{0xc4, 0xe2, 0x7d, 0x33, 0xe4, }, 5, 0, "", "",
494"c4 e2 7d 33 e4 \tvpmovzxwd %xmm4,%ymm4",},
495{{0x62, 0x62, 0x7e, 0x4f, 0x33, 0xde, }, 6, 0, "", "",
496"62 62 7e 4f 33 de \tvpmovdw %zmm27,%ymm6{%k7}",},
497{{0xc4, 0xe2, 0x7d, 0x34, 0xf4, }, 5, 0, "", "",
498"c4 e2 7d 34 f4 \tvpmovzxwq %xmm4,%ymm6",},
499{{0x62, 0x62, 0x7e, 0x4f, 0x34, 0xde, }, 6, 0, "", "",
500"62 62 7e 4f 34 de \tvpmovqw %zmm27,%xmm6{%k7}",},
501{{0xc4, 0xe2, 0x7d, 0x35, 0xe4, }, 5, 0, "", "",
502"c4 e2 7d 35 e4 \tvpmovzxdq %xmm4,%ymm4",},
503{{0x62, 0x62, 0x7e, 0x4f, 0x35, 0xde, }, 6, 0, "", "",
504"62 62 7e 4f 35 de \tvpmovqd %zmm27,%ymm6{%k7}",},
505{{0xc4, 0xe2, 0x4d, 0x36, 0xd4, }, 5, 0, "", "",
506"c4 e2 4d 36 d4 \tvpermd %ymm4,%ymm6,%ymm2",},
507{{0x62, 0x82, 0x2d, 0x27, 0x36, 0xf0, }, 6, 0, "", "",
508"62 82 2d 27 36 f0 \tvpermd %ymm24,%ymm26,%ymm22{%k7}",},
509{{0x62, 0x82, 0xad, 0x27, 0x36, 0xf0, }, 6, 0, "", "",
510"62 82 ad 27 36 f0 \tvpermq %ymm24,%ymm26,%ymm22{%k7}",},
511{{0xc4, 0xe2, 0x4d, 0x38, 0xd4, }, 5, 0, "", "",
512"c4 e2 4d 38 d4 \tvpminsb %ymm4,%ymm6,%ymm2",},
513{{0x62, 0x62, 0x7e, 0x48, 0x38, 0xe5, }, 6, 0, "", "",
514"62 62 7e 48 38 e5 \tvpmovm2d %k5,%zmm28",},
515{{0x62, 0x62, 0xfe, 0x48, 0x38, 0xe5, }, 6, 0, "", "",
516"62 62 fe 48 38 e5 \tvpmovm2q %k5,%zmm28",},
517{{0xc4, 0xe2, 0x69, 0x39, 0xd9, }, 5, 0, "", "",
518"c4 e2 69 39 d9 \tvpminsd %xmm1,%xmm2,%xmm3",},
519{{0x62, 0x02, 0x35, 0x40, 0x39, 0xd0, }, 6, 0, "", "",
520"62 02 35 40 39 d0 \tvpminsd %zmm24,%zmm25,%zmm26",},
521{{0x62, 0x02, 0xb5, 0x40, 0x39, 0xd0, }, 6, 0, "", "",
522"62 02 b5 40 39 d0 \tvpminsq %zmm24,%zmm25,%zmm26",},
523{{0x62, 0x92, 0x7e, 0x48, 0x39, 0xec, }, 6, 0, "", "",
524"62 92 7e 48 39 ec \tvpmovd2m %zmm28,%k5",},
525{{0x62, 0x92, 0xfe, 0x48, 0x39, 0xec, }, 6, 0, "", "",
526"62 92 fe 48 39 ec \tvpmovq2m %zmm28,%k5",},
527{{0xc4, 0xe2, 0x4d, 0x3a, 0xd4, }, 5, 0, "", "",
528"c4 e2 4d 3a d4 \tvpminuw %ymm4,%ymm6,%ymm2",},
529{{0x62, 0x62, 0x7e, 0x48, 0x3a, 0xe6, }, 6, 0, "", "",
530"62 62 7e 48 3a e6 \tvpbroadcastmw2d %k6,%zmm28",},
531{{0xc4, 0xe2, 0x4d, 0x3b, 0xd4, }, 5, 0, "", "",
532"c4 e2 4d 3b d4 \tvpminud %ymm4,%ymm6,%ymm2",},
533{{0x62, 0x02, 0x35, 0x40, 0x3b, 0xd0, }, 6, 0, "", "",
534"62 02 35 40 3b d0 \tvpminud %zmm24,%zmm25,%zmm26",},
535{{0x62, 0x02, 0xb5, 0x40, 0x3b, 0xd0, }, 6, 0, "", "",
536"62 02 b5 40 3b d0 \tvpminuq %zmm24,%zmm25,%zmm26",},
537{{0xc4, 0xe2, 0x4d, 0x3d, 0xd4, }, 5, 0, "", "",
538"c4 e2 4d 3d d4 \tvpmaxsd %ymm4,%ymm6,%ymm2",},
539{{0x62, 0x02, 0x35, 0x40, 0x3d, 0xd0, }, 6, 0, "", "",
540"62 02 35 40 3d d0 \tvpmaxsd %zmm24,%zmm25,%zmm26",},
541{{0x62, 0x02, 0xb5, 0x40, 0x3d, 0xd0, }, 6, 0, "", "",
542"62 02 b5 40 3d d0 \tvpmaxsq %zmm24,%zmm25,%zmm26",},
543{{0xc4, 0xe2, 0x4d, 0x3f, 0xd4, }, 5, 0, "", "",
544"c4 e2 4d 3f d4 \tvpmaxud %ymm4,%ymm6,%ymm2",},
545{{0x62, 0x02, 0x35, 0x40, 0x3f, 0xd0, }, 6, 0, "", "",
546"62 02 35 40 3f d0 \tvpmaxud %zmm24,%zmm25,%zmm26",},
547{{0x62, 0x02, 0xb5, 0x40, 0x3f, 0xd0, }, 6, 0, "", "",
548"62 02 b5 40 3f d0 \tvpmaxuq %zmm24,%zmm25,%zmm26",},
549{{0xc4, 0xe2, 0x4d, 0x40, 0xd4, }, 5, 0, "", "",
550"c4 e2 4d 40 d4 \tvpmulld %ymm4,%ymm6,%ymm2",},
551{{0x62, 0x02, 0x35, 0x40, 0x40, 0xd0, }, 6, 0, "", "",
552"62 02 35 40 40 d0 \tvpmulld %zmm24,%zmm25,%zmm26",},
553{{0x62, 0x02, 0xb5, 0x40, 0x40, 0xd0, }, 6, 0, "", "",
554"62 02 b5 40 40 d0 \tvpmullq %zmm24,%zmm25,%zmm26",},
555{{0x62, 0x02, 0x7d, 0x48, 0x42, 0xd1, }, 6, 0, "", "",
556"62 02 7d 48 42 d1 \tvgetexpps %zmm25,%zmm26",},
557{{0x62, 0x02, 0xfd, 0x48, 0x42, 0xe3, }, 6, 0, "", "",
558"62 02 fd 48 42 e3 \tvgetexppd %zmm27,%zmm28",},
559{{0x62, 0x02, 0x35, 0x07, 0x43, 0xd0, }, 6, 0, "", "",
560"62 02 35 07 43 d0 \tvgetexpss %xmm24,%xmm25,%xmm26{%k7}",},
561{{0x62, 0x02, 0x95, 0x07, 0x43, 0xf4, }, 6, 0, "", "",
562"62 02 95 07 43 f4 \tvgetexpsd %xmm28,%xmm29,%xmm30{%k7}",},
563{{0x62, 0x02, 0x7d, 0x48, 0x44, 0xe3, }, 6, 0, "", "",
564"62 02 7d 48 44 e3 \tvplzcntd %zmm27,%zmm28",},
565{{0x62, 0x02, 0xfd, 0x48, 0x44, 0xe3, }, 6, 0, "", "",
566"62 02 fd 48 44 e3 \tvplzcntq %zmm27,%zmm28",},
567{{0xc4, 0xe2, 0x4d, 0x46, 0xd4, }, 5, 0, "", "",
568"c4 e2 4d 46 d4 \tvpsravd %ymm4,%ymm6,%ymm2",},
569{{0x62, 0x02, 0x35, 0x40, 0x46, 0xd0, }, 6, 0, "", "",
570"62 02 35 40 46 d0 \tvpsravd %zmm24,%zmm25,%zmm26",},
571{{0x62, 0x02, 0xb5, 0x40, 0x46, 0xd0, }, 6, 0, "", "",
572"62 02 b5 40 46 d0 \tvpsravq %zmm24,%zmm25,%zmm26",},
573{{0x62, 0x02, 0x7d, 0x48, 0x4c, 0xd1, }, 6, 0, "", "",
574"62 02 7d 48 4c d1 \tvrcp14ps %zmm25,%zmm26",},
575{{0x62, 0x02, 0xfd, 0x48, 0x4c, 0xe3, }, 6, 0, "", "",
576"62 02 fd 48 4c e3 \tvrcp14pd %zmm27,%zmm28",},
577{{0x62, 0x02, 0x35, 0x07, 0x4d, 0xd0, }, 6, 0, "", "",
578"62 02 35 07 4d d0 \tvrcp14ss %xmm24,%xmm25,%xmm26{%k7}",},
579{{0x62, 0x02, 0xb5, 0x07, 0x4d, 0xd0, }, 6, 0, "", "",
580"62 02 b5 07 4d d0 \tvrcp14sd %xmm24,%xmm25,%xmm26{%k7}",},
581{{0x62, 0x02, 0x7d, 0x48, 0x4e, 0xd1, }, 6, 0, "", "",
582"62 02 7d 48 4e d1 \tvrsqrt14ps %zmm25,%zmm26",},
583{{0x62, 0x02, 0xfd, 0x48, 0x4e, 0xe3, }, 6, 0, "", "",
584"62 02 fd 48 4e e3 \tvrsqrt14pd %zmm27,%zmm28",},
585{{0x62, 0x02, 0x35, 0x07, 0x4f, 0xd0, }, 6, 0, "", "",
586"62 02 35 07 4f d0 \tvrsqrt14ss %xmm24,%xmm25,%xmm26{%k7}",},
587{{0x62, 0x02, 0xb5, 0x07, 0x4f, 0xd0, }, 6, 0, "", "",
588"62 02 b5 07 4f d0 \tvrsqrt14sd %xmm24,%xmm25,%xmm26{%k7}",},
589{{0xc4, 0xe2, 0x79, 0x59, 0xf4, }, 5, 0, "", "",
590"c4 e2 79 59 f4 \tvpbroadcastq %xmm4,%xmm6",},
591{{0x62, 0x02, 0x7d, 0x48, 0x59, 0xd3, }, 6, 0, "", "",
592"62 02 7d 48 59 d3 \tvbroadcasti32x2 %xmm27,%zmm26",},
593{{0xc4, 0xe2, 0x7d, 0x5a, 0x21, }, 5, 0, "", "",
594"c4 e2 7d 5a 21 \tvbroadcasti128 (%rcx),%ymm4",},
595{{0x62, 0x62, 0x7d, 0x48, 0x5a, 0x11, }, 6, 0, "", "",
596"62 62 7d 48 5a 11 \tvbroadcasti32x4 (%rcx),%zmm26",},
597{{0x62, 0x62, 0xfd, 0x48, 0x5a, 0x11, }, 6, 0, "", "",
598"62 62 fd 48 5a 11 \tvbroadcasti64x2 (%rcx),%zmm26",},
599{{0x62, 0x62, 0x7d, 0x48, 0x5b, 0x21, }, 6, 0, "", "",
600"62 62 7d 48 5b 21 \tvbroadcasti32x8 (%rcx),%zmm28",},
601{{0x62, 0x62, 0xfd, 0x48, 0x5b, 0x11, }, 6, 0, "", "",
602"62 62 fd 48 5b 11 \tvbroadcasti64x4 (%rcx),%zmm26",},
603{{0x62, 0x02, 0x25, 0x40, 0x64, 0xe2, }, 6, 0, "", "",
604"62 02 25 40 64 e2 \tvpblendmd %zmm26,%zmm27,%zmm28",},
605{{0x62, 0x02, 0xa5, 0x40, 0x64, 0xe2, }, 6, 0, "", "",
606"62 02 a5 40 64 e2 \tvpblendmq %zmm26,%zmm27,%zmm28",},
607{{0x62, 0x02, 0x35, 0x40, 0x65, 0xd0, }, 6, 0, "", "",
608"62 02 35 40 65 d0 \tvblendmps %zmm24,%zmm25,%zmm26",},
609{{0x62, 0x02, 0xa5, 0x40, 0x65, 0xe2, }, 6, 0, "", "",
610"62 02 a5 40 65 e2 \tvblendmpd %zmm26,%zmm27,%zmm28",},
611{{0x62, 0x02, 0x25, 0x40, 0x66, 0xe2, }, 6, 0, "", "",
612"62 02 25 40 66 e2 \tvpblendmb %zmm26,%zmm27,%zmm28",},
613{{0x62, 0x02, 0xa5, 0x40, 0x66, 0xe2, }, 6, 0, "", "",
614"62 02 a5 40 66 e2 \tvpblendmw %zmm26,%zmm27,%zmm28",},
615{{0x62, 0x02, 0x35, 0x40, 0x75, 0xd0, }, 6, 0, "", "",
616"62 02 35 40 75 d0 \tvpermi2b %zmm24,%zmm25,%zmm26",},
617{{0x62, 0x02, 0xa5, 0x40, 0x75, 0xe2, }, 6, 0, "", "",
618"62 02 a5 40 75 e2 \tvpermi2w %zmm26,%zmm27,%zmm28",},
619{{0x62, 0x02, 0x25, 0x40, 0x76, 0xe2, }, 6, 0, "", "",
620"62 02 25 40 76 e2 \tvpermi2d %zmm26,%zmm27,%zmm28",},
621{{0x62, 0x02, 0xa5, 0x40, 0x76, 0xe2, }, 6, 0, "", "",
622"62 02 a5 40 76 e2 \tvpermi2q %zmm26,%zmm27,%zmm28",},
623{{0x62, 0x02, 0x25, 0x40, 0x77, 0xe2, }, 6, 0, "", "",
624"62 02 25 40 77 e2 \tvpermi2ps %zmm26,%zmm27,%zmm28",},
625{{0x62, 0x02, 0xa5, 0x40, 0x77, 0xe2, }, 6, 0, "", "",
626"62 02 a5 40 77 e2 \tvpermi2pd %zmm26,%zmm27,%zmm28",},
627{{0x62, 0x62, 0x7d, 0x08, 0x7a, 0xf0, }, 6, 0, "", "",
628"62 62 7d 08 7a f0 \tvpbroadcastb %eax,%xmm30",},
629{{0x62, 0x62, 0x7d, 0x08, 0x7b, 0xf0, }, 6, 0, "", "",
630"62 62 7d 08 7b f0 \tvpbroadcastw %eax,%xmm30",},
631{{0x62, 0x62, 0x7d, 0x08, 0x7c, 0xf0, }, 6, 0, "", "",
632"62 62 7d 08 7c f0 \tvpbroadcastd %eax,%xmm30",},
633{{0x62, 0x62, 0xfd, 0x48, 0x7c, 0xf0, }, 6, 0, "", "",
634"62 62 fd 48 7c f0 \tvpbroadcastq %rax,%zmm30",},
635{{0x62, 0x02, 0x25, 0x40, 0x7d, 0xe2, }, 6, 0, "", "",
636"62 02 25 40 7d e2 \tvpermt2b %zmm26,%zmm27,%zmm28",},
637{{0x62, 0x02, 0xa5, 0x40, 0x7d, 0xe2, }, 6, 0, "", "",
638"62 02 a5 40 7d e2 \tvpermt2w %zmm26,%zmm27,%zmm28",},
639{{0x62, 0x02, 0x25, 0x40, 0x7e, 0xe2, }, 6, 0, "", "",
640"62 02 25 40 7e e2 \tvpermt2d %zmm26,%zmm27,%zmm28",},
641{{0x62, 0x02, 0xa5, 0x40, 0x7e, 0xe2, }, 6, 0, "", "",
642"62 02 a5 40 7e e2 \tvpermt2q %zmm26,%zmm27,%zmm28",},
643{{0x62, 0x02, 0x25, 0x40, 0x7f, 0xe2, }, 6, 0, "", "",
644"62 02 25 40 7f e2 \tvpermt2ps %zmm26,%zmm27,%zmm28",},
645{{0x62, 0x02, 0xa5, 0x40, 0x7f, 0xe2, }, 6, 0, "", "",
646"62 02 a5 40 7f e2 \tvpermt2pd %zmm26,%zmm27,%zmm28",},
647{{0x62, 0x02, 0xa5, 0x40, 0x83, 0xe2, }, 6, 0, "", "",
648"62 02 a5 40 83 e2 \tvpmultishiftqb %zmm26,%zmm27,%zmm28",},
649{{0x62, 0x62, 0x7d, 0x48, 0x88, 0x11, }, 6, 0, "", "",
650"62 62 7d 48 88 11 \tvexpandps (%rcx),%zmm26",},
651{{0x62, 0x62, 0xfd, 0x48, 0x88, 0x21, }, 6, 0, "", "",
652"62 62 fd 48 88 21 \tvexpandpd (%rcx),%zmm28",},
653{{0x62, 0x62, 0x7d, 0x48, 0x89, 0x21, }, 6, 0, "", "",
654"62 62 7d 48 89 21 \tvpexpandd (%rcx),%zmm28",},
655{{0x62, 0x62, 0xfd, 0x48, 0x89, 0x11, }, 6, 0, "", "",
656"62 62 fd 48 89 11 \tvpexpandq (%rcx),%zmm26",},
657{{0x62, 0x62, 0x7d, 0x48, 0x8a, 0x21, }, 6, 0, "", "",
658"62 62 7d 48 8a 21 \tvcompressps %zmm28,(%rcx)",},
659{{0x62, 0x62, 0xfd, 0x48, 0x8a, 0x21, }, 6, 0, "", "",
660"62 62 fd 48 8a 21 \tvcompresspd %zmm28,(%rcx)",},
661{{0x62, 0x62, 0x7d, 0x48, 0x8b, 0x21, }, 6, 0, "", "",
662"62 62 7d 48 8b 21 \tvpcompressd %zmm28,(%rcx)",},
663{{0x62, 0x62, 0xfd, 0x48, 0x8b, 0x11, }, 6, 0, "", "",
664"62 62 fd 48 8b 11 \tvpcompressq %zmm26,(%rcx)",},
665{{0x62, 0x02, 0x25, 0x40, 0x8d, 0xe2, }, 6, 0, "", "",
666"62 02 25 40 8d e2 \tvpermb %zmm26,%zmm27,%zmm28",},
667{{0x62, 0x02, 0xa5, 0x40, 0x8d, 0xe2, }, 6, 0, "", "",
668"62 02 a5 40 8d e2 \tvpermw %zmm26,%zmm27,%zmm28",},
669{{0xc4, 0xe2, 0x69, 0x90, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
670"c4 e2 69 90 4c 7d 02 \tvpgatherdd %xmm2,0x2(%rbp,%xmm7,2),%xmm1",},
671{{0xc4, 0xe2, 0xe9, 0x90, 0x4c, 0x7d, 0x04, }, 7, 0, "", "",
672"c4 e2 e9 90 4c 7d 04 \tvpgatherdq %xmm2,0x4(%rbp,%xmm7,2),%xmm1",},
673{{0x62, 0x22, 0x7d, 0x41, 0x90, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
674"62 22 7d 41 90 94 dd 7b 00 00 00 \tvpgatherdd 0x7b(%rbp,%zmm27,8),%zmm26{%k1}",},
675{{0x62, 0x22, 0xfd, 0x41, 0x90, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
676"62 22 fd 41 90 94 dd 7b 00 00 00 \tvpgatherdq 0x7b(%rbp,%ymm27,8),%zmm26{%k1}",},
677{{0xc4, 0xe2, 0x69, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
678"c4 e2 69 91 4c 7d 02 \tvpgatherqd %xmm2,0x2(%rbp,%xmm7,2),%xmm1",},
679{{0xc4, 0xe2, 0xe9, 0x91, 0x4c, 0x7d, 0x02, }, 7, 0, "", "",
680"c4 e2 e9 91 4c 7d 02 \tvpgatherqq %xmm2,0x2(%rbp,%xmm7,2),%xmm1",},
681{{0x62, 0x22, 0x7d, 0x41, 0x91, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
682"62 22 7d 41 91 94 dd 7b 00 00 00 \tvpgatherqd 0x7b(%rbp,%zmm27,8),%ymm26{%k1}",},
683{{0x62, 0x22, 0xfd, 0x41, 0x91, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
684"62 22 fd 41 91 94 dd 7b 00 00 00 \tvpgatherqq 0x7b(%rbp,%zmm27,8),%zmm26{%k1}",},
685{{0x62, 0x22, 0x7d, 0x41, 0xa0, 0xa4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
686"62 22 7d 41 a0 a4 ed 7b 00 00 00 \tvpscatterdd %zmm28,0x7b(%rbp,%zmm29,8){%k1}",},
687{{0x62, 0x22, 0xfd, 0x41, 0xa0, 0x94, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
688"62 22 fd 41 a0 94 dd 7b 00 00 00 \tvpscatterdq %zmm26,0x7b(%rbp,%ymm27,8){%k1}",},
689{{0x62, 0xb2, 0x7d, 0x41, 0xa1, 0xb4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
690"62 b2 7d 41 a1 b4 ed 7b 00 00 00 \tvpscatterqd %ymm6,0x7b(%rbp,%zmm29,8){%k1}",},
691{{0x62, 0xb2, 0xfd, 0x21, 0xa1, 0xb4, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
692"62 b2 fd 21 a1 b4 dd 7b 00 00 00 \tvpscatterqq %ymm6,0x7b(%rbp,%ymm27,8){%k1}",},
693{{0x62, 0x22, 0x7d, 0x41, 0xa2, 0xa4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
694"62 22 7d 41 a2 a4 ed 7b 00 00 00 \tvscatterdps %zmm28,0x7b(%rbp,%zmm29,8){%k1}",},
695{{0x62, 0x22, 0xfd, 0x41, 0xa2, 0xa4, 0xdd, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
696"62 22 fd 41 a2 a4 dd 7b 00 00 00 \tvscatterdpd %zmm28,0x7b(%rbp,%ymm27,8){%k1}",},
697{{0x62, 0xb2, 0x7d, 0x41, 0xa3, 0xb4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
698"62 b2 7d 41 a3 b4 ed 7b 00 00 00 \tvscatterqps %ymm6,0x7b(%rbp,%zmm29,8){%k1}",},
699{{0x62, 0x22, 0xfd, 0x41, 0xa3, 0xa4, 0xed, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
700"62 22 fd 41 a3 a4 ed 7b 00 00 00 \tvscatterqpd %zmm28,0x7b(%rbp,%zmm29,8){%k1}",},
701{{0x62, 0x02, 0xa5, 0x40, 0xb4, 0xe2, }, 6, 0, "", "",
702"62 02 a5 40 b4 e2 \tvpmadd52luq %zmm26,%zmm27,%zmm28",},
703{{0x62, 0x02, 0xa5, 0x40, 0xb5, 0xe2, }, 6, 0, "", "",
704"62 02 a5 40 b5 e2 \tvpmadd52huq %zmm26,%zmm27,%zmm28",},
705{{0x62, 0x02, 0x7d, 0x48, 0xc4, 0xda, }, 6, 0, "", "",
706"62 02 7d 48 c4 da \tvpconflictd %zmm26,%zmm27",},
707{{0x62, 0x02, 0xfd, 0x48, 0xc4, 0xda, }, 6, 0, "", "",
708"62 02 fd 48 c4 da \tvpconflictq %zmm26,%zmm27",},
709{{0x62, 0x02, 0x7d, 0x48, 0xc8, 0xf5, }, 6, 0, "", "",
710"62 02 7d 48 c8 f5 \tvexp2ps %zmm29,%zmm30",},
711{{0x62, 0x02, 0xfd, 0x48, 0xc8, 0xda, }, 6, 0, "", "",
712"62 02 fd 48 c8 da \tvexp2pd %zmm26,%zmm27",},
713{{0x62, 0x02, 0x7d, 0x48, 0xca, 0xf5, }, 6, 0, "", "",
714"62 02 7d 48 ca f5 \tvrcp28ps %zmm29,%zmm30",},
715{{0x62, 0x02, 0xfd, 0x48, 0xca, 0xda, }, 6, 0, "", "",
716"62 02 fd 48 ca da \tvrcp28pd %zmm26,%zmm27",},
717{{0x62, 0x02, 0x15, 0x07, 0xcb, 0xf4, }, 6, 0, "", "",
718"62 02 15 07 cb f4 \tvrcp28ss %xmm28,%xmm29,%xmm30{%k7}",},
719{{0x62, 0x02, 0xad, 0x07, 0xcb, 0xd9, }, 6, 0, "", "",
720"62 02 ad 07 cb d9 \tvrcp28sd %xmm25,%xmm26,%xmm27{%k7}",},
721{{0x62, 0x02, 0x7d, 0x48, 0xcc, 0xf5, }, 6, 0, "", "",
722"62 02 7d 48 cc f5 \tvrsqrt28ps %zmm29,%zmm30",},
723{{0x62, 0x02, 0xfd, 0x48, 0xcc, 0xda, }, 6, 0, "", "",
724"62 02 fd 48 cc da \tvrsqrt28pd %zmm26,%zmm27",},
725{{0x62, 0x02, 0x15, 0x07, 0xcd, 0xf4, }, 6, 0, "", "",
726"62 02 15 07 cd f4 \tvrsqrt28ss %xmm28,%xmm29,%xmm30{%k7}",},
727{{0x62, 0x02, 0xad, 0x07, 0xcd, 0xd9, }, 6, 0, "", "",
728"62 02 ad 07 cd d9 \tvrsqrt28sd %xmm25,%xmm26,%xmm27{%k7}",},
729{{0x62, 0x03, 0x15, 0x40, 0x03, 0xf4, 0x12, }, 7, 0, "", "",
730"62 03 15 40 03 f4 12 \tvalignd $0x12,%zmm28,%zmm29,%zmm30",},
731{{0x62, 0x03, 0xad, 0x40, 0x03, 0xd9, 0x12, }, 7, 0, "", "",
732"62 03 ad 40 03 d9 12 \tvalignq $0x12,%zmm25,%zmm26,%zmm27",},
733{{0xc4, 0xe3, 0x7d, 0x08, 0xd6, 0x05, }, 6, 0, "", "",
734"c4 e3 7d 08 d6 05 \tvroundps $0x5,%ymm6,%ymm2",},
735{{0x62, 0x03, 0x7d, 0x48, 0x08, 0xd1, 0x12, }, 7, 0, "", "",
736"62 03 7d 48 08 d1 12 \tvrndscaleps $0x12,%zmm25,%zmm26",},
737{{0xc4, 0xe3, 0x7d, 0x09, 0xd6, 0x05, }, 6, 0, "", "",
738"c4 e3 7d 09 d6 05 \tvroundpd $0x5,%ymm6,%ymm2",},
739{{0x62, 0x03, 0xfd, 0x48, 0x09, 0xd1, 0x12, }, 7, 0, "", "",
740"62 03 fd 48 09 d1 12 \tvrndscalepd $0x12,%zmm25,%zmm26",},
741{{0xc4, 0xe3, 0x49, 0x0a, 0xd4, 0x05, }, 6, 0, "", "",
742"c4 e3 49 0a d4 05 \tvroundss $0x5,%xmm4,%xmm6,%xmm2",},
743{{0x62, 0x03, 0x35, 0x07, 0x0a, 0xd0, 0x12, }, 7, 0, "", "",
744"62 03 35 07 0a d0 12 \tvrndscaless $0x12,%xmm24,%xmm25,%xmm26{%k7}",},
745{{0xc4, 0xe3, 0x49, 0x0b, 0xd4, 0x05, }, 6, 0, "", "",
746"c4 e3 49 0b d4 05 \tvroundsd $0x5,%xmm4,%xmm6,%xmm2",},
747{{0x62, 0x03, 0xb5, 0x07, 0x0b, 0xd0, 0x12, }, 7, 0, "", "",
748"62 03 b5 07 0b d0 12 \tvrndscalesd $0x12,%xmm24,%xmm25,%xmm26{%k7}",},
749{{0xc4, 0xe3, 0x5d, 0x18, 0xf4, 0x05, }, 6, 0, "", "",
750"c4 e3 5d 18 f4 05 \tvinsertf128 $0x5,%xmm4,%ymm4,%ymm6",},
751{{0x62, 0x03, 0x35, 0x47, 0x18, 0xd0, 0x12, }, 7, 0, "", "",
752"62 03 35 47 18 d0 12 \tvinsertf32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
753{{0x62, 0x03, 0xb5, 0x47, 0x18, 0xd0, 0x12, }, 7, 0, "", "",
754"62 03 b5 47 18 d0 12 \tvinsertf64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
755{{0xc4, 0xe3, 0x7d, 0x19, 0xe4, 0x05, }, 6, 0, "", "",
756"c4 e3 7d 19 e4 05 \tvextractf128 $0x5,%ymm4,%xmm4",},
757{{0x62, 0x03, 0x7d, 0x4f, 0x19, 0xca, 0x12, }, 7, 0, "", "",
758"62 03 7d 4f 19 ca 12 \tvextractf32x4 $0x12,%zmm25,%xmm26{%k7}",},
759{{0x62, 0x03, 0xfd, 0x4f, 0x19, 0xca, 0x12, }, 7, 0, "", "",
760"62 03 fd 4f 19 ca 12 \tvextractf64x2 $0x12,%zmm25,%xmm26{%k7}",},
761{{0x62, 0x03, 0x2d, 0x47, 0x1a, 0xd9, 0x12, }, 7, 0, "", "",
762"62 03 2d 47 1a d9 12 \tvinsertf32x8 $0x12,%ymm25,%zmm26,%zmm27{%k7}",},
763{{0x62, 0x03, 0x95, 0x47, 0x1a, 0xf4, 0x12, }, 7, 0, "", "",
764"62 03 95 47 1a f4 12 \tvinsertf64x4 $0x12,%ymm28,%zmm29,%zmm30{%k7}",},
765{{0x62, 0x03, 0x7d, 0x4f, 0x1b, 0xee, 0x12, }, 7, 0, "", "",
766"62 03 7d 4f 1b ee 12 \tvextractf32x8 $0x12,%zmm29,%ymm30{%k7}",},
767{{0x62, 0x03, 0xfd, 0x4f, 0x1b, 0xd3, 0x12, }, 7, 0, "", "",
768"62 03 fd 4f 1b d3 12 \tvextractf64x4 $0x12,%zmm26,%ymm27{%k7}",},
769{{0x62, 0x93, 0x0d, 0x40, 0x1e, 0xed, 0x12, }, 7, 0, "", "",
770"62 93 0d 40 1e ed 12 \tvpcmpud $0x12,%zmm29,%zmm30,%k5",},
771{{0x62, 0x93, 0xa5, 0x40, 0x1e, 0xea, 0x12, }, 7, 0, "", "",
772"62 93 a5 40 1e ea 12 \tvpcmpuq $0x12,%zmm26,%zmm27,%k5",},
773{{0x62, 0x93, 0x0d, 0x40, 0x1f, 0xed, 0x12, }, 7, 0, "", "",
774"62 93 0d 40 1f ed 12 \tvpcmpd $0x12,%zmm29,%zmm30,%k5",},
775{{0x62, 0x93, 0xa5, 0x40, 0x1f, 0xea, 0x12, }, 7, 0, "", "",
776"62 93 a5 40 1f ea 12 \tvpcmpq $0x12,%zmm26,%zmm27,%k5",},
777{{0x62, 0x03, 0x15, 0x40, 0x23, 0xf4, 0x12, }, 7, 0, "", "",
778"62 03 15 40 23 f4 12 \tvshuff32x4 $0x12,%zmm28,%zmm29,%zmm30",},
779{{0x62, 0x03, 0xad, 0x40, 0x23, 0xd9, 0x12, }, 7, 0, "", "",
780"62 03 ad 40 23 d9 12 \tvshuff64x2 $0x12,%zmm25,%zmm26,%zmm27",},
781{{0x62, 0x03, 0x15, 0x40, 0x25, 0xf4, 0x12, }, 7, 0, "", "",
782"62 03 15 40 25 f4 12 \tvpternlogd $0x12,%zmm28,%zmm29,%zmm30",},
783{{0x62, 0x03, 0x95, 0x40, 0x25, 0xf4, 0x12, }, 7, 0, "", "",
784"62 03 95 40 25 f4 12 \tvpternlogq $0x12,%zmm28,%zmm29,%zmm30",},
785{{0x62, 0x03, 0x7d, 0x48, 0x26, 0xda, 0x12, }, 7, 0, "", "",
786"62 03 7d 48 26 da 12 \tvgetmantps $0x12,%zmm26,%zmm27",},
787{{0x62, 0x03, 0xfd, 0x48, 0x26, 0xf5, 0x12, }, 7, 0, "", "",
788"62 03 fd 48 26 f5 12 \tvgetmantpd $0x12,%zmm29,%zmm30",},
789{{0x62, 0x03, 0x2d, 0x07, 0x27, 0xd9, 0x12, }, 7, 0, "", "",
790"62 03 2d 07 27 d9 12 \tvgetmantss $0x12,%xmm25,%xmm26,%xmm27{%k7}",},
791{{0x62, 0x03, 0x95, 0x07, 0x27, 0xf4, 0x12, }, 7, 0, "", "",
792"62 03 95 07 27 f4 12 \tvgetmantsd $0x12,%xmm28,%xmm29,%xmm30{%k7}",},
793{{0xc4, 0xe3, 0x5d, 0x38, 0xf4, 0x05, }, 6, 0, "", "",
794"c4 e3 5d 38 f4 05 \tvinserti128 $0x5,%xmm4,%ymm4,%ymm6",},
795{{0x62, 0x03, 0x35, 0x47, 0x38, 0xd0, 0x12, }, 7, 0, "", "",
796"62 03 35 47 38 d0 12 \tvinserti32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
797{{0x62, 0x03, 0xb5, 0x47, 0x38, 0xd0, 0x12, }, 7, 0, "", "",
798"62 03 b5 47 38 d0 12 \tvinserti64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}",},
799{{0xc4, 0xe3, 0x7d, 0x39, 0xe6, 0x05, }, 6, 0, "", "",
800"c4 e3 7d 39 e6 05 \tvextracti128 $0x5,%ymm4,%xmm6",},
801{{0x62, 0x03, 0x7d, 0x4f, 0x39, 0xca, 0x12, }, 7, 0, "", "",
802"62 03 7d 4f 39 ca 12 \tvextracti32x4 $0x12,%zmm25,%xmm26{%k7}",},
803{{0x62, 0x03, 0xfd, 0x4f, 0x39, 0xca, 0x12, }, 7, 0, "", "",
804"62 03 fd 4f 39 ca 12 \tvextracti64x2 $0x12,%zmm25,%xmm26{%k7}",},
805{{0x62, 0x03, 0x15, 0x47, 0x3a, 0xf4, 0x12, }, 7, 0, "", "",
806"62 03 15 47 3a f4 12 \tvinserti32x8 $0x12,%ymm28,%zmm29,%zmm30{%k7}",},
807{{0x62, 0x03, 0xad, 0x47, 0x3a, 0xd9, 0x12, }, 7, 0, "", "",
808"62 03 ad 47 3a d9 12 \tvinserti64x4 $0x12,%ymm25,%zmm26,%zmm27{%k7}",},
809{{0x62, 0x03, 0x7d, 0x4f, 0x3b, 0xee, 0x12, }, 7, 0, "", "",
810"62 03 7d 4f 3b ee 12 \tvextracti32x8 $0x12,%zmm29,%ymm30{%k7}",},
811{{0x62, 0x03, 0xfd, 0x4f, 0x3b, 0xd3, 0x12, }, 7, 0, "", "",
812"62 03 fd 4f 3b d3 12 \tvextracti64x4 $0x12,%zmm26,%ymm27{%k7}",},
813{{0x62, 0x93, 0x0d, 0x40, 0x3e, 0xed, 0x12, }, 7, 0, "", "",
814"62 93 0d 40 3e ed 12 \tvpcmpub $0x12,%zmm29,%zmm30,%k5",},
815{{0x62, 0x93, 0xa5, 0x40, 0x3e, 0xea, 0x12, }, 7, 0, "", "",
816"62 93 a5 40 3e ea 12 \tvpcmpuw $0x12,%zmm26,%zmm27,%k5",},
817{{0x62, 0x93, 0x0d, 0x40, 0x3f, 0xed, 0x12, }, 7, 0, "", "",
818"62 93 0d 40 3f ed 12 \tvpcmpb $0x12,%zmm29,%zmm30,%k5",},
819{{0x62, 0x93, 0xa5, 0x40, 0x3f, 0xea, 0x12, }, 7, 0, "", "",
820"62 93 a5 40 3f ea 12 \tvpcmpw $0x12,%zmm26,%zmm27,%k5",},
821{{0xc4, 0xe3, 0x4d, 0x42, 0xd4, 0x05, }, 6, 0, "", "",
822"c4 e3 4d 42 d4 05 \tvmpsadbw $0x5,%ymm4,%ymm6,%ymm2",},
823{{0x62, 0xf3, 0x55, 0x48, 0x42, 0xf4, 0x12, }, 7, 0, "", "",
824"62 f3 55 48 42 f4 12 \tvdbpsadbw $0x12,%zmm4,%zmm5,%zmm6",},
825{{0x62, 0x03, 0x2d, 0x40, 0x43, 0xd9, 0x12, }, 7, 0, "", "",
826"62 03 2d 40 43 d9 12 \tvshufi32x4 $0x12,%zmm25,%zmm26,%zmm27",},
827{{0x62, 0x03, 0x95, 0x40, 0x43, 0xf4, 0x12, }, 7, 0, "", "",
828"62 03 95 40 43 f4 12 \tvshufi64x2 $0x12,%zmm28,%zmm29,%zmm30",},
829{{0x62, 0x03, 0x2d, 0x40, 0x50, 0xd9, 0x12, }, 7, 0, "", "",
830"62 03 2d 40 50 d9 12 \tvrangeps $0x12,%zmm25,%zmm26,%zmm27",},
831{{0x62, 0x03, 0x95, 0x40, 0x50, 0xf4, 0x12, }, 7, 0, "", "",
832"62 03 95 40 50 f4 12 \tvrangepd $0x12,%zmm28,%zmm29,%zmm30",},
833{{0x62, 0x03, 0x2d, 0x00, 0x51, 0xd9, 0x12, }, 7, 0, "", "",
834"62 03 2d 00 51 d9 12 \tvrangess $0x12,%xmm25,%xmm26,%xmm27",},
835{{0x62, 0x03, 0x95, 0x00, 0x51, 0xf4, 0x12, }, 7, 0, "", "",
836"62 03 95 00 51 f4 12 \tvrangesd $0x12,%xmm28,%xmm29,%xmm30",},
837{{0x62, 0x03, 0x15, 0x40, 0x54, 0xf4, 0x12, }, 7, 0, "", "",
838"62 03 15 40 54 f4 12 \tvfixupimmps $0x12,%zmm28,%zmm29,%zmm30",},
839{{0x62, 0x03, 0xad, 0x40, 0x54, 0xd9, 0x12, }, 7, 0, "", "",
840"62 03 ad 40 54 d9 12 \tvfixupimmpd $0x12,%zmm25,%zmm26,%zmm27",},
841{{0x62, 0x03, 0x15, 0x07, 0x55, 0xf4, 0x12, }, 7, 0, "", "",
842"62 03 15 07 55 f4 12 \tvfixupimmss $0x12,%xmm28,%xmm29,%xmm30{%k7}",},
843{{0x62, 0x03, 0xad, 0x07, 0x55, 0xd9, 0x12, }, 7, 0, "", "",
844"62 03 ad 07 55 d9 12 \tvfixupimmsd $0x12,%xmm25,%xmm26,%xmm27{%k7}",},
845{{0x62, 0x03, 0x7d, 0x48, 0x56, 0xda, 0x12, }, 7, 0, "", "",
846"62 03 7d 48 56 da 12 \tvreduceps $0x12,%zmm26,%zmm27",},
847{{0x62, 0x03, 0xfd, 0x48, 0x56, 0xf5, 0x12, }, 7, 0, "", "",
848"62 03 fd 48 56 f5 12 \tvreducepd $0x12,%zmm29,%zmm30",},
849{{0x62, 0x03, 0x2d, 0x00, 0x57, 0xd9, 0x12, }, 7, 0, "", "",
850"62 03 2d 00 57 d9 12 \tvreducess $0x12,%xmm25,%xmm26,%xmm27",},
851{{0x62, 0x03, 0x95, 0x00, 0x57, 0xf4, 0x12, }, 7, 0, "", "",
852"62 03 95 00 57 f4 12 \tvreducesd $0x12,%xmm28,%xmm29,%xmm30",},
853{{0x62, 0x93, 0x7d, 0x48, 0x66, 0xeb, 0x12, }, 7, 0, "", "",
854"62 93 7d 48 66 eb 12 \tvfpclassps $0x12,%zmm27,%k5",},
855{{0x62, 0x93, 0xfd, 0x48, 0x66, 0xee, 0x12, }, 7, 0, "", "",
856"62 93 fd 48 66 ee 12 \tvfpclasspd $0x12,%zmm30,%k5",},
857{{0x62, 0x93, 0x7d, 0x08, 0x67, 0xeb, 0x12, }, 7, 0, "", "",
858"62 93 7d 08 67 eb 12 \tvfpclassss $0x12,%xmm27,%k5",},
859{{0x62, 0x93, 0xfd, 0x08, 0x67, 0xee, 0x12, }, 7, 0, "", "",
860"62 93 fd 08 67 ee 12 \tvfpclasssd $0x12,%xmm30,%k5",},
861{{0x62, 0x91, 0x2d, 0x40, 0x72, 0xc1, 0x12, }, 7, 0, "", "",
862"62 91 2d 40 72 c1 12 \tvprord $0x12,%zmm25,%zmm26",},
863{{0x62, 0x91, 0xad, 0x40, 0x72, 0xc1, 0x12, }, 7, 0, "", "",
864"62 91 ad 40 72 c1 12 \tvprorq $0x12,%zmm25,%zmm26",},
865{{0x62, 0x91, 0x0d, 0x40, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
866"62 91 0d 40 72 cd 12 \tvprold $0x12,%zmm29,%zmm30",},
867{{0x62, 0x91, 0x8d, 0x40, 0x72, 0xcd, 0x12, }, 7, 0, "", "",
868"62 91 8d 40 72 cd 12 \tvprolq $0x12,%zmm29,%zmm30",},
869{{0x0f, 0x72, 0xe6, 0x02, }, 4, 0, "", "",
870"0f 72 e6 02 \tpsrad $0x2,%mm6",},
871{{0xc5, 0xed, 0x72, 0xe6, 0x05, }, 5, 0, "", "",
872"c5 ed 72 e6 05 \tvpsrad $0x5,%ymm6,%ymm2",},
873{{0x62, 0x91, 0x4d, 0x40, 0x72, 0xe2, 0x05, }, 7, 0, "", "",
874"62 91 4d 40 72 e2 05 \tvpsrad $0x5,%zmm26,%zmm22",},
875{{0x62, 0x91, 0xcd, 0x40, 0x72, 0xe2, 0x05, }, 7, 0, "", "",
876"62 91 cd 40 72 e2 05 \tvpsraq $0x5,%zmm26,%zmm22",},
877{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
878"62 92 7d 41 c6 8c fe 7b 00 00 00 \tvgatherpf0dps 0x7b(%r14,%zmm31,8){%k1}",},
879{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
880"62 92 fd 41 c6 8c fe 7b 00 00 00 \tvgatherpf0dpd 0x7b(%r14,%ymm31,8){%k1}",},
881{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
882"62 92 7d 41 c6 94 fe 7b 00 00 00 \tvgatherpf1dps 0x7b(%r14,%zmm31,8){%k1}",},
883{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
884"62 92 fd 41 c6 94 fe 7b 00 00 00 \tvgatherpf1dpd 0x7b(%r14,%ymm31,8){%k1}",},
885{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
886"62 92 7d 41 c6 ac fe 7b 00 00 00 \tvscatterpf0dps 0x7b(%r14,%zmm31,8){%k1}",},
887{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
888"62 92 fd 41 c6 ac fe 7b 00 00 00 \tvscatterpf0dpd 0x7b(%r14,%ymm31,8){%k1}",},
889{{0x62, 0x92, 0x7d, 0x41, 0xc6, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
890"62 92 7d 41 c6 b4 fe 7b 00 00 00 \tvscatterpf1dps 0x7b(%r14,%zmm31,8){%k1}",},
891{{0x62, 0x92, 0xfd, 0x41, 0xc6, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
892"62 92 fd 41 c6 b4 fe 7b 00 00 00 \tvscatterpf1dpd 0x7b(%r14,%ymm31,8){%k1}",},
893{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
894"62 92 7d 41 c7 8c fe 7b 00 00 00 \tvgatherpf0qps 0x7b(%r14,%zmm31,8){%k1}",},
895{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0x8c, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
896"62 92 fd 41 c7 8c fe 7b 00 00 00 \tvgatherpf0qpd 0x7b(%r14,%zmm31,8){%k1}",},
897{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
898"62 92 7d 41 c7 94 fe 7b 00 00 00 \tvgatherpf1qps 0x7b(%r14,%zmm31,8){%k1}",},
899{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0x94, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
900"62 92 fd 41 c7 94 fe 7b 00 00 00 \tvgatherpf1qpd 0x7b(%r14,%zmm31,8){%k1}",},
901{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
902"62 92 7d 41 c7 ac fe 7b 00 00 00 \tvscatterpf0qps 0x7b(%r14,%zmm31,8){%k1}",},
903{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0xac, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
904"62 92 fd 41 c7 ac fe 7b 00 00 00 \tvscatterpf0qpd 0x7b(%r14,%zmm31,8){%k1}",},
905{{0x62, 0x92, 0x7d, 0x41, 0xc7, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
906"62 92 7d 41 c7 b4 fe 7b 00 00 00 \tvscatterpf1qps 0x7b(%r14,%zmm31,8){%k1}",},
907{{0x62, 0x92, 0xfd, 0x41, 0xc7, 0xb4, 0xfe, 0x7b, 0x00, 0x00, 0x00, }, 11, 0, "", "",
908"62 92 fd 41 c7 b4 fe 7b 00 00 00 \tvscatterpf1qpd 0x7b(%r14,%zmm31,8){%k1}",},
909{{0x62, 0x01, 0x95, 0x40, 0x58, 0xf4, }, 6, 0, "", "",
910"62 01 95 40 58 f4 \tvaddpd %zmm28,%zmm29,%zmm30",},
911{{0x62, 0x01, 0x95, 0x47, 0x58, 0xf4, }, 6, 0, "", "",
912"62 01 95 47 58 f4 \tvaddpd %zmm28,%zmm29,%zmm30{%k7}",},
913{{0x62, 0x01, 0x95, 0xc7, 0x58, 0xf4, }, 6, 0, "", "",
914"62 01 95 c7 58 f4 \tvaddpd %zmm28,%zmm29,%zmm30{%k7}{z}",},
915{{0x62, 0x01, 0x95, 0x10, 0x58, 0xf4, }, 6, 0, "", "",
916"62 01 95 10 58 f4 \tvaddpd {rn-sae},%zmm28,%zmm29,%zmm30",},
917{{0x62, 0x01, 0x95, 0x50, 0x58, 0xf4, }, 6, 0, "", "",
918"62 01 95 50 58 f4 \tvaddpd {ru-sae},%zmm28,%zmm29,%zmm30",},
919{{0x62, 0x01, 0x95, 0x30, 0x58, 0xf4, }, 6, 0, "", "",
920"62 01 95 30 58 f4 \tvaddpd {rd-sae},%zmm28,%zmm29,%zmm30",},
921{{0x62, 0x01, 0x95, 0x70, 0x58, 0xf4, }, 6, 0, "", "",
922"62 01 95 70 58 f4 \tvaddpd {rz-sae},%zmm28,%zmm29,%zmm30",},
923{{0x62, 0x61, 0x95, 0x40, 0x58, 0x31, }, 6, 0, "", "",
924"62 61 95 40 58 31 \tvaddpd (%rcx),%zmm29,%zmm30",},
925{{0x62, 0x21, 0x95, 0x40, 0x58, 0xb4, 0xf0, 0x23, 0x01, 0x00, 0x00, }, 11, 0, "", "",
926"62 21 95 40 58 b4 f0 23 01 00 00 \tvaddpd 0x123(%rax,%r14,8),%zmm29,%zmm30",},
927{{0x62, 0x61, 0x95, 0x50, 0x58, 0x31, }, 6, 0, "", "",
928"62 61 95 50 58 31 \tvaddpd (%rcx){1to8},%zmm29,%zmm30",},
929{{0x62, 0x61, 0x95, 0x40, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
930"62 61 95 40 58 72 7f \tvaddpd 0x1fc0(%rdx),%zmm29,%zmm30",},
931{{0x62, 0x61, 0x95, 0x50, 0x58, 0x72, 0x7f, }, 7, 0, "", "",
932"62 61 95 50 58 72 7f \tvaddpd 0x3f8(%rdx){1to8},%zmm29,%zmm30",},
933{{0x62, 0xf1, 0x0c, 0x50, 0xc2, 0x6a, 0x7f, 0x08, }, 8, 0, "", "",
934"62 f1 0c 50 c2 6a 7f 08 \tvcmpeq_uqps 0x1fc(%rdx){1to16},%zmm30,%k5",},
935{{0x62, 0xb1, 0x97, 0x07, 0xc2, 0xac, 0xf0, 0x23, 0x01, 0x00, 0x00, 0x01, }, 12, 0, "", "",
936"62 b1 97 07 c2 ac f0 23 01 00 00 01 \tvcmpltsd 0x123(%rax,%r14,8),%xmm29,%k5{%k7}",},
937{{0x62, 0x91, 0x97, 0x17, 0xc2, 0xec, 0x02, }, 7, 0, "", "",
938"62 91 97 17 c2 ec 02 \tvcmplesd {sae},%xmm28,%xmm29,%k5{%k7}",},
939{{0x62, 0x23, 0x15, 0x07, 0x27, 0xb4, 0xf0, 0x23, 0x01, 0x00, 0x00, 0x5b, }, 12, 0, "", "",
940"62 23 15 07 27 b4 f0 23 01 00 00 5b \tvgetmantss $0x5b,0x123(%rax,%r14,8),%xmm29,%xmm30{%k7}",},
9{{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "", 941{{0xf3, 0x0f, 0x1b, 0x00, }, 4, 0, "", "",
10"f3 0f 1b 00 \tbndmk (%rax),%bnd0",}, 942"f3 0f 1b 00 \tbndmk (%rax),%bnd0",},
11{{0xf3, 0x41, 0x0f, 0x1b, 0x00, }, 5, 0, "", "", 943{{0xf3, 0x41, 0x0f, 0x1b, 0x00, }, 5, 0, "", "",
@@ -325,19 +1257,19 @@
325{{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "", 1257{{0x0f, 0x1b, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
326"0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%rax,%rcx,1)",}, 1258"0f 1b 84 08 78 56 34 12 \tbndstx %bnd0,0x12345678(%rax,%rcx,1)",},
327{{0xf2, 0xe8, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "call", "unconditional", 1259{{0xf2, 0xe8, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "call", "unconditional",
328"f2 e8 00 00 00 00 \tbnd callq 3f6 <main+0x3f6>",}, 1260"f2 e8 00 00 00 00 \tbnd callq f22 <main+0xf22>",},
329{{0x67, 0xf2, 0xff, 0x10, }, 4, 0, "call", "indirect", 1261{{0x67, 0xf2, 0xff, 0x10, }, 4, 0, "call", "indirect",
330"67 f2 ff 10 \tbnd callq *(%eax)",}, 1262"67 f2 ff 10 \tbnd callq *(%eax)",},
331{{0xf2, 0xc3, }, 2, 0, "ret", "indirect", 1263{{0xf2, 0xc3, }, 2, 0, "ret", "indirect",
332"f2 c3 \tbnd retq ",}, 1264"f2 c3 \tbnd retq ",},
333{{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional", 1265{{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional",
334"f2 e9 00 00 00 00 \tbnd jmpq 402 <main+0x402>",}, 1266"f2 e9 00 00 00 00 \tbnd jmpq f2e <main+0xf2e>",},
335{{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional", 1267{{0xf2, 0xe9, 0x00, 0x00, 0x00, 0x00, }, 6, 0, "jmp", "unconditional",
336"f2 e9 00 00 00 00 \tbnd jmpq 408 <main+0x408>",}, 1268"f2 e9 00 00 00 00 \tbnd jmpq f34 <main+0xf34>",},
337{{0x67, 0xf2, 0xff, 0x21, }, 4, 0, "jmp", "indirect", 1269{{0x67, 0xf2, 0xff, 0x21, }, 4, 0, "jmp", "indirect",
338"67 f2 ff 21 \tbnd jmpq *(%ecx)",}, 1270"67 f2 ff 21 \tbnd jmpq *(%ecx)",},
339{{0xf2, 0x0f, 0x85, 0x00, 0x00, 0x00, 0x00, }, 7, 0, "jcc", "conditional", 1271{{0xf2, 0x0f, 0x85, 0x00, 0x00, 0x00, 0x00, }, 7, 0, "jcc", "conditional",
340"f2 0f 85 00 00 00 00 \tbnd jne 413 <main+0x413>",}, 1272"f2 0f 85 00 00 00 00 \tbnd jne f3f <main+0xf3f>",},
341{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "", 1273{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "",
342"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",}, 1274"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",},
343{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "", 1275{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "",
diff --git a/tools/perf/arch/x86/tests/insn-x86-dat-src.c b/tools/perf/arch/x86/tests/insn-x86-dat-src.c
index 41b1b1c62660..76e0ec379c8b 100644
--- a/tools/perf/arch/x86/tests/insn-x86-dat-src.c
+++ b/tools/perf/arch/x86/tests/insn-x86-dat-src.c
@@ -19,8 +19,882 @@ int main(void)
19 /* Following line is a marker for the awk script - do not change */ 19 /* Following line is a marker for the awk script - do not change */
20 asm volatile("rdtsc"); /* Start here */ 20 asm volatile("rdtsc"); /* Start here */
21 21
22 /* Test fix for vcvtph2ps in x86-opcode-map.txt */
23
24 asm volatile("vcvtph2ps %xmm3,%ymm5");
25
22#ifdef __x86_64__ 26#ifdef __x86_64__
23 27
28 /* AVX-512: Instructions with the same op codes as Mask Instructions */
29
30 asm volatile("cmovno %rax,%rbx");
31 asm volatile("cmovno 0x12345678(%rax),%rcx");
32 asm volatile("cmovno 0x12345678(%rax),%cx");
33
34 asm volatile("cmove %rax,%rbx");
35 asm volatile("cmove 0x12345678(%rax),%rcx");
36 asm volatile("cmove 0x12345678(%rax),%cx");
37
38 asm volatile("seto 0x12345678(%rax)");
39 asm volatile("setno 0x12345678(%rax)");
40 asm volatile("setb 0x12345678(%rax)");
41 asm volatile("setc 0x12345678(%rax)");
42 asm volatile("setnae 0x12345678(%rax)");
43 asm volatile("setae 0x12345678(%rax)");
44 asm volatile("setnb 0x12345678(%rax)");
45 asm volatile("setnc 0x12345678(%rax)");
46 asm volatile("sets 0x12345678(%rax)");
47 asm volatile("setns 0x12345678(%rax)");
48
49 /* AVX-512: Mask Instructions */
50
51 asm volatile("kandw %k7,%k6,%k5");
52 asm volatile("kandq %k7,%k6,%k5");
53 asm volatile("kandb %k7,%k6,%k5");
54 asm volatile("kandd %k7,%k6,%k5");
55
56 asm volatile("kandnw %k7,%k6,%k5");
57 asm volatile("kandnq %k7,%k6,%k5");
58 asm volatile("kandnb %k7,%k6,%k5");
59 asm volatile("kandnd %k7,%k6,%k5");
60
61 asm volatile("knotw %k7,%k6");
62 asm volatile("knotq %k7,%k6");
63 asm volatile("knotb %k7,%k6");
64 asm volatile("knotd %k7,%k6");
65
66 asm volatile("korw %k7,%k6,%k5");
67 asm volatile("korq %k7,%k6,%k5");
68 asm volatile("korb %k7,%k6,%k5");
69 asm volatile("kord %k7,%k6,%k5");
70
71 asm volatile("kxnorw %k7,%k6,%k5");
72 asm volatile("kxnorq %k7,%k6,%k5");
73 asm volatile("kxnorb %k7,%k6,%k5");
74 asm volatile("kxnord %k7,%k6,%k5");
75
76 asm volatile("kxorw %k7,%k6,%k5");
77 asm volatile("kxorq %k7,%k6,%k5");
78 asm volatile("kxorb %k7,%k6,%k5");
79 asm volatile("kxord %k7,%k6,%k5");
80
81 asm volatile("kaddw %k7,%k6,%k5");
82 asm volatile("kaddq %k7,%k6,%k5");
83 asm volatile("kaddb %k7,%k6,%k5");
84 asm volatile("kaddd %k7,%k6,%k5");
85
86 asm volatile("kunpckbw %k7,%k6,%k5");
87 asm volatile("kunpckwd %k7,%k6,%k5");
88 asm volatile("kunpckdq %k7,%k6,%k5");
89
90 asm volatile("kmovw %k6,%k5");
91 asm volatile("kmovw (%rcx),%k5");
92 asm volatile("kmovw 0x123(%rax,%r14,8),%k5");
93 asm volatile("kmovw %k5,(%rcx)");
94 asm volatile("kmovw %k5,0x123(%rax,%r14,8)");
95 asm volatile("kmovw %eax,%k5");
96 asm volatile("kmovw %ebp,%k5");
97 asm volatile("kmovw %r13d,%k5");
98 asm volatile("kmovw %k5,%eax");
99 asm volatile("kmovw %k5,%ebp");
100 asm volatile("kmovw %k5,%r13d");
101
102 asm volatile("kmovq %k6,%k5");
103 asm volatile("kmovq (%rcx),%k5");
104 asm volatile("kmovq 0x123(%rax,%r14,8),%k5");
105 asm volatile("kmovq %k5,(%rcx)");
106 asm volatile("kmovq %k5,0x123(%rax,%r14,8)");
107 asm volatile("kmovq %rax,%k5");
108 asm volatile("kmovq %rbp,%k5");
109 asm volatile("kmovq %r13,%k5");
110 asm volatile("kmovq %k5,%rax");
111 asm volatile("kmovq %k5,%rbp");
112 asm volatile("kmovq %k5,%r13");
113
114 asm volatile("kmovb %k6,%k5");
115 asm volatile("kmovb (%rcx),%k5");
116 asm volatile("kmovb 0x123(%rax,%r14,8),%k5");
117 asm volatile("kmovb %k5,(%rcx)");
118 asm volatile("kmovb %k5,0x123(%rax,%r14,8)");
119 asm volatile("kmovb %eax,%k5");
120 asm volatile("kmovb %ebp,%k5");
121 asm volatile("kmovb %r13d,%k5");
122 asm volatile("kmovb %k5,%eax");
123 asm volatile("kmovb %k5,%ebp");
124 asm volatile("kmovb %k5,%r13d");
125
126 asm volatile("kmovd %k6,%k5");
127 asm volatile("kmovd (%rcx),%k5");
128 asm volatile("kmovd 0x123(%rax,%r14,8),%k5");
129 asm volatile("kmovd %k5,(%rcx)");
130 asm volatile("kmovd %k5,0x123(%rax,%r14,8)");
131 asm volatile("kmovd %eax,%k5");
132 asm volatile("kmovd %ebp,%k5");
133 asm volatile("kmovd %r13d,%k5");
134 asm volatile("kmovd %k5,%eax");
135 asm volatile("kmovd %k5,%ebp");
136 asm volatile("kmovd %k5,%r13d");
137
138 asm volatile("kortestw %k6,%k5");
139 asm volatile("kortestq %k6,%k5");
140 asm volatile("kortestb %k6,%k5");
141 asm volatile("kortestd %k6,%k5");
142
143 asm volatile("ktestw %k6,%k5");
144 asm volatile("ktestq %k6,%k5");
145 asm volatile("ktestb %k6,%k5");
146 asm volatile("ktestd %k6,%k5");
147
148 asm volatile("kshiftrw $0x12,%k6,%k5");
149 asm volatile("kshiftrq $0x5b,%k6,%k5");
150 asm volatile("kshiftlw $0x12,%k6,%k5");
151 asm volatile("kshiftlq $0x5b,%k6,%k5");
152
153 /* AVX-512: Op code 0f 5b */
154 asm volatile("vcvtdq2ps %xmm5,%xmm6");
155 asm volatile("vcvtqq2ps %zmm29,%ymm6{%k7}");
156 asm volatile("vcvtps2dq %xmm5,%xmm6");
157 asm volatile("vcvttps2dq %xmm5,%xmm6");
158
159 /* AVX-512: Op code 0f 6f */
160
161 asm volatile("movq %mm0,%mm4");
162 asm volatile("vmovdqa %ymm4,%ymm6");
163 asm volatile("vmovdqa32 %zmm25,%zmm26");
164 asm volatile("vmovdqa64 %zmm25,%zmm26");
165 asm volatile("vmovdqu %ymm4,%ymm6");
166 asm volatile("vmovdqu32 %zmm29,%zmm30");
167 asm volatile("vmovdqu64 %zmm25,%zmm26");
168 asm volatile("vmovdqu8 %zmm29,%zmm30");
169 asm volatile("vmovdqu16 %zmm25,%zmm26");
170
171 /* AVX-512: Op code 0f 78 */
172
173 asm volatile("vmread %rax,%rbx");
174 asm volatile("vcvttps2udq %zmm25,%zmm26");
175 asm volatile("vcvttpd2udq %zmm29,%ymm6{%k7}");
176 asm volatile("vcvttsd2usi %xmm6,%rax");
177 asm volatile("vcvttss2usi %xmm6,%rax");
178 asm volatile("vcvttps2uqq %ymm5,%zmm26{%k7}");
179 asm volatile("vcvttpd2uqq %zmm29,%zmm30");
180
181 /* AVX-512: Op code 0f 79 */
182
183 asm volatile("vmwrite %rax,%rbx");
184 asm volatile("vcvtps2udq %zmm25,%zmm26");
185 asm volatile("vcvtpd2udq %zmm29,%ymm6{%k7}");
186 asm volatile("vcvtsd2usi %xmm6,%rax");
187 asm volatile("vcvtss2usi %xmm6,%rax");
188 asm volatile("vcvtps2uqq %ymm5,%zmm26{%k7}");
189 asm volatile("vcvtpd2uqq %zmm29,%zmm30");
190
191 /* AVX-512: Op code 0f 7a */
192
193 asm volatile("vcvtudq2pd %ymm5,%zmm29{%k7}");
194 asm volatile("vcvtuqq2pd %zmm25,%zmm26");
195 asm volatile("vcvtudq2ps %zmm29,%zmm30");
196 asm volatile("vcvtuqq2ps %zmm25,%ymm26{%k7}");
197 asm volatile("vcvttps2qq %ymm25,%zmm26{%k7}");
198 asm volatile("vcvttpd2qq %zmm29,%zmm30");
199
200 /* AVX-512: Op code 0f 7b */
201
202 asm volatile("vcvtusi2sd %eax,%xmm5,%xmm6");
203 asm volatile("vcvtusi2ss %eax,%xmm5,%xmm6");
204 asm volatile("vcvtps2qq %ymm5,%zmm26{%k7}");
205 asm volatile("vcvtpd2qq %zmm29,%zmm30");
206
207 /* AVX-512: Op code 0f 7f */
208
209 asm volatile("movq.s %mm0,%mm4");
210 asm volatile("vmovdqa %ymm8,%ymm6");
211 asm volatile("vmovdqa32.s %zmm25,%zmm26");
212 asm volatile("vmovdqa64.s %zmm25,%zmm26");
213 asm volatile("vmovdqu %ymm8,%ymm6");
214 asm volatile("vmovdqu32.s %zmm25,%zmm26");
215 asm volatile("vmovdqu64.s %zmm25,%zmm26");
216 asm volatile("vmovdqu8.s %zmm30,(%rcx)");
217 asm volatile("vmovdqu16.s %zmm25,%zmm26");
218
219 /* AVX-512: Op code 0f db */
220
221 asm volatile("pand %mm1,%mm2");
222 asm volatile("pand %xmm1,%xmm2");
223 asm volatile("vpand %ymm4,%ymm6,%ymm2");
224 asm volatile("vpandd %zmm24,%zmm25,%zmm26");
225 asm volatile("vpandq %zmm24,%zmm25,%zmm26");
226
227 /* AVX-512: Op code 0f df */
228
229 asm volatile("pandn %mm1,%mm2");
230 asm volatile("pandn %xmm1,%xmm2");
231 asm volatile("vpandn %ymm4,%ymm6,%ymm2");
232 asm volatile("vpandnd %zmm24,%zmm25,%zmm26");
233 asm volatile("vpandnq %zmm24,%zmm25,%zmm26");
234
235 /* AVX-512: Op code 0f e6 */
236
237 asm volatile("vcvttpd2dq %xmm1,%xmm2");
238 asm volatile("vcvtdq2pd %xmm5,%xmm6");
239 asm volatile("vcvtdq2pd %ymm5,%zmm26{%k7}");
240 asm volatile("vcvtqq2pd %zmm25,%zmm26");
241 asm volatile("vcvtpd2dq %xmm1,%xmm2");
242
243 /* AVX-512: Op code 0f eb */
244
245 asm volatile("por %mm4,%mm6");
246 asm volatile("vpor %ymm4,%ymm6,%ymm2");
247 asm volatile("vpord %zmm24,%zmm25,%zmm26");
248 asm volatile("vporq %zmm24,%zmm25,%zmm26");
249
250 /* AVX-512: Op code 0f ef */
251
252 asm volatile("pxor %mm4,%mm6");
253 asm volatile("vpxor %ymm4,%ymm6,%ymm2");
254 asm volatile("vpxord %zmm24,%zmm25,%zmm26");
255 asm volatile("vpxorq %zmm24,%zmm25,%zmm26");
256
257 /* AVX-512: Op code 0f 38 10 */
258
259 asm volatile("pblendvb %xmm1,%xmm0");
260 asm volatile("vpsrlvw %zmm27,%zmm28,%zmm29");
261 asm volatile("vpmovuswb %zmm28,%ymm6{%k7}");
262
263 /* AVX-512: Op code 0f 38 11 */
264
265 asm volatile("vpmovusdb %zmm28,%xmm6{%k7}");
266 asm volatile("vpsravw %zmm27,%zmm28,%zmm29");
267
268 /* AVX-512: Op code 0f 38 12 */
269
270 asm volatile("vpmovusqb %zmm27,%xmm6{%k7}");
271 asm volatile("vpsllvw %zmm27,%zmm28,%zmm29");
272
273 /* AVX-512: Op code 0f 38 13 */
274
275 asm volatile("vcvtph2ps %xmm3,%ymm5");
276 asm volatile("vcvtph2ps %ymm5,%zmm27{%k7}");
277 asm volatile("vpmovusdw %zmm27,%ymm6{%k7}");
278
279 /* AVX-512: Op code 0f 38 14 */
280
281 asm volatile("blendvps %xmm1,%xmm0");
282 asm volatile("vpmovusqw %zmm27,%xmm6{%k7}");
283 asm volatile("vprorvd %zmm27,%zmm28,%zmm29");
284 asm volatile("vprorvq %zmm27,%zmm28,%zmm29");
285
286 /* AVX-512: Op code 0f 38 15 */
287
288 asm volatile("blendvpd %xmm1,%xmm0");
289 asm volatile("vpmovusqd %zmm27,%ymm6{%k7}");
290 asm volatile("vprolvd %zmm27,%zmm28,%zmm29");
291 asm volatile("vprolvq %zmm27,%zmm28,%zmm29");
292
293 /* AVX-512: Op code 0f 38 16 */
294
295 asm volatile("vpermps %ymm4,%ymm6,%ymm2");
296 asm volatile("vpermps %ymm24,%ymm26,%ymm22{%k7}");
297 asm volatile("vpermpd %ymm24,%ymm26,%ymm22{%k7}");
298
299 /* AVX-512: Op code 0f 38 19 */
300
301 asm volatile("vbroadcastsd %xmm4,%ymm6");
302 asm volatile("vbroadcastf32x2 %xmm27,%zmm26");
303
304 /* AVX-512: Op code 0f 38 1a */
305
306 asm volatile("vbroadcastf128 (%rcx),%ymm4");
307 asm volatile("vbroadcastf32x4 (%rcx),%zmm26");
308 asm volatile("vbroadcastf64x2 (%rcx),%zmm26");
309
310 /* AVX-512: Op code 0f 38 1b */
311
312 asm volatile("vbroadcastf32x8 (%rcx),%zmm27");
313 asm volatile("vbroadcastf64x4 (%rcx),%zmm26");
314
315 /* AVX-512: Op code 0f 38 1f */
316
317 asm volatile("vpabsq %zmm27,%zmm28");
318
319 /* AVX-512: Op code 0f 38 20 */
320
321 asm volatile("vpmovsxbw %xmm4,%xmm5");
322 asm volatile("vpmovswb %zmm27,%ymm6{%k7}");
323
324 /* AVX-512: Op code 0f 38 21 */
325
326 asm volatile("vpmovsxbd %xmm4,%ymm6");
327 asm volatile("vpmovsdb %zmm27,%xmm6{%k7}");
328
329 /* AVX-512: Op code 0f 38 22 */
330
331 asm volatile("vpmovsxbq %xmm4,%ymm4");
332 asm volatile("vpmovsqb %zmm27,%xmm6{%k7}");
333
334 /* AVX-512: Op code 0f 38 23 */
335
336 asm volatile("vpmovsxwd %xmm4,%ymm4");
337 asm volatile("vpmovsdw %zmm27,%ymm6{%k7}");
338
339 /* AVX-512: Op code 0f 38 24 */
340
341 asm volatile("vpmovsxwq %xmm4,%ymm6");
342 asm volatile("vpmovsqw %zmm27,%xmm6{%k7}");
343
344 /* AVX-512: Op code 0f 38 25 */
345
346 asm volatile("vpmovsxdq %xmm4,%ymm4");
347 asm volatile("vpmovsqd %zmm27,%ymm6{%k7}");
348
349 /* AVX-512: Op code 0f 38 26 */
350
351 asm volatile("vptestmb %zmm27,%zmm28,%k5");
352 asm volatile("vptestmw %zmm27,%zmm28,%k5");
353 asm volatile("vptestnmb %zmm26,%zmm27,%k5");
354 asm volatile("vptestnmw %zmm26,%zmm27,%k5");
355
356 /* AVX-512: Op code 0f 38 27 */
357
358 asm volatile("vptestmd %zmm27,%zmm28,%k5");
359 asm volatile("vptestmq %zmm27,%zmm28,%k5");
360 asm volatile("vptestnmd %zmm26,%zmm27,%k5");
361 asm volatile("vptestnmq %zmm26,%zmm27,%k5");
362
363 /* AVX-512: Op code 0f 38 28 */
364
365 asm volatile("vpmuldq %ymm4,%ymm6,%ymm2");
366 asm volatile("vpmovm2b %k5,%zmm28");
367 asm volatile("vpmovm2w %k5,%zmm28");
368
369 /* AVX-512: Op code 0f 38 29 */
370
371 asm volatile("vpcmpeqq %ymm4,%ymm6,%ymm2");
372 asm volatile("vpmovb2m %zmm28,%k5");
373 asm volatile("vpmovw2m %zmm28,%k5");
374
375 /* AVX-512: Op code 0f 38 2a */
376
377 asm volatile("vmovntdqa (%rcx),%ymm4");
378 asm volatile("vpbroadcastmb2q %k6,%zmm30");
379
380 /* AVX-512: Op code 0f 38 2c */
381
382 asm volatile("vmaskmovps (%rcx),%ymm4,%ymm6");
383 asm volatile("vscalefps %zmm24,%zmm25,%zmm26");
384 asm volatile("vscalefpd %zmm24,%zmm25,%zmm26");
385
386 /* AVX-512: Op code 0f 38 2d */
387
388 asm volatile("vmaskmovpd (%rcx),%ymm4,%ymm6");
389 asm volatile("vscalefss %xmm24,%xmm25,%xmm26{%k7}");
390 asm volatile("vscalefsd %xmm24,%xmm25,%xmm26{%k7}");
391
392 /* AVX-512: Op code 0f 38 30 */
393
394 asm volatile("vpmovzxbw %xmm4,%ymm4");
395 asm volatile("vpmovwb %zmm27,%ymm6{%k7}");
396
397 /* AVX-512: Op code 0f 38 31 */
398
399 asm volatile("vpmovzxbd %xmm4,%ymm6");
400 asm volatile("vpmovdb %zmm27,%xmm6{%k7}");
401
402 /* AVX-512: Op code 0f 38 32 */
403
404 asm volatile("vpmovzxbq %xmm4,%ymm4");
405 asm volatile("vpmovqb %zmm27,%xmm6{%k7}");
406
407 /* AVX-512: Op code 0f 38 33 */
408
409 asm volatile("vpmovzxwd %xmm4,%ymm4");
410 asm volatile("vpmovdw %zmm27,%ymm6{%k7}");
411
412 /* AVX-512: Op code 0f 38 34 */
413
414 asm volatile("vpmovzxwq %xmm4,%ymm6");
415 asm volatile("vpmovqw %zmm27,%xmm6{%k7}");
416
417 /* AVX-512: Op code 0f 38 35 */
418
419 asm volatile("vpmovzxdq %xmm4,%ymm4");
420 asm volatile("vpmovqd %zmm27,%ymm6{%k7}");
421
422 /* AVX-512: Op code 0f 38 38 */
423
424 asm volatile("vpermd %ymm4,%ymm6,%ymm2");
425 asm volatile("vpermd %ymm24,%ymm26,%ymm22{%k7}");
426 asm volatile("vpermq %ymm24,%ymm26,%ymm22{%k7}");
427
428 /* AVX-512: Op code 0f 38 38 */
429
430 asm volatile("vpminsb %ymm4,%ymm6,%ymm2");
431 asm volatile("vpmovm2d %k5,%zmm28");
432 asm volatile("vpmovm2q %k5,%zmm28");
433
434 /* AVX-512: Op code 0f 38 39 */
435
436 asm volatile("vpminsd %xmm1,%xmm2,%xmm3");
437 asm volatile("vpminsd %zmm24,%zmm25,%zmm26");
438 asm volatile("vpminsq %zmm24,%zmm25,%zmm26");
439 asm volatile("vpmovd2m %zmm28,%k5");
440 asm volatile("vpmovq2m %zmm28,%k5");
441
442 /* AVX-512: Op code 0f 38 3a */
443
444 asm volatile("vpminuw %ymm4,%ymm6,%ymm2");
445 asm volatile("vpbroadcastmw2d %k6,%zmm28");
446
447 /* AVX-512: Op code 0f 38 3b */
448
449 asm volatile("vpminud %ymm4,%ymm6,%ymm2");
450 asm volatile("vpminud %zmm24,%zmm25,%zmm26");
451 asm volatile("vpminuq %zmm24,%zmm25,%zmm26");
452
453 /* AVX-512: Op code 0f 38 3d */
454
455 asm volatile("vpmaxsd %ymm4,%ymm6,%ymm2");
456 asm volatile("vpmaxsd %zmm24,%zmm25,%zmm26");
457 asm volatile("vpmaxsq %zmm24,%zmm25,%zmm26");
458
459 /* AVX-512: Op code 0f 38 3f */
460
461 asm volatile("vpmaxud %ymm4,%ymm6,%ymm2");
462 asm volatile("vpmaxud %zmm24,%zmm25,%zmm26");
463 asm volatile("vpmaxuq %zmm24,%zmm25,%zmm26");
464
465 /* AVX-512: Op code 0f 38 42 */
466
467 asm volatile("vpmulld %ymm4,%ymm6,%ymm2");
468 asm volatile("vpmulld %zmm24,%zmm25,%zmm26");
469 asm volatile("vpmullq %zmm24,%zmm25,%zmm26");
470
471 /* AVX-512: Op code 0f 38 42 */
472
473 asm volatile("vgetexpps %zmm25,%zmm26");
474 asm volatile("vgetexppd %zmm27,%zmm28");
475
476 /* AVX-512: Op code 0f 38 43 */
477
478 asm volatile("vgetexpss %xmm24,%xmm25,%xmm26{%k7}");
479 asm volatile("vgetexpsd %xmm28,%xmm29,%xmm30{%k7}");
480
481 /* AVX-512: Op code 0f 38 44 */
482
483 asm volatile("vplzcntd %zmm27,%zmm28");
484 asm volatile("vplzcntq %zmm27,%zmm28");
485
486 /* AVX-512: Op code 0f 38 46 */
487
488 asm volatile("vpsravd %ymm4,%ymm6,%ymm2");
489 asm volatile("vpsravd %zmm24,%zmm25,%zmm26");
490 asm volatile("vpsravq %zmm24,%zmm25,%zmm26");
491
492 /* AVX-512: Op code 0f 38 4c */
493
494 asm volatile("vrcp14ps %zmm25,%zmm26");
495 asm volatile("vrcp14pd %zmm27,%zmm28");
496
497 /* AVX-512: Op code 0f 38 4d */
498
499 asm volatile("vrcp14ss %xmm24,%xmm25,%xmm26{%k7}");
500 asm volatile("vrcp14sd %xmm24,%xmm25,%xmm26{%k7}");
501
502 /* AVX-512: Op code 0f 38 4e */
503
504 asm volatile("vrsqrt14ps %zmm25,%zmm26");
505 asm volatile("vrsqrt14pd %zmm27,%zmm28");
506
507 /* AVX-512: Op code 0f 38 4f */
508
509 asm volatile("vrsqrt14ss %xmm24,%xmm25,%xmm26{%k7}");
510 asm volatile("vrsqrt14sd %xmm24,%xmm25,%xmm26{%k7}");
511
512 /* AVX-512: Op code 0f 38 59 */
513
514 asm volatile("vpbroadcastq %xmm4,%xmm6");
515 asm volatile("vbroadcasti32x2 %xmm27,%zmm26");
516
517 /* AVX-512: Op code 0f 38 5a */
518
519 asm volatile("vbroadcasti128 (%rcx),%ymm4");
520 asm volatile("vbroadcasti32x4 (%rcx),%zmm26");
521 asm volatile("vbroadcasti64x2 (%rcx),%zmm26");
522
523 /* AVX-512: Op code 0f 38 5b */
524
525 asm volatile("vbroadcasti32x8 (%rcx),%zmm28");
526 asm volatile("vbroadcasti64x4 (%rcx),%zmm26");
527
528 /* AVX-512: Op code 0f 38 64 */
529
530 asm volatile("vpblendmd %zmm26,%zmm27,%zmm28");
531 asm volatile("vpblendmq %zmm26,%zmm27,%zmm28");
532
533 /* AVX-512: Op code 0f 38 65 */
534
535 asm volatile("vblendmps %zmm24,%zmm25,%zmm26");
536 asm volatile("vblendmpd %zmm26,%zmm27,%zmm28");
537
538 /* AVX-512: Op code 0f 38 66 */
539
540 asm volatile("vpblendmb %zmm26,%zmm27,%zmm28");
541 asm volatile("vpblendmw %zmm26,%zmm27,%zmm28");
542
543 /* AVX-512: Op code 0f 38 75 */
544
545 asm volatile("vpermi2b %zmm24,%zmm25,%zmm26");
546 asm volatile("vpermi2w %zmm26,%zmm27,%zmm28");
547
548 /* AVX-512: Op code 0f 38 76 */
549
550 asm volatile("vpermi2d %zmm26,%zmm27,%zmm28");
551 asm volatile("vpermi2q %zmm26,%zmm27,%zmm28");
552
553 /* AVX-512: Op code 0f 38 77 */
554
555 asm volatile("vpermi2ps %zmm26,%zmm27,%zmm28");
556 asm volatile("vpermi2pd %zmm26,%zmm27,%zmm28");
557
558 /* AVX-512: Op code 0f 38 7a */
559
560 asm volatile("vpbroadcastb %eax,%xmm30");
561
562 /* AVX-512: Op code 0f 38 7b */
563
564 asm volatile("vpbroadcastw %eax,%xmm30");
565
566 /* AVX-512: Op code 0f 38 7c */
567
568 asm volatile("vpbroadcastd %eax,%xmm30");
569 asm volatile("vpbroadcastq %rax,%zmm30");
570
571 /* AVX-512: Op code 0f 38 7d */
572
573 asm volatile("vpermt2b %zmm26,%zmm27,%zmm28");
574 asm volatile("vpermt2w %zmm26,%zmm27,%zmm28");
575
576 /* AVX-512: Op code 0f 38 7e */
577
578 asm volatile("vpermt2d %zmm26,%zmm27,%zmm28");
579 asm volatile("vpermt2q %zmm26,%zmm27,%zmm28");
580
581 /* AVX-512: Op code 0f 38 7f */
582
583 asm volatile("vpermt2ps %zmm26,%zmm27,%zmm28");
584 asm volatile("vpermt2pd %zmm26,%zmm27,%zmm28");
585
586 /* AVX-512: Op code 0f 38 83 */
587
588 asm volatile("vpmultishiftqb %zmm26,%zmm27,%zmm28");
589
590 /* AVX-512: Op code 0f 38 88 */
591
592 asm volatile("vexpandps (%rcx),%zmm26");
593 asm volatile("vexpandpd (%rcx),%zmm28");
594
595 /* AVX-512: Op code 0f 38 89 */
596
597 asm volatile("vpexpandd (%rcx),%zmm28");
598 asm volatile("vpexpandq (%rcx),%zmm26");
599
600 /* AVX-512: Op code 0f 38 8a */
601
602 asm volatile("vcompressps %zmm28,(%rcx)");
603 asm volatile("vcompresspd %zmm28,(%rcx)");
604
605 /* AVX-512: Op code 0f 38 8b */
606
607 asm volatile("vpcompressd %zmm28,(%rcx)");
608 asm volatile("vpcompressq %zmm26,(%rcx)");
609
610 /* AVX-512: Op code 0f 38 8d */
611
612 asm volatile("vpermb %zmm26,%zmm27,%zmm28");
613 asm volatile("vpermw %zmm26,%zmm27,%zmm28");
614
615 /* AVX-512: Op code 0f 38 90 */
616
617 asm volatile("vpgatherdd %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
618 asm volatile("vpgatherdq %xmm2,0x04(%rbp,%xmm7,2),%xmm1");
619 asm volatile("vpgatherdd 0x7b(%rbp,%zmm27,8),%zmm26{%k1}");
620 asm volatile("vpgatherdq 0x7b(%rbp,%ymm27,8),%zmm26{%k1}");
621
622 /* AVX-512: Op code 0f 38 91 */
623
624 asm volatile("vpgatherqd %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
625 asm volatile("vpgatherqq %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
626 asm volatile("vpgatherqd 0x7b(%rbp,%zmm27,8),%ymm26{%k1}");
627 asm volatile("vpgatherqq 0x7b(%rbp,%zmm27,8),%zmm26{%k1}");
628
629 /* AVX-512: Op code 0f 38 a0 */
630
631 asm volatile("vpscatterdd %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
632 asm volatile("vpscatterdq %zmm26,0x7b(%rbp,%ymm27,8){%k1}");
633
634 /* AVX-512: Op code 0f 38 a1 */
635
636 asm volatile("vpscatterqd %ymm6,0x7b(%rbp,%zmm29,8){%k1}");
637 asm volatile("vpscatterqq %ymm6,0x7b(%rbp,%ymm27,8){%k1}");
638
639 /* AVX-512: Op code 0f 38 a2 */
640
641 asm volatile("vscatterdps %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
642 asm volatile("vscatterdpd %zmm28,0x7b(%rbp,%ymm27,8){%k1}");
643
644 /* AVX-512: Op code 0f 38 a3 */
645
646 asm volatile("vscatterqps %ymm6,0x7b(%rbp,%zmm29,8){%k1}");
647 asm volatile("vscatterqpd %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
648
649 /* AVX-512: Op code 0f 38 b4 */
650
651 asm volatile("vpmadd52luq %zmm26,%zmm27,%zmm28");
652
653 /* AVX-512: Op code 0f 38 b5 */
654
655 asm volatile("vpmadd52huq %zmm26,%zmm27,%zmm28");
656
657 /* AVX-512: Op code 0f 38 c4 */
658
659 asm volatile("vpconflictd %zmm26,%zmm27");
660 asm volatile("vpconflictq %zmm26,%zmm27");
661
662 /* AVX-512: Op code 0f 38 c8 */
663
664 asm volatile("vexp2ps %zmm29,%zmm30");
665 asm volatile("vexp2pd %zmm26,%zmm27");
666
667 /* AVX-512: Op code 0f 38 ca */
668
669 asm volatile("vrcp28ps %zmm29,%zmm30");
670 asm volatile("vrcp28pd %zmm26,%zmm27");
671
672 /* AVX-512: Op code 0f 38 cb */
673
674 asm volatile("vrcp28ss %xmm28,%xmm29,%xmm30{%k7}");
675 asm volatile("vrcp28sd %xmm25,%xmm26,%xmm27{%k7}");
676
677 /* AVX-512: Op code 0f 38 cc */
678
679 asm volatile("vrsqrt28ps %zmm29,%zmm30");
680 asm volatile("vrsqrt28pd %zmm26,%zmm27");
681
682 /* AVX-512: Op code 0f 38 cd */
683
684 asm volatile("vrsqrt28ss %xmm28,%xmm29,%xmm30{%k7}");
685 asm volatile("vrsqrt28sd %xmm25,%xmm26,%xmm27{%k7}");
686
687 /* AVX-512: Op code 0f 3a 03 */
688
689 asm volatile("valignd $0x12,%zmm28,%zmm29,%zmm30");
690 asm volatile("valignq $0x12,%zmm25,%zmm26,%zmm27");
691
692 /* AVX-512: Op code 0f 3a 08 */
693
694 asm volatile("vroundps $0x5,%ymm6,%ymm2");
695 asm volatile("vrndscaleps $0x12,%zmm25,%zmm26");
696
697 /* AVX-512: Op code 0f 3a 09 */
698
699 asm volatile("vroundpd $0x5,%ymm6,%ymm2");
700 asm volatile("vrndscalepd $0x12,%zmm25,%zmm26");
701
702 /* AVX-512: Op code 0f 3a 1a */
703
704 asm volatile("vroundss $0x5,%xmm4,%xmm6,%xmm2");
705 asm volatile("vrndscaless $0x12,%xmm24,%xmm25,%xmm26{%k7}");
706
707 /* AVX-512: Op code 0f 3a 0b */
708
709 asm volatile("vroundsd $0x5,%xmm4,%xmm6,%xmm2");
710 asm volatile("vrndscalesd $0x12,%xmm24,%xmm25,%xmm26{%k7}");
711
712 /* AVX-512: Op code 0f 3a 18 */
713
714 asm volatile("vinsertf128 $0x5,%xmm4,%ymm4,%ymm6");
715 asm volatile("vinsertf32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
716 asm volatile("vinsertf64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
717
718 /* AVX-512: Op code 0f 3a 19 */
719
720 asm volatile("vextractf128 $0x5,%ymm4,%xmm4");
721 asm volatile("vextractf32x4 $0x12,%zmm25,%xmm26{%k7}");
722 asm volatile("vextractf64x2 $0x12,%zmm25,%xmm26{%k7}");
723
724 /* AVX-512: Op code 0f 3a 1a */
725
726 asm volatile("vinsertf32x8 $0x12,%ymm25,%zmm26,%zmm27{%k7}");
727 asm volatile("vinsertf64x4 $0x12,%ymm28,%zmm29,%zmm30{%k7}");
728
729 /* AVX-512: Op code 0f 3a 1b */
730
731 asm volatile("vextractf32x8 $0x12,%zmm29,%ymm30{%k7}");
732 asm volatile("vextractf64x4 $0x12,%zmm26,%ymm27{%k7}");
733
734 /* AVX-512: Op code 0f 3a 1e */
735
736 asm volatile("vpcmpud $0x12,%zmm29,%zmm30,%k5");
737 asm volatile("vpcmpuq $0x12,%zmm26,%zmm27,%k5");
738
739 /* AVX-512: Op code 0f 3a 1f */
740
741 asm volatile("vpcmpd $0x12,%zmm29,%zmm30,%k5");
742 asm volatile("vpcmpq $0x12,%zmm26,%zmm27,%k5");
743
744 /* AVX-512: Op code 0f 3a 23 */
745
746 asm volatile("vshuff32x4 $0x12,%zmm28,%zmm29,%zmm30");
747 asm volatile("vshuff64x2 $0x12,%zmm25,%zmm26,%zmm27");
748
749 /* AVX-512: Op code 0f 3a 25 */
750
751 asm volatile("vpternlogd $0x12,%zmm28,%zmm29,%zmm30");
752 asm volatile("vpternlogq $0x12,%zmm28,%zmm29,%zmm30");
753
754 /* AVX-512: Op code 0f 3a 26 */
755
756 asm volatile("vgetmantps $0x12,%zmm26,%zmm27");
757 asm volatile("vgetmantpd $0x12,%zmm29,%zmm30");
758
759 /* AVX-512: Op code 0f 3a 27 */
760
761 asm volatile("vgetmantss $0x12,%xmm25,%xmm26,%xmm27{%k7}");
762 asm volatile("vgetmantsd $0x12,%xmm28,%xmm29,%xmm30{%k7}");
763
764 /* AVX-512: Op code 0f 3a 38 */
765
766 asm volatile("vinserti128 $0x5,%xmm4,%ymm4,%ymm6");
767 asm volatile("vinserti32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
768 asm volatile("vinserti64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
769
770 /* AVX-512: Op code 0f 3a 39 */
771
772 asm volatile("vextracti128 $0x5,%ymm4,%xmm6");
773 asm volatile("vextracti32x4 $0x12,%zmm25,%xmm26{%k7}");
774 asm volatile("vextracti64x2 $0x12,%zmm25,%xmm26{%k7}");
775
776 /* AVX-512: Op code 0f 3a 3a */
777
778 asm volatile("vinserti32x8 $0x12,%ymm28,%zmm29,%zmm30{%k7}");
779 asm volatile("vinserti64x4 $0x12,%ymm25,%zmm26,%zmm27{%k7}");
780
781 /* AVX-512: Op code 0f 3a 3b */
782
783 asm volatile("vextracti32x8 $0x12,%zmm29,%ymm30{%k7}");
784 asm volatile("vextracti64x4 $0x12,%zmm26,%ymm27{%k7}");
785
786 /* AVX-512: Op code 0f 3a 3e */
787
788 asm volatile("vpcmpub $0x12,%zmm29,%zmm30,%k5");
789 asm volatile("vpcmpuw $0x12,%zmm26,%zmm27,%k5");
790
791 /* AVX-512: Op code 0f 3a 3f */
792
793 asm volatile("vpcmpb $0x12,%zmm29,%zmm30,%k5");
794 asm volatile("vpcmpw $0x12,%zmm26,%zmm27,%k5");
795
796 /* AVX-512: Op code 0f 3a 43 */
797
798 asm volatile("vmpsadbw $0x5,%ymm4,%ymm6,%ymm2");
799 asm volatile("vdbpsadbw $0x12,%zmm4,%zmm5,%zmm6");
800
801 /* AVX-512: Op code 0f 3a 43 */
802
803 asm volatile("vshufi32x4 $0x12,%zmm25,%zmm26,%zmm27");
804 asm volatile("vshufi64x2 $0x12,%zmm28,%zmm29,%zmm30");
805
806 /* AVX-512: Op code 0f 3a 50 */
807
808 asm volatile("vrangeps $0x12,%zmm25,%zmm26,%zmm27");
809 asm volatile("vrangepd $0x12,%zmm28,%zmm29,%zmm30");
810
811 /* AVX-512: Op code 0f 3a 51 */
812
813 asm volatile("vrangess $0x12,%xmm25,%xmm26,%xmm27");
814 asm volatile("vrangesd $0x12,%xmm28,%xmm29,%xmm30");
815
816 /* AVX-512: Op code 0f 3a 54 */
817
818 asm volatile("vfixupimmps $0x12,%zmm28,%zmm29,%zmm30");
819 asm volatile("vfixupimmpd $0x12,%zmm25,%zmm26,%zmm27");
820
821 /* AVX-512: Op code 0f 3a 55 */
822
823 asm volatile("vfixupimmss $0x12,%xmm28,%xmm29,%xmm30{%k7}");
824 asm volatile("vfixupimmsd $0x12,%xmm25,%xmm26,%xmm27{%k7}");
825
826 /* AVX-512: Op code 0f 3a 56 */
827
828 asm volatile("vreduceps $0x12,%zmm26,%zmm27");
829 asm volatile("vreducepd $0x12,%zmm29,%zmm30");
830
831 /* AVX-512: Op code 0f 3a 57 */
832
833 asm volatile("vreducess $0x12,%xmm25,%xmm26,%xmm27");
834 asm volatile("vreducesd $0x12,%xmm28,%xmm29,%xmm30");
835
836 /* AVX-512: Op code 0f 3a 66 */
837
838 asm volatile("vfpclassps $0x12,%zmm27,%k5");
839 asm volatile("vfpclasspd $0x12,%zmm30,%k5");
840
841 /* AVX-512: Op code 0f 3a 67 */
842
843 asm volatile("vfpclassss $0x12,%xmm27,%k5");
844 asm volatile("vfpclasssd $0x12,%xmm30,%k5");
845
846 /* AVX-512: Op code 0f 72 (Grp13) */
847
848 asm volatile("vprord $0x12,%zmm25,%zmm26");
849 asm volatile("vprorq $0x12,%zmm25,%zmm26");
850 asm volatile("vprold $0x12,%zmm29,%zmm30");
851 asm volatile("vprolq $0x12,%zmm29,%zmm30");
852 asm volatile("psrad $0x2,%mm6");
853 asm volatile("vpsrad $0x5,%ymm6,%ymm2");
854 asm volatile("vpsrad $0x5,%zmm26,%zmm22");
855 asm volatile("vpsraq $0x5,%zmm26,%zmm22");
856
857 /* AVX-512: Op code 0f 38 c6 (Grp18) */
858
859 asm volatile("vgatherpf0dps 0x7b(%r14,%zmm31,8){%k1}");
860 asm volatile("vgatherpf0dpd 0x7b(%r14,%ymm31,8){%k1}");
861 asm volatile("vgatherpf1dps 0x7b(%r14,%zmm31,8){%k1}");
862 asm volatile("vgatherpf1dpd 0x7b(%r14,%ymm31,8){%k1}");
863 asm volatile("vscatterpf0dps 0x7b(%r14,%zmm31,8){%k1}");
864 asm volatile("vscatterpf0dpd 0x7b(%r14,%ymm31,8){%k1}");
865 asm volatile("vscatterpf1dps 0x7b(%r14,%zmm31,8){%k1}");
866 asm volatile("vscatterpf1dpd 0x7b(%r14,%ymm31,8){%k1}");
867
868 /* AVX-512: Op code 0f 38 c7 (Grp19) */
869
870 asm volatile("vgatherpf0qps 0x7b(%r14,%zmm31,8){%k1}");
871 asm volatile("vgatherpf0qpd 0x7b(%r14,%zmm31,8){%k1}");
872 asm volatile("vgatherpf1qps 0x7b(%r14,%zmm31,8){%k1}");
873 asm volatile("vgatherpf1qpd 0x7b(%r14,%zmm31,8){%k1}");
874 asm volatile("vscatterpf0qps 0x7b(%r14,%zmm31,8){%k1}");
875 asm volatile("vscatterpf0qpd 0x7b(%r14,%zmm31,8){%k1}");
876 asm volatile("vscatterpf1qps 0x7b(%r14,%zmm31,8){%k1}");
877 asm volatile("vscatterpf1qpd 0x7b(%r14,%zmm31,8){%k1}");
878
879 /* AVX-512: Examples */
880
881 asm volatile("vaddpd %zmm28,%zmm29,%zmm30");
882 asm volatile("vaddpd %zmm28,%zmm29,%zmm30{%k7}");
883 asm volatile("vaddpd %zmm28,%zmm29,%zmm30{%k7}{z}");
884 asm volatile("vaddpd {rn-sae},%zmm28,%zmm29,%zmm30");
885 asm volatile("vaddpd {ru-sae},%zmm28,%zmm29,%zmm30");
886 asm volatile("vaddpd {rd-sae},%zmm28,%zmm29,%zmm30");
887 asm volatile("vaddpd {rz-sae},%zmm28,%zmm29,%zmm30");
888 asm volatile("vaddpd (%rcx),%zmm29,%zmm30");
889 asm volatile("vaddpd 0x123(%rax,%r14,8),%zmm29,%zmm30");
890 asm volatile("vaddpd (%rcx){1to8},%zmm29,%zmm30");
891 asm volatile("vaddpd 0x1fc0(%rdx),%zmm29,%zmm30");
892 asm volatile("vaddpd 0x3f8(%rdx){1to8},%zmm29,%zmm30");
893 asm volatile("vcmpeq_uqps 0x1fc(%rdx){1to16},%zmm30,%k5");
894 asm volatile("vcmpltsd 0x123(%rax,%r14,8),%xmm29,%k5{%k7}");
895 asm volatile("vcmplesd {sae},%xmm28,%xmm29,%k5{%k7}");
896 asm volatile("vgetmantss $0x5b,0x123(%rax,%r14,8),%xmm29,%xmm30{%k7}");
897
24 /* bndmk m64, bnd */ 898 /* bndmk m64, bnd */
25 899
26 asm volatile("bndmk (%rax), %bnd0"); 900 asm volatile("bndmk (%rax), %bnd0");
@@ -471,6 +1345,921 @@ int main(void)
471 1345
472#else /* #ifdef __x86_64__ */ 1346#else /* #ifdef __x86_64__ */
473 1347
1348 /* bound r32, mem (same op code as EVEX prefix) */
1349
1350 asm volatile("bound %eax, 0x12345678(%ecx)");
1351 asm volatile("bound %ecx, 0x12345678(%eax)");
1352 asm volatile("bound %edx, 0x12345678(%eax)");
1353 asm volatile("bound %ebx, 0x12345678(%eax)");
1354 asm volatile("bound %esp, 0x12345678(%eax)");
1355 asm volatile("bound %ebp, 0x12345678(%eax)");
1356 asm volatile("bound %esi, 0x12345678(%eax)");
1357 asm volatile("bound %edi, 0x12345678(%eax)");
1358 asm volatile("bound %ecx, (%eax)");
1359 asm volatile("bound %eax, (0x12345678)");
1360 asm volatile("bound %edx, (%ecx,%eax,1)");
1361 asm volatile("bound %edx, 0x12345678(,%eax,1)");
1362 asm volatile("bound %edx, (%eax,%ecx,1)");
1363 asm volatile("bound %edx, (%eax,%ecx,8)");
1364 asm volatile("bound %edx, 0x12(%eax)");
1365 asm volatile("bound %edx, 0x12(%ebp)");
1366 asm volatile("bound %edx, 0x12(%ecx,%eax,1)");
1367 asm volatile("bound %edx, 0x12(%ebp,%eax,1)");
1368 asm volatile("bound %edx, 0x12(%eax,%ecx,1)");
1369 asm volatile("bound %edx, 0x12(%eax,%ecx,8)");
1370 asm volatile("bound %edx, 0x12345678(%eax)");
1371 asm volatile("bound %edx, 0x12345678(%ebp)");
1372 asm volatile("bound %edx, 0x12345678(%ecx,%eax,1)");
1373 asm volatile("bound %edx, 0x12345678(%ebp,%eax,1)");
1374 asm volatile("bound %edx, 0x12345678(%eax,%ecx,1)");
1375 asm volatile("bound %edx, 0x12345678(%eax,%ecx,8)");
1376
1377 /* bound r16, mem (same op code as EVEX prefix) */
1378
1379 asm volatile("bound %ax, 0x12345678(%ecx)");
1380 asm volatile("bound %cx, 0x12345678(%eax)");
1381 asm volatile("bound %dx, 0x12345678(%eax)");
1382 asm volatile("bound %bx, 0x12345678(%eax)");
1383 asm volatile("bound %sp, 0x12345678(%eax)");
1384 asm volatile("bound %bp, 0x12345678(%eax)");
1385 asm volatile("bound %si, 0x12345678(%eax)");
1386 asm volatile("bound %di, 0x12345678(%eax)");
1387 asm volatile("bound %cx, (%eax)");
1388 asm volatile("bound %ax, (0x12345678)");
1389 asm volatile("bound %dx, (%ecx,%eax,1)");
1390 asm volatile("bound %dx, 0x12345678(,%eax,1)");
1391 asm volatile("bound %dx, (%eax,%ecx,1)");
1392 asm volatile("bound %dx, (%eax,%ecx,8)");
1393 asm volatile("bound %dx, 0x12(%eax)");
1394 asm volatile("bound %dx, 0x12(%ebp)");
1395 asm volatile("bound %dx, 0x12(%ecx,%eax,1)");
1396 asm volatile("bound %dx, 0x12(%ebp,%eax,1)");
1397 asm volatile("bound %dx, 0x12(%eax,%ecx,1)");
1398 asm volatile("bound %dx, 0x12(%eax,%ecx,8)");
1399 asm volatile("bound %dx, 0x12345678(%eax)");
1400 asm volatile("bound %dx, 0x12345678(%ebp)");
1401 asm volatile("bound %dx, 0x12345678(%ecx,%eax,1)");
1402 asm volatile("bound %dx, 0x12345678(%ebp,%eax,1)");
1403 asm volatile("bound %dx, 0x12345678(%eax,%ecx,1)");
1404 asm volatile("bound %dx, 0x12345678(%eax,%ecx,8)");
1405
1406 /* AVX-512: Instructions with the same op codes as Mask Instructions */
1407
1408 asm volatile("cmovno %eax,%ebx");
1409 asm volatile("cmovno 0x12345678(%eax),%ecx");
1410 asm volatile("cmovno 0x12345678(%eax),%cx");
1411
1412 asm volatile("cmove %eax,%ebx");
1413 asm volatile("cmove 0x12345678(%eax),%ecx");
1414 asm volatile("cmove 0x12345678(%eax),%cx");
1415
1416 asm volatile("seto 0x12345678(%eax)");
1417 asm volatile("setno 0x12345678(%eax)");
1418 asm volatile("setb 0x12345678(%eax)");
1419 asm volatile("setc 0x12345678(%eax)");
1420 asm volatile("setnae 0x12345678(%eax)");
1421 asm volatile("setae 0x12345678(%eax)");
1422 asm volatile("setnb 0x12345678(%eax)");
1423 asm volatile("setnc 0x12345678(%eax)");
1424 asm volatile("sets 0x12345678(%eax)");
1425 asm volatile("setns 0x12345678(%eax)");
1426
1427 /* AVX-512: Mask Instructions */
1428
1429 asm volatile("kandw %k7,%k6,%k5");
1430 asm volatile("kandq %k7,%k6,%k5");
1431 asm volatile("kandb %k7,%k6,%k5");
1432 asm volatile("kandd %k7,%k6,%k5");
1433
1434 asm volatile("kandnw %k7,%k6,%k5");
1435 asm volatile("kandnq %k7,%k6,%k5");
1436 asm volatile("kandnb %k7,%k6,%k5");
1437 asm volatile("kandnd %k7,%k6,%k5");
1438
1439 asm volatile("knotw %k7,%k6");
1440 asm volatile("knotq %k7,%k6");
1441 asm volatile("knotb %k7,%k6");
1442 asm volatile("knotd %k7,%k6");
1443
1444 asm volatile("korw %k7,%k6,%k5");
1445 asm volatile("korq %k7,%k6,%k5");
1446 asm volatile("korb %k7,%k6,%k5");
1447 asm volatile("kord %k7,%k6,%k5");
1448
1449 asm volatile("kxnorw %k7,%k6,%k5");
1450 asm volatile("kxnorq %k7,%k6,%k5");
1451 asm volatile("kxnorb %k7,%k6,%k5");
1452 asm volatile("kxnord %k7,%k6,%k5");
1453
1454 asm volatile("kxorw %k7,%k6,%k5");
1455 asm volatile("kxorq %k7,%k6,%k5");
1456 asm volatile("kxorb %k7,%k6,%k5");
1457 asm volatile("kxord %k7,%k6,%k5");
1458
1459 asm volatile("kaddw %k7,%k6,%k5");
1460 asm volatile("kaddq %k7,%k6,%k5");
1461 asm volatile("kaddb %k7,%k6,%k5");
1462 asm volatile("kaddd %k7,%k6,%k5");
1463
1464 asm volatile("kunpckbw %k7,%k6,%k5");
1465 asm volatile("kunpckwd %k7,%k6,%k5");
1466 asm volatile("kunpckdq %k7,%k6,%k5");
1467
1468 asm volatile("kmovw %k6,%k5");
1469 asm volatile("kmovw (%ecx),%k5");
1470 asm volatile("kmovw 0x123(%eax,%ecx,8),%k5");
1471 asm volatile("kmovw %k5,(%ecx)");
1472 asm volatile("kmovw %k5,0x123(%eax,%ecx,8)");
1473 asm volatile("kmovw %eax,%k5");
1474 asm volatile("kmovw %ebp,%k5");
1475 asm volatile("kmovw %k5,%eax");
1476 asm volatile("kmovw %k5,%ebp");
1477
1478 asm volatile("kmovq %k6,%k5");
1479 asm volatile("kmovq (%ecx),%k5");
1480 asm volatile("kmovq 0x123(%eax,%ecx,8),%k5");
1481 asm volatile("kmovq %k5,(%ecx)");
1482 asm volatile("kmovq %k5,0x123(%eax,%ecx,8)");
1483
1484 asm volatile("kmovb %k6,%k5");
1485 asm volatile("kmovb (%ecx),%k5");
1486 asm volatile("kmovb 0x123(%eax,%ecx,8),%k5");
1487 asm volatile("kmovb %k5,(%ecx)");
1488 asm volatile("kmovb %k5,0x123(%eax,%ecx,8)");
1489 asm volatile("kmovb %eax,%k5");
1490 asm volatile("kmovb %ebp,%k5");
1491 asm volatile("kmovb %k5,%eax");
1492 asm volatile("kmovb %k5,%ebp");
1493
1494 asm volatile("kmovd %k6,%k5");
1495 asm volatile("kmovd (%ecx),%k5");
1496 asm volatile("kmovd 0x123(%eax,%ecx,8),%k5");
1497 asm volatile("kmovd %k5,(%ecx)");
1498 asm volatile("kmovd %k5,0x123(%eax,%ecx,8)");
1499 asm volatile("kmovd %eax,%k5");
1500 asm volatile("kmovd %ebp,%k5");
1501 asm volatile("kmovd %k5,%eax");
1502 asm volatile("kmovd %k5,%ebp");
1503
1504 asm volatile("kortestw %k6,%k5");
1505 asm volatile("kortestq %k6,%k5");
1506 asm volatile("kortestb %k6,%k5");
1507 asm volatile("kortestd %k6,%k5");
1508
1509 asm volatile("ktestw %k6,%k5");
1510 asm volatile("ktestq %k6,%k5");
1511 asm volatile("ktestb %k6,%k5");
1512 asm volatile("ktestd %k6,%k5");
1513
1514 asm volatile("kshiftrw $0x12,%k6,%k5");
1515 asm volatile("kshiftrq $0x5b,%k6,%k5");
1516 asm volatile("kshiftlw $0x12,%k6,%k5");
1517 asm volatile("kshiftlq $0x5b,%k6,%k5");
1518
1519 /* AVX-512: Op code 0f 5b */
1520 asm volatile("vcvtdq2ps %xmm5,%xmm6");
1521 asm volatile("vcvtqq2ps %zmm5,%ymm6{%k7}");
1522 asm volatile("vcvtps2dq %xmm5,%xmm6");
1523 asm volatile("vcvttps2dq %xmm5,%xmm6");
1524
1525 /* AVX-512: Op code 0f 6f */
1526
1527 asm volatile("movq %mm0,%mm4");
1528 asm volatile("vmovdqa %ymm4,%ymm6");
1529 asm volatile("vmovdqa32 %zmm5,%zmm6");
1530 asm volatile("vmovdqa64 %zmm5,%zmm6");
1531 asm volatile("vmovdqu %ymm4,%ymm6");
1532 asm volatile("vmovdqu32 %zmm5,%zmm6");
1533 asm volatile("vmovdqu64 %zmm5,%zmm6");
1534 asm volatile("vmovdqu8 %zmm5,%zmm6");
1535 asm volatile("vmovdqu16 %zmm5,%zmm6");
1536
1537 /* AVX-512: Op code 0f 78 */
1538
1539 asm volatile("vmread %eax,%ebx");
1540 asm volatile("vcvttps2udq %zmm5,%zmm6");
1541 asm volatile("vcvttpd2udq %zmm5,%ymm6{%k7}");
1542 asm volatile("vcvttsd2usi %xmm6,%eax");
1543 asm volatile("vcvttss2usi %xmm6,%eax");
1544 asm volatile("vcvttps2uqq %ymm5,%zmm6{%k7}");
1545 asm volatile("vcvttpd2uqq %zmm5,%zmm6");
1546
1547 /* AVX-512: Op code 0f 79 */
1548
1549 asm volatile("vmwrite %eax,%ebx");
1550 asm volatile("vcvtps2udq %zmm5,%zmm6");
1551 asm volatile("vcvtpd2udq %zmm5,%ymm6{%k7}");
1552 asm volatile("vcvtsd2usi %xmm6,%eax");
1553 asm volatile("vcvtss2usi %xmm6,%eax");
1554 asm volatile("vcvtps2uqq %ymm5,%zmm6{%k7}");
1555 asm volatile("vcvtpd2uqq %zmm5,%zmm6");
1556
1557 /* AVX-512: Op code 0f 7a */
1558
1559 asm volatile("vcvtudq2pd %ymm5,%zmm6{%k7}");
1560 asm volatile("vcvtuqq2pd %zmm5,%zmm6");
1561 asm volatile("vcvtudq2ps %zmm5,%zmm6");
1562 asm volatile("vcvtuqq2ps %zmm5,%ymm6{%k7}");
1563 asm volatile("vcvttps2qq %ymm5,%zmm6{%k7}");
1564 asm volatile("vcvttpd2qq %zmm5,%zmm6");
1565
1566 /* AVX-512: Op code 0f 7b */
1567
1568 asm volatile("vcvtusi2sd %eax,%xmm5,%xmm6");
1569 asm volatile("vcvtusi2ss %eax,%xmm5,%xmm6");
1570 asm volatile("vcvtps2qq %ymm5,%zmm6{%k7}");
1571 asm volatile("vcvtpd2qq %zmm5,%zmm6");
1572
1573 /* AVX-512: Op code 0f 7f */
1574
1575 asm volatile("movq.s %mm0,%mm4");
1576 asm volatile("vmovdqa.s %ymm5,%ymm6");
1577 asm volatile("vmovdqa32.s %zmm5,%zmm6");
1578 asm volatile("vmovdqa64.s %zmm5,%zmm6");
1579 asm volatile("vmovdqu.s %ymm5,%ymm6");
1580 asm volatile("vmovdqu32.s %zmm5,%zmm6");
1581 asm volatile("vmovdqu64.s %zmm5,%zmm6");
1582 asm volatile("vmovdqu8.s %zmm5,%zmm6");
1583 asm volatile("vmovdqu16.s %zmm5,%zmm6");
1584
1585 /* AVX-512: Op code 0f db */
1586
1587 asm volatile("pand %mm1,%mm2");
1588 asm volatile("pand %xmm1,%xmm2");
1589 asm volatile("vpand %ymm4,%ymm6,%ymm2");
1590 asm volatile("vpandd %zmm4,%zmm5,%zmm6");
1591 asm volatile("vpandq %zmm4,%zmm5,%zmm6");
1592
1593 /* AVX-512: Op code 0f df */
1594
1595 asm volatile("pandn %mm1,%mm2");
1596 asm volatile("pandn %xmm1,%xmm2");
1597 asm volatile("vpandn %ymm4,%ymm6,%ymm2");
1598 asm volatile("vpandnd %zmm4,%zmm5,%zmm6");
1599 asm volatile("vpandnq %zmm4,%zmm5,%zmm6");
1600
1601 /* AVX-512: Op code 0f e6 */
1602
1603 asm volatile("vcvttpd2dq %xmm1,%xmm2");
1604 asm volatile("vcvtdq2pd %xmm5,%xmm6");
1605 asm volatile("vcvtdq2pd %ymm5,%zmm6{%k7}");
1606 asm volatile("vcvtqq2pd %zmm5,%zmm6");
1607 asm volatile("vcvtpd2dq %xmm1,%xmm2");
1608
1609 /* AVX-512: Op code 0f eb */
1610
1611 asm volatile("por %mm4,%mm6");
1612 asm volatile("vpor %ymm4,%ymm6,%ymm2");
1613 asm volatile("vpord %zmm4,%zmm5,%zmm6");
1614 asm volatile("vporq %zmm4,%zmm5,%zmm6");
1615
1616 /* AVX-512: Op code 0f ef */
1617
1618 asm volatile("pxor %mm4,%mm6");
1619 asm volatile("vpxor %ymm4,%ymm6,%ymm2");
1620 asm volatile("vpxord %zmm4,%zmm5,%zmm6");
1621 asm volatile("vpxorq %zmm4,%zmm5,%zmm6");
1622
1623 /* AVX-512: Op code 0f 38 10 */
1624
1625 asm volatile("pblendvb %xmm1,%xmm0");
1626 asm volatile("vpsrlvw %zmm4,%zmm5,%zmm6");
1627 asm volatile("vpmovuswb %zmm5,%ymm6{%k7}");
1628
1629 /* AVX-512: Op code 0f 38 11 */
1630
1631 asm volatile("vpmovusdb %zmm5,%xmm6{%k7}");
1632 asm volatile("vpsravw %zmm4,%zmm5,%zmm6");
1633
1634 /* AVX-512: Op code 0f 38 12 */
1635
1636 asm volatile("vpmovusqb %zmm5,%xmm6{%k7}");
1637 asm volatile("vpsllvw %zmm4,%zmm5,%zmm6");
1638
1639 /* AVX-512: Op code 0f 38 13 */
1640
1641 asm volatile("vcvtph2ps %xmm3,%ymm5");
1642 asm volatile("vcvtph2ps %ymm5,%zmm6{%k7}");
1643 asm volatile("vpmovusdw %zmm5,%ymm6{%k7}");
1644
1645 /* AVX-512: Op code 0f 38 14 */
1646
1647 asm volatile("blendvps %xmm1,%xmm0");
1648 asm volatile("vpmovusqw %zmm5,%xmm6{%k7}");
1649 asm volatile("vprorvd %zmm4,%zmm5,%zmm6");
1650 asm volatile("vprorvq %zmm4,%zmm5,%zmm6");
1651
1652 /* AVX-512: Op code 0f 38 15 */
1653
1654 asm volatile("blendvpd %xmm1,%xmm0");
1655 asm volatile("vpmovusqd %zmm5,%ymm6{%k7}");
1656 asm volatile("vprolvd %zmm4,%zmm5,%zmm6");
1657 asm volatile("vprolvq %zmm4,%zmm5,%zmm6");
1658
1659 /* AVX-512: Op code 0f 38 16 */
1660
1661 asm volatile("vpermps %ymm4,%ymm6,%ymm2");
1662 asm volatile("vpermps %ymm4,%ymm6,%ymm2{%k7}");
1663 asm volatile("vpermpd %ymm4,%ymm6,%ymm2{%k7}");
1664
1665 /* AVX-512: Op code 0f 38 19 */
1666
1667 asm volatile("vbroadcastsd %xmm4,%ymm6");
1668 asm volatile("vbroadcastf32x2 %xmm7,%zmm6");
1669
1670 /* AVX-512: Op code 0f 38 1a */
1671
1672 asm volatile("vbroadcastf128 (%ecx),%ymm4");
1673 asm volatile("vbroadcastf32x4 (%ecx),%zmm6");
1674 asm volatile("vbroadcastf64x2 (%ecx),%zmm6");
1675
1676 /* AVX-512: Op code 0f 38 1b */
1677
1678 asm volatile("vbroadcastf32x8 (%ecx),%zmm6");
1679 asm volatile("vbroadcastf64x4 (%ecx),%zmm6");
1680
1681 /* AVX-512: Op code 0f 38 1f */
1682
1683 asm volatile("vpabsq %zmm4,%zmm6");
1684
1685 /* AVX-512: Op code 0f 38 20 */
1686
1687 asm volatile("vpmovsxbw %xmm4,%xmm5");
1688 asm volatile("vpmovswb %zmm5,%ymm6{%k7}");
1689
1690 /* AVX-512: Op code 0f 38 21 */
1691
1692 asm volatile("vpmovsxbd %xmm4,%ymm6");
1693 asm volatile("vpmovsdb %zmm5,%xmm6{%k7}");
1694
1695 /* AVX-512: Op code 0f 38 22 */
1696
1697 asm volatile("vpmovsxbq %xmm4,%ymm4");
1698 asm volatile("vpmovsqb %zmm5,%xmm6{%k7}");
1699
1700 /* AVX-512: Op code 0f 38 23 */
1701
1702 asm volatile("vpmovsxwd %xmm4,%ymm4");
1703 asm volatile("vpmovsdw %zmm5,%ymm6{%k7}");
1704
1705 /* AVX-512: Op code 0f 38 24 */
1706
1707 asm volatile("vpmovsxwq %xmm4,%ymm6");
1708 asm volatile("vpmovsqw %zmm5,%xmm6{%k7}");
1709
1710 /* AVX-512: Op code 0f 38 25 */
1711
1712 asm volatile("vpmovsxdq %xmm4,%ymm4");
1713 asm volatile("vpmovsqd %zmm5,%ymm6{%k7}");
1714
1715 /* AVX-512: Op code 0f 38 26 */
1716
1717 asm volatile("vptestmb %zmm5,%zmm6,%k5");
1718 asm volatile("vptestmw %zmm5,%zmm6,%k5");
1719 asm volatile("vptestnmb %zmm4,%zmm5,%k5");
1720 asm volatile("vptestnmw %zmm4,%zmm5,%k5");
1721
1722 /* AVX-512: Op code 0f 38 27 */
1723
1724 asm volatile("vptestmd %zmm5,%zmm6,%k5");
1725 asm volatile("vptestmq %zmm5,%zmm6,%k5");
1726 asm volatile("vptestnmd %zmm4,%zmm5,%k5");
1727 asm volatile("vptestnmq %zmm4,%zmm5,%k5");
1728
1729 /* AVX-512: Op code 0f 38 28 */
1730
1731 asm volatile("vpmuldq %ymm4,%ymm6,%ymm2");
1732 asm volatile("vpmovm2b %k5,%zmm6");
1733 asm volatile("vpmovm2w %k5,%zmm6");
1734
1735 /* AVX-512: Op code 0f 38 29 */
1736
1737 asm volatile("vpcmpeqq %ymm4,%ymm6,%ymm2");
1738 asm volatile("vpmovb2m %zmm6,%k5");
1739 asm volatile("vpmovw2m %zmm6,%k5");
1740
1741 /* AVX-512: Op code 0f 38 2a */
1742
1743 asm volatile("vmovntdqa (%ecx),%ymm4");
1744 asm volatile("vpbroadcastmb2q %k6,%zmm1");
1745
1746 /* AVX-512: Op code 0f 38 2c */
1747
1748 asm volatile("vmaskmovps (%ecx),%ymm4,%ymm6");
1749 asm volatile("vscalefps %zmm4,%zmm5,%zmm6");
1750 asm volatile("vscalefpd %zmm4,%zmm5,%zmm6");
1751
1752 /* AVX-512: Op code 0f 38 2d */
1753
1754 asm volatile("vmaskmovpd (%ecx),%ymm4,%ymm6");
1755 asm volatile("vscalefss %xmm4,%xmm5,%xmm6{%k7}");
1756 asm volatile("vscalefsd %xmm4,%xmm5,%xmm6{%k7}");
1757
1758 /* AVX-512: Op code 0f 38 30 */
1759
1760 asm volatile("vpmovzxbw %xmm4,%ymm4");
1761 asm volatile("vpmovwb %zmm5,%ymm6{%k7}");
1762
1763 /* AVX-512: Op code 0f 38 31 */
1764
1765 asm volatile("vpmovzxbd %xmm4,%ymm6");
1766 asm volatile("vpmovdb %zmm5,%xmm6{%k7}");
1767
1768 /* AVX-512: Op code 0f 38 32 */
1769
1770 asm volatile("vpmovzxbq %xmm4,%ymm4");
1771 asm volatile("vpmovqb %zmm5,%xmm6{%k7}");
1772
1773 /* AVX-512: Op code 0f 38 33 */
1774
1775 asm volatile("vpmovzxwd %xmm4,%ymm4");
1776 asm volatile("vpmovdw %zmm5,%ymm6{%k7}");
1777
1778 /* AVX-512: Op code 0f 38 34 */
1779
1780 asm volatile("vpmovzxwq %xmm4,%ymm6");
1781 asm volatile("vpmovqw %zmm5,%xmm6{%k7}");
1782
1783 /* AVX-512: Op code 0f 38 35 */
1784
1785 asm volatile("vpmovzxdq %xmm4,%ymm4");
1786 asm volatile("vpmovqd %zmm5,%ymm6{%k7}");
1787
1788 /* AVX-512: Op code 0f 38 36 */
1789
1790 asm volatile("vpermd %ymm4,%ymm6,%ymm2");
1791 asm volatile("vpermd %ymm4,%ymm6,%ymm2{%k7}");
1792 asm volatile("vpermq %ymm4,%ymm6,%ymm2{%k7}");
1793
1794 /* AVX-512: Op code 0f 38 38 */
1795
1796 asm volatile("vpminsb %ymm4,%ymm6,%ymm2");
1797 asm volatile("vpmovm2d %k5,%zmm6");
1798 asm volatile("vpmovm2q %k5,%zmm6");
1799
1800 /* AVX-512: Op code 0f 38 39 */
1801
1802 asm volatile("vpminsd %xmm1,%xmm2,%xmm3");
1803 asm volatile("vpminsd %zmm4,%zmm5,%zmm6");
1804 asm volatile("vpminsq %zmm4,%zmm5,%zmm6");
1805 asm volatile("vpmovd2m %zmm6,%k5");
1806 asm volatile("vpmovq2m %zmm6,%k5");
1807
1808 /* AVX-512: Op code 0f 38 3a */
1809
1810 asm volatile("vpminuw %ymm4,%ymm6,%ymm2");
1811 asm volatile("vpbroadcastmw2d %k6,%zmm6");
1812
1813 /* AVX-512: Op code 0f 38 3b */
1814
1815 asm volatile("vpminud %ymm4,%ymm6,%ymm2");
1816 asm volatile("vpminud %zmm4,%zmm5,%zmm6");
1817 asm volatile("vpminuq %zmm4,%zmm5,%zmm6");
1818
1819 /* AVX-512: Op code 0f 38 3d */
1820
1821 asm volatile("vpmaxsd %ymm4,%ymm6,%ymm2");
1822 asm volatile("vpmaxsd %zmm4,%zmm5,%zmm6");
1823 asm volatile("vpmaxsq %zmm4,%zmm5,%zmm6");
1824
1825 /* AVX-512: Op code 0f 38 3f */
1826
1827 asm volatile("vpmaxud %ymm4,%ymm6,%ymm2");
1828 asm volatile("vpmaxud %zmm4,%zmm5,%zmm6");
1829 asm volatile("vpmaxuq %zmm4,%zmm5,%zmm6");
1830
1831 /* AVX-512: Op code 0f 38 40 */
1832
1833 asm volatile("vpmulld %ymm4,%ymm6,%ymm2");
1834 asm volatile("vpmulld %zmm4,%zmm5,%zmm6");
1835 asm volatile("vpmullq %zmm4,%zmm5,%zmm6");
1836
1837 /* AVX-512: Op code 0f 38 42 */
1838
1839 asm volatile("vgetexpps %zmm5,%zmm6");
1840 asm volatile("vgetexppd %zmm5,%zmm6");
1841
1842 /* AVX-512: Op code 0f 38 43 */
1843
1844 asm volatile("vgetexpss %xmm4,%xmm5,%xmm6{%k7}");
1845 asm volatile("vgetexpsd %xmm2,%xmm3,%xmm4{%k7}");
1846
1847 /* AVX-512: Op code 0f 38 44 */
1848
1849 asm volatile("vplzcntd %zmm5,%zmm6");
1850 asm volatile("vplzcntq %zmm5,%zmm6");
1851
1852 /* AVX-512: Op code 0f 38 46 */
1853
1854 asm volatile("vpsravd %ymm4,%ymm6,%ymm2");
1855 asm volatile("vpsravd %zmm4,%zmm5,%zmm6");
1856 asm volatile("vpsravq %zmm4,%zmm5,%zmm6");
1857
1858 /* AVX-512: Op code 0f 38 4c */
1859
1860 asm volatile("vrcp14ps %zmm5,%zmm6");
1861 asm volatile("vrcp14pd %zmm5,%zmm6");
1862
1863 /* AVX-512: Op code 0f 38 4d */
1864
1865 asm volatile("vrcp14ss %xmm4,%xmm5,%xmm6{%k7}");
1866 asm volatile("vrcp14sd %xmm4,%xmm5,%xmm6{%k7}");
1867
1868 /* AVX-512: Op code 0f 38 4e */
1869
1870 asm volatile("vrsqrt14ps %zmm5,%zmm6");
1871 asm volatile("vrsqrt14pd %zmm5,%zmm6");
1872
1873 /* AVX-512: Op code 0f 38 4f */
1874
1875 asm volatile("vrsqrt14ss %xmm4,%xmm5,%xmm6{%k7}");
1876 asm volatile("vrsqrt14sd %xmm4,%xmm5,%xmm6{%k7}");
1877
1878 /* AVX-512: Op code 0f 38 59 */
1879
1880 asm volatile("vpbroadcastq %xmm4,%xmm6");
1881 asm volatile("vbroadcasti32x2 %xmm7,%zmm6");
1882
1883 /* AVX-512: Op code 0f 38 5a */
1884
1885 asm volatile("vbroadcasti128 (%ecx),%ymm4");
1886 asm volatile("vbroadcasti32x4 (%ecx),%zmm6");
1887 asm volatile("vbroadcasti64x2 (%ecx),%zmm6");
1888
1889 /* AVX-512: Op code 0f 38 5b */
1890
1891 asm volatile("vbroadcasti32x8 (%ecx),%zmm6");
1892 asm volatile("vbroadcasti64x4 (%ecx),%zmm6");
1893
1894 /* AVX-512: Op code 0f 38 64 */
1895
1896 asm volatile("vpblendmd %zmm4,%zmm5,%zmm6");
1897 asm volatile("vpblendmq %zmm4,%zmm5,%zmm6");
1898
1899 /* AVX-512: Op code 0f 38 65 */
1900
1901 asm volatile("vblendmps %zmm4,%zmm5,%zmm6");
1902 asm volatile("vblendmpd %zmm4,%zmm5,%zmm6");
1903
1904 /* AVX-512: Op code 0f 38 66 */
1905
1906 asm volatile("vpblendmb %zmm4,%zmm5,%zmm6");
1907 asm volatile("vpblendmw %zmm4,%zmm5,%zmm6");
1908
1909 /* AVX-512: Op code 0f 38 75 */
1910
1911 asm volatile("vpermi2b %zmm4,%zmm5,%zmm6");
1912 asm volatile("vpermi2w %zmm4,%zmm5,%zmm6");
1913
1914 /* AVX-512: Op code 0f 38 76 */
1915
1916 asm volatile("vpermi2d %zmm4,%zmm5,%zmm6");
1917 asm volatile("vpermi2q %zmm4,%zmm5,%zmm6");
1918
1919 /* AVX-512: Op code 0f 38 77 */
1920
1921 asm volatile("vpermi2ps %zmm4,%zmm5,%zmm6");
1922 asm volatile("vpermi2pd %zmm4,%zmm5,%zmm6");
1923
1924 /* AVX-512: Op code 0f 38 7a */
1925
1926 asm volatile("vpbroadcastb %eax,%xmm3");
1927
1928 /* AVX-512: Op code 0f 38 7b */
1929
1930 asm volatile("vpbroadcastw %eax,%xmm3");
1931
1932 /* AVX-512: Op code 0f 38 7c */
1933
1934 asm volatile("vpbroadcastd %eax,%xmm3");
1935
1936 /* AVX-512: Op code 0f 38 7d */
1937
1938 asm volatile("vpermt2b %zmm4,%zmm5,%zmm6");
1939 asm volatile("vpermt2w %zmm4,%zmm5,%zmm6");
1940
1941 /* AVX-512: Op code 0f 38 7e */
1942
1943 asm volatile("vpermt2d %zmm4,%zmm5,%zmm6");
1944 asm volatile("vpermt2q %zmm4,%zmm5,%zmm6");
1945
1946 /* AVX-512: Op code 0f 38 7f */
1947
1948 asm volatile("vpermt2ps %zmm4,%zmm5,%zmm6");
1949 asm volatile("vpermt2pd %zmm4,%zmm5,%zmm6");
1950
1951 /* AVX-512: Op code 0f 38 83 */
1952
1953 asm volatile("vpmultishiftqb %zmm4,%zmm5,%zmm6");
1954
1955 /* AVX-512: Op code 0f 38 88 */
1956
1957 asm volatile("vexpandps (%ecx),%zmm6");
1958 asm volatile("vexpandpd (%ecx),%zmm6");
1959
1960 /* AVX-512: Op code 0f 38 89 */
1961
1962 asm volatile("vpexpandd (%ecx),%zmm6");
1963 asm volatile("vpexpandq (%ecx),%zmm6");
1964
1965 /* AVX-512: Op code 0f 38 8a */
1966
1967 asm volatile("vcompressps %zmm6,(%ecx)");
1968 asm volatile("vcompresspd %zmm6,(%ecx)");
1969
1970 /* AVX-512: Op code 0f 38 8b */
1971
1972 asm volatile("vpcompressd %zmm6,(%ecx)");
1973 asm volatile("vpcompressq %zmm6,(%ecx)");
1974
1975 /* AVX-512: Op code 0f 38 8d */
1976
1977 asm volatile("vpermb %zmm4,%zmm5,%zmm6");
1978 asm volatile("vpermw %zmm4,%zmm5,%zmm6");
1979
1980 /* AVX-512: Op code 0f 38 90 */
1981
1982 asm volatile("vpgatherdd %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
1983 asm volatile("vpgatherdq %xmm2,0x04(%ebp,%xmm7,2),%xmm1");
1984 asm volatile("vpgatherdd 0x7b(%ebp,%zmm7,8),%zmm6{%k1}");
1985 asm volatile("vpgatherdq 0x7b(%ebp,%ymm7,8),%zmm6{%k1}");
1986
1987 /* AVX-512: Op code 0f 38 91 */
1988
1989 asm volatile("vpgatherqd %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
1990 asm volatile("vpgatherqq %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
1991 asm volatile("vpgatherqd 0x7b(%ebp,%zmm7,8),%ymm6{%k1}");
1992 asm volatile("vpgatherqq 0x7b(%ebp,%zmm7,8),%zmm6{%k1}");
1993
1994 /* AVX-512: Op code 0f 38 a0 */
1995
1996 asm volatile("vpscatterdd %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
1997 asm volatile("vpscatterdq %zmm6,0x7b(%ebp,%ymm7,8){%k1}");
1998
1999 /* AVX-512: Op code 0f 38 a1 */
2000
2001 asm volatile("vpscatterqd %ymm6,0x7b(%ebp,%zmm7,8){%k1}");
2002 asm volatile("vpscatterqq %ymm6,0x7b(%ebp,%ymm7,8){%k1}");
2003
2004 /* AVX-512: Op code 0f 38 a2 */
2005
2006 asm volatile("vscatterdps %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
2007 asm volatile("vscatterdpd %zmm6,0x7b(%ebp,%ymm7,8){%k1}");
2008
2009 /* AVX-512: Op code 0f 38 a3 */
2010
2011 asm volatile("vscatterqps %ymm6,0x7b(%ebp,%zmm7,8){%k1}");
2012 asm volatile("vscatterqpd %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
2013
2014 /* AVX-512: Op code 0f 38 b4 */
2015
2016 asm volatile("vpmadd52luq %zmm4,%zmm5,%zmm6");
2017
2018 /* AVX-512: Op code 0f 38 b5 */
2019
2020 asm volatile("vpmadd52huq %zmm4,%zmm5,%zmm6");
2021
2022 /* AVX-512: Op code 0f 38 c4 */
2023
2024 asm volatile("vpconflictd %zmm5,%zmm6");
2025 asm volatile("vpconflictq %zmm5,%zmm6");
2026
2027 /* AVX-512: Op code 0f 38 c8 */
2028
2029 asm volatile("vexp2ps %zmm6,%zmm7");
2030 asm volatile("vexp2pd %zmm6,%zmm7");
2031
2032 /* AVX-512: Op code 0f 38 ca */
2033
2034 asm volatile("vrcp28ps %zmm6,%zmm7");
2035 asm volatile("vrcp28pd %zmm6,%zmm7");
2036
2037 /* AVX-512: Op code 0f 38 cb */
2038
2039 asm volatile("vrcp28ss %xmm5,%xmm6,%xmm7{%k7}");
2040 asm volatile("vrcp28sd %xmm5,%xmm6,%xmm7{%k7}");
2041
2042 /* AVX-512: Op code 0f 38 cc */
2043
2044 asm volatile("vrsqrt28ps %zmm6,%zmm7");
2045 asm volatile("vrsqrt28pd %zmm6,%zmm7");
2046
2047 /* AVX-512: Op code 0f 38 cd */
2048
2049 asm volatile("vrsqrt28ss %xmm5,%xmm6,%xmm7{%k7}");
2050 asm volatile("vrsqrt28sd %xmm5,%xmm6,%xmm7{%k7}");
2051
2052 /* AVX-512: Op code 0f 3a 03 */
2053
2054 asm volatile("valignd $0x12,%zmm5,%zmm6,%zmm7");
2055 asm volatile("valignq $0x12,%zmm5,%zmm6,%zmm7");
2056
2057 /* AVX-512: Op code 0f 3a 08 */
2058
2059 asm volatile("vroundps $0x5,%ymm6,%ymm2");
2060 asm volatile("vrndscaleps $0x12,%zmm5,%zmm6");
2061
2062 /* AVX-512: Op code 0f 3a 09 */
2063
2064 asm volatile("vroundpd $0x5,%ymm6,%ymm2");
2065 asm volatile("vrndscalepd $0x12,%zmm5,%zmm6");
2066
2067 /* AVX-512: Op code 0f 3a 0a */
2068
2069 asm volatile("vroundss $0x5,%xmm4,%xmm6,%xmm2");
2070 asm volatile("vrndscaless $0x12,%xmm4,%xmm5,%xmm6{%k7}");
2071
2072 /* AVX-512: Op code 0f 3a 0b */
2073
2074 asm volatile("vroundsd $0x5,%xmm4,%xmm6,%xmm2");
2075 asm volatile("vrndscalesd $0x12,%xmm4,%xmm5,%xmm6{%k7}");
2076
2077 /* AVX-512: Op code 0f 3a 18 */
2078
2079 asm volatile("vinsertf128 $0x5,%xmm4,%ymm4,%ymm6");
2080 asm volatile("vinsertf32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
2081 asm volatile("vinsertf64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
2082
2083 /* AVX-512: Op code 0f 3a 19 */
2084
2085 asm volatile("vextractf128 $0x5,%ymm4,%xmm4");
2086 asm volatile("vextractf32x4 $0x12,%zmm5,%xmm6{%k7}");
2087 asm volatile("vextractf64x2 $0x12,%zmm5,%xmm6{%k7}");
2088
2089 /* AVX-512: Op code 0f 3a 1a */
2090
2091 asm volatile("vinsertf32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
2092 asm volatile("vinsertf64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
2093
2094 /* AVX-512: Op code 0f 3a 1b */
2095
2096 asm volatile("vextractf32x8 $0x12,%zmm6,%ymm7{%k7}");
2097 asm volatile("vextractf64x4 $0x12,%zmm6,%ymm7{%k7}");
2098
2099 /* AVX-512: Op code 0f 3a 1e */
2100
2101 asm volatile("vpcmpud $0x12,%zmm6,%zmm7,%k5");
2102 asm volatile("vpcmpuq $0x12,%zmm6,%zmm7,%k5");
2103
2104 /* AVX-512: Op code 0f 3a 1f */
2105
2106 asm volatile("vpcmpd $0x12,%zmm6,%zmm7,%k5");
2107 asm volatile("vpcmpq $0x12,%zmm6,%zmm7,%k5");
2108
2109 /* AVX-512: Op code 0f 3a 23 */
2110
2111 asm volatile("vshuff32x4 $0x12,%zmm5,%zmm6,%zmm7");
2112 asm volatile("vshuff64x2 $0x12,%zmm5,%zmm6,%zmm7");
2113
2114 /* AVX-512: Op code 0f 3a 25 */
2115
2116 asm volatile("vpternlogd $0x12,%zmm5,%zmm6,%zmm7");
2117 asm volatile("vpternlogq $0x12,%zmm5,%zmm6,%zmm7");
2118
2119 /* AVX-512: Op code 0f 3a 26 */
2120
2121 asm volatile("vgetmantps $0x12,%zmm6,%zmm7");
2122 asm volatile("vgetmantpd $0x12,%zmm6,%zmm7");
2123
2124 /* AVX-512: Op code 0f 3a 27 */
2125
2126 asm volatile("vgetmantss $0x12,%xmm5,%xmm6,%xmm7{%k7}");
2127 asm volatile("vgetmantsd $0x12,%xmm5,%xmm6,%xmm7{%k7}");
2128
2129 /* AVX-512: Op code 0f 3a 38 */
2130
2131 asm volatile("vinserti128 $0x5,%xmm4,%ymm4,%ymm6");
2132 asm volatile("vinserti32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
2133 asm volatile("vinserti64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
2134
2135 /* AVX-512: Op code 0f 3a 39 */
2136
2137 asm volatile("vextracti128 $0x5,%ymm4,%xmm6");
2138 asm volatile("vextracti32x4 $0x12,%zmm5,%xmm6{%k7}");
2139 asm volatile("vextracti64x2 $0x12,%zmm5,%xmm6{%k7}");
2140
2141 /* AVX-512: Op code 0f 3a 3a */
2142
2143 asm volatile("vinserti32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
2144 asm volatile("vinserti64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
2145
2146 /* AVX-512: Op code 0f 3a 3b */
2147
2148 asm volatile("vextracti32x8 $0x12,%zmm6,%ymm7{%k7}");
2149 asm volatile("vextracti64x4 $0x12,%zmm6,%ymm7{%k7}");
2150
2151 /* AVX-512: Op code 0f 3a 3e */
2152
2153 asm volatile("vpcmpub $0x12,%zmm6,%zmm7,%k5");
2154 asm volatile("vpcmpuw $0x12,%zmm6,%zmm7,%k5");
2155
2156 /* AVX-512: Op code 0f 3a 3f */
2157
2158 asm volatile("vpcmpb $0x12,%zmm6,%zmm7,%k5");
2159 asm volatile("vpcmpw $0x12,%zmm6,%zmm7,%k5");
2160
2161 /* AVX-512: Op code 0f 3a 42 */
2162
2163 asm volatile("vmpsadbw $0x5,%ymm4,%ymm6,%ymm2");
2164 asm volatile("vdbpsadbw $0x12,%zmm4,%zmm5,%zmm6");
2165
2166 /* AVX-512: Op code 0f 3a 43 */
2167
2168 asm volatile("vshufi32x4 $0x12,%zmm5,%zmm6,%zmm7");
2169 asm volatile("vshufi64x2 $0x12,%zmm5,%zmm6,%zmm7");
2170
2171 /* AVX-512: Op code 0f 3a 50 */
2172
2173 asm volatile("vrangeps $0x12,%zmm5,%zmm6,%zmm7");
2174 asm volatile("vrangepd $0x12,%zmm5,%zmm6,%zmm7");
2175
2176 /* AVX-512: Op code 0f 3a 51 */
2177
2178 asm volatile("vrangess $0x12,%xmm5,%xmm6,%xmm7");
2179 asm volatile("vrangesd $0x12,%xmm5,%xmm6,%xmm7");
2180
2181 /* AVX-512: Op code 0f 3a 54 */
2182
2183 asm volatile("vfixupimmps $0x12,%zmm5,%zmm6,%zmm7");
2184 asm volatile("vfixupimmpd $0x12,%zmm5,%zmm6,%zmm7");
2185
2186 /* AVX-512: Op code 0f 3a 55 */
2187
2188 asm volatile("vfixupimmss $0x12,%xmm5,%xmm6,%xmm7{%k7}");
2189 asm volatile("vfixupimmsd $0x12,%xmm5,%xmm6,%xmm7{%k7}");
2190
2191 /* AVX-512: Op code 0f 3a 56 */
2192
2193 asm volatile("vreduceps $0x12,%zmm6,%zmm7");
2194 asm volatile("vreducepd $0x12,%zmm6,%zmm7");
2195
2196 /* AVX-512: Op code 0f 3a 57 */
2197
2198 asm volatile("vreducess $0x12,%xmm5,%xmm6,%xmm7");
2199 asm volatile("vreducesd $0x12,%xmm5,%xmm6,%xmm7");
2200
2201 /* AVX-512: Op code 0f 3a 66 */
2202
2203 asm volatile("vfpclassps $0x12,%zmm7,%k5");
2204 asm volatile("vfpclasspd $0x12,%zmm7,%k5");
2205
2206 /* AVX-512: Op code 0f 3a 67 */
2207
2208 asm volatile("vfpclassss $0x12,%xmm7,%k5");
2209 asm volatile("vfpclasssd $0x12,%xmm7,%k5");
2210
2211 /* AVX-512: Op code 0f 72 (Grp13) */
2212
2213 asm volatile("vprord $0x12,%zmm5,%zmm6");
2214 asm volatile("vprorq $0x12,%zmm5,%zmm6");
2215 asm volatile("vprold $0x12,%zmm5,%zmm6");
2216 asm volatile("vprolq $0x12,%zmm5,%zmm6");
2217 asm volatile("psrad $0x2,%mm6");
2218 asm volatile("vpsrad $0x5,%ymm6,%ymm2");
2219 asm volatile("vpsrad $0x5,%zmm6,%zmm2");
2220 asm volatile("vpsraq $0x5,%zmm6,%zmm2");
2221
2222 /* AVX-512: Op code 0f 38 c6 (Grp18) */
2223
2224 asm volatile("vgatherpf0dps 0x7b(%ebp,%zmm7,8){%k1}");
2225 asm volatile("vgatherpf0dpd 0x7b(%ebp,%ymm7,8){%k1}");
2226 asm volatile("vgatherpf1dps 0x7b(%ebp,%zmm7,8){%k1}");
2227 asm volatile("vgatherpf1dpd 0x7b(%ebp,%ymm7,8){%k1}");
2228 asm volatile("vscatterpf0dps 0x7b(%ebp,%zmm7,8){%k1}");
2229 asm volatile("vscatterpf0dpd 0x7b(%ebp,%ymm7,8){%k1}");
2230 asm volatile("vscatterpf1dps 0x7b(%ebp,%zmm7,8){%k1}");
2231 asm volatile("vscatterpf1dpd 0x7b(%ebp,%ymm7,8){%k1}");
2232
2233 /* AVX-512: Op code 0f 38 c7 (Grp19) */
2234
2235 asm volatile("vgatherpf0qps 0x7b(%ebp,%zmm7,8){%k1}");
2236 asm volatile("vgatherpf0qpd 0x7b(%ebp,%zmm7,8){%k1}");
2237 asm volatile("vgatherpf1qps 0x7b(%ebp,%zmm7,8){%k1}");
2238 asm volatile("vgatherpf1qpd 0x7b(%ebp,%zmm7,8){%k1}");
2239 asm volatile("vscatterpf0qps 0x7b(%ebp,%zmm7,8){%k1}");
2240 asm volatile("vscatterpf0qpd 0x7b(%ebp,%zmm7,8){%k1}");
2241 asm volatile("vscatterpf1qps 0x7b(%ebp,%zmm7,8){%k1}");
2242 asm volatile("vscatterpf1qpd 0x7b(%ebp,%zmm7,8){%k1}");
2243
2244 /* AVX-512: Examples */
2245
2246 asm volatile("vaddpd %zmm4,%zmm5,%zmm6");
2247 asm volatile("vaddpd %zmm4,%zmm5,%zmm6{%k7}");
2248 asm volatile("vaddpd %zmm4,%zmm5,%zmm6{%k7}{z}");
2249 asm volatile("vaddpd {rn-sae},%zmm4,%zmm5,%zmm6");
2250 asm volatile("vaddpd {ru-sae},%zmm4,%zmm5,%zmm6");
2251 asm volatile("vaddpd {rd-sae},%zmm4,%zmm5,%zmm6");
2252 asm volatile("vaddpd {rz-sae},%zmm4,%zmm5,%zmm6");
2253 asm volatile("vaddpd (%ecx),%zmm5,%zmm6");
2254 asm volatile("vaddpd 0x123(%eax,%ecx,8),%zmm5,%zmm6");
2255 asm volatile("vaddpd (%ecx){1to8},%zmm5,%zmm6");
2256 asm volatile("vaddpd 0x1fc0(%edx),%zmm5,%zmm6");
2257 asm volatile("vaddpd 0x3f8(%edx){1to8},%zmm5,%zmm6");
2258 asm volatile("vcmpeq_uqps 0x1fc(%edx){1to16},%zmm6,%k5");
2259 asm volatile("vcmpltsd 0x123(%eax,%ecx,8),%xmm3,%k5{%k7}");
2260 asm volatile("vcmplesd {sae},%xmm4,%xmm5,%k5{%k7}");
2261 asm volatile("vgetmantss $0x5b,0x123(%eax,%ecx,8),%xmm4,%xmm5{%k7}");
2262
474 /* bndmk m32, bnd */ 2263 /* bndmk m32, bnd */
475 2264
476 asm volatile("bndmk (%eax), %bnd0"); 2265 asm volatile("bndmk (%eax), %bnd0");
diff --git a/tools/perf/tests/kmod-path.c b/tools/perf/tests/kmod-path.c
index d2af78193153..76f41f249944 100644
--- a/tools/perf/tests/kmod-path.c
+++ b/tools/perf/tests/kmod-path.c
@@ -1,4 +1,5 @@
1#include <stdbool.h> 1#include <stdbool.h>
2#include <stdlib.h>
2#include "tests.h" 3#include "tests.h"
3#include "dso.h" 4#include "dso.h"
4#include "debug.h" 5#include "debug.h"
diff --git a/tools/perf/util/event.h b/tools/perf/util/event.h
index b32464b353aa..8d363d5e65a2 100644
--- a/tools/perf/util/event.h
+++ b/tools/perf/util/event.h
@@ -8,7 +8,6 @@
8#include "map.h" 8#include "map.h"
9#include "build-id.h" 9#include "build-id.h"
10#include "perf_regs.h" 10#include "perf_regs.h"
11#include <asm/perf_regs.h>
12 11
13struct mmap_event { 12struct mmap_event {
14 struct perf_event_header header; 13 struct perf_event_header header;
diff --git a/tools/perf/util/intel-pt-decoder/gen-insn-attr-x86.awk b/tools/perf/util/intel-pt-decoder/gen-insn-attr-x86.awk
index 517567347aac..54e961659514 100644
--- a/tools/perf/util/intel-pt-decoder/gen-insn-attr-x86.awk
+++ b/tools/perf/util/intel-pt-decoder/gen-insn-attr-x86.awk
@@ -72,12 +72,14 @@ BEGIN {
72 lprefix_expr = "\\((66|F2|F3)\\)" 72 lprefix_expr = "\\((66|F2|F3)\\)"
73 max_lprefix = 4 73 max_lprefix = 4
74 74
75 # All opcodes starting with lower-case 'v' or with (v1) superscript 75 # All opcodes starting with lower-case 'v', 'k' or with (v1) superscript
76 # accepts VEX prefix 76 # accepts VEX prefix
77 vexok_opcode_expr = "^v.*" 77 vexok_opcode_expr = "^[vk].*"
78 vexok_expr = "\\(v1\\)" 78 vexok_expr = "\\(v1\\)"
79 # All opcodes with (v) superscript supports *only* VEX prefix 79 # All opcodes with (v) superscript supports *only* VEX prefix
80 vexonly_expr = "\\(v\\)" 80 vexonly_expr = "\\(v\\)"
81 # All opcodes with (ev) superscript supports *only* EVEX prefix
82 evexonly_expr = "\\(ev\\)"
81 83
82 prefix_expr = "\\(Prefix\\)" 84 prefix_expr = "\\(Prefix\\)"
83 prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ" 85 prefix_num["Operand-Size"] = "INAT_PFX_OPNDSZ"
@@ -95,6 +97,7 @@ BEGIN {
95 prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ" 97 prefix_num["Address-Size"] = "INAT_PFX_ADDRSZ"
96 prefix_num["VEX+1byte"] = "INAT_PFX_VEX2" 98 prefix_num["VEX+1byte"] = "INAT_PFX_VEX2"
97 prefix_num["VEX+2byte"] = "INAT_PFX_VEX3" 99 prefix_num["VEX+2byte"] = "INAT_PFX_VEX3"
100 prefix_num["EVEX"] = "INAT_PFX_EVEX"
98 101
99 clear_vars() 102 clear_vars()
100} 103}
@@ -319,7 +322,9 @@ function convert_operands(count,opnd, i,j,imm,mod)
319 flags = add_flags(flags, "INAT_MODRM") 322 flags = add_flags(flags, "INAT_MODRM")
320 323
321 # check VEX codes 324 # check VEX codes
322 if (match(ext, vexonly_expr)) 325 if (match(ext, evexonly_expr))
326 flags = add_flags(flags, "INAT_VEXOK | INAT_EVEXONLY")
327 else if (match(ext, vexonly_expr))
323 flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY") 328 flags = add_flags(flags, "INAT_VEXOK | INAT_VEXONLY")
324 else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr)) 329 else if (match(ext, vexok_expr) || match(opcode, vexok_opcode_expr))
325 flags = add_flags(flags, "INAT_VEXOK") 330 flags = add_flags(flags, "INAT_VEXOK")
diff --git a/tools/perf/util/intel-pt-decoder/inat.h b/tools/perf/util/intel-pt-decoder/inat.h
index 611645e903a8..125ecd2a300d 100644
--- a/tools/perf/util/intel-pt-decoder/inat.h
+++ b/tools/perf/util/intel-pt-decoder/inat.h
@@ -48,6 +48,7 @@
48/* AVX VEX prefixes */ 48/* AVX VEX prefixes */
49#define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */ 49#define INAT_PFX_VEX2 13 /* 2-bytes VEX prefix */
50#define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */ 50#define INAT_PFX_VEX3 14 /* 3-bytes VEX prefix */
51#define INAT_PFX_EVEX 15 /* EVEX prefix */
51 52
52#define INAT_LSTPFX_MAX 3 53#define INAT_LSTPFX_MAX 3
53#define INAT_LGCPFX_MAX 11 54#define INAT_LGCPFX_MAX 11
@@ -89,6 +90,7 @@
89#define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4)) 90#define INAT_VARIANT (1 << (INAT_FLAG_OFFS + 4))
90#define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5)) 91#define INAT_VEXOK (1 << (INAT_FLAG_OFFS + 5))
91#define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6)) 92#define INAT_VEXONLY (1 << (INAT_FLAG_OFFS + 6))
93#define INAT_EVEXONLY (1 << (INAT_FLAG_OFFS + 7))
92/* Attribute making macros for attribute tables */ 94/* Attribute making macros for attribute tables */
93#define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS) 95#define INAT_MAKE_PREFIX(pfx) (pfx << INAT_PFX_OFFS)
94#define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS) 96#define INAT_MAKE_ESCAPE(esc) (esc << INAT_ESC_OFFS)
@@ -141,7 +143,13 @@ static inline int inat_last_prefix_id(insn_attr_t attr)
141static inline int inat_is_vex_prefix(insn_attr_t attr) 143static inline int inat_is_vex_prefix(insn_attr_t attr)
142{ 144{
143 attr &= INAT_PFX_MASK; 145 attr &= INAT_PFX_MASK;
144 return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3; 146 return attr == INAT_PFX_VEX2 || attr == INAT_PFX_VEX3 ||
147 attr == INAT_PFX_EVEX;
148}
149
150static inline int inat_is_evex_prefix(insn_attr_t attr)
151{
152 return (attr & INAT_PFX_MASK) == INAT_PFX_EVEX;
145} 153}
146 154
147static inline int inat_is_vex3_prefix(insn_attr_t attr) 155static inline int inat_is_vex3_prefix(insn_attr_t attr)
@@ -216,6 +224,11 @@ static inline int inat_accept_vex(insn_attr_t attr)
216 224
217static inline int inat_must_vex(insn_attr_t attr) 225static inline int inat_must_vex(insn_attr_t attr)
218{ 226{
219 return attr & INAT_VEXONLY; 227 return attr & (INAT_VEXONLY | INAT_EVEXONLY);
228}
229
230static inline int inat_must_evex(insn_attr_t attr)
231{
232 return attr & INAT_EVEXONLY;
220} 233}
221#endif 234#endif
diff --git a/tools/perf/util/intel-pt-decoder/insn.c b/tools/perf/util/intel-pt-decoder/insn.c
index 9f26eae6c9f0..ca983e2bea8b 100644
--- a/tools/perf/util/intel-pt-decoder/insn.c
+++ b/tools/perf/util/intel-pt-decoder/insn.c
@@ -155,14 +155,24 @@ found:
155 /* 155 /*
156 * In 32-bits mode, if the [7:6] bits (mod bits of 156 * In 32-bits mode, if the [7:6] bits (mod bits of
157 * ModRM) on the second byte are not 11b, it is 157 * ModRM) on the second byte are not 11b, it is
158 * LDS or LES. 158 * LDS or LES or BOUND.
159 */ 159 */
160 if (X86_MODRM_MOD(b2) != 3) 160 if (X86_MODRM_MOD(b2) != 3)
161 goto vex_end; 161 goto vex_end;
162 } 162 }
163 insn->vex_prefix.bytes[0] = b; 163 insn->vex_prefix.bytes[0] = b;
164 insn->vex_prefix.bytes[1] = b2; 164 insn->vex_prefix.bytes[1] = b2;
165 if (inat_is_vex3_prefix(attr)) { 165 if (inat_is_evex_prefix(attr)) {
166 b2 = peek_nbyte_next(insn_byte_t, insn, 2);
167 insn->vex_prefix.bytes[2] = b2;
168 b2 = peek_nbyte_next(insn_byte_t, insn, 3);
169 insn->vex_prefix.bytes[3] = b2;
170 insn->vex_prefix.nbytes = 4;
171 insn->next_byte += 4;
172 if (insn->x86_64 && X86_VEX_W(b2))
173 /* VEX.W overrides opnd_size */
174 insn->opnd_bytes = 8;
175 } else if (inat_is_vex3_prefix(attr)) {
166 b2 = peek_nbyte_next(insn_byte_t, insn, 2); 176 b2 = peek_nbyte_next(insn_byte_t, insn, 2);
167 insn->vex_prefix.bytes[2] = b2; 177 insn->vex_prefix.bytes[2] = b2;
168 insn->vex_prefix.nbytes = 3; 178 insn->vex_prefix.nbytes = 3;
@@ -221,7 +231,9 @@ void insn_get_opcode(struct insn *insn)
221 m = insn_vex_m_bits(insn); 231 m = insn_vex_m_bits(insn);
222 p = insn_vex_p_bits(insn); 232 p = insn_vex_p_bits(insn);
223 insn->attr = inat_get_avx_attribute(op, m, p); 233 insn->attr = inat_get_avx_attribute(op, m, p);
224 if (!inat_accept_vex(insn->attr) && !inat_is_group(insn->attr)) 234 if ((inat_must_evex(insn->attr) && !insn_is_evex(insn)) ||
235 (!inat_accept_vex(insn->attr) &&
236 !inat_is_group(insn->attr)))
225 insn->attr = 0; /* This instruction is bad */ 237 insn->attr = 0; /* This instruction is bad */
226 goto end; /* VEX has only 1 byte for opcode */ 238 goto end; /* VEX has only 1 byte for opcode */
227 } 239 }
diff --git a/tools/perf/util/intel-pt-decoder/insn.h b/tools/perf/util/intel-pt-decoder/insn.h
index dd12da0f4593..e23578c7b1be 100644
--- a/tools/perf/util/intel-pt-decoder/insn.h
+++ b/tools/perf/util/intel-pt-decoder/insn.h
@@ -91,6 +91,7 @@ struct insn {
91#define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */ 91#define X86_VEX_B(vex) ((vex) & 0x20) /* VEX3 Byte1 */
92#define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */ 92#define X86_VEX_L(vex) ((vex) & 0x04) /* VEX3 Byte2, VEX2 Byte1 */
93/* VEX bit fields */ 93/* VEX bit fields */
94#define X86_EVEX_M(vex) ((vex) & 0x03) /* EVEX Byte1 */
94#define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */ 95#define X86_VEX3_M(vex) ((vex) & 0x1f) /* VEX3 Byte1 */
95#define X86_VEX2_M 1 /* VEX2.M always 1 */ 96#define X86_VEX2_M 1 /* VEX2.M always 1 */
96#define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */ 97#define X86_VEX_V(vex) (((vex) & 0x78) >> 3) /* VEX3 Byte2, VEX2 Byte1 */
@@ -133,6 +134,13 @@ static inline int insn_is_avx(struct insn *insn)
133 return (insn->vex_prefix.value != 0); 134 return (insn->vex_prefix.value != 0);
134} 135}
135 136
137static inline int insn_is_evex(struct insn *insn)
138{
139 if (!insn->prefixes.got)
140 insn_get_prefixes(insn);
141 return (insn->vex_prefix.nbytes == 4);
142}
143
136/* Ensure this instruction is decoded completely */ 144/* Ensure this instruction is decoded completely */
137static inline int insn_complete(struct insn *insn) 145static inline int insn_complete(struct insn *insn)
138{ 146{
@@ -144,8 +152,10 @@ static inline insn_byte_t insn_vex_m_bits(struct insn *insn)
144{ 152{
145 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ 153 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */
146 return X86_VEX2_M; 154 return X86_VEX2_M;
147 else 155 else if (insn->vex_prefix.nbytes == 3) /* 3 bytes VEX */
148 return X86_VEX3_M(insn->vex_prefix.bytes[1]); 156 return X86_VEX3_M(insn->vex_prefix.bytes[1]);
157 else /* EVEX */
158 return X86_EVEX_M(insn->vex_prefix.bytes[1]);
149} 159}
150 160
151static inline insn_byte_t insn_vex_p_bits(struct insn *insn) 161static inline insn_byte_t insn_vex_p_bits(struct insn *insn)
diff --git a/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt b/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt
index d388de72eaca..ec378cd7b71e 100644
--- a/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt
+++ b/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt
@@ -13,12 +13,17 @@
13# opcode: escape # escaped-name 13# opcode: escape # escaped-name
14# EndTable 14# EndTable
15# 15#
16# mnemonics that begin with lowercase 'v' accept a VEX or EVEX prefix
17# mnemonics that begin with lowercase 'k' accept a VEX prefix
18#
16#<group maps> 19#<group maps>
17# GrpTable: GrpXXX 20# GrpTable: GrpXXX
18# reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...] 21# reg: mnemonic [operand1[,operand2...]] [(extra1)[,(extra2)...] [| 2nd-mnemonic ...]
19# EndTable 22# EndTable
20# 23#
21# AVX Superscripts 24# AVX Superscripts
25# (ev): this opcode requires EVEX prefix.
26# (evo): this opcode is changed by EVEX prefix (EVEX opcode)
22# (v): this opcode requires VEX prefix. 27# (v): this opcode requires VEX prefix.
23# (v1): this opcode only supports 128bit VEX. 28# (v1): this opcode only supports 128bit VEX.
24# 29#
@@ -137,7 +142,7 @@ AVXcode:
137# 0x60 - 0x6f 142# 0x60 - 0x6f
13860: PUSHA/PUSHAD (i64) 14360: PUSHA/PUSHAD (i64)
13961: POPA/POPAD (i64) 14461: POPA/POPAD (i64)
14062: BOUND Gv,Ma (i64) 14562: BOUND Gv,Ma (i64) | EVEX (Prefix)
14163: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64) 14663: ARPL Ew,Gw (i64) | MOVSXD Gv,Ev (o64)
14264: SEG=FS (Prefix) 14764: SEG=FS (Prefix)
14365: SEG=GS (Prefix) 14865: SEG=GS (Prefix)
@@ -399,17 +404,17 @@ AVXcode: 1
3993f: 4043f:
400# 0x0f 0x40-0x4f 405# 0x0f 0x40-0x4f
40140: CMOVO Gv,Ev 40640: CMOVO Gv,Ev
40241: CMOVNO Gv,Ev 40741: CMOVNO Gv,Ev | kandw/q Vk,Hk,Uk | kandb/d Vk,Hk,Uk (66)
40342: CMOVB/C/NAE Gv,Ev 40842: CMOVB/C/NAE Gv,Ev | kandnw/q Vk,Hk,Uk | kandnb/d Vk,Hk,Uk (66)
40443: CMOVAE/NB/NC Gv,Ev 40943: CMOVAE/NB/NC Gv,Ev
40544: CMOVE/Z Gv,Ev 41044: CMOVE/Z Gv,Ev | knotw/q Vk,Uk | knotb/d Vk,Uk (66)
40645: CMOVNE/NZ Gv,Ev 41145: CMOVNE/NZ Gv,Ev | korw/q Vk,Hk,Uk | korb/d Vk,Hk,Uk (66)
40746: CMOVBE/NA Gv,Ev 41246: CMOVBE/NA Gv,Ev | kxnorw/q Vk,Hk,Uk | kxnorb/d Vk,Hk,Uk (66)
40847: CMOVA/NBE Gv,Ev 41347: CMOVA/NBE Gv,Ev | kxorw/q Vk,Hk,Uk | kxorb/d Vk,Hk,Uk (66)
40948: CMOVS Gv,Ev 41448: CMOVS Gv,Ev
41049: CMOVNS Gv,Ev 41549: CMOVNS Gv,Ev
4114a: CMOVP/PE Gv,Ev 4164a: CMOVP/PE Gv,Ev | kaddw/q Vk,Hk,Uk | kaddb/d Vk,Hk,Uk (66)
4124b: CMOVNP/PO Gv,Ev 4174b: CMOVNP/PO Gv,Ev | kunpckbw Vk,Hk,Uk (66) | kunpckwd/dq Vk,Hk,Uk
4134c: CMOVL/NGE Gv,Ev 4184c: CMOVL/NGE Gv,Ev
4144d: CMOVNL/GE Gv,Ev 4194d: CMOVNL/GE Gv,Ev
4154e: CMOVLE/NG Gv,Ev 4204e: CMOVLE/NG Gv,Ev
@@ -426,7 +431,7 @@ AVXcode: 1
42658: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1) 43158: vaddps Vps,Hps,Wps | vaddpd Vpd,Hpd,Wpd (66) | vaddss Vss,Hss,Wss (F3),(v1) | vaddsd Vsd,Hsd,Wsd (F2),(v1)
42759: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1) 43259: vmulps Vps,Hps,Wps | vmulpd Vpd,Hpd,Wpd (66) | vmulss Vss,Hss,Wss (F3),(v1) | vmulsd Vsd,Hsd,Wsd (F2),(v1)
4285a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1) 4335a: vcvtps2pd Vpd,Wps | vcvtpd2ps Vps,Wpd (66) | vcvtss2sd Vsd,Hx,Wss (F3),(v1) | vcvtsd2ss Vss,Hx,Wsd (F2),(v1)
4295b: vcvtdq2ps Vps,Wdq | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3) 4345b: vcvtdq2ps Vps,Wdq | vcvtqq2ps Vps,Wqq (evo) | vcvtps2dq Vdq,Wps (66) | vcvttps2dq Vdq,Wps (F3)
4305c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1) 4355c: vsubps Vps,Hps,Wps | vsubpd Vpd,Hpd,Wpd (66) | vsubss Vss,Hss,Wss (F3),(v1) | vsubsd Vsd,Hsd,Wsd (F2),(v1)
4315d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1) 4365d: vminps Vps,Hps,Wps | vminpd Vpd,Hpd,Wpd (66) | vminss Vss,Hss,Wss (F3),(v1) | vminsd Vsd,Hsd,Wsd (F2),(v1)
4325e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1) 4375e: vdivps Vps,Hps,Wps | vdivpd Vpd,Hpd,Wpd (66) | vdivss Vss,Hss,Wss (F3),(v1) | vdivsd Vsd,Hsd,Wsd (F2),(v1)
@@ -447,7 +452,7 @@ AVXcode: 1
4476c: vpunpcklqdq Vx,Hx,Wx (66),(v1) 4526c: vpunpcklqdq Vx,Hx,Wx (66),(v1)
4486d: vpunpckhqdq Vx,Hx,Wx (66),(v1) 4536d: vpunpckhqdq Vx,Hx,Wx (66),(v1)
4496e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1) 4546e: movd/q Pd,Ey | vmovd/q Vy,Ey (66),(v1)
4506f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqu Vx,Wx (F3) 4556f: movq Pq,Qq | vmovdqa Vx,Wx (66) | vmovdqa32/64 Vx,Wx (66),(evo) | vmovdqu Vx,Wx (F3) | vmovdqu32/64 Vx,Wx (F3),(evo) | vmovdqu8/16 Vx,Wx (F2),(ev)
451# 0x0f 0x70-0x7f 456# 0x0f 0x70-0x7f
45270: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1) 45770: pshufw Pq,Qq,Ib | vpshufd Vx,Wx,Ib (66),(v1) | vpshufhw Vx,Wx,Ib (F3),(v1) | vpshuflw Vx,Wx,Ib (F2),(v1)
45371: Grp12 (1A) 45871: Grp12 (1A)
@@ -458,14 +463,14 @@ AVXcode: 1
45876: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1) 46376: pcmpeqd Pq,Qq | vpcmpeqd Vx,Hx,Wx (66),(v1)
459# Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX. 464# Note: Remove (v), because vzeroall and vzeroupper becomes emms without VEX.
46077: emms | vzeroupper | vzeroall 46577: emms | vzeroupper | vzeroall
46178: VMREAD Ey,Gy 46678: VMREAD Ey,Gy | vcvttps2udq/pd2udq Vx,Wpd (evo) | vcvttsd2usi Gv,Wx (F2),(ev) | vcvttss2usi Gv,Wx (F3),(ev) | vcvttps2uqq/pd2uqq Vx,Wx (66),(ev)
46279: VMWRITE Gy,Ey 46779: VMWRITE Gy,Ey | vcvtps2udq/pd2udq Vx,Wpd (evo) | vcvtsd2usi Gv,Wx (F2),(ev) | vcvtss2usi Gv,Wx (F3),(ev) | vcvtps2uqq/pd2uqq Vx,Wx (66),(ev)
4637a: 4687a: vcvtudq2pd/uqq2pd Vpd,Wx (F3),(ev) | vcvtudq2ps/uqq2ps Vpd,Wx (F2),(ev) | vcvttps2qq/pd2qq Vx,Wx (66),(ev)
4647b: 4697b: vcvtusi2sd Vpd,Hpd,Ev (F2),(ev) | vcvtusi2ss Vps,Hps,Ev (F3),(ev) | vcvtps2qq/pd2qq Vx,Wx (66),(ev)
4657c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2) 4707c: vhaddpd Vpd,Hpd,Wpd (66) | vhaddps Vps,Hps,Wps (F2)
4667d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2) 4717d: vhsubpd Vpd,Hpd,Wpd (66) | vhsubps Vps,Hps,Wps (F2)
4677e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1) 4727e: movd/q Ey,Pd | vmovd/q Ey,Vy (66),(v1) | vmovq Vq,Wq (F3),(v1)
4687f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqu Wx,Vx (F3) 4737f: movq Qq,Pq | vmovdqa Wx,Vx (66) | vmovdqa32/64 Wx,Vx (66),(evo) | vmovdqu Wx,Vx (F3) | vmovdqu32/64 Wx,Vx (F3),(evo) | vmovdqu8/16 Wx,Vx (F2),(ev)
469# 0x0f 0x80-0x8f 474# 0x0f 0x80-0x8f
470# Note: "forced64" is Intel CPU behavior (see comment about CALL insn). 475# Note: "forced64" is Intel CPU behavior (see comment about CALL insn).
47180: JO Jz (f64) 47680: JO Jz (f64)
@@ -485,16 +490,16 @@ AVXcode: 1
4858e: JLE/JNG Jz (f64) 4908e: JLE/JNG Jz (f64)
4868f: JNLE/JG Jz (f64) 4918f: JNLE/JG Jz (f64)
487# 0x0f 0x90-0x9f 492# 0x0f 0x90-0x9f
48890: SETO Eb 49390: SETO Eb | kmovw/q Vk,Wk | kmovb/d Vk,Wk (66)
48991: SETNO Eb 49491: SETNO Eb | kmovw/q Mv,Vk | kmovb/d Mv,Vk (66)
49092: SETB/C/NAE Eb 49592: SETB/C/NAE Eb | kmovw Vk,Rv | kmovb Vk,Rv (66) | kmovq/d Vk,Rv (F2)
49193: SETAE/NB/NC Eb 49693: SETAE/NB/NC Eb | kmovw Gv,Uk | kmovb Gv,Uk (66) | kmovq/d Gv,Uk (F2)
49294: SETE/Z Eb 49794: SETE/Z Eb
49395: SETNE/NZ Eb 49895: SETNE/NZ Eb
49496: SETBE/NA Eb 49996: SETBE/NA Eb
49597: SETA/NBE Eb 50097: SETA/NBE Eb
49698: SETS Eb 50198: SETS Eb | kortestw/q Vk,Uk | kortestb/d Vk,Uk (66)
49799: SETNS Eb 50299: SETNS Eb | ktestw/q Vk,Uk | ktestb/d Vk,Uk (66)
4989a: SETP/PE Eb 5039a: SETP/PE Eb
4999b: SETNP/PO Eb 5049b: SETNP/PO Eb
5009c: SETL/NGE Eb 5059c: SETL/NGE Eb
@@ -564,11 +569,11 @@ d7: pmovmskb Gd,Nq | vpmovmskb Gd,Ux (66),(v1)
564d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1) 569d8: psubusb Pq,Qq | vpsubusb Vx,Hx,Wx (66),(v1)
565d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1) 570d9: psubusw Pq,Qq | vpsubusw Vx,Hx,Wx (66),(v1)
566da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1) 571da: pminub Pq,Qq | vpminub Vx,Hx,Wx (66),(v1)
567db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) 572db: pand Pq,Qq | vpand Vx,Hx,Wx (66),(v1) | vpandd/q Vx,Hx,Wx (66),(evo)
568dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1) 573dc: paddusb Pq,Qq | vpaddusb Vx,Hx,Wx (66),(v1)
569dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1) 574dd: paddusw Pq,Qq | vpaddusw Vx,Hx,Wx (66),(v1)
570de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1) 575de: pmaxub Pq,Qq | vpmaxub Vx,Hx,Wx (66),(v1)
571df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) 576df: pandn Pq,Qq | vpandn Vx,Hx,Wx (66),(v1) | vpandnd/q Vx,Hx,Wx (66),(evo)
572# 0x0f 0xe0-0xef 577# 0x0f 0xe0-0xef
573e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1) 578e0: pavgb Pq,Qq | vpavgb Vx,Hx,Wx (66),(v1)
574e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1) 579e1: psraw Pq,Qq | vpsraw Vx,Hx,Wx (66),(v1)
@@ -576,16 +581,16 @@ e2: psrad Pq,Qq | vpsrad Vx,Hx,Wx (66),(v1)
576e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1) 581e3: pavgw Pq,Qq | vpavgw Vx,Hx,Wx (66),(v1)
577e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1) 582e4: pmulhuw Pq,Qq | vpmulhuw Vx,Hx,Wx (66),(v1)
578e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1) 583e5: pmulhw Pq,Qq | vpmulhw Vx,Hx,Wx (66),(v1)
579e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtpd2dq Vx,Wpd (F2) 584e6: vcvttpd2dq Vx,Wpd (66) | vcvtdq2pd Vx,Wdq (F3) | vcvtdq2pd/qq2pd Vx,Wdq (F3),(evo) | vcvtpd2dq Vx,Wpd (F2)
580e7: movntq Mq,Pq | vmovntdq Mx,Vx (66) 585e7: movntq Mq,Pq | vmovntdq Mx,Vx (66)
581e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1) 586e8: psubsb Pq,Qq | vpsubsb Vx,Hx,Wx (66),(v1)
582e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1) 587e9: psubsw Pq,Qq | vpsubsw Vx,Hx,Wx (66),(v1)
583ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1) 588ea: pminsw Pq,Qq | vpminsw Vx,Hx,Wx (66),(v1)
584eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) 589eb: por Pq,Qq | vpor Vx,Hx,Wx (66),(v1) | vpord/q Vx,Hx,Wx (66),(evo)
585ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1) 590ec: paddsb Pq,Qq | vpaddsb Vx,Hx,Wx (66),(v1)
586ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1) 591ed: paddsw Pq,Qq | vpaddsw Vx,Hx,Wx (66),(v1)
587ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1) 592ee: pmaxsw Pq,Qq | vpmaxsw Vx,Hx,Wx (66),(v1)
588ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) 593ef: pxor Pq,Qq | vpxor Vx,Hx,Wx (66),(v1) | vpxord/q Vx,Hx,Wx (66),(evo)
589# 0x0f 0xf0-0xff 594# 0x0f 0xf0-0xff
590f0: vlddqu Vx,Mx (F2) 595f0: vlddqu Vx,Mx (F2)
591f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1) 596f1: psllw Pq,Qq | vpsllw Vx,Hx,Wx (66),(v1)
@@ -626,81 +631,105 @@ AVXcode: 2
6260e: vtestps Vx,Wx (66),(v) 6310e: vtestps Vx,Wx (66),(v)
6270f: vtestpd Vx,Wx (66),(v) 6320f: vtestpd Vx,Wx (66),(v)
628# 0x0f 0x38 0x10-0x1f 633# 0x0f 0x38 0x10-0x1f
62910: pblendvb Vdq,Wdq (66) 63410: pblendvb Vdq,Wdq (66) | vpsrlvw Vx,Hx,Wx (66),(evo) | vpmovuswb Wx,Vx (F3),(ev)
63011: 63511: vpmovusdb Wx,Vd (F3),(ev) | vpsravw Vx,Hx,Wx (66),(ev)
63112: 63612: vpmovusqb Wx,Vq (F3),(ev) | vpsllvw Vx,Hx,Wx (66),(ev)
63213: vcvtph2ps Vx,Wx,Ib (66),(v) 63713: vcvtph2ps Vx,Wx (66),(v) | vpmovusdw Wx,Vd (F3),(ev)
63314: blendvps Vdq,Wdq (66) 63814: blendvps Vdq,Wdq (66) | vpmovusqw Wx,Vq (F3),(ev) | vprorvd/q Vx,Hx,Wx (66),(evo)
63415: blendvpd Vdq,Wdq (66) 63915: blendvpd Vdq,Wdq (66) | vpmovusqd Wx,Vq (F3),(ev) | vprolvd/q Vx,Hx,Wx (66),(evo)
63516: vpermps Vqq,Hqq,Wqq (66),(v) 64016: vpermps Vqq,Hqq,Wqq (66),(v) | vpermps/d Vqq,Hqq,Wqq (66),(evo)
63617: vptest Vx,Wx (66) 64117: vptest Vx,Wx (66)
63718: vbroadcastss Vx,Wd (66),(v) 64218: vbroadcastss Vx,Wd (66),(v)
63819: vbroadcastsd Vqq,Wq (66),(v) 64319: vbroadcastsd Vqq,Wq (66),(v) | vbroadcastf32x2 Vqq,Wq (66),(evo)
6391a: vbroadcastf128 Vqq,Mdq (66),(v) 6441a: vbroadcastf128 Vqq,Mdq (66),(v) | vbroadcastf32x4/64x2 Vqq,Wq (66),(evo)
6401b: 6451b: vbroadcastf32x8/64x4 Vqq,Mdq (66),(ev)
6411c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1) 6461c: pabsb Pq,Qq | vpabsb Vx,Wx (66),(v1)
6421d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1) 6471d: pabsw Pq,Qq | vpabsw Vx,Wx (66),(v1)
6431e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1) 6481e: pabsd Pq,Qq | vpabsd Vx,Wx (66),(v1)
6441f: 6491f: vpabsq Vx,Wx (66),(ev)
645# 0x0f 0x38 0x20-0x2f 650# 0x0f 0x38 0x20-0x2f
64620: vpmovsxbw Vx,Ux/Mq (66),(v1) 65120: vpmovsxbw Vx,Ux/Mq (66),(v1) | vpmovswb Wx,Vx (F3),(ev)
64721: vpmovsxbd Vx,Ux/Md (66),(v1) 65221: vpmovsxbd Vx,Ux/Md (66),(v1) | vpmovsdb Wx,Vd (F3),(ev)
64822: vpmovsxbq Vx,Ux/Mw (66),(v1) 65322: vpmovsxbq Vx,Ux/Mw (66),(v1) | vpmovsqb Wx,Vq (F3),(ev)
64923: vpmovsxwd Vx,Ux/Mq (66),(v1) 65423: vpmovsxwd Vx,Ux/Mq (66),(v1) | vpmovsdw Wx,Vd (F3),(ev)
65024: vpmovsxwq Vx,Ux/Md (66),(v1) 65524: vpmovsxwq Vx,Ux/Md (66),(v1) | vpmovsqw Wx,Vq (F3),(ev)
65125: vpmovsxdq Vx,Ux/Mq (66),(v1) 65625: vpmovsxdq Vx,Ux/Mq (66),(v1) | vpmovsqd Wx,Vq (F3),(ev)
65226: 65726: vptestmb/w Vk,Hx,Wx (66),(ev) | vptestnmb/w Vk,Hx,Wx (F3),(ev)
65327: 65827: vptestmd/q Vk,Hx,Wx (66),(ev) | vptestnmd/q Vk,Hx,Wx (F3),(ev)
65428: vpmuldq Vx,Hx,Wx (66),(v1) 65928: vpmuldq Vx,Hx,Wx (66),(v1) | vpmovm2b/w Vx,Uk (F3),(ev)
65529: vpcmpeqq Vx,Hx,Wx (66),(v1) 66029: vpcmpeqq Vx,Hx,Wx (66),(v1) | vpmovb2m/w2m Vk,Ux (F3),(ev)
6562a: vmovntdqa Vx,Mx (66),(v1) 6612a: vmovntdqa Vx,Mx (66),(v1) | vpbroadcastmb2q Vx,Uk (F3),(ev)
6572b: vpackusdw Vx,Hx,Wx (66),(v1) 6622b: vpackusdw Vx,Hx,Wx (66),(v1)
6582c: vmaskmovps Vx,Hx,Mx (66),(v) 6632c: vmaskmovps Vx,Hx,Mx (66),(v) | vscalefps/d Vx,Hx,Wx (66),(evo)
6592d: vmaskmovpd Vx,Hx,Mx (66),(v) 6642d: vmaskmovpd Vx,Hx,Mx (66),(v) | vscalefss/d Vx,Hx,Wx (66),(evo)
6602e: vmaskmovps Mx,Hx,Vx (66),(v) 6652e: vmaskmovps Mx,Hx,Vx (66),(v)
6612f: vmaskmovpd Mx,Hx,Vx (66),(v) 6662f: vmaskmovpd Mx,Hx,Vx (66),(v)
662# 0x0f 0x38 0x30-0x3f 667# 0x0f 0x38 0x30-0x3f
66330: vpmovzxbw Vx,Ux/Mq (66),(v1) 66830: vpmovzxbw Vx,Ux/Mq (66),(v1) | vpmovwb Wx,Vx (F3),(ev)
66431: vpmovzxbd Vx,Ux/Md (66),(v1) 66931: vpmovzxbd Vx,Ux/Md (66),(v1) | vpmovdb Wx,Vd (F3),(ev)
66532: vpmovzxbq Vx,Ux/Mw (66),(v1) 67032: vpmovzxbq Vx,Ux/Mw (66),(v1) | vpmovqb Wx,Vq (F3),(ev)
66633: vpmovzxwd Vx,Ux/Mq (66),(v1) 67133: vpmovzxwd Vx,Ux/Mq (66),(v1) | vpmovdw Wx,Vd (F3),(ev)
66734: vpmovzxwq Vx,Ux/Md (66),(v1) 67234: vpmovzxwq Vx,Ux/Md (66),(v1) | vpmovqw Wx,Vq (F3),(ev)
66835: vpmovzxdq Vx,Ux/Mq (66),(v1) 67335: vpmovzxdq Vx,Ux/Mq (66),(v1) | vpmovqd Wx,Vq (F3),(ev)
66936: vpermd Vqq,Hqq,Wqq (66),(v) 67436: vpermd Vqq,Hqq,Wqq (66),(v) | vpermd/q Vqq,Hqq,Wqq (66),(evo)
67037: vpcmpgtq Vx,Hx,Wx (66),(v1) 67537: vpcmpgtq Vx,Hx,Wx (66),(v1)
67138: vpminsb Vx,Hx,Wx (66),(v1) 67638: vpminsb Vx,Hx,Wx (66),(v1) | vpmovm2d/q Vx,Uk (F3),(ev)
67239: vpminsd Vx,Hx,Wx (66),(v1) 67739: vpminsd Vx,Hx,Wx (66),(v1) | vpminsd/q Vx,Hx,Wx (66),(evo) | vpmovd2m/q2m Vk,Ux (F3),(ev)
6733a: vpminuw Vx,Hx,Wx (66),(v1) 6783a: vpminuw Vx,Hx,Wx (66),(v1) | vpbroadcastmw2d Vx,Uk (F3),(ev)
6743b: vpminud Vx,Hx,Wx (66),(v1) 6793b: vpminud Vx,Hx,Wx (66),(v1) | vpminud/q Vx,Hx,Wx (66),(evo)
6753c: vpmaxsb Vx,Hx,Wx (66),(v1) 6803c: vpmaxsb Vx,Hx,Wx (66),(v1)
6763d: vpmaxsd Vx,Hx,Wx (66),(v1) 6813d: vpmaxsd Vx,Hx,Wx (66),(v1) | vpmaxsd/q Vx,Hx,Wx (66),(evo)
6773e: vpmaxuw Vx,Hx,Wx (66),(v1) 6823e: vpmaxuw Vx,Hx,Wx (66),(v1)
6783f: vpmaxud Vx,Hx,Wx (66),(v1) 6833f: vpmaxud Vx,Hx,Wx (66),(v1) | vpmaxud/q Vx,Hx,Wx (66),(evo)
679# 0x0f 0x38 0x40-0x8f 684# 0x0f 0x38 0x40-0x8f
68040: vpmulld Vx,Hx,Wx (66),(v1) 68540: vpmulld Vx,Hx,Wx (66),(v1) | vpmulld/q Vx,Hx,Wx (66),(evo)
68141: vphminposuw Vdq,Wdq (66),(v1) 68641: vphminposuw Vdq,Wdq (66),(v1)
68242: 68742: vgetexpps/d Vx,Wx (66),(ev)
68343: 68843: vgetexpss/d Vx,Hx,Wx (66),(ev)
68444: 68944: vplzcntd/q Vx,Wx (66),(ev)
68545: vpsrlvd/q Vx,Hx,Wx (66),(v) 69045: vpsrlvd/q Vx,Hx,Wx (66),(v)
68646: vpsravd Vx,Hx,Wx (66),(v) 69146: vpsravd Vx,Hx,Wx (66),(v) | vpsravd/q Vx,Hx,Wx (66),(evo)
68747: vpsllvd/q Vx,Hx,Wx (66),(v) 69247: vpsllvd/q Vx,Hx,Wx (66),(v)
688# Skip 0x48-0x57 693# Skip 0x48-0x4b
6944c: vrcp14ps/d Vpd,Wpd (66),(ev)
6954d: vrcp14ss/d Vsd,Hpd,Wsd (66),(ev)
6964e: vrsqrt14ps/d Vpd,Wpd (66),(ev)
6974f: vrsqrt14ss/d Vsd,Hsd,Wsd (66),(ev)
698# Skip 0x50-0x57
68958: vpbroadcastd Vx,Wx (66),(v) 69958: vpbroadcastd Vx,Wx (66),(v)
69059: vpbroadcastq Vx,Wx (66),(v) 70059: vpbroadcastq Vx,Wx (66),(v) | vbroadcasti32x2 Vx,Wx (66),(evo)
6915a: vbroadcasti128 Vqq,Mdq (66),(v) 7015a: vbroadcasti128 Vqq,Mdq (66),(v) | vbroadcasti32x4/64x2 Vx,Wx (66),(evo)
692# Skip 0x5b-0x77 7025b: vbroadcasti32x8/64x4 Vqq,Mdq (66),(ev)
703# Skip 0x5c-0x63
70464: vpblendmd/q Vx,Hx,Wx (66),(ev)
70565: vblendmps/d Vx,Hx,Wx (66),(ev)
70666: vpblendmb/w Vx,Hx,Wx (66),(ev)
707# Skip 0x67-0x74
70875: vpermi2b/w Vx,Hx,Wx (66),(ev)
70976: vpermi2d/q Vx,Hx,Wx (66),(ev)
71077: vpermi2ps/d Vx,Hx,Wx (66),(ev)
69378: vpbroadcastb Vx,Wx (66),(v) 71178: vpbroadcastb Vx,Wx (66),(v)
69479: vpbroadcastw Vx,Wx (66),(v) 71279: vpbroadcastw Vx,Wx (66),(v)
695# Skip 0x7a-0x7f 7137a: vpbroadcastb Vx,Rv (66),(ev)
7147b: vpbroadcastw Vx,Rv (66),(ev)
7157c: vpbroadcastd/q Vx,Rv (66),(ev)
7167d: vpermt2b/w Vx,Hx,Wx (66),(ev)
7177e: vpermt2d/q Vx,Hx,Wx (66),(ev)
7187f: vpermt2ps/d Vx,Hx,Wx (66),(ev)
69680: INVEPT Gy,Mdq (66) 71980: INVEPT Gy,Mdq (66)
69781: INVPID Gy,Mdq (66) 72081: INVPID Gy,Mdq (66)
69882: INVPCID Gy,Mdq (66) 72182: INVPCID Gy,Mdq (66)
72283: vpmultishiftqb Vx,Hx,Wx (66),(ev)
72388: vexpandps/d Vpd,Wpd (66),(ev)
72489: vpexpandd/q Vx,Wx (66),(ev)
7258a: vcompressps/d Wx,Vx (66),(ev)
7268b: vpcompressd/q Wx,Vx (66),(ev)
6998c: vpmaskmovd/q Vx,Hx,Mx (66),(v) 7278c: vpmaskmovd/q Vx,Hx,Mx (66),(v)
7288d: vpermb/w Vx,Hx,Wx (66),(ev)
7008e: vpmaskmovd/q Mx,Vx,Hx (66),(v) 7298e: vpmaskmovd/q Mx,Vx,Hx (66),(v)
701# 0x0f 0x38 0x90-0xbf (FMA) 730# 0x0f 0x38 0x90-0xbf (FMA)
70290: vgatherdd/q Vx,Hx,Wx (66),(v) 73190: vgatherdd/q Vx,Hx,Wx (66),(v) | vpgatherdd/q Vx,Wx (66),(evo)
70391: vgatherqd/q Vx,Hx,Wx (66),(v) 73291: vgatherqd/q Vx,Hx,Wx (66),(v) | vpgatherqd/q Vx,Wx (66),(evo)
70492: vgatherdps/d Vx,Hx,Wx (66),(v) 73392: vgatherdps/d Vx,Hx,Wx (66),(v)
70593: vgatherqps/d Vx,Hx,Wx (66),(v) 73493: vgatherqps/d Vx,Hx,Wx (66),(v)
70694: 73594:
@@ -715,6 +744,10 @@ AVXcode: 2
7159d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1) 7449d: vfnmadd132ss/d Vx,Hx,Wx (66),(v),(v1)
7169e: vfnmsub132ps/d Vx,Hx,Wx (66),(v) 7459e: vfnmsub132ps/d Vx,Hx,Wx (66),(v)
7179f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1) 7469f: vfnmsub132ss/d Vx,Hx,Wx (66),(v),(v1)
747a0: vpscatterdd/q Wx,Vx (66),(ev)
748a1: vpscatterqd/q Wx,Vx (66),(ev)
749a2: vscatterdps/d Wx,Vx (66),(ev)
750a3: vscatterqps/d Wx,Vx (66),(ev)
718a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v) 751a6: vfmaddsub213ps/d Vx,Hx,Wx (66),(v)
719a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v) 752a7: vfmsubadd213ps/d Vx,Hx,Wx (66),(v)
720a8: vfmadd213ps/d Vx,Hx,Wx (66),(v) 753a8: vfmadd213ps/d Vx,Hx,Wx (66),(v)
@@ -725,6 +758,8 @@ ac: vfnmadd213ps/d Vx,Hx,Wx (66),(v)
725ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1) 758ad: vfnmadd213ss/d Vx,Hx,Wx (66),(v),(v1)
726ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v) 759ae: vfnmsub213ps/d Vx,Hx,Wx (66),(v)
727af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1) 760af: vfnmsub213ss/d Vx,Hx,Wx (66),(v),(v1)
761b4: vpmadd52luq Vx,Hx,Wx (66),(ev)
762b5: vpmadd52huq Vx,Hx,Wx (66),(ev)
728b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v) 763b6: vfmaddsub231ps/d Vx,Hx,Wx (66),(v)
729b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v) 764b7: vfmsubadd231ps/d Vx,Hx,Wx (66),(v)
730b8: vfmadd231ps/d Vx,Hx,Wx (66),(v) 765b8: vfmadd231ps/d Vx,Hx,Wx (66),(v)
@@ -736,12 +771,15 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1)
736be: vfnmsub231ps/d Vx,Hx,Wx (66),(v) 771be: vfnmsub231ps/d Vx,Hx,Wx (66),(v)
737bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1) 772bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1)
738# 0x0f 0x38 0xc0-0xff 773# 0x0f 0x38 0xc0-0xff
739c8: sha1nexte Vdq,Wdq 774c4: vpconflictd/q Vx,Wx (66),(ev)
775c6: Grp18 (1A)
776c7: Grp19 (1A)
777c8: sha1nexte Vdq,Wdq | vexp2ps/d Vx,Wx (66),(ev)
740c9: sha1msg1 Vdq,Wdq 778c9: sha1msg1 Vdq,Wdq
741ca: sha1msg2 Vdq,Wdq 779ca: sha1msg2 Vdq,Wdq | vrcp28ps/d Vx,Wx (66),(ev)
742cb: sha256rnds2 Vdq,Wdq 780cb: sha256rnds2 Vdq,Wdq | vrcp28ss/d Vx,Hx,Wx (66),(ev)
743cc: sha256msg1 Vdq,Wdq 781cc: sha256msg1 Vdq,Wdq | vrsqrt28ps/d Vx,Wx (66),(ev)
744cd: sha256msg2 Vdq,Wdq 782cd: sha256msg2 Vdq,Wdq | vrsqrt28ss/d Vx,Hx,Wx (66),(ev)
745db: VAESIMC Vdq,Wdq (66),(v1) 783db: VAESIMC Vdq,Wdq (66),(v1)
746dc: VAESENC Vdq,Hdq,Wdq (66),(v1) 784dc: VAESENC Vdq,Hdq,Wdq (66),(v1)
747dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1) 785dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1)
@@ -763,15 +801,15 @@ AVXcode: 3
76300: vpermq Vqq,Wqq,Ib (66),(v) 80100: vpermq Vqq,Wqq,Ib (66),(v)
76401: vpermpd Vqq,Wqq,Ib (66),(v) 80201: vpermpd Vqq,Wqq,Ib (66),(v)
76502: vpblendd Vx,Hx,Wx,Ib (66),(v) 80302: vpblendd Vx,Hx,Wx,Ib (66),(v)
76603: 80403: valignd/q Vx,Hx,Wx,Ib (66),(ev)
76704: vpermilps Vx,Wx,Ib (66),(v) 80504: vpermilps Vx,Wx,Ib (66),(v)
76805: vpermilpd Vx,Wx,Ib (66),(v) 80605: vpermilpd Vx,Wx,Ib (66),(v)
76906: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v) 80706: vperm2f128 Vqq,Hqq,Wqq,Ib (66),(v)
77007: 80807:
77108: vroundps Vx,Wx,Ib (66) 80908: vroundps Vx,Wx,Ib (66) | vrndscaleps Vx,Wx,Ib (66),(evo)
77209: vroundpd Vx,Wx,Ib (66) 81009: vroundpd Vx,Wx,Ib (66) | vrndscalepd Vx,Wx,Ib (66),(evo)
7730a: vroundss Vss,Wss,Ib (66),(v1) 8110a: vroundss Vss,Wss,Ib (66),(v1) | vrndscaless Vx,Hx,Wx,Ib (66),(evo)
7740b: vroundsd Vsd,Wsd,Ib (66),(v1) 8120b: vroundsd Vsd,Wsd,Ib (66),(v1) | vrndscalesd Vx,Hx,Wx,Ib (66),(evo)
7750c: vblendps Vx,Hx,Wx,Ib (66) 8130c: vblendps Vx,Hx,Wx,Ib (66)
7760d: vblendpd Vx,Hx,Wx,Ib (66) 8140d: vblendpd Vx,Hx,Wx,Ib (66)
7770e: vpblendw Vx,Hx,Wx,Ib (66),(v1) 8150e: vpblendw Vx,Hx,Wx,Ib (66),(v1)
@@ -780,26 +818,51 @@ AVXcode: 3
78015: vpextrw Rd/Mw,Vdq,Ib (66),(v1) 81815: vpextrw Rd/Mw,Vdq,Ib (66),(v1)
78116: vpextrd/q Ey,Vdq,Ib (66),(v1) 81916: vpextrd/q Ey,Vdq,Ib (66),(v1)
78217: vextractps Ed,Vdq,Ib (66),(v1) 82017: vextractps Ed,Vdq,Ib (66),(v1)
78318: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) 82118: vinsertf128 Vqq,Hqq,Wqq,Ib (66),(v) | vinsertf32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
78419: vextractf128 Wdq,Vqq,Ib (66),(v) 82219: vextractf128 Wdq,Vqq,Ib (66),(v) | vextractf32x4/64x2 Wdq,Vqq,Ib (66),(evo)
8231a: vinsertf32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
8241b: vextractf32x8/64x4 Wdq,Vqq,Ib (66),(ev)
7851d: vcvtps2ph Wx,Vx,Ib (66),(v) 8251d: vcvtps2ph Wx,Vx,Ib (66),(v)
8261e: vpcmpud/q Vk,Hd,Wd,Ib (66),(ev)
8271f: vpcmpd/q Vk,Hd,Wd,Ib (66),(ev)
78620: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1) 82820: vpinsrb Vdq,Hdq,Ry/Mb,Ib (66),(v1)
78721: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1) 82921: vinsertps Vdq,Hdq,Udq/Md,Ib (66),(v1)
78822: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1) 83022: vpinsrd/q Vdq,Hdq,Ey,Ib (66),(v1)
78938: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) 83123: vshuff32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
79039: vextracti128 Wdq,Vqq,Ib (66),(v) 83225: vpternlogd/q Vx,Hx,Wx,Ib (66),(ev)
83326: vgetmantps/d Vx,Wx,Ib (66),(ev)
83427: vgetmantss/d Vx,Hx,Wx,Ib (66),(ev)
83530: kshiftrb/w Vk,Uk,Ib (66),(v)
83631: kshiftrd/q Vk,Uk,Ib (66),(v)
83732: kshiftlb/w Vk,Uk,Ib (66),(v)
83833: kshiftld/q Vk,Uk,Ib (66),(v)
83938: vinserti128 Vqq,Hqq,Wqq,Ib (66),(v) | vinserti32x4/64x2 Vqq,Hqq,Wqq,Ib (66),(evo)
84039: vextracti128 Wdq,Vqq,Ib (66),(v) | vextracti32x4/64x2 Wdq,Vqq,Ib (66),(evo)
8413a: vinserti32x8/64x4 Vqq,Hqq,Wqq,Ib (66),(ev)
8423b: vextracti32x8/64x4 Wdq,Vqq,Ib (66),(ev)
8433e: vpcmpub/w Vk,Hk,Wx,Ib (66),(ev)
8443f: vpcmpb/w Vk,Hk,Wx,Ib (66),(ev)
79140: vdpps Vx,Hx,Wx,Ib (66) 84540: vdpps Vx,Hx,Wx,Ib (66)
79241: vdppd Vdq,Hdq,Wdq,Ib (66),(v1) 84641: vdppd Vdq,Hdq,Wdq,Ib (66),(v1)
79342: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) 84742: vmpsadbw Vx,Hx,Wx,Ib (66),(v1) | vdbpsadbw Vx,Hx,Wx,Ib (66),(evo)
84843: vshufi32x4/64x2 Vx,Hx,Wx,Ib (66),(ev)
79444: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1) 84944: vpclmulqdq Vdq,Hdq,Wdq,Ib (66),(v1)
79546: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v) 85046: vperm2i128 Vqq,Hqq,Wqq,Ib (66),(v)
7964a: vblendvps Vx,Hx,Wx,Lx (66),(v) 8514a: vblendvps Vx,Hx,Wx,Lx (66),(v)
7974b: vblendvpd Vx,Hx,Wx,Lx (66),(v) 8524b: vblendvpd Vx,Hx,Wx,Lx (66),(v)
7984c: vpblendvb Vx,Hx,Wx,Lx (66),(v1) 8534c: vpblendvb Vx,Hx,Wx,Lx (66),(v1)
85450: vrangeps/d Vx,Hx,Wx,Ib (66),(ev)
85551: vrangess/d Vx,Hx,Wx,Ib (66),(ev)
85654: vfixupimmps/d Vx,Hx,Wx,Ib (66),(ev)
85755: vfixupimmss/d Vx,Hx,Wx,Ib (66),(ev)
85856: vreduceps/d Vx,Wx,Ib (66),(ev)
85957: vreducess/d Vx,Hx,Wx,Ib (66),(ev)
79960: vpcmpestrm Vdq,Wdq,Ib (66),(v1) 86060: vpcmpestrm Vdq,Wdq,Ib (66),(v1)
80061: vpcmpestri Vdq,Wdq,Ib (66),(v1) 86161: vpcmpestri Vdq,Wdq,Ib (66),(v1)
80162: vpcmpistrm Vdq,Wdq,Ib (66),(v1) 86262: vpcmpistrm Vdq,Wdq,Ib (66),(v1)
80263: vpcmpistri Vdq,Wdq,Ib (66),(v1) 86363: vpcmpistri Vdq,Wdq,Ib (66),(v1)
86466: vfpclassps/d Vk,Wx,Ib (66),(ev)
86567: vfpclassss/d Vk,Wx,Ib (66),(ev)
803cc: sha1rnds4 Vdq,Wdq,Ib 866cc: sha1rnds4 Vdq,Wdq,Ib
804df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1) 867df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1)
805f0: RORX Gy,Ey,Ib (F2),(v) 868f0: RORX Gy,Ey,Ib (F2),(v)
@@ -927,8 +990,10 @@ GrpTable: Grp12
927EndTable 990EndTable
928 991
929GrpTable: Grp13 992GrpTable: Grp13
9930: vprord/q Hx,Wx,Ib (66),(ev)
9941: vprold/q Hx,Wx,Ib (66),(ev)
9302: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1) 9952: psrld Nq,Ib (11B) | vpsrld Hx,Ux,Ib (66),(11B),(v1)
9314: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) 9964: psrad Nq,Ib (11B) | vpsrad Hx,Ux,Ib (66),(11B),(v1) | vpsrad/q Hx,Ux,Ib (66),(evo)
9326: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1) 9976: pslld Nq,Ib (11B) | vpslld Hx,Ux,Ib (66),(11B),(v1)
933EndTable 998EndTable
934 999
@@ -963,6 +1028,20 @@ GrpTable: Grp17
9633: BLSI By,Ey (v) 10283: BLSI By,Ey (v)
964EndTable 1029EndTable
965 1030
1031GrpTable: Grp18
10321: vgatherpf0dps/d Wx (66),(ev)
10332: vgatherpf1dps/d Wx (66),(ev)
10345: vscatterpf0dps/d Wx (66),(ev)
10356: vscatterpf1dps/d Wx (66),(ev)
1036EndTable
1037
1038GrpTable: Grp19
10391: vgatherpf0qps/d Wx (66),(ev)
10402: vgatherpf1qps/d Wx (66),(ev)
10415: vscatterpf0qps/d Wx (66),(ev)
10426: vscatterpf1qps/d Wx (66),(ev)
1043EndTable
1044
966# AMD's Prefetch Group 1045# AMD's Prefetch Group
967GrpTable: GrpP 1046GrpTable: GrpP
9680: PREFETCH 10470: PREFETCH
diff --git a/tools/scripts/Makefile.arch b/tools/scripts/Makefile.arch
index e11fbd6fae78..ad85b921a607 100644
--- a/tools/scripts/Makefile.arch
+++ b/tools/scripts/Makefile.arch
@@ -1,8 +1,4 @@
1ifndef ARCH 1HOSTARCH := $(shell uname -m | sed -e s/i.86/x86/ -e s/x86_64/x86/ \
2ARCH := $(shell uname -m 2>/dev/null || echo not)
3endif
4
5ARCH := $(shell echo $(ARCH) | sed -e s/i.86/x86/ -e s/x86_64/x86/ \
6 -e s/sun4u/sparc/ -e s/sparc64/sparc/ \ 2 -e s/sun4u/sparc/ -e s/sparc64/sparc/ \
7 -e /arm64/!s/arm.*/arm/ -e s/sa110/arm/ \ 3 -e /arm64/!s/arm.*/arm/ -e s/sa110/arm/ \
8 -e s/s390x/s390/ -e s/parisc64/parisc/ \ 4 -e s/s390x/s390/ -e s/parisc64/parisc/ \
@@ -10,6 +6,41 @@ ARCH := $(shell echo $(ARCH) | sed -e s/i.86/x86/ -e s/x86_64/x86/ \
10 -e s/sh[234].*/sh/ -e s/aarch64.*/arm64/ \ 6 -e s/sh[234].*/sh/ -e s/aarch64.*/arm64/ \
11 -e s/tile.*/tile/ ) 7 -e s/tile.*/tile/ )
12 8
9ifndef ARCH
10ARCH := $(HOSTARCH)
11endif
12
13SRCARCH := $(ARCH)
14
15# Additional ARCH settings for x86
16ifeq ($(ARCH),i386)
17 SRCARCH := x86
18endif
19ifeq ($(ARCH),x86_64)
20 SRCARCH := x86
21endif
22
23# Additional ARCH settings for sparc
24ifeq ($(ARCH),sparc32)
25 SRCARCH := sparc
26endif
27ifeq ($(ARCH),sparc64)
28 SRCARCH := sparc
29endif
30
31# Additional ARCH settings for sh
32ifeq ($(ARCH),sh64)
33 SRCARCH := sh
34endif
35
36# Additional ARCH settings for tile
37ifeq ($(ARCH),tilepro)
38 SRCARCH := tile
39endif
40ifeq ($(ARCH),tilegx)
41 SRCARCH := tile
42endif
43
13LP64 := $(shell echo __LP64__ | ${CC} ${CFLAGS} -E -x c - | tail -n 1) 44LP64 := $(shell echo __LP64__ | ${CC} ${CFLAGS} -E -x c - | tail -n 1)
14ifeq ($(LP64), 1) 45ifeq ($(LP64), 1)
15 IS_64_BIT := 1 46 IS_64_BIT := 1