@@ -314,9 +314,12 @@ static const struct test avx512bw_all[]
INSN(pminsw, 66, 0f, ea, vl, w, vl),
INSN(pminub, 66, 0f, da, vl, b, vl),
INSN(pminuw, 66, 0f38, 3a, vl, w, vl),
+// pmovb2m, f3, 0f38, 29, b
+// pmovm2, f3, 0f38, 28, bw
INSN(pmovswb, f3, 0f38, 20, vl_2, b, vl),
INSN(pmovsxbw, 66, 0f38, 20, vl_2, b, vl),
INSN(pmovuswb, f3, 0f38, 10, vl_2, b, vl),
+// pmovw2m, f3, 0f38, 29, w
INSN(pmovwb, f3, 0f38, 30, vl_2, b, vl),
INSN(pmovzxbw, 66, 0f38, 30, vl_2, b, vl),
INSN(pmulhuw, 66, 0f, e4, vl, w, vl),
@@ -364,6 +367,9 @@ static const struct test avx512dq_all[]
INSN_PFP(andn, 0f, 55),
INSN(broadcasti32x2, 66, 0f38, 59, el_2, d, vl),
INSN_PFP(or, 0f, 56),
+// pmovd2m, f3, 0f38, 39, d
+// pmovm2, f3, 0f38, 38, dq
+// pmovq2m, f3, 0f38, 39, q
INSN(pmullq, 66, 0f38, 40, vl, q, vl),
INSN_PFP(xor, 0f, 57),
};
@@ -12,17 +12,23 @@
#if SIZE == 1
# define _(x) x##b
+# define _v(x, t) _v_(x##q, t)
#elif SIZE == 2
# define _(x) x##w
+# define _v(x, t) _v_(x##d, t)
# define WIDEN(x) x##bw
#elif SIZE == 4
# define _(x) x##d
+# define _v(x, t) _v_(x##w, t)
# define WIDEN(x) x##wd
#elif SIZE == 8
# define _(x) x##q
+# define _v(x, t) _v_(x##b, t)
# define WIDEN(x) x##dq
#endif
+#define _v_(x, t) v##x##t
+
.macro check res1:req, res2:req, line:req
_(kmov) %\res1, DATA(out)
#if SIZE < 8 || !defined(__i386__)
@@ -131,6 +137,15 @@ _start:
#endif
+#if SIZE > 2 ? defined(__AVX512BW__) : defined(__AVX512DQ__)
+
+ _(kmov) DATA(in1), %k0
+ _v(pmovm2,) %k0, %zmm7
+ _v(pmov,2m) %zmm7, %k3
+ check k0, k3, __LINE__
+
+#endif
+
xor %eax, %eax
ret
@@ -8465,6 +8465,21 @@ x86_emulate(
elem_bytes = (b & 7) < 3 ? 1 : (b & 7) != 5 ? 2 : 4;
goto avx512f_no_sae;
+ case X86EMUL_OPC_EVEX_F3(0x0f38, 0x29): /* vpmov{b,w}2m [xyz]mm,k */
+ case X86EMUL_OPC_EVEX_F3(0x0f38, 0x39): /* vpmov{d,q}2m [xyz]mm,k */
+ generate_exception_if(!evex.r || !evex.R, EXC_UD);
+ /* fall through */
+ case X86EMUL_OPC_EVEX_F3(0x0f38, 0x28): /* vpmovm2{b,w} k,[xyz]mm */
+ case X86EMUL_OPC_EVEX_F3(0x0f38, 0x38): /* vpmovm2{d,q} k,[xyz]mm */
+ if ( b & 0x10 )
+ host_and_vcpu_must_have(avx512dq);
+ else
+ host_and_vcpu_must_have(avx512bw);
+ generate_exception_if(evex.opmsk || ea.type != OP_REG, EXC_UD);
+ d |= TwoOp;
+ op_bytes = 16 << evex.lr;
+ goto avx512f_no_sae;
+
case X86EMUL_OPC_66(0x0f38, 0x2a): /* movntdqa m128,xmm */
case X86EMUL_OPC_VEX_66(0x0f38, 0x2a): /* vmovntdqa mem,{x,y}mm */
generate_exception_if(ea.type != OP_MEM, EXC_UD);
Entries to the tables in evex-disp8.c are added despite these insns not allowing for memory operands, with the goal of the tables giving a complete picture of the supported EVEX-encoded insns in the end. Signed-off-by: Jan Beulich <jbeulich@suse.com> --- v3: New.