diff --git a/decode-test.c b/decode-test.c index 3064d66..2224e58 100644 --- a/decode-test.c +++ b/decode-test.c @@ -2739,6 +2739,26 @@ main(int argc, char** argv) TEST32("\x62\xf2\xfd\x49\x93\x44\xe7\x01", "vgatherqpd zmm0{k1}, qword ptr [edi+8*zmm4+0x8]"); TEST64("\x62\xf2\xfd\x49\x93\x44\xe7\x01", "vgatherqpd zmm0{k1}, qword ptr [rdi+8*zmm4+0x8]"); + // AVX512-FP16 + TEST("\x62\xf5\x74\x08\x5c\xc2", "vsubph xmm0, xmm1, xmm2"); + TEST("\x62\xf5\x74\x28\x5c\xc2", "vsubph ymm0, ymm1, ymm2"); + TEST("\x62\xf5\x74\x48\x5c\xc2", "vsubph zmm0, zmm1, zmm2"); + TEST32("\x62\xf5\x74\x08\x5c\x42\x01", "vsubph xmm0, xmm1, xmmword ptr [edx+0x10]"); + TEST64("\x62\xf5\x74\x08\x5c\x42\x01", "vsubph xmm0, xmm1, xmmword ptr [rdx+0x10]"); + TEST32("\x62\xf5\x74\x28\x5c\x42\x01", "vsubph ymm0, ymm1, ymmword ptr [edx+0x20]"); + TEST64("\x62\xf5\x74\x28\x5c\x42\x01", "vsubph ymm0, ymm1, ymmword ptr [rdx+0x20]"); + TEST32("\x62\xf5\x74\x48\x5c\x42\x01", "vsubph zmm0, zmm1, zmmword ptr [edx+0x40]"); + TEST64("\x62\xf5\x74\x48\x5c\x42\x01", "vsubph zmm0, zmm1, zmmword ptr [rdx+0x40]"); + TEST32("\x62\xf5\x74\x18\x5c\x42\x01", "vsubph xmm0, xmm1, word ptr [edx+0x2]{1to8}"); + TEST64("\x62\xf5\x74\x18\x5c\x42\x01", "vsubph xmm0, xmm1, word ptr [rdx+0x2]{1to8}"); + TEST32("\x62\xf5\x74\x38\x5c\x42\x01", "vsubph ymm0, ymm1, word ptr [edx+0x2]{1to16}"); + TEST64("\x62\xf5\x74\x38\x5c\x42\x01", "vsubph ymm0, ymm1, word ptr [rdx+0x2]{1to16}"); + TEST32("\x62\xf5\x74\x58\x5c\x42\x01", "vsubph zmm0, zmm1, word ptr [edx+0x2]{1to32}"); + TEST64("\x62\xf5\x74\x58\x5c\x42\x01", "vsubph zmm0, zmm1, word ptr [rdx+0x2]{1to32}"); + TEST64("\x62\x93\x36\x34\xc2\xeb\x89", "vcmpsh k5{k4}, xmm25, xmm27, 0x89, {sae}"); + TEST("\x62\xf5\x66\x4c\x11\xd5", "vmovsh xmm5{k4}, xmm3, xmm2"); + TEST64("\x62\x25\x66\x4c\x11\xd5", "vmovsh xmm21{k4}, xmm3, xmm26"); + puts(failed ? "Some tests FAILED" : "All tests PASSED"); return failed ? EXIT_FAILURE : EXIT_SUCCESS; } diff --git a/decode.c b/decode.c index 27420c8..4e3ec90 100644 --- a/decode.c +++ b/decode.c @@ -232,9 +232,9 @@ prefix_end: prefix_rex |= mode != DECODE_64 || (byte & 0x20) ? 0 : PREFIX_REXB; if (vex_prefix == 0x62) // EVEX { - if (byte & 0x0c) // Bits 3:2 of opcode_escape must be clear. + if (byte & 0x08) // Bit 3 of opcode_escape must be clear. return FD_ERR_UD; - opcode_escape = (byte & 0x03) | 8; // 8 is table index with EVEX + opcode_escape = (byte & 0x07) | 8; // 8 is table index with EVEX prefix_rex |= mode != DECODE_64 || (byte & 0x10) ? 0 : PREFIX_REXRR; } else // 3-byte VEX diff --git a/instrs.txt b/instrs.txt index 2830a3f..68a9417 100644 --- a/instrs.txt +++ b/instrs.txt @@ -2335,3 +2335,120 @@ VEX.66.W0.L0.0f3a32/r RMI Kb Kb Ib - KSHIFTLB F=AVX512DQ VEX.66.W1.L0.0f3a32/r RMI Kw Kw Ib - KSHIFTLW F=AVX512F VEX.66.W0.L0.0f3a33/r RMI Kd Kd Ib - KSHIFTLD F=AVX512BW VEX.66.W1.L0.0f3a33/r RMI Kq Kq Ib - KSHIFTLQ F=AVX512BW + +# AVX512-FP16 +EVEX.NP.W0.0f3a08 RMI Vx Wx Ib - EVX_RNDSCALEPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.NP.W0.LIG.0f3a0a RVMI Vdq Hdq Ww Ib EVX_RNDSCALESH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.0f3a26 RMI Vx Wx Ib - EVX_GETMANTPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.NP.W0.LIG.0f3a27 RVMI Vdq Hdq Ww Ib EVX_GETMANTSH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.0f3a56 RMI Vx Wx Ib - EVX_REDUCEPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.NP.W0.LIG.0f3a57 RVMI Vdq Hdq Ww Ib EVX_REDUCESH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.0f3a66 RMI K Wx Ib - EVX_FPCLASSPH+kb F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.NP.W0.LIG.0f3a67 RMI Kb Ww Ib - EVX_FPCLASSSH+k F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.0f3ac2 RVMI K Hx Wx Ib EVX_CMPPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.0f3ac2 RVMI Kb Hw Ww Ib EVX_CMPSH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 + +EVEX.F3.W0.LIG.M5.10/m RM Vdq Mw - - EVX_MOVSH+k F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.F3.W0.LIG.M5.10/r RVM Vdq Hdq Uw - EVX_MOVSH+k F=AVX512-FP16 +EVEX.F3.W0.LIG.M5.11/m MR Mw Vw - - EVX_MOVSH+k F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.F3.W0.LIG.M5.11/r MVR Udq Hdq Vw - EVX_MOVSH+k F=AVX512-FP16 +EVEX.NP.W0.LIG.M5.1d RVM Vdq Hdq Wd - EVX_CVTSS2SH+kr F=AVX512-FP16 TUPLE1_SCALAR_32 +EVEX.66.W0.M5.1d RM Vh Wx - - EVX_CVTPS2PHX+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.F3.LIG.M5.2a RVM Vdq Hdq Ey - EVX_CVTSI2SH+r F=AVX512-FP16 TUPLE1_SCALAR_OPSZ +EVEX.F3.LIG.M5.2c RM Gy Ww - - EVX_CVTTSH2SI+e F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.F3.LIG.M5.2d RM Gy Ww - - EVX_CVTSH2SI+r F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.LIG.M5.2e RM Vw Ww - - EVX_UCOMISH+e F=AVX512-FP16 TUPLE1_SCALAR_16 EFL=0--0m0mm +EVEX.NP.W0.LIG.M5.2f RM Vw Ww - - EVX_COMISH+e F=AVX512-FP16 TUPLE1_SCALAR_16 EFL=0--0m0mm +EVEX.NP.W0.M5.51 RM Vx Wx - - EVX_SQRTPH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.51 RVM Vdq Hdq Ww - EVX_SQRTSH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.58 RVM Vx Hx Wx - EVX_ADDPH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.58 RVM Vdq Hdq Ww - EVX_ADDSH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.59 RVM Vx Hx Wx - EVX_MULPH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.59 RVM Vdq Hdq Ww - EVX_MULSH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.5a RM Vx Wf - - EVX_CVTPH2PD+kbe F=AVX512-FP16 TUPLE_QUARTER_16 BCST16 +EVEX.66.W1.M5.5a RM Vf Wx - - EVX_CVTPD2PH+kbr F=AVX512-FP16 TUPLE_FULL_64 +EVEX.F3.W0.LIG.M5.5a RVM Vdq Hdq Ww - EVX_CVTSH2SD+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.F2.W1.LIG.M5.5a RVM Vdq Hdq Wq - EVX_CVTSD2SH+kr F=AVX512-FP16 TUPLE1_SCALAR_64 +EVEX.NP.W0.M5.5b RM Vh Wx - - EVX_CVTDQ2PH+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.NP.W1.M5.5b RM Vf Wx - - EVX_CVTQQ2PH+kbr F=AVX512-FP16 TUPLE_FULL_64 +EVEX.66.W0.M5.5b RM Vx Wh - - EVX_CVTPH2DQ+kbr F=AVX512-FP16 TUPLE_HALF_16 BCST16 +EVEX.F3.W0.M5.5b RM Vx Wh - - EVX_CVTTPH2DQ+kbe F=AVX512-FP16 TUPLE_HALF_16 BCST16 +EVEX.NP.W0.M5.5c RVM Vx Hx Wx - EVX_SUBPH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.5c RVM Vdq Hdq Ww - EVX_SUBSH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.5d RVM Vx Hx Wx - EVX_MINPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.5d RVM Vdq Hdq Ww - EVX_MINSH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.5e RVM Vx Hx Wx - EVX_DIVPH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.5e RVM Vdq Hdq Ww - EVX_DIVSH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.5f RVM Vx Hx Wx - EVX_MAXPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.LIG.M5.5f RVM Vdq Hdq Ww - EVX_MAXSH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.L0.M5.6e RM Vdq Ew - - EVX_MOVW_G2X F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.78 RM Vx Wh - - EVX_CVTTPH2UDQ+kbe F=AVX512-FP16 TUPLE_HALF_16 BCST16 +EVEX.66.W0.M5.78 RM Vx Wf - - EVX_CVTTPH2UQQ+kbe F=AVX512-FP16 TUPLE_QUARTER_16 BCST16 +EVEX.F3.LIG.M5.78 RM Gy Ww - - EVX_CVTTSH2USI+e F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.NP.W0.M5.79 RM Vx Wh - - EVX_CVTPH2UDQ+kbr F=AVX512-FP16 TUPLE_HALF_16 BCST16 +EVEX.66.W0.M5.79 RM Vx Wf - - EVX_CVTPH2UQQ+kbr F=AVX512-FP16 TUPLE_QUARTER_16 BCST16 +EVEX.F3.LIG.M5.79 RM Gy Ww - - EVX_CVTSH2USI+r F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M5.7a RM Vx Wf - - EVX_CVTTPH2QQ+kbe F=AVX512-FP16 TUPLE_QUARTER_16 BCST16 +EVEX.F2.W0.M5.7a RM Vh Wx - - EVX_CVTUDQ2PH+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.F2.W1.M5.7a RM Vf Wx - - EVX_CVTUQQ2PH+kbr F=AVX512-FP16 TUPLE_FULL_64 +EVEX.66.W0.M5.7b RM Vx Wf - - EVX_CVTPH2QQ+kbr F=AVX512-FP16 TUPLE_QUARTER_16 BCST16 +EVEX.F3.LIG.M5.7b RVM Vdq Hdq Ey - EVX_CVTUSI2SH+r F=AVX512-FP16 TUPLE1_SCALAR_OPSZ +EVEX.NP.W0.M5.7c RM Vx Wx - - EVX_CVTTPH2UW+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M5.7c RM Vx Wx - - EVX_CVTTPH2W+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.NP.W0.M5.7d RM Vx Wx - - EVX_CVTPH2UW+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M5.7d RM Vx Wx - - EVX_CVTPH2W+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F3.W0.M5.7d RM Vx Wx - - EVX_CVTW2PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.F2.W0.M5.7d RM Vx Wx - - EVX_CVTUW2PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.L0.M5.7e MR Ew Vw - - EVX_MOVW_X2G F=AVX512-FP16 TUPLE1_SCALAR_16 + +EVEX.66.W0.M6.13 RM Vx Wh - - EVX_CVTPH2PSX+kbe F=AVX512-FP16 TUPLE_HALF_16 BCST16 +EVEX.NP.W0.LIG.M6.13 RVM Vdq Hdq Ww - EVX_CVTSH2SS+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.2c RVM Vx Hx Wx - EVX_SCALEFPH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.2d RVM Vdq Hdq Ww - EVX_SCALEFSH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.42 RM Vx Wx - - EVX_GETEXPPH+kbe F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.43 RVM Vdq Hdq Ww - EVX_GETEXPSH+ke F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.4c RM Vx Wx - - EVX_RCPPH+kb F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.4d RVM Vdq Hdq Ww - EVX_RCPSH+k F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.4e RM Vx Wx - - EVX_RSQRTPH+kb F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.4f RVM Vdq Hdq Ww - EVX_RSQRTSH+k F=AVX512-FP16 TUPLE1_SCALAR_16 + +# TODO: for F{,C}M{ADD,UL}C{P,S}H, destreg must be unequal to the source registers +EVEX.F3.W0.M6.56 RVM Vx Hx Wx - EVX_FMADDCPH+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.F2.W0.M6.56 RVM Vx Hx Wx - EVX_FCMADDCPH+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.F3.W0.LIG.M6.57 RVM Vdq Hdq Wd - EVX_FMADDCSH+kr F=AVX512-FP16 TUPLE1_SCALAR_32 +EVEX.F2.W0.LIG.M6.57 RVM Vdq Hdq Wd - EVX_FCMADDCSH+kr F=AVX512-FP16 TUPLE1_SCALAR_32 +EVEX.F3.W0.M6.d6 RVM Vx Hx Wx - EVX_FMULCPH+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.F2.W0.M6.d6 RVM Vx Hx Wx - EVX_FCMULCPH+kbr F=AVX512-FP16 TUPLE_FULL_32 +EVEX.F3.W0.LIG.M6.d7 RVM Vdq Hdq Wd - EVX_FMULCSH+kr F=AVX512-FP16 TUPLE1_SCALAR_32 +EVEX.F2.W0.LIG.M6.d7 RVM Vdq Hdq Wd - EVX_FCMULCSH+kr F=AVX512-FP16 TUPLE1_SCALAR_32 + +EVEX.66.W0.M6.96 RVM Vx Hx Wx - EVX_FMADDSUB132PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.a6 RVM Vx Hx Wx - EVX_FMADDSUB213PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.b6 RVM Vx Hx Wx - EVX_FMADDSUB231PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.97 RVM Vx Hx Wx - EVX_FMSUBADD132PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.a7 RVM Vx Hx Wx - EVX_FMSUBADD213PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.b7 RVM Vx Hx Wx - EVX_FMSUBADD231PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.98 RVM Vx Hx Wx - EVX_FMADD132PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.a8 RVM Vx Hx Wx - EVX_FMADD213PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.b8 RVM Vx Hx Wx - EVX_FMADD231PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.99 RVM Vdq Hdq Ww - EVX_FMADD132SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.a9 RVM Vdq Hdq Ww - EVX_FMADD213SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.b9 RVM Vdq Hdq Ww - EVX_FMADD231SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.9a RVM Vx Hx Wx - EVX_FMSUB132PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.aa RVM Vx Hx Wx - EVX_FMSUB213PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.ba RVM Vx Hx Wx - EVX_FMSUB231PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.9b RVM Vdq Hdq Ww - EVX_FMSUB132SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.ab RVM Vdq Hdq Ww - EVX_FMSUB213SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.bb RVM Vdq Hdq Ww - EVX_FMSUB231SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.9c RVM Vx Hx Wx - EVX_FNMADD132PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.ac RVM Vx Hx Wx - EVX_FNMADD213PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.bc RVM Vx Hx Wx - EVX_FNMADD231PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.9d RVM Vdq Hdq Ww - EVX_FNMADD132SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.ad RVM Vdq Hdq Ww - EVX_FNMADD213SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.bd RVM Vdq Hdq Ww - EVX_FNMADD231SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.M6.9e RVM Vx Hx Wx - EVX_FNMSUB132PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.ae RVM Vx Hx Wx - EVX_FNMSUB213PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.M6.be RVM Vx Hx Wx - EVX_FNMSUB231PH+kbr F=AVX512-FP16 TUPLE_FULL_16 BCST16 +EVEX.66.W0.LIG.M6.9f RVM Vdq Hdq Ww - EVX_FNMSUB132SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.af RVM Vdq Hdq Ww - EVX_FNMSUB213SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 +EVEX.66.W0.LIG.M6.bf RVM Vdq Hdq Ww - EVX_FNMSUB231SH+kr F=AVX512-FP16 TUPLE1_SCALAR_16 diff --git a/parseinstrs.py b/parseinstrs.py index 3e1ffce..8df69c0 100644 --- a/parseinstrs.py +++ b/parseinstrs.py @@ -339,7 +339,7 @@ class EntryKind(Enum): opcode_regex = re.compile( r"^(?:(?P(?PE?VEX\.)?(?PNP|66|F2|F3|NFx)\." + r"(?:W(?P[01])\.)?(?:L(?P0|1|12|2|IG)\.)?))?" + - r"(?P0f38|0f3a|0f|)" + + r"(?P0f38|0f3a|0f|M[56]\.|)" + r"(?P[0-9a-f]{2})" + r"(?:/(?P[0-7]|[rm]|[0-7][rm])|(?P[c-f][0-9a-f]))?(?P\+)?$") @@ -370,7 +370,7 @@ class Opcode(NamedTuple): return cls( prefix=match.group("legacy"), - escape=["", "0f", "0f38", "0f3a"].index(match.group("escape")), + escape=["", "0f", "0f38", "0f3a", "M4.", "M5.", "M6."].index(match.group("escape")), opc=int(match.group("opcode"), 16), extended=match.group("extended") is not None, modreg=modreg, @@ -407,10 +407,13 @@ def verifyOpcodeDesc(opcode, desc): raise Exception(f"missing memory operand {opcode}, {desc}") # From Intel SDM bcst, evexw, vszs = { + "TUPLE_FULL_16": (2, "0", ( 16, 32, 64)), "TUPLE_FULL_32": (4, "0", ( 16, 32, 64)), "TUPLE_FULL_64": (8, "1", ( 16, 32, 64)), + "TUPLE_HALF_16": (2, "0", ( 8, 16, 32)), "TUPLE_HALF_32": (4, "0", ( 8, 16, 32)), "TUPLE_HALF_64": (8, "1", ( 8, 16, 32)), + "TUPLE_QUARTER_16": (2, "0", ( 4, 8, 16)), "TUPLE_FULL_MEM": (None, None, ( 16, 32, 64)), "TUPLE_HALF_MEM": (None, None, ( 8, 16, 32)), "TUPLE_QUARTER_MEM": (None, None, ( 4, 8, 16)), @@ -449,7 +452,7 @@ class Trie: EntryKind.TABLE_PREFIX, EntryKind.TABLE16, EntryKind.TABLE8E, EntryKind.TABLE_VEX) TABLE_LENGTH = { - EntryKind.TABLE_ROOT: 12, + EntryKind.TABLE_ROOT: 16, EntryKind.TABLE256: 256, EntryKind.TABLE_PREFIX: 4, EntryKind.TABLE16: 16,