Lines Matching defs:XMM
90 uint_t it_vexwoxmm:1; /* VEX instructions that don't use XMM/YMM */
185 XMMO, /* Prefixable SIMD xmm/mem -> xmm */
186 XMMOS, /* Prefixable SIMD xmm -> xmm/mem */
187 XMMOPM, /* Prefixable SIMD xmm/mem w/to xmm,imm8 */
188 XMMOMX, /* Prefixable SIMD mm/mem -> xmm */
189 XMMOX3, /* Prefixable SIMD xmm -> r32 */
190 XMMOXMM, /* Prefixable SIMD xmm/mem -> mm */
191 XMMOM, /* Prefixable SIMD xmm -> mem */
192 XMMOMS, /* Prefixable SIMD mem -> xmm */
193 XMM, /* SIMD xmm/mem -> xmm */
194 XMM_66r, /* SIMD 0x66 prefix required xmm/mem -> xmm */
195 XMM_66o, /* SIMD 0x66 prefix optional xmm/mem -> xmm */
196 XMMXIMPL, /* SIMD xmm -> xmm (mem) */
197 XMM3P, /* SIMD xmm -> r32,imm8 */
198 XMM3PM_66r, /* SIMD 0x66 prefix required xmm -> r32/mem,imm8 */
199 XMMP, /* SIMD xmm/mem w/to xmm,imm8 */
200 XMMP_66o, /* SIMD 0x66 prefix optional xmm/mem w/to xmm,imm8 */
201 XMMP_66r, /* SIMD 0x66 prefix required xmm/mem w/to xmm,imm8 */
202 XMMPRM, /* SIMD r32/mem -> xmm,imm8 */
203 XMMPRM_66r, /* SIMD 0x66 prefix required r32/mem -> xmm,imm8 */
204 XMMS, /* SIMD xmm -> xmm/mem */
205 XMMM, /* SIMD mem -> xmm */
206 XMMM_66r, /* SIMD 0x66 prefix required mem -> xmm */
207 XMMMS, /* SIMD xmm -> mem */
208 XMM3MX, /* SIMD r32/mem -> xmm */
209 XMM3MXS, /* SIMD xmm -> r32/mem */
210 XMMSH, /* SIMD xmm,imm8 */
211 XMMXM3, /* SIMD xmm/mem -> r32 */
212 XMMX3, /* SIMD xmm -> r32 */
213 XMMXMM, /* SIMD xmm/mem -> mm */
214 XMMMX, /* SIMD mm -> xmm */
215 XMMXM, /* SIMD xmm -> mm */
216 XMMX2I, /* SIMD xmm -> xmm, imm, imm */
217 XMM2I, /* SIMD xmm, imm, imm */
663 /* [10] */ TNSZ("movupd",XMM,16), TNSZ("movupd",XMMS,16), TNSZ("movlpd",XMMM,8), TNSZ("movlpd",XMMMS,8),
664 /* [14] */ TNSZ("unpcklpd",XMM,16),TNSZ("unpckhpd",XMM,16),TNSZ("movhpd",XMMM,8), TNSZ("movhpd",XMMMS,8),
670 /* [28] */ TNSZ("movapd",XMM,16), TNSZ("movapd",XMMS,16), TNSZ("cvtpi2pd",XMMOMX,8),TNSZ("movntpd",XMMOMS,16),
671 /* [2C] */ TNSZ("cvttpd2pi",XMMXMM,16),TNSZ("cvtpd2pi",XMMXMM,16),TNSZ("ucomisd",XMM,8),TNSZ("comisd",XMM,8),
683 /* [50] */ TNS("movmskpd",XMMOX3), TNSZ("sqrtpd",XMM,16), INVALID, INVALID,
684 /* [54] */ TNSZ("andpd",XMM,16), TNSZ("andnpd",XMM,16), TNSZ("orpd",XMM,16), TNSZ("xorpd",XMM,16),
685 /* [58] */ TNSZ("addpd",XMM,16), TNSZ("mulpd",XMM,16), TNSZ("cvtpd2ps",XMM,16),TNSZ("cvtps2dq",XMM,16),
686 /* [5C] */ TNSZ("subpd",XMM,16), TNSZ("minpd",XMM,16), TNSZ("divpd",XMM,16), TNSZ("maxpd",XMM,16),
688 /* [60] */ TNSZ("punpcklbw",XMM,16),TNSZ("punpcklwd",XMM,16),TNSZ("punpckldq",XMM,16),TNSZ("packsswb",XMM,16),
689 /* [64] */ TNSZ("pcmpgtb",XMM,16), TNSZ("pcmpgtw",XMM,16), TNSZ("pcmpgtd",XMM,16), TNSZ("packuswb",XMM,16),
690 /* [68] */ TNSZ("punpckhbw",XMM,16),TNSZ("punpckhwd",XMM,16),TNSZ("punpckhdq",XMM,16),TNSZ("packssdw",XMM,16),
691 /* [6C] */ TNSZ("punpcklqdq",XMM,16),TNSZ("punpckhqdq",XMM,16),TNSZ("movd",XMM3MX,4),TNSZ("movdqa",XMM,16),
694 /* [74] */ TNSZ("pcmpeqb",XMM,16), TNSZ("pcmpeqw",XMM,16), TNSZ("pcmpeqd",XMM,16), INVALID,
695 /* [78] */ TNSZ("extrq",XMM2I,16), TNSZ("extrq",XMM,16), INVALID, INVALID,
696 /* [7C] */ TNSZ("haddpd",XMM,16), TNSZ("hsubpd",XMM,16), TNSZ("movd",XMM3MXS,4), TNSZ("movdqa",XMMS,16),
723 /* [D0] */ TNSZ("addsubpd",XMM,16),TNSZ("psrlw",XMM,16), TNSZ("psrld",XMM,16), TNSZ("psrlq",XMM,16),
724 /* [D4] */ TNSZ("paddq",XMM,16), TNSZ("pmullw",XMM,16), TNSZ("movq",XMMS,8), TNS("pmovmskb",XMMX3),
725 /* [D8] */ TNSZ("psubusb",XMM,16), TNSZ("psubusw",XMM,16), TNSZ("pminub",XMM,16), TNSZ("pand",XMM,16),
726 /* [DC] */ TNSZ("paddusb",XMM,16), TNSZ("paddusw",XMM,16), TNSZ("pmaxub",XMM,16), TNSZ("pandn",XMM,16),
728 /* [E0] */ TNSZ("pavgb",XMM,16), TNSZ("psraw",XMM,16), TNSZ("psrad",XMM,16), TNSZ("pavgw",XMM,16),
729 /* [E4] */ TNSZ("pmulhuw",XMM,16), TNSZ("pmulhw",XMM,16), TNSZ("cvttpd2dq",XMM,16),TNSZ("movntdq",XMMS,16),
730 /* [E8] */ TNSZ("psubsb",XMM,16), TNSZ("psubsw",XMM,16), TNSZ("pminsw",XMM,16), TNSZ("por",XMM,16),
731 /* [EC] */ TNSZ("paddsb",XMM,16), TNSZ("paddsw",XMM,16), TNSZ("pmaxsw",XMM,16), TNSZ("pxor",XMM,16),
733 /* [F0] */ INVALID, TNSZ("psllw",XMM,16), TNSZ("pslld",XMM,16), TNSZ("psllq",XMM,16),
734 /* [F4] */ TNSZ("pmuludq",XMM,16), TNSZ("pmaddwd",XMM,16), TNSZ("psadbw",XMM,16), TNSZ("maskmovdqu", XMMXIMPL,16),
735 /* [F8] */ TNSZ("psubb",XMM,16), TNSZ("psubw",XMM,16), TNSZ("psubd",XMM,16), TNSZ("psubq",XMM,16),
736 /* [FC] */ TNSZ("paddb",XMM,16), TNSZ("paddw",XMM,16), TNSZ("paddd",XMM,16), INVALID,
830 /* [10] */ TNSZ("movsd",XMM,8), TNSZ("movsd",XMMS,8), TNSZ("movddup",XMM,8), INVALID,
850 /* [50] */ INVALID, TNSZ("sqrtsd",XMM,8), INVALID, INVALID,
852 /* [58] */ TNSZ("addsd",XMM,8), TNSZ("mulsd",XMM,8), TNSZ("cvtsd2ss",XMM,8), INVALID,
853 /* [5C] */ TNSZ("subsd",XMM,8), TNSZ("minsd",XMM,8), TNSZ("divsd",XMM,8), TNSZ("maxsd",XMM,8),
862 /* [78] */ TNSZ("insertq",XMMX2I,16),TNSZ("insertq",XMM,8),INVALID, INVALID,
863 /* [7C] */ TNSZ("haddps",XMM,16), TNSZ("hsubps",XMM,16), INVALID, INVALID,
890 /* [D0] */ TNSZ("addsubps",XMM,16),INVALID, INVALID, INVALID,
896 /* [E4] */ INVALID, INVALID, TNSZ("cvtpd2dq",XMM,16),INVALID,
1242 /* [10] */ TNSZ("movss",XMM,4), TNSZ("movss",XMMS,4), TNSZ("movsldup",XMM,16),INVALID,
1243 /* [14] */ INVALID, INVALID, TNSZ("movshdup",XMM,16),INVALID,
1262 /* [50] */ INVALID, TNSZ("sqrtss",XMM,4), TNSZ("rsqrtss",XMM,4), TNSZ("rcpss",XMM,4),
1264 /* [58] */ TNSZ("addss",XMM,4), TNSZ("mulss",XMM,4), TNSZ("cvtss2sd",XMM,4), TNSZ("cvttps2dq",XMM,16),
1265 /* [5C] */ TNSZ("subss",XMM,4), TNSZ("minss",XMM,4), TNSZ("divss",XMM,4), TNSZ("maxss",XMM,4),
1270 /* [6C] */ INVALID, INVALID, INVALID, TNSZ("movdqu",XMM,16),
1275 /* [7C] */ INVALID, INVALID, TNSZ("movq",XMM,8), TNSZ("movdqu",XMMS,16),
1308 /* [E4] */ INVALID, INVALID, TNSZ("cvtdq2pd",XMM,8), INVALID,
1485 /* [C8] */ TNSZ("sha1nexte",XMM,16),TNSZ("sha1msg1",XMM,16),TNSZ("sha1msg2",XMM,16),TNSZ("sha256rnds2",XMM,16),
1486 /* [CC] */ TNSZ("sha256msg1",XMM,16),TNSZ("sha256msg2",XMM,16),INVALID, INVALID,
2320 #define XMM_OPND 3 /* "value" used to indicate a xmm reg */
2333 * some registers match VEX_L, but the VSIB is always XMM.
3378 case XMM:
4305 /* SIMD memory or xmm reg operand to xmm reg */
4306 case XMM:
4334 /* SIMD xmm reg to memory or xmm reg */
4351 /* SIMD memory to xmm reg */
4368 /* SIMD memory or r32 to xmm reg */
4379 /* SIMD memory or mm reg to xmm reg */
4381 /* SIMD mm to xmm */
4387 /* SIMD memory or xmm reg to mm reg */
4396 /* SIMD memory or xmm reg to r32 */
4402 /* SIMD xmm to r32 */
4414 /* SIMD predicated memory or xmm reg with/to xmm reg */
4824 /* vmovsd <m64>, <xmm> */
4825 /* or vmovss <m64>, <xmm> */
4834 * XMM/YMM regs
4841 * vcvtsi2si </r,m>, <xmm>, <xmm> or vcvtsi2ss </r,m>,
4842 * <xmm>, <xmm>
4868 * XMM/YMM regs
4899 * XMM/YMM regs
4916 /* vmovsd <xmm>, <m64> */
4917 /* or vmovss <xmm>, <m64> */
4937 /* vinsertf128 <imm8>, <xmm>, <ymm>, <ymm> */
4941 /* vpinsrb <imm8>, <reg/mm>, <xmm>, <xmm> */
4942 /* or vpinsrw <imm8>, <reg/mm>, <xmm>, <xmm> */
4945 /* vpinsrd/q <imm8>, <reg/mm>, <xmm>, <xmm> */
4988 /* vcvtpd2dq <ymm>, <xmm> */
4989 /* or vcvtpd2ps <ymm>, <xmm> */
4990 /* or vcvttpd2dq <ymm>, <xmm> */
5002 /* vcvtdq2pd <xmm>, <ymm> */
5003 /* or vcvtps2pd <xmm>, <ymm> */
5004 /* or vcvtph2ps <xmm>, <ymm> */
5005 /* or vbroadcasts* <xmm>, <ymm> */
5009 /* vmovd/q <reg/mem 32/64>, <xmm> */
5058 /* vpextrw <imm8>, <xmm>, <reg> */
5120 /* vmovd/q <reg/mem 32/64>, <xmm> */
5141 /* vpextrd/q <imm>, <xmm>, <reg/mem 32/64> */
5197 /* VEX use the 1's complement form encode the XMM/YMM regs */