4 GEN_test_RandM(VPOR_128
,
5 "vpor %%xmm6, %%xmm8, %%xmm7",
6 "vpor (%%rsi), %%xmm8, %%xmm7")
8 GEN_test_RandM(VPXOR_128
,
9 "vpxor %%xmm6, %%xmm8, %%xmm7",
10 "vpxor (%%rsi), %%xmm8, %%xmm7")
12 GEN_test_RandM(VPSUBB_128
,
13 "vpsubb %%xmm6, %%xmm8, %%xmm7",
14 "vpsubb (%%rsi), %%xmm8, %%xmm7")
16 GEN_test_RandM(VPSUBD_128
,
17 "vpsubd %%xmm6, %%xmm8, %%xmm7",
18 "vpsubd (%%rsi), %%xmm8, %%xmm7")
20 GEN_test_RandM(VPADDD_128
,
21 "vpaddd %%xmm6, %%xmm8, %%xmm7",
22 "vpaddd (%%rsi), %%xmm8, %%xmm7")
24 GEN_test_RandM(VPMOVZXWD_128
,
25 "vpmovzxwd %%xmm6, %%xmm8",
26 "vpmovzxwd (%%rsi), %%xmm8")
28 GEN_test_RandM(VPMOVZXBW_128
,
29 "vpmovzxbw %%xmm6, %%xmm8",
30 "vpmovzxbw (%%rsi), %%xmm8")
32 GEN_test_RandM(VPBLENDVB_128
,
33 "vpblendvb %%xmm9, %%xmm6, %%xmm8, %%xmm7",
34 "vpblendvb %%xmm9, (%%rsi), %%xmm8, %%xmm7")
36 GEN_test_RandM(VPMINSD_128
,
37 "vpminsd %%xmm6, %%xmm8, %%xmm7",
38 "vpminsd (%%rsi), %%xmm8, %%xmm7")
40 GEN_test_RandM(VPMAXSD_128
,
41 "vpmaxsd %%xmm6, %%xmm8, %%xmm7",
42 "vpmaxsd (%%rsi), %%xmm8, %%xmm7")
44 GEN_test_RandM(VANDPD_128
,
45 "vandpd %%xmm6, %%xmm8, %%xmm7",
46 "vandpd (%%rsi), %%xmm8, %%xmm7")
48 GEN_test_RandM(VCVTSI2SD_32
,
49 "vcvtsi2sdl %%r14d, %%xmm8, %%xmm7",
50 "vcvtsi2sdl (%%rsi), %%xmm8, %%xmm7")
52 GEN_test_RandM(VCVTSI2SD_64
,
53 "vcvtsi2sdq %%r14, %%xmm8, %%xmm7",
54 "vcvtsi2sdq (%%rsi), %%xmm8, %%xmm7")
56 GEN_test_RandM(VCVTSI2SS_64
,
57 "vcvtsi2ssq %%r14, %%xmm8, %%xmm7",
58 "vcvtsi2ssq (%%rsi), %%xmm8, %%xmm7")
60 GEN_test_RandM(VCVTTSD2SI_32
,
61 "vcvttsd2si %%xmm8, %%r14d",
62 "vcvttsd2si (%%rsi), %%r14d")
64 GEN_test_RandM(VCVTTSD2SI_64
,
65 "vcvttsd2si %%xmm8, %%r14",
66 "vcvttsd2si (%%rsi), %%r14")
68 GEN_test_RandM(VCVTSD2SI_32
,
69 "vcvtsd2si %%xmm8, %%r14d",
70 "vcvtsd2si (%%rsi), %%r14d")
72 GEN_test_RandM(VCVTSD2SI_64
,
73 "vcvtsd2si %%xmm8, %%r14",
74 "vcvtsd2si (%%rsi), %%r14")
76 GEN_test_RandM(VPSHUFB_128
,
77 "vpshufb %%xmm6, %%xmm8, %%xmm7",
78 "vpshufb (%%rsi), %%xmm8, %%xmm7")
80 GEN_test_RandM(VCMPSD_128_0x0
,
81 "vcmpsd $0, %%xmm6, %%xmm8, %%xmm7",
82 "vcmpsd $0, (%%rsi), %%xmm8, %%xmm7")
83 GEN_test_RandM(VCMPSD_128_0x1
,
84 "vcmpsd $1, %%xmm6, %%xmm8, %%xmm7",
85 "vcmpsd $1, (%%rsi), %%xmm8, %%xmm7")
86 GEN_test_RandM(VCMPSD_128_0x2
,
87 "vcmpsd $2, %%xmm6, %%xmm8, %%xmm7",
88 "vcmpsd $2, (%%rsi), %%xmm8, %%xmm7")
89 GEN_test_RandM(VCMPSD_128_0x3
,
90 "vcmpsd $3, %%xmm6, %%xmm8, %%xmm7",
91 "vcmpsd $3, (%%rsi), %%xmm8, %%xmm7")
92 GEN_test_RandM(VCMPSD_128_0x4
,
93 "vcmpsd $4, %%xmm6, %%xmm8, %%xmm7",
94 "vcmpsd $4, (%%rsi), %%xmm8, %%xmm7")
95 GEN_test_RandM(VCMPSD_128_0x5
,
96 "vcmpsd $5, %%xmm6, %%xmm8, %%xmm7",
97 "vcmpsd $5, (%%rsi), %%xmm8, %%xmm7")
98 GEN_test_RandM(VCMPSD_128_0x6
,
99 "vcmpsd $6, %%xmm6, %%xmm8, %%xmm7",
100 "vcmpsd $6, (%%rsi), %%xmm8, %%xmm7")
101 GEN_test_RandM(VCMPSD_128_0x7
,
102 "vcmpsd $7, %%xmm6, %%xmm8, %%xmm7",
103 "vcmpsd $7, (%%rsi), %%xmm8, %%xmm7")
104 GEN_test_RandM(VCMPSD_128_0x8
,
105 "vcmpsd $8, %%xmm6, %%xmm8, %%xmm7",
106 "vcmpsd $8, (%%rsi), %%xmm8, %%xmm7")
107 GEN_test_RandM(VCMPSD_128_0x9
,
108 "vcmpsd $9, %%xmm6, %%xmm8, %%xmm7",
109 "vcmpsd $9, (%%rsi), %%xmm8, %%xmm7")
110 GEN_test_RandM(VCMPSD_128_0xA
,
111 "vcmpsd $0xA, %%xmm6, %%xmm8, %%xmm7",
112 "vcmpsd $0xA, (%%rsi), %%xmm8, %%xmm7")
113 GEN_test_RandM(VCMPSD_128_0xB
,
114 "vcmpsd $0xB, %%xmm6, %%xmm8, %%xmm7",
115 "vcmpsd $0xB, (%%rsi), %%xmm8, %%xmm7")
116 GEN_test_RandM(VCMPSD_128_0xC
,
117 "vcmpsd $0xC, %%xmm6, %%xmm8, %%xmm7",
118 "vcmpsd $0xC, (%%rsi), %%xmm8, %%xmm7")
119 GEN_test_RandM(VCMPSD_128_0xD
,
120 "vcmpsd $0xD, %%xmm6, %%xmm8, %%xmm7",
121 "vcmpsd $0xD, (%%rsi), %%xmm8, %%xmm7")
122 GEN_test_RandM(VCMPSD_128_0xE
,
123 "vcmpsd $0xE, %%xmm6, %%xmm8, %%xmm7",
124 "vcmpsd $0xE, (%%rsi), %%xmm8, %%xmm7")
125 GEN_test_RandM(VCMPSD_128_0xF
,
126 "vcmpsd $0xF, %%xmm6, %%xmm8, %%xmm7",
127 "vcmpsd $0xF, (%%rsi), %%xmm8, %%xmm7")
128 GEN_test_RandM(VCMPSD_128_0x10
,
129 "vcmpsd $0x10, %%xmm6, %%xmm8, %%xmm7",
130 "vcmpsd $0x10, (%%rsi), %%xmm8, %%xmm7")
131 GEN_test_RandM(VCMPSD_128_0x11
,
132 "vcmpsd $0x11, %%xmm6, %%xmm8, %%xmm7",
133 "vcmpsd $0x11, (%%rsi), %%xmm8, %%xmm7")
134 GEN_test_RandM(VCMPSD_128_0x12
,
135 "vcmpsd $0x12, %%xmm6, %%xmm8, %%xmm7",
136 "vcmpsd $0x12, (%%rsi), %%xmm8, %%xmm7")
137 GEN_test_RandM(VCMPSD_128_0x13
,
138 "vcmpsd $0x13, %%xmm6, %%xmm8, %%xmm7",
139 "vcmpsd $0x13, (%%rsi), %%xmm8, %%xmm7")
140 GEN_test_RandM(VCMPSD_128_0x14
,
141 "vcmpsd $0x14, %%xmm6, %%xmm8, %%xmm7",
142 "vcmpsd $0x14, (%%rsi), %%xmm8, %%xmm7")
143 GEN_test_RandM(VCMPSD_128_0x15
,
144 "vcmpsd $0x15, %%xmm6, %%xmm8, %%xmm7",
145 "vcmpsd $0x15, (%%rsi), %%xmm8, %%xmm7")
146 GEN_test_RandM(VCMPSD_128_0x16
,
147 "vcmpsd $0x16, %%xmm6, %%xmm8, %%xmm7",
148 "vcmpsd $0x16, (%%rsi), %%xmm8, %%xmm7")
149 GEN_test_RandM(VCMPSD_128_0x17
,
150 "vcmpsd $0x17, %%xmm6, %%xmm8, %%xmm7",
151 "vcmpsd $0x17, (%%rsi), %%xmm8, %%xmm7")
152 GEN_test_RandM(VCMPSD_128_0x18
,
153 "vcmpsd $0x18, %%xmm6, %%xmm8, %%xmm7",
154 "vcmpsd $0x18, (%%rsi), %%xmm8, %%xmm7")
155 GEN_test_RandM(VCMPSD_128_0x19
,
156 "vcmpsd $0x19, %%xmm6, %%xmm8, %%xmm7",
157 "vcmpsd $0x19, (%%rsi), %%xmm8, %%xmm7")
158 GEN_test_RandM(VCMPSD_128_0x1A
,
159 "vcmpsd $0x1A, %%xmm6, %%xmm8, %%xmm7",
160 "vcmpsd $0x1A, (%%rsi), %%xmm8, %%xmm7")
161 GEN_test_RandM(VCMPSD_128_0x1B
,
162 "vcmpsd $0x1B, %%xmm6, %%xmm8, %%xmm7",
163 "vcmpsd $0x1B, (%%rsi), %%xmm8, %%xmm7")
164 GEN_test_RandM(VCMPSD_128_0x1C
,
165 "vcmpsd $0x1C, %%xmm6, %%xmm8, %%xmm7",
166 "vcmpsd $0x1C, (%%rsi), %%xmm8, %%xmm7")
167 GEN_test_RandM(VCMPSD_128_0x1D
,
168 "vcmpsd $0x1D, %%xmm6, %%xmm8, %%xmm7",
169 "vcmpsd $0x1D, (%%rsi), %%xmm8, %%xmm7")
170 GEN_test_RandM(VCMPSD_128_0x1E
,
171 "vcmpsd $0x1E, %%xmm6, %%xmm8, %%xmm7",
172 "vcmpsd $0x1E, (%%rsi), %%xmm8, %%xmm7")
173 GEN_test_RandM(VCMPSD_128_0x1F
,
174 "vcmpsd $0x1F, %%xmm6, %%xmm8, %%xmm7",
175 "vcmpsd $0x1F, (%%rsi), %%xmm8, %%xmm7")
177 GEN_test_RandM(VSQRTSD_128
,
178 "vsqrtsd %%xmm6, %%xmm8, %%xmm7",
179 "vsqrtsd (%%rsi), %%xmm8, %%xmm7")
181 GEN_test_RandM(VORPS_128
,
182 "vorps %%xmm6, %%xmm8, %%xmm7",
183 "vorps (%%rsi), %%xmm8, %%xmm7")
185 GEN_test_RandM(VANDNPS_128
,
186 "vandnps %%xmm6, %%xmm8, %%xmm7",
187 "vandnps (%%rsi), %%xmm8, %%xmm7")
189 GEN_test_RandM(VMAXSS_128
,
190 "vmaxss %%xmm6, %%xmm8, %%xmm7",
191 "vmaxss (%%rsi), %%xmm8, %%xmm7")
193 GEN_test_RandM(VMINSS_128
,
194 "vminss %%xmm6, %%xmm8, %%xmm7",
195 "vminss (%%rsi), %%xmm8, %%xmm7")
197 GEN_test_RandM(VANDPS_128
,
198 "vandps %%xmm6, %%xmm8, %%xmm7",
199 "vandps (%%rsi), %%xmm8, %%xmm7")
201 GEN_test_RandM(VCVTSI2SS_128
,
202 "vcvtsi2ssl %%r14d, %%xmm8, %%xmm7",
203 "vcvtsi2ssl (%%rsi), %%xmm8, %%xmm7")
205 GEN_test_RandM(VUNPCKLPS_128
,
206 "vunpcklps %%xmm6, %%xmm8, %%xmm7",
207 "vunpcklps (%%rsi), %%xmm8, %%xmm7")
209 GEN_test_RandM(VDIVSS_128
,
210 "vdivss %%xmm6, %%xmm8, %%xmm7",
211 "vdivss (%%rsi), %%xmm8, %%xmm7")
213 GEN_test_RandM(VADDSS_128
,
214 "vaddss %%xmm6, %%xmm8, %%xmm7",
215 "vaddss (%%rsi), %%xmm8, %%xmm7")
217 GEN_test_RandM(VSUBSS_128
,
218 "vsubss %%xmm6, %%xmm8, %%xmm7",
219 "vsubss (%%rsi), %%xmm8, %%xmm7")
221 GEN_test_RandM(VMULSS_128
,
222 "vmulss %%xmm6, %%xmm8, %%xmm7",
223 "vmulss (%%rsi), %%xmm8, %%xmm7")
225 GEN_test_RandM(VPUNPCKLBW_128
,
226 "vpunpcklbw %%xmm6, %%xmm8, %%xmm7",
227 "vpunpcklbw (%%rsi), %%xmm8, %%xmm7")
229 GEN_test_RandM(VPUNPCKHBW_128
,
230 "vpunpckhbw %%xmm6, %%xmm8, %%xmm7",
231 "vpunpckhbw (%%rsi), %%xmm8, %%xmm7")
233 GEN_test_RandM(VCVTTSS2SI_32
,
234 "vcvttss2si %%xmm8, %%r14d",
235 "vcvttss2si (%%rsi), %%r14d")
237 GEN_test_RandM(VCVTSS2SI_32
,
238 "vcvtss2si %%xmm8, %%r14d",
239 "vcvtss2si (%%rsi), %%r14d")
241 GEN_test_RandM(VMOVQ_XMMorMEM64_to_XMM
,
242 "vmovq %%xmm7, %%xmm8",
243 "vmovq (%%rsi), %%xmm8")
245 /* NB tests the reg form only */
246 GEN_test_Ronly(VMOVQ_XMM_to_IREG64
,
247 "vmovq %%xmm7, %%r14")
249 /* This insn only exists in the reg-reg-reg form. */
250 GEN_test_Ronly(VMOVHLPS_128
,
251 "vmovhlps %%xmm6, %%xmm8, %%xmm7")
253 GEN_test_RandM(VPABSD_128
,
254 "vpabsd %%xmm6, %%xmm8",
255 "vpabsd (%%rsi), %%xmm8")
257 /* This insn only exists in the reg-reg-reg form. */
258 GEN_test_Ronly(VMOVLHPS_128
,
259 "vmovlhps %%xmm6, %%xmm8, %%xmm7")
261 GEN_test_Monly(VMOVNTDQ_128
,
262 "vmovntdq %%xmm8, (%%rsi)")
264 GEN_test_Monly(VMOVNTDQ_256
,
265 "vmovntdq %%ymm8, (%%rsi)")
267 GEN_test_RandM(VMOVUPS_XMM_to_XMMorMEM
,
268 "vmovups %%xmm8, %%xmm7",
269 "vmovups %%xmm9, (%%rsi)")
271 GEN_test_RandM(VMOVQ_IREGorMEM64_to_XMM
,
272 "vmovq %%r14, %%xmm7",
273 "vmovq (%%rsi), %%xmm9")
275 GEN_test_RandM(VPCMPESTRM_0x45_128
,
276 "movl $16, %%eax ; movl $16, %%edx ; "
277 "vpcmpestrm $0x45, %%xmm7, %%xmm8; movapd %%xmm0, %%xmm9",
278 "movl $16, %%eax ; movl $16, %%edx ; "
279 "vpcmpestrm $0x45, (%%rsi), %%xmm8; movapd %%xmm0, %%xmm9")
281 /* NB tests the reg form only */
282 GEN_test_Ronly(VMOVD_XMM_to_IREG32
,
283 "vmovd %%xmm7, %%r14d")
285 GEN_test_RandM(VCVTSD2SS_128
,
286 "vcvtsd2ss %%xmm9, %%xmm8, %%xmm7",
287 "vcvtsd2ss (%%rsi), %%xmm8, %%xmm7")
289 GEN_test_RandM(VCVTSS2SD_128
,
290 "vcvtss2sd %%xmm9, %%xmm8, %%xmm7",
291 "vcvtss2sd (%%rsi), %%xmm8, %%xmm7")
293 GEN_test_RandM(VPACKUSWB_128
,
294 "vpackuswb %%xmm9, %%xmm8, %%xmm7",
295 "vpackuswb (%%rsi), %%xmm8, %%xmm7")
297 GEN_test_RandM(VCVTTSS2SI_64
,
298 "vcvttss2si %%xmm8, %%r14",
299 "vcvttss2si (%%rsi), %%r14")
301 GEN_test_RandM(VCVTSS2SI_64
,
302 "vcvtss2si %%xmm8, %%r14",
303 "vcvtss2si (%%rsi), %%r14")
305 GEN_test_Ronly(VPMOVMSKB_128
,
306 "vpmovmskb %%xmm8, %%r14")
308 GEN_test_RandM(VPAND_128
,
309 "vpand %%xmm9, %%xmm8, %%xmm7",
310 "vpand (%%rsi), %%xmm8, %%xmm7")
312 GEN_test_Monly(VMOVHPD_128_StoreForm
,
313 "vmovhpd %%xmm8, (%%rsi)")
315 GEN_test_Monly(VMOVHPS_128_StoreForm
,
316 "vmovhps %%xmm8, (%%rsi)")
318 GEN_test_RandM(VPCMPEQB_128
,
319 "vpcmpeqb %%xmm9, %%xmm8, %%xmm7",
320 "vpcmpeqb (%%rsi), %%xmm8, %%xmm7")
322 GEN_test_RandM(VSHUFPS_0x39_128
,
323 "vshufps $0x39, %%xmm9, %%xmm8, %%xmm7",
324 "vshufps $0xC6, (%%rsi), %%xmm8, %%xmm7")
326 GEN_test_RandM(VMULPS_128
,
327 "vmulps %%xmm9, %%xmm8, %%xmm7",
328 "vmulps (%%rsi), %%xmm8, %%xmm7")
330 GEN_test_RandM(VSUBPS_128
,
331 "vsubps %%xmm9, %%xmm8, %%xmm7",
332 "vsubps (%%rsi), %%xmm8, %%xmm7")
334 GEN_test_RandM(VADDPS_128
,
335 "vaddps %%xmm9, %%xmm8, %%xmm7",
336 "vaddps (%%rsi), %%xmm8, %%xmm7")
338 GEN_test_RandM(VMAXPS_128
,
339 "vmaxps %%xmm9, %%xmm8, %%xmm7",
340 "vmaxps (%%rsi), %%xmm8, %%xmm7")
342 GEN_test_RandM(VMAXPS_256
,
343 "vmaxps %%ymm9, %%ymm8, %%ymm7",
344 "vmaxps (%%rsi), %%ymm8, %%ymm7")
346 GEN_test_RandM(VMAXPD_128
,
347 "vmaxpd %%xmm9, %%xmm8, %%xmm7",
348 "vmaxpd (%%rsi), %%xmm8, %%xmm7")
350 GEN_test_RandM(VMAXPD_256
,
351 "vmaxpd %%ymm9, %%ymm8, %%ymm7",
352 "vmaxpd (%%rsi), %%ymm8, %%ymm7")
354 GEN_test_RandM(VMINPS_128
,
355 "vminps %%xmm9, %%xmm8, %%xmm7",
356 "vminps (%%rsi), %%xmm8, %%xmm7")
358 GEN_test_RandM(VMINPS_256
,
359 "vminps %%ymm9, %%ymm8, %%ymm7",
360 "vminps (%%rsi), %%ymm8, %%ymm7")
362 GEN_test_RandM(VMINPD_128
,
363 "vminpd %%xmm9, %%xmm8, %%xmm7",
364 "vminpd (%%rsi), %%xmm8, %%xmm7")
366 GEN_test_RandM(VMINPD_256
,
367 "vminpd %%ymm9, %%ymm8, %%ymm7",
368 "vminpd (%%rsi), %%ymm8, %%ymm7")
370 GEN_test_RandM(VCVTPS2DQ_128
,
371 "vcvtps2dq %%xmm8, %%xmm7",
372 "vcvtps2dq (%%rsi), %%xmm8")
374 GEN_test_RandM(VPSHUFLW_0x39_128
,
375 "vpshuflw $0x39, %%xmm9, %%xmm7",
376 "vpshuflw $0xC6, (%%rsi), %%xmm8")
378 GEN_test_RandM(VPSHUFHW_0x39_128
,
379 "vpshufhw $0x39, %%xmm9, %%xmm7",
380 "vpshufhw $0xC6, (%%rsi), %%xmm8")
382 GEN_test_RandM(VPMULLW_128
,
383 "vpmullw %%xmm9, %%xmm8, %%xmm7",
384 "vpmullw (%%rsi), %%xmm8, %%xmm7")
386 GEN_test_RandM(VPADDUSW_128
,
387 "vpaddusw %%xmm9, %%xmm8, %%xmm7",
388 "vpaddusw (%%rsi), %%xmm8, %%xmm7")
390 GEN_test_RandM(VPMULHUW_128
,
391 "vpmulhuw %%xmm9, %%xmm8, %%xmm7",
392 "vpmulhuw (%%rsi), %%xmm8, %%xmm7")
394 GEN_test_RandM(VPADDUSB_128
,
395 "vpaddusb %%xmm9, %%xmm8, %%xmm7",
396 "vpaddusb (%%rsi), %%xmm8, %%xmm7")
398 GEN_test_RandM(VPUNPCKLWD_128
,
399 "vpunpcklwd %%xmm6, %%xmm8, %%xmm7",
400 "vpunpcklwd (%%rsi), %%xmm8, %%xmm7")
402 GEN_test_RandM(VPUNPCKHWD_128
,
403 "vpunpckhwd %%xmm6, %%xmm8, %%xmm7",
404 "vpunpckhwd (%%rsi), %%xmm8, %%xmm7")
406 GEN_test_Ronly(VPSLLD_0x05_128
,
407 "vpslld $0x5, %%xmm9, %%xmm7")
409 GEN_test_Ronly(VPSRLD_0x05_128
,
410 "vpsrld $0x5, %%xmm9, %%xmm7")
412 GEN_test_Ronly(VPSRAD_0x05_128
,
413 "vpsrad $0x5, %%xmm9, %%xmm7")
415 GEN_test_RandM(VPSUBUSB_128
,
416 "vpsubusb %%xmm9, %%xmm8, %%xmm7",
417 "vpsubusb (%%rsi), %%xmm8, %%xmm7")
419 GEN_test_RandM(VPSUBSB_128
,
420 "vpsubsb %%xmm9, %%xmm8, %%xmm7",
421 "vpsubsb (%%rsi), %%xmm8, %%xmm7")
423 GEN_test_Ronly(VPSRLDQ_0x05_128
,
424 "vpsrldq $0x5, %%xmm9, %%xmm7")
426 GEN_test_Ronly(VPSLLDQ_0x05_128
,
427 "vpslldq $0x5, %%xmm9, %%xmm7")
429 GEN_test_RandM(VPANDN_128
,
430 "vpandn %%xmm9, %%xmm8, %%xmm7",
431 "vpandn (%%rsi), %%xmm8, %%xmm7")
433 /* NB tests the mem form only */
434 GEN_test_Monly(VMOVD_XMM_to_MEM32
,
435 "vmovd %%xmm7, (%%rsi)")
437 GEN_test_RandM(VPINSRD_128
,
438 "vpinsrd $0, %%r14d, %%xmm8, %%xmm7",
439 "vpinsrd $3, (%%rsi), %%xmm8, %%xmm7")
441 GEN_test_RandM(VPUNPCKLQDQ_128
,
442 "vpunpcklqdq %%xmm6, %%xmm8, %%xmm7",
443 "vpunpcklqdq (%%rsi), %%xmm8, %%xmm7")
445 GEN_test_Ronly(VPSRLW_0x05_128
,
446 "vpsrlw $0x5, %%xmm9, %%xmm7")
448 GEN_test_Ronly(VPSLLW_0x05_128
,
449 "vpsllw $0x5, %%xmm9, %%xmm7")
451 GEN_test_RandM(VPADDW_128
,
452 "vpaddw %%xmm6, %%xmm8, %%xmm7",
453 "vpaddw (%%rsi), %%xmm8, %%xmm7")
455 GEN_test_RandM(VPACKSSDW_128
,
456 "vpackssdw %%xmm9, %%xmm8, %%xmm7",
457 "vpackssdw (%%rsi), %%xmm8, %%xmm7")
459 GEN_test_RandM(VPUNPCKLDQ_128
,
460 "vpunpckldq %%xmm6, %%xmm8, %%xmm7",
461 "vpunpckldq (%%rsi), %%xmm8, %%xmm7")
463 GEN_test_RandM(VINSERTPS_0x39_128
,
464 "vinsertps $0x39, %%xmm6, %%xmm8, %%xmm7",
465 "vinsertps $0xC6, (%%rsi), %%xmm8, %%xmm7")
467 GEN_test_Monly(VMOVSD_M64_XMM
, "vmovsd (%%rsi), %%xmm8")
469 GEN_test_Monly(VMOVSS_M64_XMM
, "vmovss (%%rsi), %%xmm8")
471 GEN_test_Monly(VMOVSD_XMM_M64
, "vmovsd %%xmm8, (%%rsi)")
473 GEN_test_Monly(VMOVSS_XMM_M32
, "vmovss %%xmm8, (%%rsi)")
475 GEN_test_RandM(VMOVUPD_GtoE_128
,
476 "vmovupd %%xmm9, %%xmm6",
477 "vmovupd %%xmm7, (%%rsi)")
479 GEN_test_RandM(VMOVAPD_EtoG_128
,
480 "vmovapd %%xmm6, %%xmm8",
481 "vmovapd (%%rsi), %%xmm9")
483 GEN_test_RandM(VMOVAPD_EtoG_256
,
484 "vmovapd %%ymm6, %%ymm8",
485 "vmovapd (%%rsi), %%ymm9")
487 GEN_test_RandM(VMOVAPS_EtoG_128
,
488 "vmovaps %%xmm6, %%xmm8",
489 "vmovaps (%%rsi), %%xmm9")
491 GEN_test_RandM(VMOVAPS_GtoE_128
,
492 "vmovaps %%xmm9, %%xmm6",
493 "vmovaps %%xmm7, (%%rsi)")
495 GEN_test_RandM(VMOVAPS_GtoE_256
,
496 "vmovaps %%ymm9, %%ymm6",
497 "vmovaps %%ymm7, (%%rsi)")
499 GEN_test_RandM(VMOVAPD_GtoE_128
,
500 "vmovapd %%xmm9, %%xmm6",
501 "vmovapd %%xmm7, (%%rsi)")
503 GEN_test_RandM(VMOVAPD_GtoE_256
,
504 "vmovapd %%ymm9, %%ymm6",
505 "vmovapd %%ymm7, (%%rsi)")
507 GEN_test_RandM(VMOVDQU_EtoG_128
,
508 "vmovdqu %%xmm6, %%xmm8",
509 "vmovdqu (%%rsi), %%xmm9")
511 GEN_test_RandM(VMOVDQA_EtoG_128
,
512 "vmovdqa %%xmm6, %%xmm8",
513 "vmovdqa (%%rsi), %%xmm9")
515 GEN_test_RandM(VMOVDQA_EtoG_256
,
516 "vmovdqa %%ymm6, %%ymm8",
517 "vmovdqa (%%rsi), %%ymm9")
519 GEN_test_RandM(VMOVDQU_GtoE_128
,
520 "vmovdqu %%xmm9, %%xmm6",
521 "vmovdqu %%xmm7, (%%rsi)")
523 GEN_test_RandM(VMOVDQA_GtoE_128
,
524 "vmovdqa %%xmm9, %%xmm6",
525 "vmovdqa %%xmm7, (%%rsi)")
527 GEN_test_RandM(VMOVDQA_GtoE_256
,
528 "vmovdqa %%ymm9, %%ymm6",
529 "vmovdqa %%ymm7, (%%rsi)")
531 GEN_test_Monly(VMOVQ_XMM_MEM64
, "vmovq %%xmm8, (%%rsi)")
533 GEN_test_RandM(VMOVD_IREGorMEM32_to_XMM
,
534 "vmovd %%r14d, %%xmm7",
535 "vmovd (%%rsi), %%xmm9")
537 GEN_test_RandM(VMOVDDUP_XMMorMEM64_to_XMM
,
538 "vmovddup %%xmm8, %%xmm7",
539 "vmovddup (%%rsi), %%xmm9")
541 GEN_test_RandM(VCMPSS_128_0x0
,
542 "vcmpss $0, %%xmm6, %%xmm8, %%xmm7",
543 "vcmpss $0, (%%rsi), %%xmm8, %%xmm7")
544 GEN_test_RandM(VCMPSS_128_0x1
,
545 "vcmpss $1, %%xmm6, %%xmm8, %%xmm7",
546 "vcmpss $1, (%%rsi), %%xmm8, %%xmm7")
547 GEN_test_RandM(VCMPSS_128_0x2
,
548 "vcmpss $2, %%xmm6, %%xmm8, %%xmm7",
549 "vcmpss $2, (%%rsi), %%xmm8, %%xmm7")
550 GEN_test_RandM(VCMPSS_128_0x3
,
551 "vcmpss $3, %%xmm6, %%xmm8, %%xmm7",
552 "vcmpss $3, (%%rsi), %%xmm8, %%xmm7")
553 GEN_test_RandM(VCMPSS_128_0x4
,
554 "vcmpss $4, %%xmm6, %%xmm8, %%xmm7",
555 "vcmpss $4, (%%rsi), %%xmm8, %%xmm7")
556 GEN_test_RandM(VCMPSS_128_0x5
,
557 "vcmpss $5, %%xmm6, %%xmm8, %%xmm7",
558 "vcmpss $5, (%%rsi), %%xmm8, %%xmm7")
559 GEN_test_RandM(VCMPSS_128_0x6
,
560 "vcmpss $6, %%xmm6, %%xmm8, %%xmm7",
561 "vcmpss $6, (%%rsi), %%xmm8, %%xmm7")
562 GEN_test_RandM(VCMPSS_128_0x7
,
563 "vcmpss $7, %%xmm6, %%xmm8, %%xmm7",
564 "vcmpss $7, (%%rsi), %%xmm8, %%xmm7")
565 GEN_test_RandM(VCMPSS_128_0x8
,
566 "vcmpss $8, %%xmm6, %%xmm8, %%xmm7",
567 "vcmpss $8, (%%rsi), %%xmm8, %%xmm7")
568 GEN_test_RandM(VCMPSS_128_0x9
,
569 "vcmpss $0x9, %%xmm6, %%xmm8, %%xmm7",
570 "vcmpss $0x9, (%%rsi), %%xmm8, %%xmm7")
571 GEN_test_RandM(VCMPSS_128_0xA
,
572 "vcmpss $0xA, %%xmm6, %%xmm8, %%xmm7",
573 "vcmpss $0xA, (%%rsi), %%xmm8, %%xmm7")
574 GEN_test_RandM(VCMPSS_128_0xB
,
575 "vcmpss $0xB, %%xmm6, %%xmm8, %%xmm7",
576 "vcmpss $0xB, (%%rsi), %%xmm8, %%xmm7")
577 GEN_test_RandM(VCMPSS_128_0xC
,
578 "vcmpss $0xC, %%xmm6, %%xmm8, %%xmm7",
579 "vcmpss $0xC, (%%rsi), %%xmm8, %%xmm7")
580 GEN_test_RandM(VCMPSS_128_0xD
,
581 "vcmpss $0xD, %%xmm6, %%xmm8, %%xmm7",
582 "vcmpss $0xD, (%%rsi), %%xmm8, %%xmm7")
583 GEN_test_RandM(VCMPSS_128_0xE
,
584 "vcmpss $0xE, %%xmm6, %%xmm8, %%xmm7",
585 "vcmpss $0xE, (%%rsi), %%xmm8, %%xmm7")
586 GEN_test_RandM(VCMPSS_128_0xF
,
587 "vcmpss $0xF, %%xmm6, %%xmm8, %%xmm7",
588 "vcmpss $0xF, (%%rsi), %%xmm8, %%xmm7")
589 GEN_test_RandM(VCMPSS_128_0x10
,
590 "vcmpss $0x10, %%xmm6, %%xmm8, %%xmm7",
591 "vcmpss $0x10, (%%rsi), %%xmm8, %%xmm7")
592 GEN_test_RandM(VCMPSS_128_0x11
,
593 "vcmpss $0x11, %%xmm6, %%xmm8, %%xmm7",
594 "vcmpss $0x11, (%%rsi), %%xmm8, %%xmm7")
595 GEN_test_RandM(VCMPSS_128_0x12
,
596 "vcmpss $0x12, %%xmm6, %%xmm8, %%xmm7",
597 "vcmpss $0x12, (%%rsi), %%xmm8, %%xmm7")
598 GEN_test_RandM(VCMPSS_128_0x13
,
599 "vcmpss $0x13, %%xmm6, %%xmm8, %%xmm7",
600 "vcmpss $0x13, (%%rsi), %%xmm8, %%xmm7")
601 GEN_test_RandM(VCMPSS_128_0x14
,
602 "vcmpss $0x14, %%xmm6, %%xmm8, %%xmm7",
603 "vcmpss $0x14, (%%rsi), %%xmm8, %%xmm7")
604 GEN_test_RandM(VCMPSS_128_0x15
,
605 "vcmpss $0x15, %%xmm6, %%xmm8, %%xmm7",
606 "vcmpss $0x15, (%%rsi), %%xmm8, %%xmm7")
607 GEN_test_RandM(VCMPSS_128_0x16
,
608 "vcmpss $0x16, %%xmm6, %%xmm8, %%xmm7",
609 "vcmpss $0x16, (%%rsi), %%xmm8, %%xmm7")
610 GEN_test_RandM(VCMPSS_128_0x17
,
611 "vcmpss $0x17, %%xmm6, %%xmm8, %%xmm7",
612 "vcmpss $0x17, (%%rsi), %%xmm8, %%xmm7")
613 GEN_test_RandM(VCMPSS_128_0x18
,
614 "vcmpss $0x18, %%xmm6, %%xmm8, %%xmm7",
615 "vcmpss $0x18, (%%rsi), %%xmm8, %%xmm7")
616 GEN_test_RandM(VCMPSS_128_0x19
,
617 "vcmpss $0x19, %%xmm6, %%xmm8, %%xmm7",
618 "vcmpss $0x19, (%%rsi), %%xmm8, %%xmm7")
619 GEN_test_RandM(VCMPSS_128_0x1A
,
620 "vcmpss $0x1A, %%xmm6, %%xmm8, %%xmm7",
621 "vcmpss $0x1A, (%%rsi), %%xmm8, %%xmm7")
622 GEN_test_RandM(VCMPSS_128_0x1B
,
623 "vcmpss $0x1B, %%xmm6, %%xmm8, %%xmm7",
624 "vcmpss $0x1B, (%%rsi), %%xmm8, %%xmm7")
625 GEN_test_RandM(VCMPSS_128_0x1C
,
626 "vcmpss $0x1C, %%xmm6, %%xmm8, %%xmm7",
627 "vcmpss $0x1C, (%%rsi), %%xmm8, %%xmm7")
628 GEN_test_RandM(VCMPSS_128_0x1D
,
629 "vcmpss $0x1D, %%xmm6, %%xmm8, %%xmm7",
630 "vcmpss $0x1D, (%%rsi), %%xmm8, %%xmm7")
631 GEN_test_RandM(VCMPSS_128_0x1E
,
632 "vcmpss $0x1E, %%xmm6, %%xmm8, %%xmm7",
633 "vcmpss $0x1E, (%%rsi), %%xmm8, %%xmm7")
634 GEN_test_RandM(VCMPSS_128_0x1F
,
635 "vcmpss $0x1F, %%xmm6, %%xmm8, %%xmm7",
636 "vcmpss $0x1F, (%%rsi), %%xmm8, %%xmm7")
638 // The x suffix denotes a 128 -> 64 operation
639 GEN_test_RandM(VCVTPD2PS_128
,
640 "vcvtpd2psx %%xmm8, %%xmm7",
641 "vcvtpd2psx (%%rsi), %%xmm9")
643 GEN_test_RandM(VEXTRACTF128_0x0
,
644 "vextractf128 $0x0, %%ymm7, %%xmm9",
645 "vextractf128 $0x0, %%ymm7, (%%rsi)")
647 GEN_test_RandM(VEXTRACTF128_0x1
,
648 "vextractf128 $0x1, %%ymm7, %%xmm9",
649 "vextractf128 $0x1, %%ymm7, (%%rsi)")
651 GEN_test_RandM(VINSERTF128_0x0
,
652 "vinsertf128 $0x0, %%xmm9, %%ymm7, %%ymm8",
653 "vinsertf128 $0x0, (%%rsi), %%ymm7, %%ymm8")
655 GEN_test_RandM(VINSERTF128_0x1
,
656 "vinsertf128 $0x1, %%xmm9, %%ymm7, %%ymm8",
657 "vinsertf128 $0x1, (%%rsi), %%ymm7, %%ymm8")
659 GEN_test_RandM(VPEXTRD_128_0x0
,
660 "vpextrd $0x0, %%xmm7, %%r14d",
661 "vpextrd $0x0, %%xmm7, (%%rsi)")
663 GEN_test_RandM(VPEXTRD_128_0x3
,
664 "vpextrd $0x3, %%xmm7, %%r14d",
665 "vpextrd $0x3, %%xmm7, (%%rsi)")
667 GEN_test_RandM(VPCMPEQD_128
,
668 "vpcmpeqd %%xmm6, %%xmm8, %%xmm7",
669 "vpcmpeqd (%%rsi), %%xmm8, %%xmm7")
671 GEN_test_RandM(VPSHUFD_0x39_128
,
672 "vpshufd $0x39, %%xmm9, %%xmm8",
673 "vpshufd $0xC6, (%%rsi), %%xmm7")
675 GEN_test_RandM(VMAXSD_128
,
676 "vmaxsd %%xmm6, %%xmm8, %%xmm7",
677 "vmaxsd (%%rsi), %%xmm8, %%xmm7")
679 GEN_test_RandM(VDIVSD_128
,
680 "vdivsd %%xmm6, %%xmm8, %%xmm7",
681 "vdivsd (%%rsi), %%xmm8, %%xmm7")
683 GEN_test_RandM(VMINSD_128
,
684 "vminsd %%xmm6, %%xmm8, %%xmm7",
685 "vminsd (%%rsi), %%xmm8, %%xmm7")
687 GEN_test_RandM(VSUBSD_128
,
688 "vsubsd %%xmm6, %%xmm8, %%xmm7",
689 "vsubsd (%%rsi), %%xmm8, %%xmm7")
691 GEN_test_RandM(VADDSD_128
,
692 "vaddsd %%xmm6, %%xmm8, %%xmm7",
693 "vaddsd (%%rsi), %%xmm8, %%xmm7")
695 GEN_test_RandM(VMULSD_128
,
696 "vmulsd %%xmm6, %%xmm8, %%xmm7",
697 "vmulsd (%%rsi), %%xmm8, %%xmm7")
699 GEN_test_RandM(VXORPS_128
,
700 "vxorps %%xmm6, %%xmm8, %%xmm7",
701 "vxorps (%%rsi), %%xmm8, %%xmm7")
703 GEN_test_RandM(VXORPD_128
,
704 "vxorpd %%xmm6, %%xmm8, %%xmm7",
705 "vxorpd (%%rsi), %%xmm8, %%xmm7")
707 GEN_test_RandM(VORPD_128
,
708 "vorpd %%xmm6, %%xmm8, %%xmm7",
709 "vorpd (%%rsi), %%xmm8, %%xmm7")
711 GEN_test_RandM(VANDNPD_128
,
712 "vandnpd %%xmm6, %%xmm8, %%xmm7",
713 "vandnpd (%%rsi), %%xmm8, %%xmm7")
715 GEN_test_RandM(VCVTPS2PD_128
,
716 "vcvtps2pd %%xmm6, %%xmm8",
717 "vcvtps2pd (%%rsi), %%xmm8")
719 GEN_test_RandM(VUCOMISD_128
,
720 "vucomisd %%xmm6, %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14",
721 "vucomisd (%%rsi), %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14")
723 GEN_test_RandM(VUCOMISS_128
,
724 "vucomiss %%xmm6, %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14",
725 "vucomiss (%%rsi), %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14")
727 GEN_test_RandM(VPINSRQ_128
,
728 "vpinsrq $0, %%r14, %%xmm8, %%xmm7",
729 "vpinsrq $1, (%%rsi), %%xmm8, %%xmm7")
731 GEN_test_RandM(VPADDQ_128
,
732 "vpaddq %%xmm6, %%xmm8, %%xmm7",
733 "vpaddq (%%rsi), %%xmm8, %%xmm7")
735 GEN_test_RandM(VPSUBQ_128
,
736 "vpsubq %%xmm6, %%xmm8, %%xmm7",
737 "vpsubq (%%rsi), %%xmm8, %%xmm7")
739 GEN_test_RandM(VPSUBW_128
,
740 "vpsubw %%xmm6, %%xmm8, %%xmm7",
741 "vpsubw (%%rsi), %%xmm8, %%xmm7")
743 GEN_test_RandM(VMOVUPD_GtoE_256
,
744 "vmovupd %%ymm9, %%ymm6",
745 "vmovupd %%ymm7, (%%rsi)")
747 GEN_test_RandM(VMOVUPD_EtoG_256
,
748 "vmovupd %%ymm6, %%ymm9",
749 "vmovupd (%%rsi), %%ymm7")
751 GEN_test_RandM(VMULPD_256
,
752 "vmulpd %%ymm6, %%ymm8, %%ymm7",
753 "vmulpd (%%rsi), %%ymm8, %%ymm7")
755 GEN_test_RandM(VMOVUPD_EtoG_128
,
756 "vmovupd %%xmm6, %%xmm9",
757 "vmovupd (%%rsi), %%xmm7")
759 GEN_test_RandM(VADDPD_256
,
760 "vaddpd %%ymm6, %%ymm8, %%ymm7",
761 "vaddpd (%%rsi), %%ymm8, %%ymm7")
763 GEN_test_RandM(VSUBPD_256
,
764 "vsubpd %%ymm6, %%ymm8, %%ymm7",
765 "vsubpd (%%rsi), %%ymm8, %%ymm7")
767 GEN_test_RandM(VDIVPD_256
,
768 "vdivpd %%ymm6, %%ymm8, %%ymm7",
769 "vdivpd (%%rsi), %%ymm8, %%ymm7")
771 GEN_test_RandM(VPCMPEQQ_128
,
772 "vpcmpeqq %%xmm6, %%xmm8, %%xmm7",
773 "vpcmpeqq (%%rsi), %%xmm8, %%xmm7")
775 GEN_test_RandM(VSUBPD_128
,
776 "vsubpd %%xmm6, %%xmm8, %%xmm7",
777 "vsubpd (%%rsi), %%xmm8, %%xmm7")
779 GEN_test_RandM(VADDPD_128
,
780 "vaddpd %%xmm6, %%xmm8, %%xmm7",
781 "vaddpd (%%rsi), %%xmm8, %%xmm7")
783 GEN_test_RandM(VUNPCKLPD_128
,
784 "vunpcklpd %%xmm6, %%xmm8, %%xmm7",
785 "vunpcklpd (%%rsi), %%xmm8, %%xmm7")
787 GEN_test_RandM(VUNPCKHPD_128
,
788 "vunpckhpd %%xmm6, %%xmm8, %%xmm7",
789 "vunpckhpd (%%rsi), %%xmm8, %%xmm7")
791 GEN_test_RandM(VUNPCKHPS_128
,
792 "vunpckhps %%xmm6, %%xmm8, %%xmm7",
793 "vunpckhps (%%rsi), %%xmm8, %%xmm7")
795 GEN_test_RandM(VMOVUPS_EtoG_128
,
796 "vmovups %%xmm6, %%xmm8",
797 "vmovups (%%rsi), %%xmm9")
799 GEN_test_RandM(VADDPS_256
,
800 "vaddps %%ymm6, %%ymm8, %%ymm7",
801 "vaddps (%%rsi), %%ymm8, %%ymm7")
803 GEN_test_RandM(VSUBPS_256
,
804 "vsubps %%ymm6, %%ymm8, %%ymm7",
805 "vsubps (%%rsi), %%ymm8, %%ymm7")
807 GEN_test_RandM(VMULPS_256
,
808 "vmulps %%ymm6, %%ymm8, %%ymm7",
809 "vmulps (%%rsi), %%ymm8, %%ymm7")
811 GEN_test_RandM(VDIVPS_256
,
812 "vdivps %%ymm6, %%ymm8, %%ymm7",
813 "vdivps (%%rsi), %%ymm8, %%ymm7")
815 GEN_test_RandM(VPCMPGTQ_128
,
816 "vpcmpgtq %%xmm6, %%xmm8, %%xmm7",
817 "vpcmpgtq (%%rsi), %%xmm8, %%xmm7")
819 GEN_test_RandM(VPEXTRQ_128_0x0
,
820 "vpextrq $0x0, %%xmm7, %%r14",
821 "vpextrq $0x0, %%xmm7, (%%rsi)")
823 GEN_test_RandM(VPEXTRQ_128_0x1
,
824 "vpextrq $0x1, %%xmm7, %%r14",
825 "vpextrq $0x1, %%xmm7, (%%rsi)")
827 GEN_test_Ronly(VPSRLQ_0x05_128
,
828 "vpsrlq $0x5, %%xmm9, %%xmm7")
830 GEN_test_RandM(VPMULUDQ_128
,
831 "vpmuludq %%xmm6, %%xmm8, %%xmm7",
832 "vpmuludq (%%rsi), %%xmm8, %%xmm7")
834 GEN_test_RandM(VPMULDQ_128
,
835 "vpmuldq %%xmm6, %%xmm8, %%xmm7",
836 "vpmuldq (%%rsi), %%xmm8, %%xmm7")
838 GEN_test_Ronly(VPSLLQ_0x05_128
,
839 "vpsllq $0x5, %%xmm9, %%xmm7")
841 GEN_test_RandM(VPMAXUD_128
,
842 "vpmaxud %%xmm6, %%xmm8, %%xmm7",
843 "vpmaxud (%%rsi), %%xmm8, %%xmm7")
845 GEN_test_RandM(VPMINUD_128
,
846 "vpminud %%xmm6, %%xmm8, %%xmm7",
847 "vpminud (%%rsi), %%xmm8, %%xmm7")
849 GEN_test_RandM(VPMULLD_128
,
850 "vpmulld %%xmm6, %%xmm8, %%xmm7",
851 "vpmulld (%%rsi), %%xmm8, %%xmm7")
853 GEN_test_RandM(VPMAXUW_128
,
854 "vpmaxuw %%xmm6, %%xmm8, %%xmm7",
855 "vpmaxuw (%%rsi), %%xmm8, %%xmm7")
857 GEN_test_Ronly(VPEXTRW_128_EregOnly_toG_0x0
,
858 "vpextrw $0x0, %%xmm7, %%r14d")
860 GEN_test_Ronly(VPEXTRW_128_EregOnly_toG_0x7
,
861 "vpextrw $0x7, %%xmm7, %%r14d")
863 GEN_test_RandM(VPMINUW_128
,
864 "vpminuw %%xmm6, %%xmm8, %%xmm7",
865 "vpminuw (%%rsi), %%xmm8, %%xmm7")
867 GEN_test_RandM(VPHMINPOSUW_128
,
868 "vphminposuw %%xmm6, %%xmm8",
869 "vphminposuw (%%rsi), %%xmm7")
871 GEN_test_RandM(VPMAXSW_128
,
872 "vpmaxsw %%xmm6, %%xmm8, %%xmm7",
873 "vpmaxsw (%%rsi), %%xmm8, %%xmm7")
875 GEN_test_RandM(VPMINSW_128
,
876 "vpminsw %%xmm6, %%xmm8, %%xmm7",
877 "vpminsw (%%rsi), %%xmm8, %%xmm7")
879 GEN_test_RandM(VPMAXUB_128
,
880 "vpmaxub %%xmm6, %%xmm8, %%xmm7",
881 "vpmaxub (%%rsi), %%xmm8, %%xmm7")
883 GEN_test_RandM(VPEXTRB_GtoE_128_0x0
,
884 "vpextrb $0x0, %%xmm8, %%r14",
885 "vpextrb $0x0, %%xmm8, (%%rsi)")
887 GEN_test_RandM(VPEXTRB_GtoE_128_0x1
,
888 "vpextrb $0x1, %%xmm8, %%r14",
889 "vpextrb $0x1, %%xmm8, (%%rsi)")
891 GEN_test_RandM(VPEXTRB_GtoE_128_0x2
,
892 "vpextrb $0x2, %%xmm8, %%r14",
893 "vpextrb $0x2, %%xmm8, (%%rsi)")
895 GEN_test_RandM(VPEXTRB_GtoE_128_0x3
,
896 "vpextrb $0x3, %%xmm8, %%r14",
897 "vpextrb $0x3, %%xmm8, (%%rsi)")
899 GEN_test_RandM(VPEXTRB_GtoE_128_0x4
,
900 "vpextrb $0x4, %%xmm8, %%r14",
901 "vpextrb $0x4, %%xmm8, (%%rsi)")
903 GEN_test_RandM(VPEXTRB_GtoE_128_0x9
,
904 "vpextrb $0x9, %%xmm8, %%r14",
905 "vpextrb $0x9, %%xmm8, (%%rsi)")
907 GEN_test_RandM(VPEXTRB_GtoE_128_0xE
,
908 "vpextrb $0xE, %%xmm8, %%r14",
909 "vpextrb $0xE, %%xmm8, (%%rsi)")
911 GEN_test_RandM(VPEXTRB_GtoE_128_0xF
,
912 "vpextrb $0xF, %%xmm8, %%r14",
913 "vpextrb $0xF, %%xmm8, (%%rsi)")
915 GEN_test_RandM(VPMINUB_128
,
916 "vpminub %%xmm6, %%xmm8, %%xmm7",
917 "vpminub (%%rsi), %%xmm8, %%xmm7")
919 GEN_test_RandM(VPMAXSB_128
,
920 "vpmaxsb %%xmm6, %%xmm8, %%xmm7",
921 "vpmaxsb (%%rsi), %%xmm8, %%xmm7")
923 GEN_test_RandM(VPMINSB_128
,
924 "vpminsb %%xmm6, %%xmm8, %%xmm7",
925 "vpminsb (%%rsi), %%xmm8, %%xmm7")
927 GEN_test_RandM(VPERM2F128_0x00
,
928 "vperm2f128 $0x00, %%ymm6, %%ymm8, %%ymm7",
929 "vperm2f128 $0x00, (%%rsi), %%ymm8, %%ymm7")
930 GEN_test_RandM(VPERM2F128_0xFF
,
931 "vperm2f128 $0xFF, %%ymm6, %%ymm8, %%ymm7",
932 "vperm2f128 $0xFF, (%%rsi), %%ymm8, %%ymm7")
933 GEN_test_RandM(VPERM2F128_0x30
,
934 "vperm2f128 $0x30, %%ymm6, %%ymm8, %%ymm7",
935 "vperm2f128 $0x30, (%%rsi), %%ymm8, %%ymm7")
936 GEN_test_RandM(VPERM2F128_0x21
,
937 "vperm2f128 $0x21, %%ymm6, %%ymm8, %%ymm7",
938 "vperm2f128 $0x21, (%%rsi), %%ymm8, %%ymm7")
939 GEN_test_RandM(VPERM2F128_0x12
,
940 "vperm2f128 $0x12, %%ymm6, %%ymm8, %%ymm7",
941 "vperm2f128 $0x12, (%%rsi), %%ymm8, %%ymm7")
942 GEN_test_RandM(VPERM2F128_0x03
,
943 "vperm2f128 $0x03, %%ymm6, %%ymm8, %%ymm7",
944 "vperm2f128 $0x03, (%%rsi), %%ymm8, %%ymm7")
945 GEN_test_RandM(VPERM2F128_0x85
,
946 "vperm2f128 $0x85, %%ymm6, %%ymm8, %%ymm7",
947 "vperm2f128 $0x85, (%%rsi), %%ymm8, %%ymm7")
948 GEN_test_RandM(VPERM2F128_0x5A
,
949 "vperm2f128 $0x5A, %%ymm6, %%ymm8, %%ymm7",
950 "vperm2f128 $0x5A, (%%rsi), %%ymm8, %%ymm7")
952 GEN_test_RandM(VPERMILPD_256_0x0
,
953 "vpermilpd $0x0, %%ymm6, %%ymm8",
954 "vpermilpd $0x1, (%%rsi), %%ymm8")
955 GEN_test_RandM(VPERMILPD_256_0xF
,
956 "vpermilpd $0xF, %%ymm6, %%ymm8",
957 "vpermilpd $0xE, (%%rsi), %%ymm8")
958 GEN_test_RandM(VPERMILPD_256_0xA
,
959 "vpermilpd $0xA, %%ymm6, %%ymm8",
960 "vpermilpd $0xB, (%%rsi), %%ymm8")
961 GEN_test_RandM(VPERMILPD_256_0x5
,
962 "vpermilpd $0x5, %%ymm6, %%ymm8",
963 "vpermilpd $0x4, (%%rsi), %%ymm8")
965 GEN_test_RandM(VPERMILPD_128_0x0
,
966 "vpermilpd $0x0, %%xmm6, %%xmm8",
967 "vpermilpd $0x1, (%%rsi), %%xmm8")
968 GEN_test_RandM(VPERMILPD_128_0x3
,
969 "vpermilpd $0x3, %%xmm6, %%xmm8",
970 "vpermilpd $0x2, (%%rsi), %%xmm8")
972 GEN_test_RandM(VUNPCKLPD_256
,
973 "vunpcklpd %%ymm6, %%ymm8, %%ymm7",
974 "vunpcklpd (%%rsi), %%ymm8, %%ymm7")
976 GEN_test_RandM(VUNPCKHPD_256
,
977 "vunpckhpd %%ymm6, %%ymm8, %%ymm7",
978 "vunpckhpd (%%rsi), %%ymm8, %%ymm7")
980 GEN_test_RandM(VSHUFPS_0x39_256
,
981 "vshufps $0x39, %%ymm9, %%ymm8, %%ymm7",
982 "vshufps $0xC6, (%%rsi), %%ymm8, %%ymm7")
984 GEN_test_RandM(VUNPCKLPS_256
,
985 "vunpcklps %%ymm6, %%ymm8, %%ymm7",
986 "vunpcklps (%%rsi), %%ymm8, %%ymm7")
988 GEN_test_RandM(VUNPCKHPS_256
,
989 "vunpckhps %%ymm6, %%ymm8, %%ymm7",
990 "vunpckhps (%%rsi), %%ymm8, %%ymm7")
992 GEN_test_RandM(VXORPD_256
,
993 "vxorpd %%ymm6, %%ymm8, %%ymm7",
994 "vxorpd (%%rsi), %%ymm8, %%ymm7")
996 GEN_test_Monly(VBROADCASTSD_256
,
997 "vbroadcastsd (%%rsi), %%ymm8")
999 GEN_test_RandM(VCMPPD_128_0x0
,
1000 "vcmppd $0, %%xmm6, %%xmm8, %%xmm7",
1001 "vcmppd $0, (%%rsi), %%xmm8, %%xmm7")
1002 GEN_test_RandM(VCMPPD_256_0x0
,
1003 "vcmppd $0, %%ymm6, %%ymm8, %%ymm7",
1004 "vcmppd $0, (%%rsi), %%ymm8, %%ymm7")
1005 GEN_test_RandM(VCMPPS_128_0x0
,
1006 "vcmpps $0, %%xmm6, %%xmm8, %%xmm7",
1007 "vcmpps $0, (%%rsi), %%xmm8, %%xmm7")
1008 GEN_test_RandM(VCMPPS_256_0x0
,
1009 "vcmpps $0, %%ymm6, %%ymm8, %%ymm7",
1010 "vcmpps $0, (%%rsi), %%ymm8, %%ymm7")
1012 GEN_test_RandM(VCMPPD_128_0x1
,
1013 "vcmppd $1, %%xmm6, %%xmm8, %%xmm7",
1014 "vcmppd $1, (%%rsi), %%xmm8, %%xmm7")
1015 GEN_test_RandM(VCMPPD_256_0x1
,
1016 "vcmppd $1, %%ymm6, %%ymm8, %%ymm7",
1017 "vcmppd $1, (%%rsi), %%ymm8, %%ymm7")
1018 GEN_test_RandM(VCMPPS_128_0x1
,
1019 "vcmpps $1, %%xmm6, %%xmm8, %%xmm7",
1020 "vcmpps $1, (%%rsi), %%xmm8, %%xmm7")
1021 GEN_test_RandM(VCMPPS_256_0x1
,
1022 "vcmpps $1, %%ymm6, %%ymm8, %%ymm7",
1023 "vcmpps $1, (%%rsi), %%ymm8, %%ymm7")
1025 GEN_test_RandM(VCMPPD_128_0x2
,
1026 "vcmppd $2, %%xmm6, %%xmm8, %%xmm7",
1027 "vcmppd $2, (%%rsi), %%xmm8, %%xmm7")
1028 GEN_test_RandM(VCMPPD_256_0x2
,
1029 "vcmppd $2, %%ymm6, %%ymm8, %%ymm7",
1030 "vcmppd $2, (%%rsi), %%ymm8, %%ymm7")
1031 GEN_test_RandM(VCMPPS_128_0x2
,
1032 "vcmpps $2, %%xmm6, %%xmm8, %%xmm7",
1033 "vcmpps $2, (%%rsi), %%xmm8, %%xmm7")
1034 GEN_test_RandM(VCMPPS_256_0x2
,
1035 "vcmpps $2, %%ymm6, %%ymm8, %%ymm7",
1036 "vcmpps $2, (%%rsi), %%ymm8, %%ymm7")
1038 GEN_test_RandM(VCMPPD_128_0x3
,
1039 "vcmppd $3, %%xmm6, %%xmm8, %%xmm7",
1040 "vcmppd $3, (%%rsi), %%xmm8, %%xmm7")
1041 GEN_test_RandM(VCMPPD_256_0x3
,
1042 "vcmppd $3, %%ymm6, %%ymm8, %%ymm7",
1043 "vcmppd $3, (%%rsi), %%ymm8, %%ymm7")
1044 GEN_test_RandM(VCMPPS_128_0x3
,
1045 "vcmpps $3, %%xmm6, %%xmm8, %%xmm7",
1046 "vcmpps $3, (%%rsi), %%xmm8, %%xmm7")
1047 GEN_test_RandM(VCMPPS_256_0x3
,
1048 "vcmpps $3, %%ymm6, %%ymm8, %%ymm7",
1049 "vcmpps $3, (%%rsi), %%ymm8, %%ymm7")
1051 GEN_test_RandM(VCMPPD_128_0x4
,
1052 "vcmppd $4, %%xmm6, %%xmm8, %%xmm7",
1053 "vcmppd $4, (%%rsi), %%xmm8, %%xmm7")
1054 GEN_test_RandM(VCMPPD_256_0x4
,
1055 "vcmppd $4, %%ymm6, %%ymm8, %%ymm7",
1056 "vcmppd $4, (%%rsi), %%ymm8, %%ymm7")
1057 GEN_test_RandM(VCMPPS_128_0x4
,
1058 "vcmpps $4, %%xmm6, %%xmm8, %%xmm7",
1059 "vcmpps $4, (%%rsi), %%xmm8, %%xmm7")
1060 GEN_test_RandM(VCMPPS_256_0x4
,
1061 "vcmpps $4, %%ymm6, %%ymm8, %%ymm7",
1062 "vcmpps $4, (%%rsi), %%ymm8, %%ymm7")
1064 GEN_test_RandM(VCMPPD_128_0x5
,
1065 "vcmppd $5, %%xmm6, %%xmm8, %%xmm7",
1066 "vcmppd $5, (%%rsi), %%xmm8, %%xmm7")
1067 GEN_test_RandM(VCMPPD_256_0x5
,
1068 "vcmppd $5, %%ymm6, %%ymm8, %%ymm7",
1069 "vcmppd $5, (%%rsi), %%ymm8, %%ymm7")
1070 GEN_test_RandM(VCMPPS_128_0x5
,
1071 "vcmpps $5, %%xmm6, %%xmm8, %%xmm7",
1072 "vcmpps $5, (%%rsi), %%xmm8, %%xmm7")
1073 GEN_test_RandM(VCMPPS_256_0x5
,
1074 "vcmpps $5, %%ymm6, %%ymm8, %%ymm7",
1075 "vcmpps $5, (%%rsi), %%ymm8, %%ymm7")
1077 GEN_test_RandM(VCMPPD_128_0x6
,
1078 "vcmppd $6, %%xmm6, %%xmm8, %%xmm7",
1079 "vcmppd $6, (%%rsi), %%xmm8, %%xmm7")
1080 GEN_test_RandM(VCMPPD_256_0x6
,
1081 "vcmppd $6, %%ymm6, %%ymm8, %%ymm7",
1082 "vcmppd $6, (%%rsi), %%ymm8, %%ymm7")
1083 GEN_test_RandM(VCMPPS_128_0x6
,
1084 "vcmpps $6, %%xmm6, %%xmm8, %%xmm7",
1085 "vcmpps $6, (%%rsi), %%xmm8, %%xmm7")
1086 GEN_test_RandM(VCMPPS_256_0x6
,
1087 "vcmpps $6, %%ymm6, %%ymm8, %%ymm7",
1088 "vcmpps $6, (%%rsi), %%ymm8, %%ymm7")
1090 GEN_test_RandM(VCMPPD_128_0x7
,
1091 "vcmppd $7, %%xmm6, %%xmm8, %%xmm7",
1092 "vcmppd $7, (%%rsi), %%xmm8, %%xmm7")
1093 GEN_test_RandM(VCMPPD_256_0x7
,
1094 "vcmppd $7, %%ymm6, %%ymm8, %%ymm7",
1095 "vcmppd $7, (%%rsi), %%ymm8, %%ymm7")
1096 GEN_test_RandM(VCMPPS_128_0x7
,
1097 "vcmpps $7, %%xmm6, %%xmm8, %%xmm7",
1098 "vcmpps $7, (%%rsi), %%xmm8, %%xmm7")
1099 GEN_test_RandM(VCMPPS_256_0x7
,
1100 "vcmpps $7, %%ymm6, %%ymm8, %%ymm7",
1101 "vcmpps $7, (%%rsi), %%ymm8, %%ymm7")
1103 GEN_test_RandM(VCMPPD_128_0x8
,
1104 "vcmppd $8, %%xmm6, %%xmm8, %%xmm7",
1105 "vcmppd $8, (%%rsi), %%xmm8, %%xmm7")
1106 GEN_test_RandM(VCMPPD_256_0x8
,
1107 "vcmppd $8, %%ymm6, %%ymm8, %%ymm7",
1108 "vcmppd $8, (%%rsi), %%ymm8, %%ymm7")
1109 GEN_test_RandM(VCMPPS_128_0x8
,
1110 "vcmpps $8, %%xmm6, %%xmm8, %%xmm7",
1111 "vcmpps $8, (%%rsi), %%xmm8, %%xmm7")
1112 GEN_test_RandM(VCMPPS_256_0x8
,
1113 "vcmpps $8, %%ymm6, %%ymm8, %%ymm7",
1114 "vcmpps $8, (%%rsi), %%ymm8, %%ymm7")
1116 GEN_test_RandM(VCMPPD_128_0x9
,
1117 "vcmppd $9, %%xmm6, %%xmm8, %%xmm7",
1118 "vcmppd $9, (%%rsi), %%xmm8, %%xmm7")
1119 GEN_test_RandM(VCMPPD_256_0x9
,
1120 "vcmppd $9, %%ymm6, %%ymm8, %%ymm7",
1121 "vcmppd $9, (%%rsi), %%ymm8, %%ymm7")
1122 GEN_test_RandM(VCMPPS_128_0x9
,
1123 "vcmpps $9, %%xmm6, %%xmm8, %%xmm7",
1124 "vcmpps $9, (%%rsi), %%xmm8, %%xmm7")
1125 GEN_test_RandM(VCMPPS_256_0x9
,
1126 "vcmpps $9, %%ymm6, %%ymm8, %%ymm7",
1127 "vcmpps $9, (%%rsi), %%ymm8, %%ymm7")
1129 GEN_test_RandM(VCMPPD_128_0xA
,
1130 "vcmppd $0xA, %%xmm6, %%xmm8, %%xmm7",
1131 "vcmppd $0xA, (%%rsi), %%xmm8, %%xmm7")
1132 GEN_test_RandM(VCMPPD_256_0xA
,
1133 "vcmppd $0xA, %%ymm6, %%ymm8, %%ymm7",
1134 "vcmppd $0xA, (%%rsi), %%ymm8, %%ymm7")
1135 GEN_test_RandM(VCMPPS_128_0xA
,
1136 "vcmpps $0xA, %%xmm6, %%xmm8, %%xmm7",
1137 "vcmpps $0xA, (%%rsi), %%xmm8, %%xmm7")
1138 GEN_test_RandM(VCMPPS_256_0xA
,
1139 "vcmpps $0xA, %%ymm6, %%ymm8, %%ymm7",
1140 "vcmpps $0xA, (%%rsi), %%ymm8, %%ymm7")
1142 GEN_test_RandM(VCMPPD_128_0xB
,
1143 "vcmppd $0xB, %%xmm6, %%xmm8, %%xmm7",
1144 "vcmppd $0xB, (%%rsi), %%xmm8, %%xmm7")
1145 GEN_test_RandM(VCMPPD_256_0xB
,
1146 "vcmppd $0xB, %%ymm6, %%ymm8, %%ymm7",
1147 "vcmppd $0xB, (%%rsi), %%ymm8, %%ymm7")
1148 GEN_test_RandM(VCMPPS_128_0xB
,
1149 "vcmpps $0xB, %%xmm6, %%xmm8, %%xmm7",
1150 "vcmpps $0xB, (%%rsi), %%xmm8, %%xmm7")
1151 GEN_test_RandM(VCMPPS_256_0xB
,
1152 "vcmpps $0xB, %%ymm6, %%ymm8, %%ymm7",
1153 "vcmpps $0xB, (%%rsi), %%ymm8, %%ymm7")
1155 GEN_test_RandM(VCMPPD_128_0xC
,
1156 "vcmppd $0xC, %%xmm6, %%xmm8, %%xmm7",
1157 "vcmppd $0xC, (%%rsi), %%xmm8, %%xmm7")
1158 GEN_test_RandM(VCMPPD_256_0xC
,
1159 "vcmppd $0xC, %%ymm6, %%ymm8, %%ymm7",
1160 "vcmppd $0xC, (%%rsi), %%ymm8, %%ymm7")
1161 GEN_test_RandM(VCMPPS_128_0xC
,
1162 "vcmpps $0xC, %%xmm6, %%xmm8, %%xmm7",
1163 "vcmpps $0xC, (%%rsi), %%xmm8, %%xmm7")
1164 GEN_test_RandM(VCMPPS_256_0xC
,
1165 "vcmpps $0xC, %%ymm6, %%ymm8, %%ymm7",
1166 "vcmpps $0xC, (%%rsi), %%ymm8, %%ymm7")
1168 GEN_test_RandM(VCMPPD_128_0xD
,
1169 "vcmppd $0xD, %%xmm6, %%xmm8, %%xmm7",
1170 "vcmppd $0xD, (%%rsi), %%xmm8, %%xmm7")
1171 GEN_test_RandM(VCMPPD_256_0xD
,
1172 "vcmppd $0xD, %%ymm6, %%ymm8, %%ymm7",
1173 "vcmppd $0xD, (%%rsi), %%ymm8, %%ymm7")
1174 GEN_test_RandM(VCMPPS_128_0xD
,
1175 "vcmpps $0xD, %%xmm6, %%xmm8, %%xmm7",
1176 "vcmpps $0xD, (%%rsi), %%xmm8, %%xmm7")
1177 GEN_test_RandM(VCMPPS_256_0xD
,
1178 "vcmpps $0xD, %%ymm6, %%ymm8, %%ymm7",
1179 "vcmpps $0xD, (%%rsi), %%ymm8, %%ymm7")
1181 GEN_test_RandM(VCMPPD_128_0xE
,
1182 "vcmppd $0xE, %%xmm6, %%xmm8, %%xmm7",
1183 "vcmppd $0xE, (%%rsi), %%xmm8, %%xmm7")
1184 GEN_test_RandM(VCMPPD_256_0xE
,
1185 "vcmppd $0xE, %%ymm6, %%ymm8, %%ymm7",
1186 "vcmppd $0xE, (%%rsi), %%ymm8, %%ymm7")
1187 GEN_test_RandM(VCMPPS_128_0xE
,
1188 "vcmpps $0xE, %%xmm6, %%xmm8, %%xmm7",
1189 "vcmpps $0xE, (%%rsi), %%xmm8, %%xmm7")
1190 GEN_test_RandM(VCMPPS_256_0xE
,
1191 "vcmpps $0xE, %%ymm6, %%ymm8, %%ymm7",
1192 "vcmpps $0xE, (%%rsi), %%ymm8, %%ymm7")
1194 GEN_test_RandM(VCMPPD_128_0xF
,
1195 "vcmppd $0xF, %%xmm6, %%xmm8, %%xmm7",
1196 "vcmppd $0xF, (%%rsi), %%xmm8, %%xmm7")
1197 GEN_test_RandM(VCMPPD_256_0xF
,
1198 "vcmppd $0xF, %%ymm6, %%ymm8, %%ymm7",
1199 "vcmppd $0xF, (%%rsi), %%ymm8, %%ymm7")
1200 GEN_test_RandM(VCMPPS_128_0xF
,
1201 "vcmpps $0xF, %%xmm6, %%xmm8, %%xmm7",
1202 "vcmpps $0xF, (%%rsi), %%xmm8, %%xmm7")
1203 GEN_test_RandM(VCMPPS_256_0xF
,
1204 "vcmpps $0xF, %%ymm6, %%ymm8, %%ymm7",
1205 "vcmpps $0xF, (%%rsi), %%ymm8, %%ymm7")
1207 GEN_test_RandM(VCMPPD_128_0x10
,
1208 "vcmppd $0x10, %%xmm6, %%xmm8, %%xmm7",
1209 "vcmppd $0x10, (%%rsi), %%xmm8, %%xmm7")
1210 GEN_test_RandM(VCMPPD_256_0x10
,
1211 "vcmppd $0x10, %%ymm6, %%ymm8, %%ymm7",
1212 "vcmppd $0x10, (%%rsi), %%ymm8, %%ymm7")
1213 GEN_test_RandM(VCMPPS_128_0x10
,
1214 "vcmpps $0x10, %%xmm6, %%xmm8, %%xmm7",
1215 "vcmpps $0x10, (%%rsi), %%xmm8, %%xmm7")
1216 GEN_test_RandM(VCMPPS_256_0x10
,
1217 "vcmpps $0x10, %%ymm6, %%ymm8, %%ymm7",
1218 "vcmpps $0x10, (%%rsi), %%ymm8, %%ymm7")
1220 GEN_test_RandM(VCMPPD_128_0x11
,
1221 "vcmppd $0x11, %%xmm6, %%xmm8, %%xmm7",
1222 "vcmppd $0x11, (%%rsi), %%xmm8, %%xmm7")
1223 GEN_test_RandM(VCMPPD_256_0x11
,
1224 "vcmppd $0x11, %%ymm6, %%ymm8, %%ymm7",
1225 "vcmppd $0x11, (%%rsi), %%ymm8, %%ymm7")
1226 GEN_test_RandM(VCMPPS_128_0x11
,
1227 "vcmpps $0x11, %%xmm6, %%xmm8, %%xmm7",
1228 "vcmpps $0x11, (%%rsi), %%xmm8, %%xmm7")
1229 GEN_test_RandM(VCMPPS_256_0x11
,
1230 "vcmpps $0x11, %%ymm6, %%ymm8, %%ymm7",
1231 "vcmpps $0x11, (%%rsi), %%ymm8, %%ymm7")
1233 GEN_test_RandM(VCMPPD_128_0x12
,
1234 "vcmppd $0x12, %%xmm6, %%xmm8, %%xmm7",
1235 "vcmppd $0x12, (%%rsi), %%xmm8, %%xmm7")
1236 GEN_test_RandM(VCMPPD_256_0x12
,
1237 "vcmppd $0x12, %%ymm6, %%ymm8, %%ymm7",
1238 "vcmppd $0x12, (%%rsi), %%ymm8, %%ymm7")
1239 GEN_test_RandM(VCMPPS_128_0x12
,
1240 "vcmpps $0x12, %%xmm6, %%xmm8, %%xmm7",
1241 "vcmpps $0x12, (%%rsi), %%xmm8, %%xmm7")
1242 GEN_test_RandM(VCMPPS_256_0x12
,
1243 "vcmpps $0x12, %%ymm6, %%ymm8, %%ymm7",
1244 "vcmpps $0x12, (%%rsi), %%ymm8, %%ymm7")
1246 GEN_test_RandM(VCMPPD_128_0x13
,
1247 "vcmppd $0x13, %%xmm6, %%xmm8, %%xmm7",
1248 "vcmppd $0x13, (%%rsi), %%xmm8, %%xmm7")
1249 GEN_test_RandM(VCMPPD_256_0x13
,
1250 "vcmppd $0x13, %%ymm6, %%ymm8, %%ymm7",
1251 "vcmppd $0x13, (%%rsi), %%ymm8, %%ymm7")
1252 GEN_test_RandM(VCMPPS_128_0x13
,
1253 "vcmpps $0x13, %%xmm6, %%xmm8, %%xmm7",
1254 "vcmpps $0x13, (%%rsi), %%xmm8, %%xmm7")
1255 GEN_test_RandM(VCMPPS_256_0x13
,
1256 "vcmpps $0x13, %%ymm6, %%ymm8, %%ymm7",
1257 "vcmpps $0x13, (%%rsi), %%ymm8, %%ymm7")
1259 GEN_test_RandM(VCMPPD_128_0x14
,
1260 "vcmppd $0x14, %%xmm6, %%xmm8, %%xmm7",
1261 "vcmppd $0x14, (%%rsi), %%xmm8, %%xmm7")
1262 GEN_test_RandM(VCMPPD_256_0x14
,
1263 "vcmppd $0x14, %%ymm6, %%ymm8, %%ymm7",
1264 "vcmppd $0x14, (%%rsi), %%ymm8, %%ymm7")
1265 GEN_test_RandM(VCMPPS_128_0x14
,
1266 "vcmpps $0x14, %%xmm6, %%xmm8, %%xmm7",
1267 "vcmpps $0x14, (%%rsi), %%xmm8, %%xmm7")
1268 GEN_test_RandM(VCMPPS_256_0x14
,
1269 "vcmpps $0x14, %%ymm6, %%ymm8, %%ymm7",
1270 "vcmpps $0x14, (%%rsi), %%ymm8, %%ymm7")
1272 GEN_test_RandM(VCMPPD_128_0x15
,
1273 "vcmppd $0x15, %%xmm6, %%xmm8, %%xmm7",
1274 "vcmppd $0x15, (%%rsi), %%xmm8, %%xmm7")
1275 GEN_test_RandM(VCMPPD_256_0x15
,
1276 "vcmppd $0x15, %%ymm6, %%ymm8, %%ymm7",
1277 "vcmppd $0x15, (%%rsi), %%ymm8, %%ymm7")
1278 GEN_test_RandM(VCMPPS_128_0x15
,
1279 "vcmpps $0x15, %%xmm6, %%xmm8, %%xmm7",
1280 "vcmpps $0x15, (%%rsi), %%xmm8, %%xmm7")
1281 GEN_test_RandM(VCMPPS_256_0x15
,
1282 "vcmpps $0x15, %%ymm6, %%ymm8, %%ymm7",
1283 "vcmpps $0x15, (%%rsi), %%ymm8, %%ymm7")
1285 GEN_test_RandM(VCMPPD_128_0x16
,
1286 "vcmppd $0x16, %%xmm6, %%xmm8, %%xmm7",
1287 "vcmppd $0x16, (%%rsi), %%xmm8, %%xmm7")
1288 GEN_test_RandM(VCMPPD_256_0x16
,
1289 "vcmppd $0x16, %%ymm6, %%ymm8, %%ymm7",
1290 "vcmppd $0x16, (%%rsi), %%ymm8, %%ymm7")
1291 GEN_test_RandM(VCMPPS_128_0x16
,
1292 "vcmpps $0x16, %%xmm6, %%xmm8, %%xmm7",
1293 "vcmpps $0x16, (%%rsi), %%xmm8, %%xmm7")
1294 GEN_test_RandM(VCMPPS_256_0x16
,
1295 "vcmpps $0x16, %%ymm6, %%ymm8, %%ymm7",
1296 "vcmpps $0x16, (%%rsi), %%ymm8, %%ymm7")
1298 GEN_test_RandM(VCMPPD_128_0x17
,
1299 "vcmppd $0x17, %%xmm6, %%xmm8, %%xmm7",
1300 "vcmppd $0x17, (%%rsi), %%xmm8, %%xmm7")
1301 GEN_test_RandM(VCMPPD_256_0x17
,
1302 "vcmppd $0x17, %%ymm6, %%ymm8, %%ymm7",
1303 "vcmppd $0x17, (%%rsi), %%ymm8, %%ymm7")
1304 GEN_test_RandM(VCMPPS_128_0x17
,
1305 "vcmpps $0x17, %%xmm6, %%xmm8, %%xmm7",
1306 "vcmpps $0x17, (%%rsi), %%xmm8, %%xmm7")
1307 GEN_test_RandM(VCMPPS_256_0x17
,
1308 "vcmpps $0x17, %%ymm6, %%ymm8, %%ymm7",
1309 "vcmpps $0x17, (%%rsi), %%ymm8, %%ymm7")
1311 GEN_test_RandM(VCMPPD_128_0x18
,
1312 "vcmppd $0x18, %%xmm6, %%xmm8, %%xmm7",
1313 "vcmppd $0x18, (%%rsi), %%xmm8, %%xmm7")
1314 GEN_test_RandM(VCMPPD_256_0x18
,
1315 "vcmppd $0x18, %%ymm6, %%ymm8, %%ymm7",
1316 "vcmppd $0x18, (%%rsi), %%ymm8, %%ymm7")
1317 GEN_test_RandM(VCMPPS_128_0x18
,
1318 "vcmpps $0x18, %%xmm6, %%xmm8, %%xmm7",
1319 "vcmpps $0x18, (%%rsi), %%xmm8, %%xmm7")
1320 GEN_test_RandM(VCMPPS_256_0x18
,
1321 "vcmpps $0x18, %%ymm6, %%ymm8, %%ymm7",
1322 "vcmpps $0x18, (%%rsi), %%ymm8, %%ymm7")
1324 GEN_test_RandM(VCMPPD_128_0x19
,
1325 "vcmppd $0x19, %%xmm6, %%xmm8, %%xmm7",
1326 "vcmppd $0x19, (%%rsi), %%xmm8, %%xmm7")
1327 GEN_test_RandM(VCMPPD_256_0x19
,
1328 "vcmppd $0x19, %%ymm6, %%ymm8, %%ymm7",
1329 "vcmppd $0x19, (%%rsi), %%ymm8, %%ymm7")
1330 GEN_test_RandM(VCMPPS_128_0x19
,
1331 "vcmpps $0x19, %%xmm6, %%xmm8, %%xmm7",
1332 "vcmpps $0x19, (%%rsi), %%xmm8, %%xmm7")
1333 GEN_test_RandM(VCMPPS_256_0x19
,
1334 "vcmpps $0x19, %%ymm6, %%ymm8, %%ymm7",
1335 "vcmpps $0x19, (%%rsi), %%ymm8, %%ymm7")
1337 GEN_test_RandM(VCMPPD_128_0x1A
,
1338 "vcmppd $0x1A, %%xmm6, %%xmm8, %%xmm7",
1339 "vcmppd $0x1A, (%%rsi), %%xmm8, %%xmm7")
1340 GEN_test_RandM(VCMPPD_256_0x1A
,
1341 "vcmppd $0x1A, %%ymm6, %%ymm8, %%ymm7",
1342 "vcmppd $0x1A, (%%rsi), %%ymm8, %%ymm7")
1343 GEN_test_RandM(VCMPPS_128_0x1A
,
1344 "vcmpps $0x1A, %%xmm6, %%xmm8, %%xmm7",
1345 "vcmpps $0x1A, (%%rsi), %%xmm8, %%xmm7")
1346 GEN_test_RandM(VCMPPS_256_0x1A
,
1347 "vcmpps $0x1A, %%ymm6, %%ymm8, %%ymm7",
1348 "vcmpps $0x1A, (%%rsi), %%ymm8, %%ymm7")
1350 GEN_test_RandM(VCMPPD_128_0x1B
,
1351 "vcmppd $0x1B, %%xmm6, %%xmm8, %%xmm7",
1352 "vcmppd $0x1B, (%%rsi), %%xmm8, %%xmm7")
1353 GEN_test_RandM(VCMPPD_256_0x1B
,
1354 "vcmppd $0x1B, %%ymm6, %%ymm8, %%ymm7",
1355 "vcmppd $0x1B, (%%rsi), %%ymm8, %%ymm7")
1356 GEN_test_RandM(VCMPPS_128_0x1B
,
1357 "vcmpps $0x1B, %%xmm6, %%xmm8, %%xmm7",
1358 "vcmpps $0x1B, (%%rsi), %%xmm8, %%xmm7")
1359 GEN_test_RandM(VCMPPS_256_0x1B
,
1360 "vcmpps $0x1B, %%ymm6, %%ymm8, %%ymm7",
1361 "vcmpps $0x1B, (%%rsi), %%ymm8, %%ymm7")
1363 GEN_test_RandM(VCMPPD_128_0x1C
,
1364 "vcmppd $0x1C, %%xmm6, %%xmm8, %%xmm7",
1365 "vcmppd $0x1C, (%%rsi), %%xmm8, %%xmm7")
1366 GEN_test_RandM(VCMPPD_256_0x1C
,
1367 "vcmppd $0x1C, %%ymm6, %%ymm8, %%ymm7",
1368 "vcmppd $0x1C, (%%rsi), %%ymm8, %%ymm7")
1369 GEN_test_RandM(VCMPPS_128_0x1C
,
1370 "vcmpps $0x1C, %%xmm6, %%xmm8, %%xmm7",
1371 "vcmpps $0x1C, (%%rsi), %%xmm8, %%xmm7")
1372 GEN_test_RandM(VCMPPS_256_0x1C
,
1373 "vcmpps $0x1C, %%ymm6, %%ymm8, %%ymm7",
1374 "vcmpps $0x1C, (%%rsi), %%ymm8, %%ymm7")
1376 GEN_test_RandM(VCMPPD_128_0x1D
,
1377 "vcmppd $0x1D, %%xmm6, %%xmm8, %%xmm7",
1378 "vcmppd $0x1D, (%%rsi), %%xmm8, %%xmm7")
1379 GEN_test_RandM(VCMPPD_256_0x1D
,
1380 "vcmppd $0x1D, %%ymm6, %%ymm8, %%ymm7",
1381 "vcmppd $0x1D, (%%rsi), %%ymm8, %%ymm7")
1382 GEN_test_RandM(VCMPPS_128_0x1D
,
1383 "vcmpps $0x1D, %%xmm6, %%xmm8, %%xmm7",
1384 "vcmpps $0x1D, (%%rsi), %%xmm8, %%xmm7")
1385 GEN_test_RandM(VCMPPS_256_0x1D
,
1386 "vcmpps $0x1D, %%ymm6, %%ymm8, %%ymm7",
1387 "vcmpps $0x1D, (%%rsi), %%ymm8, %%ymm7")
1389 GEN_test_RandM(VCMPPD_128_0x1E
,
1390 "vcmppd $0x1E, %%xmm6, %%xmm8, %%xmm7",
1391 "vcmppd $0x1E, (%%rsi), %%xmm8, %%xmm7")
1392 GEN_test_RandM(VCMPPD_256_0x1E
,
1393 "vcmppd $0x1E, %%ymm6, %%ymm8, %%ymm7",
1394 "vcmppd $0x1E, (%%rsi), %%ymm8, %%ymm7")
1395 GEN_test_RandM(VCMPPS_128_0x1E
,
1396 "vcmpps $0x1E, %%xmm6, %%xmm8, %%xmm7",
1397 "vcmpps $0x1E, (%%rsi), %%xmm8, %%xmm7")
1398 GEN_test_RandM(VCMPPS_256_0x1E
,
1399 "vcmpps $0x1E, %%ymm6, %%ymm8, %%ymm7",
1400 "vcmpps $0x1E, (%%rsi), %%ymm8, %%ymm7")
1402 GEN_test_RandM(VCMPPD_128_0x1F
,
1403 "vcmppd $0x1F, %%xmm6, %%xmm8, %%xmm7",
1404 "vcmppd $0x1F, (%%rsi), %%xmm8, %%xmm7")
1405 GEN_test_RandM(VCMPPD_256_0x1F
,
1406 "vcmppd $0x1F, %%ymm6, %%ymm8, %%ymm7",
1407 "vcmppd $0x1F, (%%rsi), %%ymm8, %%ymm7")
1408 GEN_test_RandM(VCMPPS_128_0x1F
,
1409 "vcmpps $0x1F, %%xmm6, %%xmm8, %%xmm7",
1410 "vcmpps $0x1F, (%%rsi), %%xmm8, %%xmm7")
1411 GEN_test_RandM(VCMPPS_256_0x1F
,
1412 "vcmpps $0x1F, %%ymm6, %%ymm8, %%ymm7",
1413 "vcmpps $0x1F, (%%rsi), %%ymm8, %%ymm7")
1415 GEN_test_RandM(VCVTDQ2PD_128
,
1416 "vcvtdq2pd %%xmm6, %%xmm8",
1417 "vcvtdq2pd (%%rsi), %%xmm8")
1419 GEN_test_RandM(VDIVPD_128
,
1420 "vdivpd %%xmm6, %%xmm8, %%xmm7",
1421 "vdivpd (%%rsi), %%xmm8, %%xmm7")
1423 GEN_test_RandM(VANDPD_256
,
1424 "vandpd %%ymm6, %%ymm8, %%ymm7",
1425 "vandpd (%%rsi), %%ymm8, %%ymm7")
1427 GEN_test_RandM(VPMOVSXBW_128
,
1428 "vpmovsxbw %%xmm6, %%xmm8",
1429 "vpmovsxbw (%%rsi), %%xmm8")
1431 GEN_test_RandM(VPSUBUSW_128
,
1432 "vpsubusw %%xmm9, %%xmm8, %%xmm7",
1433 "vpsubusw (%%rsi), %%xmm8, %%xmm7")
1435 GEN_test_RandM(VPSUBSW_128
,
1436 "vpsubsw %%xmm9, %%xmm8, %%xmm7",
1437 "vpsubsw (%%rsi), %%xmm8, %%xmm7")
1439 GEN_test_RandM(VPCMPEQW_128
,
1440 "vpcmpeqw %%xmm6, %%xmm8, %%xmm7",
1441 "vpcmpeqw (%%rsi), %%xmm8, %%xmm7")
1443 GEN_test_RandM(VPADDB_128
,
1444 "vpaddb %%xmm6, %%xmm8, %%xmm7",
1445 "vpaddb (%%rsi), %%xmm8, %%xmm7")
1447 GEN_test_RandM(VMOVAPS_EtoG_256
,
1448 "vmovaps %%ymm6, %%ymm8",
1449 "vmovaps (%%rsi), %%ymm9")
1451 GEN_test_RandM(VCVTDQ2PD_256
,
1452 "vcvtdq2pd %%xmm6, %%ymm8",
1453 "vcvtdq2pd (%%rsi), %%ymm8")
1455 GEN_test_Monly(VMOVHPD_128_LoadForm
,
1456 "vmovhpd (%%rsi), %%xmm8, %%xmm7")
1458 GEN_test_Monly(VMOVHPS_128_LoadForm
,
1459 "vmovhps (%%rsi), %%xmm8, %%xmm7")
1461 // The y suffix denotes a 256 -> 128 operation
1462 GEN_test_RandM(VCVTPD2PS_256
,
1463 "vcvtpd2psy %%ymm8, %%xmm7",
1464 "vcvtpd2psy (%%rsi), %%xmm9")
1466 GEN_test_RandM(VPUNPCKHDQ_128
,
1467 "vpunpckhdq %%xmm6, %%xmm8, %%xmm7",
1468 "vpunpckhdq (%%rsi), %%xmm8, %%xmm7")
1470 GEN_test_Monly(VBROADCASTSS_128
,
1471 "vbroadcastss (%%rsi), %%xmm8")
1473 GEN_test_RandM(VPMOVSXDQ_128
,
1474 "vpmovsxdq %%xmm6, %%xmm8",
1475 "vpmovsxdq (%%rsi), %%xmm8")
1477 GEN_test_RandM(VPMOVSXWD_128
,
1478 "vpmovsxwd %%xmm6, %%xmm8",
1479 "vpmovsxwd (%%rsi), %%xmm8")
1481 GEN_test_RandM(VDIVPS_128
,
1482 "vdivps %%xmm9, %%xmm8, %%xmm7",
1483 "vdivps (%%rsi), %%xmm8, %%xmm7")
1485 GEN_test_RandM(VANDPS_256
,
1486 "vandps %%ymm6, %%ymm8, %%ymm7",
1487 "vandps (%%rsi), %%ymm8, %%ymm7")
1489 GEN_test_RandM(VXORPS_256
,
1490 "vxorps %%ymm6, %%ymm8, %%ymm7",
1491 "vxorps (%%rsi), %%ymm8, %%ymm7")
1493 GEN_test_RandM(VORPS_256
,
1494 "vorps %%ymm6, %%ymm8, %%ymm7",
1495 "vorps (%%rsi), %%ymm8, %%ymm7")
1497 GEN_test_RandM(VANDNPD_256
,
1498 "vandnpd %%ymm6, %%ymm8, %%ymm7",
1499 "vandnpd (%%rsi), %%ymm8, %%ymm7")
1501 GEN_test_RandM(VANDNPS_256
,
1502 "vandnps %%ymm6, %%ymm8, %%ymm7",
1503 "vandnps (%%rsi), %%ymm8, %%ymm7")
1505 GEN_test_RandM(VORPD_256
,
1506 "vorpd %%ymm6, %%ymm8, %%ymm7",
1507 "vorpd (%%rsi), %%ymm8, %%ymm7")
1509 GEN_test_RandM(VPERMILPS_256_0x0F
,
1510 "vpermilps $0x0F, %%ymm6, %%ymm8",
1511 "vpermilps $0x1E, (%%rsi), %%ymm8")
1512 GEN_test_RandM(VPERMILPS_256_0xFA
,
1513 "vpermilps $0xFA, %%ymm6, %%ymm8",
1514 "vpermilps $0xE5, (%%rsi), %%ymm8")
1515 GEN_test_RandM(VPERMILPS_256_0xA3
,
1516 "vpermilps $0xA3, %%ymm6, %%ymm8",
1517 "vpermilps $0xB4, (%%rsi), %%ymm8")
1518 GEN_test_RandM(VPERMILPS_256_0x5A
,
1519 "vpermilps $0x5A, %%ymm6, %%ymm8",
1520 "vpermilps $0x45, (%%rsi), %%ymm8")
1522 GEN_test_RandM(VPMULHW_128
,
1523 "vpmulhw %%xmm9, %%xmm8, %%xmm7",
1524 "vpmulhw (%%rsi), %%xmm8, %%xmm7")
1526 GEN_test_RandM(VPUNPCKHQDQ_128
,
1527 "vpunpckhqdq %%xmm6, %%xmm8, %%xmm7",
1528 "vpunpckhqdq (%%rsi), %%xmm8, %%xmm7")
1530 GEN_test_Ronly(VPSRAW_0x05_128
,
1531 "vpsraw $0x5, %%xmm9, %%xmm7")
1533 GEN_test_RandM(VPCMPGTB_128
,
1534 "vpcmpgtb %%xmm6, %%xmm8, %%xmm7",
1535 "vpcmpgtb (%%rsi), %%xmm8, %%xmm7")
1537 GEN_test_RandM(VPCMPGTW_128
,
1538 "vpcmpgtw %%xmm6, %%xmm8, %%xmm7",
1539 "vpcmpgtw (%%rsi), %%xmm8, %%xmm7")
1541 GEN_test_RandM(VPCMPGTD_128
,
1542 "vpcmpgtd %%xmm6, %%xmm8, %%xmm7",
1543 "vpcmpgtd (%%rsi), %%xmm8, %%xmm7")
1545 GEN_test_RandM(VPMOVZXBD_128
,
1546 "vpmovzxbd %%xmm6, %%xmm8",
1547 "vpmovzxbd (%%rsi), %%xmm8")
1549 GEN_test_RandM(VPMOVSXBD_128
,
1550 "vpmovsxbd %%xmm6, %%xmm8",
1551 "vpmovsxbd (%%rsi), %%xmm8")
1553 GEN_test_RandM(VPINSRB_128_1of3
,
1554 "vpinsrb $0, %%r14d, %%xmm8, %%xmm7",
1555 "vpinsrb $3, (%%rsi), %%xmm8, %%xmm7")
1556 GEN_test_RandM(VPINSRB_128_2of3
,
1557 "vpinsrb $6, %%r14d, %%xmm8, %%xmm7",
1558 "vpinsrb $9, (%%rsi), %%xmm8, %%xmm7")
1559 GEN_test_RandM(VPINSRB_128_3of3
,
1560 "vpinsrb $12, %%r14d, %%xmm8, %%xmm7",
1561 "vpinsrb $15, (%%rsi), %%xmm8, %%xmm7")
1563 GEN_test_RandM(VPINSRW_128_1of4
,
1564 "vpinsrw $0, %%r14d, %%xmm8, %%xmm7",
1565 "vpinsrw $3, (%%rsi), %%xmm8, %%xmm7")
1566 GEN_test_RandM(VPINSRW_128_2of4
,
1567 "vpinsrw $2, %%r14d, %%xmm8, %%xmm7",
1568 "vpinsrw $3, (%%rsi), %%xmm8, %%xmm7")
1569 GEN_test_RandM(VPINSRW_128_3of4
,
1570 "vpinsrw $4, %%r14d, %%xmm8, %%xmm7",
1571 "vpinsrw $5, (%%rsi), %%xmm8, %%xmm7")
1572 GEN_test_RandM(VPINSRW_128_4of4
,
1573 "vpinsrw $6, %%r14d, %%xmm8, %%xmm7",
1574 "vpinsrw $7, (%%rsi), %%xmm8, %%xmm7")
1576 GEN_test_RandM(VCOMISD_128
,
1577 "vcomisd %%xmm6, %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14",
1578 "vcomisd (%%rsi), %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14")
1580 GEN_test_RandM(VCOMISS_128
,
1581 "vcomiss %%xmm6, %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14",
1582 "vcomiss (%%rsi), %%xmm8; pushfq; popq %%r14; andq $0x8D5, %%r14")
1584 GEN_test_RandM(VMOVUPS_YMM_to_YMMorMEM
,
1585 "vmovups %%ymm8, %%ymm7",
1586 "vmovups %%ymm9, (%%rsi)")
1588 GEN_test_RandM(VDPPD_128_1of4
,
1589 "vdppd $0x00, %%xmm6, %%xmm8, %%xmm7",
1590 "vdppd $0xA5, (%%rsi), %%xmm9, %%xmm6")
1591 GEN_test_RandM(VDPPD_128_2of4
,
1592 "vdppd $0x5A, %%xmm6, %%xmm8, %%xmm7",
1593 "vdppd $0xFF, (%%rsi), %%xmm9, %%xmm6")
1594 GEN_test_RandM(VDPPD_128_3of4
,
1595 "vdppd $0x0F, %%xmm6, %%xmm8, %%xmm7",
1596 "vdppd $0x37, (%%rsi), %%xmm9, %%xmm6")
1597 GEN_test_RandM(VDPPD_128_4of4
,
1598 "vdppd $0xF0, %%xmm6, %%xmm8, %%xmm7",
1599 "vdppd $0x73, (%%rsi), %%xmm9, %%xmm6")
1601 GEN_test_RandM(VDPPS_128_1of4
,
1602 "vdpps $0x00, %%xmm6, %%xmm8, %%xmm7",
1603 "vdpps $0xA5, (%%rsi), %%xmm9, %%xmm6")
1604 GEN_test_RandM(VDPPS_128_2of4
,
1605 "vdpps $0x5A, %%xmm6, %%xmm8, %%xmm7",
1606 "vdpps $0xFF, (%%rsi), %%xmm9, %%xmm6")
1607 GEN_test_RandM(VDPPS_128_3of4
,
1608 "vdpps $0x0F, %%xmm6, %%xmm8, %%xmm7",
1609 "vdpps $0x37, (%%rsi), %%xmm9, %%xmm6")
1610 GEN_test_RandM(VDPPS_128_4of4
,
1611 "vdpps $0xF0, %%xmm6, %%xmm8, %%xmm7",
1612 "vdpps $0x73, (%%rsi), %%xmm9, %%xmm6")
1614 GEN_test_RandM(VDPPS_256_1of4
,
1615 "vdpps $0x00, %%ymm6, %%ymm8, %%ymm7",
1616 "vdpps $0xA5, (%%rsi), %%ymm9, %%ymm6")
1617 GEN_test_RandM(VDPPS_256_2of4
,
1618 "vdpps $0x5A, %%ymm6, %%ymm8, %%ymm7",
1619 "vdpps $0xFF, (%%rsi), %%ymm9, %%ymm6")
1620 GEN_test_RandM(VDPPS_256_3of4
,
1621 "vdpps $0x0F, %%ymm6, %%ymm8, %%ymm7",
1622 "vdpps $0x37, (%%rsi), %%ymm9, %%ymm6")
1623 GEN_test_RandM(VDPPS_256_4of4
,
1624 "vdpps $0xF0, %%ymm6, %%ymm8, %%ymm7",
1625 "vdpps $0x73, (%%rsi), %%ymm9, %%ymm6")
1627 GEN_test_Monly(VBROADCASTSS_256
,
1628 "vbroadcastss (%%rsi), %%ymm8")
1630 GEN_test_RandM(VPALIGNR_128_1of3
,
1631 "vpalignr $0, %%xmm6, %%xmm8, %%xmm7",
1632 "vpalignr $3, (%%rsi), %%xmm8, %%xmm7")
1633 GEN_test_RandM(VPALIGNR_128_2of3
,
1634 "vpalignr $6, %%xmm6, %%xmm8, %%xmm7",
1635 "vpalignr $9, (%%rsi), %%xmm8, %%xmm7")
1636 GEN_test_RandM(VPALIGNR_128_3of3
,
1637 "vpalignr $12, %%xmm6, %%xmm8, %%xmm7",
1638 "vpalignr $15, (%%rsi), %%xmm8, %%xmm7")
1640 // These (3 arg) vmovss and vmovsd gave some difficulty in testing. See
1642 GEN_test_Ronly(VMOVSD_REG_XMM_1of3
,
1643 "vmovsd %%xmm9, %%xmm7, %%xmm8") // encoding 10
1644 GEN_test_Ronly(VMOVSD_REG_XMM_2of3
,
1645 "vmovsd %%xmm7, %%xmm8, %%xmm9") // encoding 10
1646 GEN_test_Ronly(VMOVSD_REG_XMM_3of3
,
1647 "vmovsd %%xmm8, %%xmm9, %%xmm7") // encoding 11
1649 GEN_test_Ronly(VMOVSS_REG_XMM_1of3
,
1650 "vmovss %%xmm9, %%xmm7, %%xmm8") // encoding 10
1651 GEN_test_Ronly(VMOVSS_REG_XMM_2of3
,
1652 "vmovss %%xmm7, %%xmm8, %%xmm9") // encoding 10
1653 GEN_test_Ronly(VMOVSS_REG_XMM_3of3
,
1654 "vmovss %%xmm8, %%xmm9, %%xmm7") // encoding 11
1656 GEN_test_Monly(VMOVLPD_128_M64_XMM_XMM
, "vmovlpd (%%rsi), %%xmm8, %%xmm7")
1658 GEN_test_Monly(VMOVLPD_128_XMM_M64
, "vmovlpd %%xmm7, (%%rsi)")
1660 GEN_test_RandM(VSHUFPD_128_1of2
,
1661 "vshufpd $0, %%xmm9, %%xmm8, %%xmm7",
1662 "vshufpd $1, (%%rsi), %%xmm8, %%xmm7")
1663 GEN_test_RandM(VSHUFPD_128_2of2
,
1664 "vshufpd $2, %%xmm9, %%xmm8, %%xmm7",
1665 "vshufpd $3, (%%rsi), %%xmm8, %%xmm7")
1667 GEN_test_RandM(VSHUFPD_256_1of2
,
1668 "vshufpd $0x00, %%ymm9, %%ymm8, %%ymm7",
1669 "vshufpd $0xFF, (%%rsi), %%ymm8, %%ymm7")
1670 GEN_test_RandM(VSHUFPD_256_2of2
,
1671 "vshufpd $0x5A, %%ymm9, %%ymm8, %%ymm7",
1672 "vshufpd $0xA5, (%%rsi), %%ymm8, %%ymm7")
1674 GEN_test_RandM(VPERMILPS_128_0x00
,
1675 "vpermilps $0x00, %%xmm6, %%xmm8",
1676 "vpermilps $0x01, (%%rsi), %%xmm8")
1677 GEN_test_RandM(VPERMILPS_128_0xFE
,
1678 "vpermilps $0xFE, %%xmm6, %%xmm8",
1679 "vpermilps $0xFF, (%%rsi), %%xmm8")
1680 GEN_test_RandM(VPERMILPS_128_0x30
,
1681 "vpermilps $0x30, %%xmm6, %%xmm8",
1682 "vpermilps $0x03, (%%rsi), %%xmm8")
1683 GEN_test_RandM(VPERMILPS_128_0x21
,
1684 "vpermilps $0x21, %%xmm6, %%xmm8",
1685 "vpermilps $0x12, (%%rsi), %%xmm8")
1686 GEN_test_RandM(VPERMILPS_128_0xD7
,
1687 "vpermilps $0xD7, %%xmm6, %%xmm8",
1688 "vpermilps $0x6C, (%%rsi), %%xmm8")
1689 GEN_test_RandM(VPERMILPS_128_0xB5
,
1690 "vpermilps $0xB5, %%xmm6, %%xmm8",
1691 "vpermilps $0x4A, (%%rsi), %%xmm8")
1692 GEN_test_RandM(VPERMILPS_128_0x85
,
1693 "vpermilps $0x85, %%xmm6, %%xmm8",
1694 "vpermilps $0xDC, (%%rsi), %%xmm8")
1695 GEN_test_RandM(VPERMILPS_128_0x29
,
1696 "vpermilps $0x29, %%xmm6, %%xmm8",
1697 "vpermilps $0x92, (%%rsi), %%xmm8")
1699 GEN_test_RandM(VBLENDPS_128_1of3
,
1700 "vblendps $0, %%xmm6, %%xmm8, %%xmm7",
1701 "vblendps $3, (%%rsi), %%xmm8, %%xmm7")
1702 GEN_test_RandM(VBLENDPS_128_2of3
,
1703 "vblendps $6, %%xmm6, %%xmm8, %%xmm7",
1704 "vblendps $9, (%%rsi), %%xmm8, %%xmm7")
1705 GEN_test_RandM(VBLENDPS_128_3of3
,
1706 "vblendps $12, %%xmm6, %%xmm8, %%xmm7",
1707 "vblendps $15, (%%rsi), %%xmm8, %%xmm7")
1709 GEN_test_RandM(VBLENDPD_128_1of2
,
1710 "vblendpd $0, %%xmm6, %%xmm8, %%xmm7",
1711 "vblendpd $1, (%%rsi), %%xmm8, %%xmm7")
1712 GEN_test_RandM(VBLENDPD_128_2of2
,
1713 "vblendpd $2, %%xmm6, %%xmm8, %%xmm7",
1714 "vblendpd $3, (%%rsi), %%xmm8, %%xmm7")
1716 GEN_test_RandM(VBLENDPD_256_1of3
,
1717 "vblendpd $0, %%ymm6, %%ymm8, %%ymm7",
1718 "vblendpd $3, (%%rsi), %%ymm8, %%ymm7")
1719 GEN_test_RandM(VBLENDPD_256_2of3
,
1720 "vblendpd $6, %%ymm6, %%ymm8, %%ymm7",
1721 "vblendpd $9, (%%rsi), %%ymm8, %%ymm7")
1722 GEN_test_RandM(VBLENDPD_256_3of3
,
1723 "vblendpd $12, %%ymm6, %%ymm8, %%ymm7",
1724 "vblendpd $15, (%%rsi), %%ymm8, %%ymm7")
1726 GEN_test_RandM(VPBLENDW_128_0x00
,
1727 "vpblendw $0x00, %%xmm6, %%xmm8, %%xmm7",
1728 "vpblendw $0x01, (%%rsi), %%xmm8, %%xmm7")
1729 GEN_test_RandM(VPBLENDW_128_0xFE
,
1730 "vpblendw $0xFE, %%xmm6, %%xmm8, %%xmm7",
1731 "vpblendw $0xFF, (%%rsi), %%xmm8, %%xmm7")
1732 GEN_test_RandM(VPBLENDW_128_0x30
,
1733 "vpblendw $0x30, %%xmm6, %%xmm8, %%xmm7",
1734 "vpblendw $0x03, (%%rsi), %%xmm8, %%xmm7")
1735 GEN_test_RandM(VPBLENDW_128_0x21
,
1736 "vpblendw $0x21, %%xmm6, %%xmm8, %%xmm7",
1737 "vpblendw $0x12, (%%rsi), %%xmm8, %%xmm7")
1738 GEN_test_RandM(VPBLENDW_128_0xD7
,
1739 "vpblendw $0xD7, %%xmm6, %%xmm8, %%xmm7",
1740 "vpblendw $0x6C, (%%rsi), %%xmm8, %%xmm7")
1741 GEN_test_RandM(VPBLENDW_128_0xB5
,
1742 "vpblendw $0xB5, %%xmm6, %%xmm8, %%xmm7",
1743 "vpblendw $0x4A, (%%rsi), %%xmm8, %%xmm7")
1744 GEN_test_RandM(VPBLENDW_128_0x85
,
1745 "vpblendw $0x85, %%xmm6, %%xmm8, %%xmm7",
1746 "vpblendw $0xDC, (%%rsi), %%xmm8, %%xmm7")
1747 GEN_test_RandM(VPBLENDW_128_0x29
,
1748 "vpblendw $0x29, %%xmm6, %%xmm8, %%xmm7",
1749 "vpblendw $0x92, (%%rsi), %%xmm8, %%xmm7")
1751 GEN_test_RandM(VMOVUPS_EtoG_256
,
1752 "vmovups %%ymm6, %%ymm9",
1753 "vmovups (%%rsi), %%ymm7")
1755 GEN_test_RandM(VMOVDQU_GtoE_256
,
1756 "vmovdqu %%ymm9, %%ymm6",
1757 "vmovdqu %%ymm7, (%%rsi)")
1759 GEN_test_RandM(VCVTPS2PD_256
,
1760 "vcvtps2pd %%xmm9, %%ymm6",
1761 "vcvtps2pd (%%rsi), %%ymm7")
1763 GEN_test_RandM(VCVTTPS2DQ_128
,
1764 "vcvttps2dq %%xmm9, %%xmm6",
1765 "vcvttps2dq (%%rsi), %%xmm7")
1767 GEN_test_RandM(VCVTTPS2DQ_256
,
1768 "vcvttps2dq %%ymm9, %%ymm6",
1769 "vcvttps2dq (%%rsi), %%ymm7")
1771 GEN_test_RandM(VCVTDQ2PS_128
,
1772 "vcvtdq2ps %%xmm9, %%xmm6",
1773 "vcvtdq2ps (%%rsi), %%xmm7")
1775 GEN_test_RandM(VCVTDQ2PS_256
,
1776 "vcvtdq2ps %%ymm9, %%ymm6",
1777 "vcvtdq2ps (%%rsi), %%ymm7")
1779 GEN_test_RandM(VCVTTPD2DQ_128
,
1780 "vcvttpd2dqx %%xmm9, %%xmm6",
1781 "vcvttpd2dqx (%%rsi), %%xmm7")
1783 GEN_test_RandM(VCVTTPD2DQ_256
,
1784 "vcvttpd2dqy %%ymm9, %%xmm6",
1785 "vcvttpd2dqy (%%rsi), %%xmm7")
1787 GEN_test_RandM(VCVTPD2DQ_128
,
1788 "vcvtpd2dqx %%xmm9, %%xmm6",
1789 "vcvtpd2dqx (%%rsi), %%xmm7")
1791 GEN_test_RandM(VCVTPD2DQ_256
,
1792 "vcvtpd2dqy %%ymm9, %%xmm6",
1793 "vcvtpd2dqy (%%rsi), %%xmm7")
1795 GEN_test_RandM(VMOVSLDUP_128
,
1796 "vmovsldup %%xmm9, %%xmm6",
1797 "vmovsldup (%%rsi), %%xmm7")
1799 GEN_test_RandM(VMOVSLDUP_256
,
1800 "vmovsldup %%ymm9, %%ymm6",
1801 "vmovsldup (%%rsi), %%ymm7")
1803 GEN_test_RandM(VMOVSHDUP_128
,
1804 "vmovshdup %%xmm9, %%xmm6",
1805 "vmovshdup (%%rsi), %%xmm7")
1807 GEN_test_RandM(VMOVSHDUP_256
,
1808 "vmovshdup %%ymm9, %%ymm6",
1809 "vmovshdup (%%rsi), %%ymm7")
1811 GEN_test_RandM(VPERMILPS_VAR_128
,
1812 "vpermilps %%xmm6, %%xmm8, %%xmm7",
1813 "vpermilps (%%rsi), %%xmm8, %%xmm7")
1815 GEN_test_RandM(VPERMILPD_VAR_128
,
1816 "vpermilpd %%xmm6, %%xmm8, %%xmm7",
1817 "vpermilpd (%%rsi), %%xmm8, %%xmm7")
1819 GEN_test_RandM(VPERMILPS_VAR_256
,
1820 "vpermilps %%ymm6, %%ymm8, %%ymm7",
1821 "vpermilps (%%rsi), %%ymm8, %%ymm7")
1823 GEN_test_RandM(VPERMILPD_VAR_256
,
1824 "vpermilpd %%ymm6, %%ymm8, %%ymm7",
1825 "vpermilpd (%%rsi), %%ymm8, %%ymm7")
1827 GEN_test_RandM(VPSLLW_128
,
1829 "vmovd %%r14d, %%xmm6;"
1830 "vpsllw %%xmm6, %%xmm8, %%xmm9",
1831 "andq $15, 128(%%rsi);"
1832 "vpsllw 128(%%rsi), %%xmm8, %%xmm9")
1834 GEN_test_RandM(VPSRLW_128
,
1836 "vmovd %%r14d, %%xmm6;"
1837 "vpsrlw %%xmm6, %%xmm8, %%xmm9",
1838 "andq $15, 128(%%rsi);"
1839 "vpsrlw 128(%%rsi), %%xmm8, %%xmm9")
1841 GEN_test_RandM(VPSRAW_128
,
1843 "vmovd %%r14d, %%xmm6;"
1844 "vpsraw %%xmm6, %%xmm8, %%xmm9",
1845 "andq $15, 128(%%rsi);"
1846 "vpsraw 128(%%rsi), %%xmm8, %%xmm9")
1848 GEN_test_RandM(VPSLLD_128
,
1850 "vmovd %%r14d, %%xmm6;"
1851 "vpslld %%xmm6, %%xmm8, %%xmm9",
1852 "andq $31, 128(%%rsi);"
1853 "vpslld 128(%%rsi), %%xmm8, %%xmm9")
1855 GEN_test_RandM(VPSRLD_128
,
1857 "vmovd %%r14d, %%xmm6;"
1858 "vpsrld %%xmm6, %%xmm8, %%xmm9",
1859 "andq $31, 128(%%rsi);"
1860 "vpsrld 128(%%rsi), %%xmm8, %%xmm9")
1862 GEN_test_RandM(VPSRAD_128
,
1864 "vmovd %%r14d, %%xmm6;"
1865 "vpsrad %%xmm6, %%xmm8, %%xmm9",
1866 "andq $31, 128(%%rsi);"
1867 "vpsrad 128(%%rsi), %%xmm8, %%xmm9")
1869 GEN_test_RandM(VPSLLQ_128
,
1871 "vmovd %%r14d, %%xmm6;"
1872 "vpsllq %%xmm6, %%xmm8, %%xmm9",
1873 "andq $63, 128(%%rsi);"
1874 "vpsllq 128(%%rsi), %%xmm8, %%xmm9")
1876 GEN_test_RandM(VPSRLQ_128
,
1878 "vmovd %%r14d, %%xmm6;"
1879 "vpsrlq %%xmm6, %%xmm8, %%xmm9",
1880 "andq $63, 128(%%rsi);"
1881 "vpsrlq 128(%%rsi), %%xmm8, %%xmm9")
1883 GEN_test_RandM(VROUNDPS_128_0x0
,
1884 "vroundps $0x0, %%xmm8, %%xmm9",
1885 "vroundps $0x0, (%%rsi), %%xmm9")
1886 GEN_test_RandM(VROUNDPS_128_0x1
,
1887 "vroundps $0x1, %%xmm8, %%xmm9",
1888 "vroundps $0x1, (%%rsi), %%xmm9")
1889 GEN_test_RandM(VROUNDPS_128_0x2
,
1890 "vroundps $0x2, %%xmm8, %%xmm9",
1891 "vroundps $0x2, (%%rsi), %%xmm9")
1892 GEN_test_RandM(VROUNDPS_128_0x3
,
1893 "vroundps $0x3, %%xmm8, %%xmm9",
1894 "vroundps $0x3, (%%rsi), %%xmm9")
1895 GEN_test_RandM(VROUNDPS_128_0x4
,
1896 "vroundps $0x4, %%xmm8, %%xmm9",
1897 "vroundps $0x4, (%%rsi), %%xmm9")
1899 GEN_test_RandM(VROUNDPS_256_0x0
,
1900 "vroundps $0x0, %%ymm8, %%ymm9",
1901 "vroundps $0x0, (%%rsi), %%ymm9")
1902 GEN_test_RandM(VROUNDPS_256_0x1
,
1903 "vroundps $0x1, %%ymm8, %%ymm9",
1904 "vroundps $0x1, (%%rsi), %%ymm9")
1905 GEN_test_RandM(VROUNDPS_256_0x2
,
1906 "vroundps $0x2, %%ymm8, %%ymm9",
1907 "vroundps $0x2, (%%rsi), %%ymm9")
1908 GEN_test_RandM(VROUNDPS_256_0x3
,
1909 "vroundps $0x3, %%ymm8, %%ymm9",
1910 "vroundps $0x3, (%%rsi), %%ymm9")
1911 GEN_test_RandM(VROUNDPS_256_0x4
,
1912 "vroundps $0x4, %%ymm8, %%ymm9",
1913 "vroundps $0x4, (%%rsi), %%ymm9")
1915 GEN_test_RandM(VROUNDPD_128_0x0
,
1916 "vroundpd $0x0, %%xmm8, %%xmm9",
1917 "vroundpd $0x0, (%%rsi), %%xmm9")
1918 GEN_test_RandM(VROUNDPD_128_0x1
,
1919 "vroundpd $0x1, %%xmm8, %%xmm9",
1920 "vroundpd $0x1, (%%rsi), %%xmm9")
1921 GEN_test_RandM(VROUNDPD_128_0x2
,
1922 "vroundpd $0x2, %%xmm8, %%xmm9",
1923 "vroundpd $0x2, (%%rsi), %%xmm9")
1924 GEN_test_RandM(VROUNDPD_128_0x3
,
1925 "vroundpd $0x3, %%xmm8, %%xmm9",
1926 "vroundpd $0x3, (%%rsi), %%xmm9")
1927 GEN_test_RandM(VROUNDPD_128_0x4
,
1928 "vroundpd $0x4, %%xmm8, %%xmm9",
1929 "vroundpd $0x4, (%%rsi), %%xmm9")
1931 GEN_test_RandM(VROUNDPD_256_0x0
,
1932 "vroundpd $0x0, %%ymm8, %%ymm9",
1933 "vroundpd $0x0, (%%rsi), %%ymm9")
1934 GEN_test_RandM(VROUNDPD_256_0x1
,
1935 "vroundpd $0x1, %%ymm8, %%ymm9",
1936 "vroundpd $0x1, (%%rsi), %%ymm9")
1937 GEN_test_RandM(VROUNDPD_256_0x2
,
1938 "vroundpd $0x2, %%ymm8, %%ymm9",
1939 "vroundpd $0x2, (%%rsi), %%ymm9")
1940 GEN_test_RandM(VROUNDPD_256_0x3
,
1941 "vroundpd $0x3, %%ymm8, %%ymm9",
1942 "vroundpd $0x3, (%%rsi), %%ymm9")
1943 GEN_test_RandM(VROUNDPD_256_0x4
,
1944 "vroundpd $0x4, %%ymm8, %%ymm9",
1945 "vroundpd $0x4, (%%rsi), %%ymm9")
1947 GEN_test_RandM(VPMADDWD_128
,
1948 "vpmaddwd %%xmm6, %%xmm8, %%xmm7",
1949 "vpmaddwd (%%rsi), %%xmm8, %%xmm7")
1951 GEN_test_RandM(VADDSUBPS_128
,
1952 "vaddsubps %%xmm6, %%xmm8, %%xmm7",
1953 "vaddsubps (%%rsi), %%xmm8, %%xmm7")
1955 GEN_test_RandM(VADDSUBPS_256
,
1956 "vaddsubps %%ymm6, %%ymm8, %%ymm7",
1957 "vaddsubps (%%rsi), %%ymm8, %%ymm7")
1959 GEN_test_RandM(VADDSUBPD_128
,
1960 "vaddsubpd %%xmm6, %%xmm8, %%xmm7",
1961 "vaddsubpd (%%rsi), %%xmm8, %%xmm7")
1963 GEN_test_RandM(VADDSUBPD_256
,
1964 "vaddsubpd %%ymm6, %%ymm8, %%ymm7",
1965 "vaddsubpd (%%rsi), %%ymm8, %%ymm7")
1967 GEN_test_RandM(VROUNDSS_0x0
,
1968 "vroundss $0x0, %%xmm8, %%xmm6, %%xmm9",
1969 "vroundss $0x0, (%%rsi), %%xmm6, %%xmm9")
1970 GEN_test_RandM(VROUNDSS_0x1
,
1971 "vroundss $0x1, %%xmm8, %%xmm6, %%xmm9",
1972 "vroundss $0x1, (%%rsi), %%xmm6, %%xmm9")
1973 GEN_test_RandM(VROUNDSS_0x2
,
1974 "vroundss $0x2, %%xmm8, %%xmm6, %%xmm9",
1975 "vroundss $0x2, (%%rsi), %%xmm6, %%xmm9")
1976 GEN_test_RandM(VROUNDSS_0x3
,
1977 "vroundss $0x3, %%xmm8, %%xmm6, %%xmm9",
1978 "vroundss $0x3, (%%rsi), %%xmm6, %%xmm9")
1979 GEN_test_RandM(VROUNDSS_0x4
,
1980 "vroundss $0x4, %%xmm8, %%xmm6, %%xmm9",
1981 "vroundss $0x4, (%%rsi), %%xmm6, %%xmm9")
1982 GEN_test_RandM(VROUNDSS_0x5
,
1983 "vroundss $0x5, %%xmm8, %%xmm6, %%xmm9",
1984 "vroundss $0x5, (%%rsi), %%xmm6, %%xmm9")
1986 GEN_test_RandM(VROUNDSD_0x0
,
1987 "vroundsd $0x0, %%xmm8, %%xmm6, %%xmm9",
1988 "vroundsd $0x0, (%%rsi), %%xmm6, %%xmm9")
1989 GEN_test_RandM(VROUNDSD_0x1
,
1990 "vroundsd $0x1, %%xmm8, %%xmm6, %%xmm9",
1991 "vroundsd $0x1, (%%rsi), %%xmm6, %%xmm9")
1992 GEN_test_RandM(VROUNDSD_0x2
,
1993 "vroundsd $0x2, %%xmm8, %%xmm6, %%xmm9",
1994 "vroundsd $0x2, (%%rsi), %%xmm6, %%xmm9")
1995 GEN_test_RandM(VROUNDSD_0x3
,
1996 "vroundsd $0x3, %%xmm8, %%xmm6, %%xmm9",
1997 "vroundsd $0x3, (%%rsi), %%xmm6, %%xmm9")
1998 GEN_test_RandM(VROUNDSD_0x4
,
1999 "vroundsd $0x4, %%xmm8, %%xmm6, %%xmm9",
2000 "vroundsd $0x4, (%%rsi), %%xmm6, %%xmm9")
2001 GEN_test_RandM(VROUNDSD_0x5
,
2002 "vroundsd $0x5, %%xmm8, %%xmm6, %%xmm9",
2003 "vroundsd $0x5, (%%rsi), %%xmm6, %%xmm9")
2005 GEN_test_RandM(VPTEST_128_1
,
2006 "vptest %%xmm6, %%xmm8; "
2007 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2008 "vptest (%%rsi), %%xmm8; "
2009 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2011 /* Here we ignore the boilerplate-supplied data and try to do
2012 x AND x and x AND NOT x. Not a great test but better
2014 GEN_test_RandM(VPTEST_128_2
,
2015 "vmovups %%xmm6, %%xmm8;"
2016 "vptest %%xmm6, %%xmm8; "
2017 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2018 "vmovups (%%rsi), %%xmm8;"
2019 "vcmpeqpd %%xmm8,%%xmm8,%%xmm7;"
2020 "vxorpd %%xmm8,%%xmm7,%%xmm8;"
2021 "vptest (%%rsi), %%xmm8; "
2022 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2024 GEN_test_RandM(VPTEST_256_1
,
2025 "vptest %%ymm6, %%ymm8; "
2026 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2027 "vptest (%%rsi), %%ymm8; "
2028 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2030 /* Here we ignore the boilerplate-supplied data and try to do
2031 x AND x and x AND NOT x. Not a great test but better
2033 GEN_test_RandM(VPTEST_256_2
,
2034 "vmovups %%ymm6, %%ymm8;"
2035 "vptest %%ymm6, %%ymm8; "
2036 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2037 "vmovups (%%rsi), %%ymm8;"
2038 "vcmpeqpd %%xmm8,%%xmm8,%%xmm7;"
2039 "subq $1024, %%rsp;"
2040 "vmovups %%xmm7,512(%%rsp);"
2041 "vmovups %%xmm7,528(%%rsp);"
2042 "vmovups 512(%%rsp), %%ymm7;"
2043 "addq $1024, %%rsp;"
2044 "vxorpd %%ymm8,%%ymm7,%%ymm8;"
2045 "vptest (%%rsi), %%ymm8; "
2046 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2049 /* VTESTPS/VTESTPD: test once with all-0 operands, once with
2050 one all-0s and one all 1s, and once with random data. */
2052 GEN_test_RandM(VTESTPS_128_1
,
2053 "vtestps %%xmm6, %%xmm8; "
2054 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2055 "vtestps (%%rsi), %%xmm8; "
2056 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2058 /* Here we ignore the boilerplate-supplied data and try to do
2059 x AND x and x AND NOT x. Not a great test but better
2061 GEN_test_RandM(VTESTPS_128_2
,
2062 "vmovups %%xmm6, %%xmm8;"
2063 "vtestps %%xmm6, %%xmm8; "
2064 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2065 "vmovups (%%rsi), %%xmm8;"
2066 "vcmpeqpd %%xmm8,%%xmm8,%%xmm7;"
2067 "vxorpd %%xmm8,%%xmm7,%%xmm8;"
2068 "vtestps (%%rsi), %%xmm8; "
2069 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2071 GEN_test_RandM(VTESTPS_128_3
,
2072 "vtestps %%xmm8, %%xmm9; "
2073 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2074 "vtestps (%%rsi), %%xmm9; "
2075 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2080 GEN_test_RandM(VTESTPS_256_1
,
2081 "vtestps %%ymm6, %%ymm8; "
2082 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2083 "vtestps (%%rsi), %%ymm8; "
2084 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2086 /* Here we ignore the boilerplate-supplied data and try to do
2087 x AND x and x AND NOT x. Not a great test but better
2089 GEN_test_RandM(VTESTPS_256_2
,
2090 "vmovups %%ymm6, %%ymm8;"
2091 "vtestps %%ymm6, %%ymm8; "
2092 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2093 "vmovups (%%rsi), %%ymm8;"
2094 "vcmpeqpd %%xmm8,%%xmm8,%%xmm7;"
2095 "subq $1024, %%rsp;"
2096 "vmovups %%xmm7,512(%%rsp);"
2097 "vmovups %%xmm7,528(%%rsp);"
2098 "vmovups 512(%%rsp), %%ymm7;"
2099 "addq $1024, %%rsp;"
2100 "vxorpd %%ymm8,%%ymm7,%%ymm8;"
2101 "vtestps (%%rsi), %%ymm8; "
2102 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2104 GEN_test_RandM(VTESTPS_256_3
,
2105 "vtestps %%ymm8, %%ymm9; "
2106 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2107 "vtestps (%%rsi), %%ymm9; "
2108 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2112 GEN_test_RandM(VTESTPD_128_1
,
2113 "vtestpd %%xmm6, %%xmm8; "
2114 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2115 "vtestpd (%%rsi), %%xmm8; "
2116 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2118 /* Here we ignore the boilerplate-supplied data and try to do
2119 x AND x and x AND NOT x. Not a great test but better
2121 GEN_test_RandM(VTESTPD_128_2
,
2122 "vmovups %%xmm6, %%xmm8;"
2123 "vtestpd %%xmm6, %%xmm8; "
2124 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2125 "vmovups (%%rsi), %%xmm8;"
2126 "vcmpeqpd %%xmm8,%%xmm8,%%xmm7;"
2127 "vxorpd %%xmm8,%%xmm7,%%xmm8;"
2128 "vtestpd (%%rsi), %%xmm8; "
2129 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2131 GEN_test_RandM(VTESTPD_128_3
,
2132 "vtestpd %%xmm8, %%xmm9; "
2133 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2134 "vtestpd (%%rsi), %%xmm9; "
2135 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2140 GEN_test_RandM(VTESTPD_256_1
,
2141 "vtestpd %%ymm6, %%ymm8; "
2142 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2143 "vtestpd (%%rsi), %%ymm8; "
2144 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2146 /* Here we ignore the boilerplate-supplied data and try to do
2147 x AND x and x AND NOT x. Not a great test but better
2149 GEN_test_RandM(VTESTPD_256_2
,
2150 "vmovups %%ymm6, %%ymm8;"
2151 "vtestpd %%ymm6, %%ymm8; "
2152 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2153 "vmovups (%%rsi), %%ymm8;"
2154 "vcmpeqpd %%xmm8,%%xmm8,%%xmm7;"
2155 "subq $1024, %%rsp;"
2156 "vmovups %%xmm7,512(%%rsp);"
2157 "vmovups %%xmm7,528(%%rsp);"
2158 "vmovups 512(%%rsp), %%ymm7;"
2159 "addq $1024, %%rsp;"
2160 "vxorpd %%ymm8,%%ymm7,%%ymm8;"
2161 "vtestpd (%%rsi), %%ymm8; "
2162 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2164 GEN_test_RandM(VTESTPD_256_3
,
2165 "vtestpd %%ymm8, %%ymm9; "
2166 "pushfq; popq %%r14; andq $0x8D5, %%r14",
2167 "vtestpd (%%rsi), %%ymm9; "
2168 "pushfq; popq %%r14; andq $0x8D5, %%r14")
2170 GEN_test_RandM(VBLENDVPS_128
,
2171 "vblendvps %%xmm9, %%xmm6, %%xmm8, %%xmm7",
2172 "vblendvps %%xmm9, (%%rsi), %%xmm8, %%xmm7")
2174 GEN_test_RandM(VBLENDVPS_256
,
2175 "vblendvps %%ymm9, %%ymm6, %%ymm8, %%ymm7",
2176 "vblendvps %%ymm9, (%%rsi), %%ymm8, %%ymm7")
2178 GEN_test_RandM(VBLENDVPD_128
,
2179 "vblendvpd %%xmm9, %%xmm6, %%xmm8, %%xmm7",
2180 "vblendvpd %%xmm9, (%%rsi), %%xmm8, %%xmm7")
2182 GEN_test_RandM(VBLENDVPD_256
,
2183 "vblendvpd %%ymm9, %%ymm6, %%ymm8, %%ymm7",
2184 "vblendvpd %%ymm9, (%%rsi), %%ymm8, %%ymm7")
2187 GEN_test_RandM(VHADDPS_128
,
2188 "vhaddps %%xmm6, %%xmm8, %%xmm7",
2189 "vhaddps (%%rsi), %%xmm8, %%xmm7")
2191 GEN_test_RandM(VHADDPS_256
,
2192 "vhaddps %%ymm6, %%ymm8, %%ymm7",
2193 "vhaddps (%%rsi), %%ymm8, %%ymm7")
2195 GEN_test_RandM(VHADDPD_128
,
2196 "vhaddpd %%xmm6, %%xmm8, %%xmm7",
2197 "vhaddpd (%%rsi), %%xmm8, %%xmm7")
2199 GEN_test_RandM(VHADDPD_256
,
2200 "vhaddpd %%ymm6, %%ymm8, %%ymm7",
2201 "vhaddpd (%%rsi), %%ymm8, %%ymm7")
2203 GEN_test_RandM(VHSUBPS_128
,
2204 "vhsubps %%xmm6, %%xmm8, %%xmm7",
2205 "vhsubps (%%rsi), %%xmm8, %%xmm7")
2207 GEN_test_RandM(VHSUBPS_256
,
2208 "vhsubps %%ymm6, %%ymm8, %%ymm7",
2209 "vhsubps (%%rsi), %%ymm8, %%ymm7")
2211 GEN_test_RandM(VHSUBPD_128
,
2212 "vhsubpd %%xmm6, %%xmm8, %%xmm7",
2213 "vhsubpd (%%rsi), %%xmm8, %%xmm7")
2215 GEN_test_RandM(VHSUBPD_256
,
2216 "vhsubpd %%ymm6, %%ymm8, %%ymm7",
2217 "vhsubpd (%%rsi), %%ymm8, %%ymm7")
2219 GEN_test_RandM(VEXTRACTPS_0x0
,
2220 "vextractps $0, %%xmm8, %%r14d",
2221 "vextractps $0, %%xmm8, (%%rsi)")
2223 GEN_test_RandM(VEXTRACTPS_0x1
,
2224 "vextractps $1, %%xmm8, %%r14d",
2225 "vextractps $1, %%xmm8, (%%rsi)")
2227 GEN_test_RandM(VEXTRACTPS_0x2
,
2228 "vextractps $2, %%xmm8, %%r14d",
2229 "vextractps $2, %%xmm8, (%%rsi)")
2231 GEN_test_RandM(VEXTRACTPS_0x3
,
2232 "vextractps $3, %%xmm8, %%r14d",
2233 "vextractps $3, %%xmm8, (%%rsi)")
2235 GEN_test_Monly(VLDDQU_128
,
2236 "vlddqu 1(%%rsi), %%xmm8")
2238 GEN_test_Monly(VLDDQU_256
,
2239 "vlddqu 1(%%rsi), %%ymm8")
2241 GEN_test_Monly(VMOVNTDQA_128
,
2242 "vmovntdqa (%%rsi), %%xmm9")
2244 GEN_test_Monly(VMASKMOVDQU_128
,
2245 "xchgq %%rsi, %%rdi;"
2246 "vmaskmovdqu %%xmm8, %%xmm9;"
2247 "xchgq %%rsi, %%rdi")
2249 GEN_test_Ronly(VMOVMSKPD_128
,
2250 "vmovmskpd %%xmm9, %%r14d")
2252 GEN_test_Ronly(VMOVMSKPD_256
,
2253 "vmovmskpd %%ymm9, %%r14d")
2255 GEN_test_Ronly(VMOVMSKPS_128
,
2256 "vmovmskps %%xmm9, %%r14d")
2258 GEN_test_Ronly(VMOVMSKPS_256
,
2259 "vmovmskps %%ymm9, %%r14d")
2261 GEN_test_Monly(VMOVNTPD_128
,
2262 "vmovntpd %%xmm9, (%%rsi)")
2264 GEN_test_Monly(VMOVNTPD_256
,
2265 "vmovntpd %%ymm9, (%%rsi)")
2267 GEN_test_Monly(VMOVNTPS_128
,
2268 "vmovntps %%xmm9, (%%rsi)")
2270 GEN_test_Monly(VMOVNTPS_256
,
2271 "vmovntps %%ymm9, (%%rsi)")
2273 GEN_test_RandM(VPACKSSWB_128
,
2274 "vpacksswb %%xmm6, %%xmm8, %%xmm7",
2275 "vpacksswb (%%rsi), %%xmm8, %%xmm7")
2277 GEN_test_RandM(VPAVGB_128
,
2278 "vpavgb %%xmm6, %%xmm8, %%xmm7",
2279 "vpavgb (%%rsi), %%xmm8, %%xmm7")
2281 GEN_test_RandM(VPAVGW_128
,
2282 "vpavgw %%xmm6, %%xmm8, %%xmm7",
2283 "vpavgw (%%rsi), %%xmm8, %%xmm7")
2285 GEN_test_RandM(VPADDSB_128
,
2286 "vpaddsb %%xmm6, %%xmm8, %%xmm7",
2287 "vpaddsb (%%rsi), %%xmm8, %%xmm7")
2289 GEN_test_RandM(VPADDSW_128
,
2290 "vpaddsw %%xmm6, %%xmm8, %%xmm7",
2291 "vpaddsw (%%rsi), %%xmm8, %%xmm7")
2293 GEN_test_RandM(VPHADDW_128
,
2294 "vphaddw %%xmm6, %%xmm8, %%xmm7",
2295 "vphaddw (%%rsi), %%xmm8, %%xmm7")
2297 GEN_test_RandM(VPHADDD_128
,
2298 "vphaddd %%xmm6, %%xmm8, %%xmm7",
2299 "vphaddd (%%rsi), %%xmm8, %%xmm7")
2301 GEN_test_RandM(VPHADDSW_128
,
2302 "vphaddsw %%xmm6, %%xmm8, %%xmm7",
2303 "vphaddsw (%%rsi), %%xmm8, %%xmm7")
2305 GEN_test_RandM(VPMADDUBSW_128
,
2306 "vpmaddubsw %%xmm6, %%xmm8, %%xmm7",
2307 "vpmaddubsw (%%rsi), %%xmm8, %%xmm7")
2309 GEN_test_RandM(VPHSUBW_128
,
2310 "vphsubw %%xmm6, %%xmm8, %%xmm7",
2311 "vphsubw (%%rsi), %%xmm8, %%xmm7")
2313 GEN_test_RandM(VPHSUBD_128
,
2314 "vphsubd %%xmm6, %%xmm8, %%xmm7",
2315 "vphsubd (%%rsi), %%xmm8, %%xmm7")
2317 GEN_test_RandM(VPHSUBSW_128
,
2318 "vphsubsw %%xmm6, %%xmm8, %%xmm7",
2319 "vphsubsw (%%rsi), %%xmm8, %%xmm7")
2321 GEN_test_RandM(VPABSB_128
,
2322 "vpabsb %%xmm6, %%xmm7",
2323 "vpabsb (%%rsi), %%xmm7")
2325 GEN_test_RandM(VPABSW_128
,
2326 "vpabsw %%xmm6, %%xmm7",
2327 "vpabsw (%%rsi), %%xmm7")
2329 GEN_test_RandM(VPMOVSXBQ_128
,
2330 "vpmovsxbq %%xmm6, %%xmm8",
2331 "vpmovsxbq (%%rsi), %%xmm8")
2333 GEN_test_RandM(VPMOVSXWQ_128
,
2334 "vpmovsxwq %%xmm6, %%xmm8",
2335 "vpmovsxwq (%%rsi), %%xmm8")
2337 GEN_test_RandM(VPACKUSDW_128
,
2338 "vpackusdw %%xmm6, %%xmm8, %%xmm7",
2339 "vpackusdw (%%rsi), %%xmm8, %%xmm7")
2341 GEN_test_RandM(VPMOVZXBQ_128
,
2342 "vpmovzxbq %%xmm6, %%xmm8",
2343 "vpmovzxbq (%%rsi), %%xmm8")
2345 GEN_test_RandM(VPMOVZXWQ_128
,
2346 "vpmovzxwq %%xmm6, %%xmm8",
2347 "vpmovzxwq (%%rsi), %%xmm8")
2349 GEN_test_RandM(VPMOVZXDQ_128
,
2350 "vpmovzxdq %%xmm6, %%xmm8",
2351 "vpmovzxdq (%%rsi), %%xmm8")
2353 GEN_test_RandM(VMPSADBW_128_0x0
,
2354 "vmpsadbw $0, %%xmm6, %%xmm8, %%xmm7",
2355 "vmpsadbw $0, (%%rsi), %%xmm8, %%xmm7")
2356 GEN_test_RandM(VMPSADBW_128_0x1
,
2357 "vmpsadbw $1, %%xmm6, %%xmm8, %%xmm7",
2358 "vmpsadbw $1, (%%rsi), %%xmm8, %%xmm7")
2359 GEN_test_RandM(VMPSADBW_128_0x2
,
2360 "vmpsadbw $2, %%xmm6, %%xmm8, %%xmm7",
2361 "vmpsadbw $2, (%%rsi), %%xmm8, %%xmm7")
2362 GEN_test_RandM(VMPSADBW_128_0x3
,
2363 "vmpsadbw $3, %%xmm6, %%xmm8, %%xmm7",
2364 "vmpsadbw $3, (%%rsi), %%xmm8, %%xmm7")
2365 GEN_test_RandM(VMPSADBW_128_0x4
,
2366 "vmpsadbw $4, %%xmm6, %%xmm8, %%xmm7",
2367 "vmpsadbw $4, (%%rsi), %%xmm8, %%xmm7")
2368 GEN_test_RandM(VMPSADBW_128_0x5
,
2369 "vmpsadbw $5, %%xmm6, %%xmm8, %%xmm7",
2370 "vmpsadbw $5, (%%rsi), %%xmm8, %%xmm7")
2371 GEN_test_RandM(VMPSADBW_128_0x6
,
2372 "vmpsadbw $6, %%xmm6, %%xmm8, %%xmm7",
2373 "vmpsadbw $6, (%%rsi), %%xmm8, %%xmm7")
2374 GEN_test_RandM(VMPSADBW_128_0x7
,
2375 "vmpsadbw $7, %%xmm6, %%xmm8, %%xmm7",
2376 "vmpsadbw $7, (%%rsi), %%xmm8, %%xmm7")
2378 GEN_test_RandM(VMOVDDUP_YMMorMEM256_to_YMM
,
2379 "vmovddup %%ymm8, %%ymm7",
2380 "vmovddup (%%rsi), %%ymm9")
2382 GEN_test_Monly(VMOVLPS_128_M64_XMM_XMM
, "vmovlps (%%rsi), %%xmm8, %%xmm7")
2384 GEN_test_Monly(VMOVLPS_128_XMM_M64
, "vmovlps %%xmm7, (%%rsi)")
2386 GEN_test_RandM(VPSADBW_128
,
2387 "vpsadbw %%xmm6, %%xmm8, %%xmm7",
2388 "vpsadbw (%%rsi), %%xmm8, %%xmm7")
2390 GEN_test_RandM(VPSIGNB_128
,
2391 "vpsignb %%xmm6, %%xmm8, %%xmm7",
2392 "vpsignb (%%rsi), %%xmm8, %%xmm7")
2394 GEN_test_RandM(VPSIGNW_128
,
2395 "vpsignw %%xmm6, %%xmm8, %%xmm7",
2396 "vpsignw (%%rsi), %%xmm8, %%xmm7")
2398 GEN_test_RandM(VPSIGND_128
,
2399 "vpsignd %%xmm6, %%xmm8, %%xmm7",
2400 "vpsignd (%%rsi), %%xmm8, %%xmm7")
2402 GEN_test_RandM(VPMULHRSW_128
,
2403 "vpmulhrsw %%xmm6, %%xmm8, %%xmm7",
2404 "vpmulhrsw (%%rsi), %%xmm8, %%xmm7")
2406 GEN_test_Monly(VBROADCASTF128
,
2407 "vbroadcastf128 (%%rsi), %%ymm9")
2409 GEN_test_RandM(VPEXTRW_128_0x0
,
2410 "vpextrw $0x0, %%xmm7, %%r14d",
2411 "vpextrw $0x0, %%xmm7, (%%rsi)")
2412 GEN_test_RandM(VPEXTRW_128_0x1
,
2413 "vpextrw $0x1, %%xmm7, %%r14d",
2414 "vpextrw $0x1, %%xmm7, (%%rsi)")
2415 GEN_test_RandM(VPEXTRW_128_0x2
,
2416 "vpextrw $0x2, %%xmm7, %%r14d",
2417 "vpextrw $0x2, %%xmm7, (%%rsi)")
2418 GEN_test_RandM(VPEXTRW_128_0x3
,
2419 "vpextrw $0x3, %%xmm7, %%r14d",
2420 "vpextrw $0x3, %%xmm7, (%%rsi)")
2421 GEN_test_RandM(VPEXTRW_128_0x4
,
2422 "vpextrw $0x4, %%xmm7, %%r14d",
2423 "vpextrw $0x4, %%xmm7, (%%rsi)")
2424 GEN_test_RandM(VPEXTRW_128_0x5
,
2425 "vpextrw $0x5, %%xmm7, %%r14d",
2426 "vpextrw $0x5, %%xmm7, (%%rsi)")
2427 GEN_test_RandM(VPEXTRW_128_0x6
,
2428 "vpextrw $0x6, %%xmm7, %%r14d",
2429 "vpextrw $0x6, %%xmm7, (%%rsi)")
2430 GEN_test_RandM(VPEXTRW_128_0x7
,
2431 "vpextrw $0x7, %%xmm7, %%r14d",
2432 "vpextrw $0x7, %%xmm7, (%%rsi)")
2434 GEN_test_RandM(VAESENC
,
2435 "vaesenc %%xmm6, %%xmm8, %%xmm7",
2436 "vaesenc (%%rsi), %%xmm8, %%xmm7")
2438 GEN_test_RandM(VAESENCLAST
,
2439 "vaesenclast %%xmm6, %%xmm8, %%xmm7",
2440 "vaesenclast (%%rsi), %%xmm8, %%xmm7")
2442 GEN_test_RandM(VAESDEC
,
2443 "vaesdec %%xmm6, %%xmm8, %%xmm7",
2444 "vaesdec (%%rsi), %%xmm8, %%xmm7")
2446 GEN_test_RandM(VAESDECLAST
,
2447 "vaesdeclast %%xmm6, %%xmm8, %%xmm7",
2448 "vaesdeclast (%%rsi), %%xmm8, %%xmm7")
2450 GEN_test_RandM(VAESIMC
,
2451 "vaesimc %%xmm6, %%xmm7",
2452 "vaesimc (%%rsi), %%xmm7")
2454 GEN_test_RandM(VAESKEYGENASSIST_0x00
,
2455 "vaeskeygenassist $0x00, %%xmm6, %%xmm7",
2456 "vaeskeygenassist $0x00, (%%rsi), %%xmm7")
2457 GEN_test_RandM(VAESKEYGENASSIST_0x31
,
2458 "vaeskeygenassist $0x31, %%xmm6, %%xmm7",
2459 "vaeskeygenassist $0x31, (%%rsi), %%xmm7")
2460 GEN_test_RandM(VAESKEYGENASSIST_0xB2
,
2461 "vaeskeygenassist $0xb2, %%xmm6, %%xmm7",
2462 "vaeskeygenassist $0xb2, (%%rsi), %%xmm7")
2463 GEN_test_RandM(VAESKEYGENASSIST_0xFF
,
2464 "vaeskeygenassist $0xFF, %%xmm6, %%xmm7",
2465 "vaeskeygenassist $0xFF, (%%rsi), %%xmm7")
2467 GEN_test_RandM(VPCLMULQDQ_0x00
,
2468 "vpclmulqdq $0x00, %%xmm6, %%xmm8, %%xmm7",
2469 "vpclmulqdq $0x00, (%%rsi), %%xmm8, %%xmm7")
2470 GEN_test_RandM(VPCLMULQDQ_0x01
,
2471 "vpclmulqdq $0x01, %%xmm6, %%xmm8, %%xmm7",
2472 "vpclmulqdq $0x01, (%%rsi), %%xmm8, %%xmm7")
2473 GEN_test_RandM(VPCLMULQDQ_0x10
,
2474 "vpclmulqdq $0x10, %%xmm6, %%xmm8, %%xmm7",
2475 "vpclmulqdq $0x10, (%%rsi), %%xmm8, %%xmm7")
2476 GEN_test_RandM(VPCLMULQDQ_0x11
,
2477 "vpclmulqdq $0x11, %%xmm6, %%xmm8, %%xmm7",
2478 "vpclmulqdq $0x11, (%%rsi), %%xmm8, %%xmm7")
2479 GEN_test_RandM(VPCLMULQDQ_0xFF
,
2480 "vpclmulqdq $0xFF, %%xmm6, %%xmm8, %%xmm7",
2481 "vpclmulqdq $0xFF, (%%rsi), %%xmm8, %%xmm7")
2483 GEN_test_Monly(VMASKMOVPS_128_LoadForm
,
2484 "vmaskmovps (%%rsi), %%xmm8, %%xmm7;"
2485 "vxorps %%xmm6, %%xmm6, %%xmm6;"
2486 "vmaskmovps (%%rsi,%%rsi,4), %%xmm6, %%xmm9")
2488 GEN_test_Monly(VMASKMOVPS_256_LoadForm
,
2489 "vmaskmovps (%%rsi), %%ymm8, %%ymm7;"
2490 "vxorps %%ymm6, %%ymm6, %%ymm6;"
2491 "vmaskmovps (%%rsi,%%rsi,4), %%ymm6, %%ymm9")
2493 GEN_test_Monly(VMASKMOVPD_128_LoadForm
,
2494 "vmaskmovpd (%%rsi), %%xmm8, %%xmm7;"
2495 "vxorpd %%xmm6, %%xmm6, %%xmm6;"
2496 "vmaskmovpd (%%rsi,%%rsi,4), %%xmm6, %%xmm9")
2498 GEN_test_Monly(VMASKMOVPD_256_LoadForm
,
2499 "vmaskmovpd (%%rsi), %%ymm8, %%ymm7;"
2500 "vxorpd %%ymm6, %%ymm6, %%ymm6;"
2501 "vmaskmovpd (%%rsi,%%rsi,4), %%ymm6, %%ymm9")
2503 GEN_test_Monly(VMASKMOVPS_128_StoreForm
,
2504 "vmaskmovps %%xmm8, %%xmm7, (%%rsi);"
2505 "vxorps %%xmm6, %%xmm6, %%xmm6;"
2506 "vmaskmovps %%xmm9, %%xmm6, (%%rsi,%%rsi,4)")
2508 GEN_test_Monly(VMASKMOVPS_256_StoreForm
,
2509 "vmaskmovps %%ymm8, %%ymm7, (%%rsi);"
2510 "vxorps %%ymm6, %%ymm6, %%ymm6;"
2511 "vmaskmovps %%ymm9, %%ymm6, (%%rsi,%%rsi,4)")
2513 GEN_test_Monly(VMASKMOVPD_128_StoreForm
,
2514 "vmaskmovpd %%xmm8, %%xmm7, (%%rsi);"
2515 "vxorpd %%xmm6, %%xmm6, %%xmm6;"
2516 "vmaskmovpd %%xmm9, %%xmm6, (%%rsi,%%rsi,4)")
2518 GEN_test_Monly(VMASKMOVPD_256_StoreForm
,
2519 "vmaskmovpd %%ymm8, %%ymm7, (%%rsi);"
2520 "vxorpd %%ymm6, %%ymm6, %%ymm6;"
2521 "vmaskmovpd %%ymm9, %%ymm6, (%%rsi,%%rsi,4)")
2525 DO_D( VMOVUPD_EtoG_256
);
2526 DO_D( VMOVUPD_GtoE_256
);
2530 DO_D( VPINSRQ_128
);
2531 DO_D( VUCOMISS_128
);
2532 DO_D( VUCOMISD_128
);
2533 DO_D( VCVTPS2PD_128
);
2534 DO_D( VANDNPD_128
);
2544 DO_D( VPSHUFD_0x39_128
);
2545 DO_D( VPCMPEQD_128
);
2546 DO_D( VPEXTRD_128_0x3
);
2547 DO_D( VPEXTRD_128_0x0
);
2548 DO_D( VINSERTF128_0x0
);
2549 DO_D( VINSERTF128_0x1
);
2550 DO_D( VEXTRACTF128_0x0
);
2551 DO_D( VEXTRACTF128_0x1
);
2552 DO_D( VCVTPD2PS_128
);
2553 /* Test all CMPSS variants; this code is tricky. */
2554 DO_D( VCMPSS_128_0x0
);
2555 DO_D( VCMPSS_128_0x1
);
2556 DO_D( VCMPSS_128_0x2
);
2557 DO_D( VCMPSS_128_0x3
);
2558 DO_D( VCMPSS_128_0x4
);
2559 DO_D( VCMPSS_128_0x5
);
2560 DO_D( VCMPSS_128_0x6
);
2561 DO_D( VCMPSS_128_0x7
);
2562 DO_D( VCMPSS_128_0x8
);
2563 DO_D( VCMPSS_128_0xA
);
2564 DO_D( VCMPSS_128_0xC
);
2565 DO_D( VCMPSS_128_0xD
);
2566 DO_D( VCMPSS_128_0xE
);
2567 DO_D( VCMPSS_128_0x10
);
2568 DO_D( VCMPSS_128_0x11
);
2569 DO_D( VCMPSS_128_0x12
);
2570 DO_D( VCMPSS_128_0x13
);
2571 DO_D( VCMPSS_128_0x14
);
2572 DO_D( VCMPSS_128_0x15
);
2573 DO_D( VCMPSS_128_0x16
);
2574 DO_D( VCMPSS_128_0x17
);
2575 DO_D( VCMPSS_128_0x18
);
2576 DO_D( VCMPSS_128_0x19
);
2577 DO_D( VCMPSS_128_0x1A
);
2578 DO_D( VCMPSS_128_0x1C
);
2579 DO_D( VCMPSS_128_0x1D
);
2580 DO_D( VCMPSS_128_0x1E
);
2581 DO_D( VMOVDDUP_XMMorMEM64_to_XMM
);
2582 DO_D( VMOVD_IREGorMEM32_to_XMM
);
2583 DO_D( VMOVQ_XMM_MEM64
);
2584 DO_D( VMOVDQA_GtoE_256
);
2585 DO_D( VMOVDQA_GtoE_128
);
2586 DO_D( VMOVDQU_GtoE_128
);
2587 DO_D( VMOVDQA_EtoG_256
);
2588 DO_D( VMOVDQA_EtoG_128
);
2589 DO_D( VMOVDQU_EtoG_128
);
2590 DO_D( VMOVAPD_GtoE_128
);
2591 DO_D( VMOVAPD_GtoE_256
);
2592 DO_D( VMOVAPS_GtoE_128
);
2593 DO_D( VMOVAPS_GtoE_256
);
2594 DO_D( VMOVAPS_EtoG_128
);
2595 DO_D( VMOVAPD_EtoG_256
);
2596 DO_D( VMOVAPD_EtoG_128
);
2597 DO_D( VMOVUPD_GtoE_128
);
2598 DO_D( VMOVSS_XMM_M32
);
2599 DO_D( VMOVSD_XMM_M64
);
2600 DO_D( VMOVSS_M64_XMM
);
2601 DO_D( VMOVSD_M64_XMM
);
2602 DO_D( VINSERTPS_0x39_128
);
2603 DO_D( VPUNPCKLDQ_128
);
2604 DO_D( VPACKSSDW_128
);
2606 DO_D( VPSRLW_0x05_128
);
2607 DO_D( VPSLLW_0x05_128
);
2608 DO_D( VPUNPCKLQDQ_128
);
2609 DO_D( VPINSRD_128
);
2610 DO_D( VMOVD_XMM_to_MEM32
);
2612 DO_D( VPSLLDQ_0x05_128
);
2613 DO_D( VPSRLDQ_0x05_128
);
2614 DO_D( VPSUBUSB_128
);
2615 DO_D( VPSUBSB_128
);
2616 DO_D( VPSLLD_0x05_128
);
2617 DO_D( VPSRLD_0x05_128
);
2618 DO_D( VPSRAD_0x05_128
);
2619 DO_D( VPUNPCKLWD_128
);
2620 DO_D( VPUNPCKHWD_128
);
2621 DO_D( VPADDUSB_128
);
2622 DO_D( VPMULHUW_128
);
2623 DO_D( VPADDUSW_128
);
2624 DO_D( VPMULLW_128
);
2625 DO_D( VPSHUFHW_0x39_128
);
2626 DO_D( VPSHUFLW_0x39_128
);
2627 DO_D( VCVTPS2DQ_128
);
2633 DO_D( VSHUFPS_0x39_128
);
2634 DO_D( VPCMPEQB_128
);
2635 DO_D( VMOVHPD_128_StoreForm
);
2637 DO_D( VPMOVMSKB_128
);
2638 DO_D( VCVTTSS2SI_64
);
2639 DO_D( VPACKUSWB_128
);
2640 DO_D( VCVTSS2SD_128
);
2641 DO_D( VCVTSD2SS_128
);
2642 DO_D( VMOVD_XMM_to_IREG32
);
2643 DO_D( VPCMPESTRM_0x45_128
);
2644 DO_D( VMOVQ_IREGorMEM64_to_XMM
);
2645 DO_D( VMOVUPS_XMM_to_XMMorMEM
);
2646 DO_D( VMOVNTDQ_128
);
2647 DO_D( VMOVLHPS_128
);
2649 DO_D( VMOVHLPS_128
);
2650 DO_D( VMOVQ_XMM_to_IREG64
);
2651 DO_D( VMOVQ_XMMorMEM64_to_XMM
);
2652 DO_D( VCVTTSS2SI_32
);
2653 DO_D( VPUNPCKLBW_128
);
2654 DO_D( VPUNPCKHBW_128
);
2659 DO_D( VUNPCKLPS_128
);
2660 DO_D( VCVTSI2SS_128
);
2664 DO_D( VANDNPS_128
);
2666 DO_D( VSQRTSD_128
);
2667 /* Test all CMPSD variants; this code is tricky. */
2668 DO_D( VCMPSD_128_0x0
);
2669 DO_D( VCMPSD_128_0x1
);
2670 DO_D( VCMPSD_128_0x2
);
2671 DO_D( VCMPSD_128_0x3
);
2672 DO_D( VCMPSD_128_0x4
);
2673 DO_D( VCMPSD_128_0x5
);
2674 DO_D( VCMPSD_128_0x6
);
2675 DO_D( VCMPSD_128_0x7
);
2676 DO_D( VCMPSD_128_0x8
);
2677 DO_D( VCMPSD_128_0xA
);
2678 DO_D( VCMPSD_128_0xC
);
2679 DO_D( VCMPSD_128_0xD
);
2680 DO_D( VCMPSD_128_0xE
);
2681 DO_D( VCMPSD_128_0x10
);
2682 DO_D( VCMPSD_128_0x11
);
2683 DO_D( VCMPSD_128_0x12
);
2684 DO_D( VCMPSD_128_0x13
);
2685 DO_D( VCMPSD_128_0x14
);
2686 DO_D( VCMPSD_128_0x15
);
2687 DO_D( VCMPSD_128_0x16
);
2688 DO_D( VCMPSD_128_0x17
);
2689 DO_D( VCMPSD_128_0x18
);
2690 DO_D( VCMPSD_128_0x19
);
2691 DO_D( VCMPSD_128_0x1A
);
2692 DO_D( VCMPSD_128_0x1C
);
2693 DO_D( VCMPSD_128_0x1D
);
2694 DO_D( VCMPSD_128_0x1E
);
2695 DO_D( VPSHUFB_128
);
2696 DO_D( VCVTTSD2SI_32
);
2697 DO_D( VCVTTSD2SI_64
);
2698 DO_D( VCVTSI2SS_64
);
2699 DO_D( VCVTSI2SD_64
);
2700 DO_D( VCVTSI2SD_32
);
2706 DO_D( VPMOVZXBW_128
);
2707 DO_D( VPMOVZXWD_128
);
2708 DO_D( VPBLENDVB_128
);
2709 DO_D( VPMINSD_128
);
2710 DO_D( VPMAXSD_128
);
2713 DO_D( VMOVUPD_EtoG_128
);
2717 DO_D( VPCMPEQQ_128
);
2720 DO_D( VUNPCKLPD_128
);
2721 DO_D( VUNPCKHPD_128
);
2722 DO_D( VUNPCKHPS_128
);
2723 DO_D( VMOVUPS_EtoG_128
);
2728 DO_D( VPCMPGTQ_128
);
2729 DO_D( VPEXTRQ_128_0x0
);
2730 DO_D( VPEXTRQ_128_0x1
);
2731 DO_D( VPSRLQ_0x05_128
);
2732 DO_D( VPMULUDQ_128
);
2733 DO_D( VPSLLQ_0x05_128
);
2734 DO_D( VPMAXUD_128
);
2735 DO_D( VPMINUD_128
);
2736 DO_D( VPMULLD_128
);
2737 DO_D( VPMAXUW_128
);
2738 DO_D( VPEXTRW_128_EregOnly_toG_0x0
);
2739 DO_D( VPEXTRW_128_EregOnly_toG_0x7
);
2740 DO_D( VPMINUW_128
);
2741 DO_D( VPHMINPOSUW_128
);
2742 DO_D( VPMAXSW_128
);
2743 DO_D( VPMINSW_128
);
2744 DO_D( VPMAXUB_128
);
2745 DO_D( VPEXTRB_GtoE_128_0x0
);
2746 DO_D( VPEXTRB_GtoE_128_0x1
);
2747 DO_D( VPEXTRB_GtoE_128_0x2
);
2748 DO_D( VPEXTRB_GtoE_128_0x3
);
2749 DO_D( VPEXTRB_GtoE_128_0x4
);
2750 DO_D( VPEXTRB_GtoE_128_0x9
);
2751 DO_D( VPEXTRB_GtoE_128_0xE
);
2752 DO_D( VPEXTRB_GtoE_128_0xF
);
2753 DO_D( VPMINUB_128
);
2754 DO_D( VPMAXSB_128
);
2755 DO_D( VPMINSB_128
);
2756 DO_D( VPERM2F128_0x00
);
2757 DO_D( VPERM2F128_0xFF
);
2758 DO_D( VPERM2F128_0x30
);
2759 DO_D( VPERM2F128_0x21
);
2760 DO_D( VPERM2F128_0x12
);
2761 DO_D( VPERM2F128_0x03
);
2762 DO_D( VPERM2F128_0x85
);
2763 DO_D( VPERM2F128_0x5A
);
2764 DO_D( VPERMILPD_256_0x0
);
2765 DO_D( VPERMILPD_256_0xF
);
2766 DO_D( VPERMILPD_256_0xA
);
2767 DO_D( VPERMILPD_256_0x5
);
2768 DO_D( VPERMILPD_128_0x0
);
2769 DO_D( VPERMILPD_128_0x3
);
2770 DO_D( VUNPCKLPD_256
);
2771 DO_D( VUNPCKHPD_256
);
2772 DO_D( VSHUFPS_0x39_256
);
2773 DO_D( VUNPCKLPS_256
);
2774 DO_D( VUNPCKHPS_256
);
2776 DO_D( VBROADCASTSD_256
);
2777 DO_D( VCMPPD_128_0x4
);
2778 DO_D( VCVTDQ2PD_128
);
2781 DO_D( VPMOVSXBW_128
);
2782 DO_D( VPSUBUSW_128
);
2783 DO_D( VPSUBSW_128
);
2784 DO_D( VPCMPEQW_128
);
2786 DO_D( VMOVAPS_EtoG_256
);
2787 DO_D( VCVTDQ2PD_256
);
2788 DO_D( VMOVHPD_128_LoadForm
);
2789 DO_D( VCVTPD2PS_256
);
2790 DO_D( VPUNPCKHDQ_128
);
2791 DO_D( VBROADCASTSS_128
);
2792 DO_D( VPMOVSXDQ_128
);
2793 DO_D( VPMOVSXWD_128
);
2798 DO_D( VANDNPD_256
);
2799 DO_D( VANDNPS_256
);
2801 DO_D( VPERMILPS_256_0x0F
);
2802 DO_D( VPERMILPS_256_0xFA
);
2803 DO_D( VPERMILPS_256_0xA3
);
2804 DO_D( VPERMILPS_256_0x5A
);
2805 DO_D( VPMULHW_128
);
2806 DO_D( VPUNPCKHQDQ_128
);
2807 DO_D( VPSRAW_0x05_128
);
2808 DO_D( VPCMPGTD_128
);
2809 DO_D( VPMOVZXBD_128
);
2810 DO_D( VPMOVSXBD_128
);
2811 DO_D( VPINSRB_128_1of3
);
2812 DO_D( VPINSRB_128_2of3
);
2813 DO_D( VPINSRB_128_3of3
);
2814 DO_D( VCOMISD_128
);
2815 DO_D( VCOMISS_128
);
2816 DO_D( VMOVUPS_YMM_to_YMMorMEM
);
2817 DO_D( VDPPD_128_1of4
);
2818 DO_D( VDPPD_128_2of4
);
2819 DO_D( VDPPD_128_3of4
);
2820 DO_D( VDPPD_128_4of4
);
2821 DO_D( VPINSRW_128_1of4
);
2822 DO_D( VPINSRW_128_2of4
);
2823 DO_D( VPINSRW_128_3of4
);
2824 DO_D( VPINSRW_128_4of4
);
2825 DO_D( VBROADCASTSS_256
);
2826 DO_D( VPALIGNR_128_1of3
);
2827 DO_D( VPALIGNR_128_2of3
);
2828 DO_D( VPALIGNR_128_3of3
);
2829 DO_D( VMOVSD_REG_XMM_1of3
);
2830 DO_D( VMOVSD_REG_XMM_2of3
);
2831 DO_D( VMOVSD_REG_XMM_3of3
);
2832 DO_D( VMOVSS_REG_XMM_1of3
);
2833 DO_D( VMOVSS_REG_XMM_2of3
);
2834 DO_D( VMOVSS_REG_XMM_3of3
);
2835 DO_D( VMOVLPD_128_M64_XMM_XMM
);
2836 DO_D( VMOVLPD_128_XMM_M64
);
2837 DO_D( VSHUFPD_128_1of2
);
2838 DO_D( VSHUFPD_128_2of2
);
2839 DO_D( VSHUFPD_256_1of2
);
2840 DO_D( VSHUFPD_256_2of2
);
2841 DO_D( VPERMILPS_128_0x00
);
2842 DO_D( VPERMILPS_128_0xFE
);
2843 DO_D( VPERMILPS_128_0x30
);
2844 DO_D( VPERMILPS_128_0x21
);
2845 DO_D( VPERMILPS_128_0xD7
);
2846 DO_D( VPERMILPS_128_0xB5
);
2847 DO_D( VPERMILPS_128_0x85
);
2848 DO_D( VPERMILPS_128_0x29
);
2849 DO_D( VBLENDPS_128_1of3
);
2850 DO_D( VBLENDPS_128_2of3
);
2851 DO_D( VBLENDPS_128_3of3
);
2852 DO_D( VBLENDPD_128_1of2
);
2853 DO_D( VBLENDPD_128_2of2
);
2854 DO_D( VBLENDPD_256_1of3
);
2855 DO_D( VBLENDPD_256_2of3
);
2856 DO_D( VBLENDPD_256_3of3
);
2857 DO_D( VPBLENDW_128_0x00
);
2858 DO_D( VPBLENDW_128_0xFE
);
2859 DO_D( VPBLENDW_128_0x30
);
2860 DO_D( VPBLENDW_128_0x21
);
2861 DO_D( VPBLENDW_128_0xD7
);
2862 DO_D( VPBLENDW_128_0xB5
);
2863 DO_D( VPBLENDW_128_0x85
);
2864 DO_D( VPBLENDW_128_0x29
);
2865 DO_D( VMOVUPS_EtoG_256
);
2866 DO_D( VMOVDQU_GtoE_256
);
2867 DO_D( VCVTPS2PD_256
);
2868 DO_D( VCVTTPS2DQ_128
);
2869 DO_D( VCVTTPS2DQ_256
);
2870 DO_D( VCVTDQ2PS_128
);
2871 DO_D( VCVTDQ2PS_256
);
2872 DO_D( VCVTTPD2DQ_128
);
2873 DO_D( VCVTTPD2DQ_256
);
2874 DO_D( VCVTPD2DQ_128
);
2875 DO_D( VCVTPD2DQ_256
);
2876 DO_D( VMOVSLDUP_128
);
2877 DO_D( VMOVSLDUP_256
);
2878 DO_D( VMOVSHDUP_128
);
2879 DO_D( VMOVSHDUP_256
);
2880 DO_D( VPERMILPS_VAR_128
);
2881 DO_D( VPERMILPD_VAR_128
);
2882 DO_D( VPERMILPS_VAR_256
);
2883 DO_D( VPERMILPD_VAR_256
);
2892 DO_D( VROUNDPS_128_0x0
);
2893 DO_D( VROUNDPS_128_0x1
);
2894 DO_D( VROUNDPS_128_0x2
);
2895 DO_D( VROUNDPS_128_0x3
);
2896 DO_D( VROUNDPS_128_0x4
);
2897 DO_D( VROUNDPS_256_0x0
);
2898 DO_D( VROUNDPS_256_0x1
);
2899 DO_D( VROUNDPS_256_0x2
);
2900 DO_D( VROUNDPS_256_0x3
);
2901 DO_D( VROUNDPS_256_0x4
);
2902 DO_D( VROUNDPD_128_0x0
);
2903 DO_D( VROUNDPD_128_0x1
);
2904 DO_D( VROUNDPD_128_0x2
);
2905 DO_D( VROUNDPD_128_0x3
);
2906 DO_D( VROUNDPD_128_0x4
);
2907 DO_D( VROUNDPD_256_0x0
);
2908 DO_D( VROUNDPD_256_0x1
);
2909 DO_D( VROUNDPD_256_0x2
);
2910 DO_D( VROUNDPD_256_0x3
);
2911 DO_D( VROUNDPD_256_0x4
);
2912 DO_D( VROUNDSS_0x0
);
2913 DO_D( VROUNDSS_0x1
);
2914 DO_D( VROUNDSS_0x2
);
2915 DO_D( VROUNDSS_0x3
);
2916 DO_D( VROUNDSS_0x4
);
2917 DO_D( VROUNDSS_0x5
);
2918 DO_D( VROUNDSD_0x0
);
2919 DO_D( VROUNDSD_0x1
);
2920 DO_D( VROUNDSD_0x2
);
2921 DO_D( VROUNDSD_0x3
);
2922 DO_D( VROUNDSD_0x4
);
2923 DO_D( VROUNDSD_0x5
);
2924 DO_D( VPTEST_128_1
);
2925 DO_D( VPTEST_128_2
);
2926 DO_D( VPTEST_256_1
);
2927 DO_D( VPTEST_256_2
);
2928 DO_D( VTESTPS_128_1
);
2929 DO_D( VTESTPS_128_2
);
2930 DO_N( 10, VTESTPS_128_3
);
2931 DO_D( VTESTPS_256_1
);
2932 DO_D( VTESTPS_256_2
);
2933 DO_N( 10, VTESTPS_256_3
);
2934 DO_D( VTESTPD_128_1
);
2935 DO_D( VTESTPD_128_2
);
2936 DO_N( 10, VTESTPD_128_3
);
2937 DO_D( VTESTPD_256_1
);
2938 DO_D( VTESTPD_256_2
);
2939 DO_N( 10, VTESTPD_256_3
);
2940 DO_D( VBLENDVPS_128
);
2941 DO_D( VBLENDVPS_256
);
2942 DO_D( VBLENDVPD_128
);
2943 DO_D( VBLENDVPD_256
);
2944 DO_D( VPMULDQ_128
);
2945 DO_D( VCMPPD_256_0x4
);
2946 DO_D( VCMPPS_128_0x4
);
2947 DO_D( VCMPPS_256_0x4
);
2948 DO_D( VPCMPGTB_128
);
2949 DO_D( VPCMPGTW_128
);
2950 DO_D( VPMADDWD_128
);
2951 DO_D( VADDSUBPS_128
);
2952 DO_D( VADDSUBPS_256
);
2953 DO_D( VADDSUBPD_128
);
2954 DO_D( VADDSUBPD_256
);
2955 DO_D( VCVTSS2SI_64
);
2956 DO_D( VCVTSS2SI_32
);
2957 DO_D( VCVTSD2SI_32
);
2958 DO_D( VCVTSD2SI_64
);
2959 DO_D( VDPPS_128_1of4
);
2960 DO_D( VDPPS_128_2of4
);
2961 DO_D( VDPPS_128_3of4
);
2962 DO_D( VDPPS_128_4of4
);
2963 DO_D( VDPPS_256_1of4
);
2964 DO_D( VDPPS_256_2of4
);
2965 DO_D( VDPPS_256_3of4
);
2966 DO_D( VDPPS_256_4of4
);
2967 DO_D( VHADDPS_128
);
2968 DO_D( VHADDPS_256
);
2969 DO_D( VHADDPD_128
);
2970 DO_D( VHADDPD_256
);
2971 DO_D( VHSUBPS_128
);
2972 DO_D( VHSUBPS_256
);
2973 DO_D( VHSUBPD_128
);
2974 DO_D( VHSUBPD_256
);
2975 DO_D( VEXTRACTPS_0x0
);
2976 DO_D( VEXTRACTPS_0x1
);
2977 DO_D( VEXTRACTPS_0x2
);
2978 DO_D( VEXTRACTPS_0x3
);
2987 DO_D( VMOVHPS_128_StoreForm
);
2988 DO_D( VMOVNTDQ_256
);
2989 DO_D( VMOVHPS_128_LoadForm
);
2990 DO_D( VMOVNTDQA_128
);
2991 DO_D( VMASKMOVDQU_128
);
2992 DO_D( VMOVMSKPD_128
);
2993 DO_D( VMOVMSKPD_256
);
2994 DO_D( VMOVMSKPS_128
);
2995 DO_D( VMOVMSKPS_256
);
2996 DO_D( VMOVNTPD_128
);
2997 DO_D( VMOVNTPD_256
);
2998 DO_D( VMOVNTPS_128
);
2999 DO_D( VMOVNTPS_256
);
3000 DO_D( VPACKSSWB_128
);
3003 DO_D( VPADDSB_128
);
3004 DO_D( VPADDSW_128
);
3005 DO_D( VPHADDW_128
);
3006 DO_D( VPHADDD_128
);
3007 DO_D( VPHADDSW_128
);
3008 DO_D( VPMADDUBSW_128
);
3009 DO_D( VPHSUBW_128
);
3010 DO_D( VPHSUBD_128
);
3011 DO_D( VPHSUBSW_128
);
3014 DO_D( VPMOVSXBQ_128
);
3015 DO_D( VPMOVSXWQ_128
);
3016 DO_D( VPACKUSDW_128
);
3017 DO_D( VPMOVZXBQ_128
);
3018 DO_D( VPMOVZXWQ_128
);
3019 DO_D( VPMOVZXDQ_128
);
3020 DO_D( VMPSADBW_128_0x0
);
3021 DO_D( VMPSADBW_128_0x1
);
3022 DO_D( VMPSADBW_128_0x2
);
3023 DO_D( VMPSADBW_128_0x3
);
3024 DO_D( VMPSADBW_128_0x4
);
3025 DO_D( VMPSADBW_128_0x5
);
3026 DO_D( VMPSADBW_128_0x6
);
3027 DO_D( VMPSADBW_128_0x7
);
3028 DO_D( VMOVDDUP_YMMorMEM256_to_YMM
);
3029 DO_D( VMOVLPS_128_M64_XMM_XMM
);
3030 DO_D( VMOVLPS_128_XMM_M64
);
3031 DO_D( VPSADBW_128
);
3032 DO_D( VPSIGNB_128
);
3033 DO_D( VPSIGNW_128
);
3034 DO_D( VPSIGND_128
);
3035 DO_D( VPMULHRSW_128
);
3036 DO_D( VBROADCASTF128
);
3037 DO_D( VPEXTRW_128_0x0
);
3038 DO_D( VPEXTRW_128_0x1
);
3039 DO_D( VPEXTRW_128_0x2
);
3040 DO_D( VPEXTRW_128_0x3
);
3041 DO_D( VPEXTRW_128_0x4
);
3042 DO_D( VPEXTRW_128_0x5
);
3043 DO_D( VPEXTRW_128_0x6
);
3044 DO_D( VPEXTRW_128_0x7
);
3046 DO_D( VAESENCLAST
);
3048 DO_D( VAESDECLAST
);
3050 DO_D( VAESKEYGENASSIST_0x00
);
3051 DO_D( VAESKEYGENASSIST_0x31
);
3052 DO_D( VAESKEYGENASSIST_0xB2
);
3053 DO_D( VAESKEYGENASSIST_0xFF
);
3054 DO_D( VPCLMULQDQ_0x00
);
3055 DO_D( VPCLMULQDQ_0x01
);
3056 DO_D( VPCLMULQDQ_0x10
);
3057 DO_D( VPCLMULQDQ_0x11
);
3058 DO_D( VPCLMULQDQ_0xFF
);
3059 DO_D( VCMPSS_128_0x9
);
3060 DO_D( VMASKMOVPS_128_LoadForm
);
3061 DO_D( VMASKMOVPS_256_LoadForm
);
3062 DO_D( VMASKMOVPD_128_LoadForm
);
3063 DO_D( VMASKMOVPD_256_LoadForm
);
3064 DO_D( VMASKMOVPS_128_StoreForm
);
3065 DO_D( VMASKMOVPS_256_StoreForm
);
3066 DO_D( VMASKMOVPD_128_StoreForm
);
3067 DO_D( VMASKMOVPD_256_StoreForm
);
3068 DO_D( VCMPSS_128_0xB
);
3069 DO_D( VCMPSS_128_0xF
);
3070 DO_D( VCMPSS_128_0x1B
);
3071 DO_D( VCMPSS_128_0x1F
);
3072 DO_D( VCMPSD_128_0x9
);
3073 DO_D( VCMPSD_128_0xB
);
3074 DO_D( VCMPSD_128_0xF
);
3075 DO_D( VCMPSD_128_0x1B
);
3076 DO_D( VCMPSD_128_0x1F
);
3078 DO_D( VCMPPD_128_0x0
);
3079 DO_D( VCMPPD_256_0x0
);
3080 DO_D( VCMPPS_128_0x0
);
3081 DO_D( VCMPPS_256_0x0
);
3083 DO_D( VCMPPD_128_0x1
);
3084 DO_D( VCMPPD_256_0x1
);
3085 DO_D( VCMPPS_128_0x1
);
3086 DO_D( VCMPPS_256_0x1
);
3088 DO_D( VCMPPD_128_0x2
);
3089 DO_D( VCMPPD_256_0x2
);
3090 DO_D( VCMPPS_128_0x2
);
3091 DO_D( VCMPPS_256_0x2
);
3093 DO_D( VCMPPD_128_0x3
);
3094 DO_D( VCMPPD_256_0x3
);
3095 DO_D( VCMPPS_128_0x3
);
3096 DO_D( VCMPPS_256_0x3
);
3098 // The 0x4 group is tested above
3100 DO_D( VCMPPD_128_0x5
);
3101 DO_D( VCMPPD_256_0x5
);
3102 DO_D( VCMPPS_128_0x5
);
3103 DO_D( VCMPPS_256_0x5
);
3105 DO_D( VCMPPD_128_0x6
);
3106 DO_D( VCMPPD_256_0x6
);
3107 DO_D( VCMPPS_128_0x6
);
3108 DO_D( VCMPPS_256_0x6
);
3110 DO_D( VCMPPD_128_0x7
);
3111 DO_D( VCMPPD_256_0x7
);
3112 DO_D( VCMPPS_128_0x7
);
3113 DO_D( VCMPPS_256_0x7
);
3115 DO_D( VCMPPD_128_0x8
);
3116 DO_D( VCMPPD_256_0x8
);
3117 DO_D( VCMPPS_128_0x8
);
3118 DO_D( VCMPPS_256_0x8
);
3120 DO_D( VCMPPD_128_0x9
);
3121 DO_D( VCMPPD_256_0x9
);
3122 DO_D( VCMPPS_128_0x9
);
3123 DO_D( VCMPPS_256_0x9
);
3125 DO_D( VCMPPD_128_0xA
);
3126 DO_D( VCMPPD_256_0xA
);
3127 DO_D( VCMPPS_128_0xA
);
3128 DO_D( VCMPPS_256_0xA
);
3130 DO_D( VCMPPD_128_0xB
);
3131 DO_D( VCMPPD_256_0xB
);
3132 DO_D( VCMPPS_128_0xB
);
3133 DO_D( VCMPPS_256_0xB
);
3135 DO_D( VCMPPD_128_0xC
);
3136 DO_D( VCMPPD_256_0xC
);
3137 DO_D( VCMPPS_128_0xC
);
3138 if (0) DO_D( VCMPPS_256_0xC
); // FIXME probably denorms etc in input
3140 DO_D( VCMPPD_128_0xD
);
3141 DO_D( VCMPPD_256_0xD
);
3142 DO_D( VCMPPS_128_0xD
);
3143 DO_D( VCMPPS_256_0xD
);
3145 DO_D( VCMPPD_128_0xE
);
3146 DO_D( VCMPPD_256_0xE
);
3147 DO_D( VCMPPS_128_0xE
);
3148 DO_D( VCMPPS_256_0xE
);
3150 DO_D( VCMPPD_128_0xF
);
3151 DO_D( VCMPPD_256_0xF
);
3152 DO_D( VCMPPS_128_0xF
);
3153 DO_D( VCMPPS_256_0xF
);
3155 DO_D( VCMPPD_128_0x10
);
3156 DO_D( VCMPPD_256_0x10
);
3157 DO_D( VCMPPS_128_0x10
);
3158 DO_D( VCMPPS_256_0x10
);
3160 DO_D( VCMPPD_128_0x11
);
3161 DO_D( VCMPPD_256_0x11
);
3162 DO_D( VCMPPS_128_0x11
);
3163 DO_D( VCMPPS_256_0x11
);
3165 DO_D( VCMPPD_128_0x12
);
3166 DO_D( VCMPPD_256_0x12
);
3167 DO_D( VCMPPS_128_0x12
);
3168 DO_D( VCMPPS_256_0x12
);
3170 DO_D( VCMPPD_128_0x13
);
3171 DO_D( VCMPPD_256_0x13
);
3172 DO_D( VCMPPS_128_0x13
);
3173 DO_D( VCMPPS_256_0x13
);
3175 DO_D( VCMPPD_128_0x14
);
3176 DO_D( VCMPPD_256_0x14
);
3177 DO_D( VCMPPS_128_0x14
);
3178 DO_D( VCMPPS_256_0x14
);
3180 DO_D( VCMPPD_128_0x15
);
3181 DO_D( VCMPPD_256_0x15
);
3182 DO_D( VCMPPS_128_0x15
);
3183 DO_D( VCMPPS_256_0x15
);
3185 DO_D( VCMPPD_128_0x16
);
3186 DO_D( VCMPPD_256_0x16
);
3187 DO_D( VCMPPS_128_0x16
);
3188 DO_D( VCMPPS_256_0x16
);
3190 DO_D( VCMPPD_128_0x17
);
3191 DO_D( VCMPPD_256_0x17
);
3192 DO_D( VCMPPS_128_0x17
);
3193 DO_D( VCMPPS_256_0x17
);
3195 DO_D( VCMPPD_128_0x18
);
3196 DO_D( VCMPPD_256_0x18
);
3197 DO_D( VCMPPS_128_0x18
);
3198 DO_D( VCMPPS_256_0x18
);
3200 DO_D( VCMPPD_128_0x19
);
3201 DO_D( VCMPPD_256_0x19
);
3202 DO_D( VCMPPS_128_0x19
);
3203 DO_D( VCMPPS_256_0x19
);
3205 DO_D( VCMPPD_128_0x1A
);
3206 DO_D( VCMPPD_256_0x1A
);
3207 DO_D( VCMPPS_128_0x1A
);
3208 DO_D( VCMPPS_256_0x1A
);
3210 DO_D( VCMPPD_128_0x1B
);
3211 DO_D( VCMPPD_256_0x1B
);
3212 DO_D( VCMPPS_128_0x1B
);
3213 DO_D( VCMPPS_256_0x1B
);
3215 DO_D( VCMPPD_128_0x1C
);
3216 DO_D( VCMPPD_256_0x1C
);
3217 DO_D( VCMPPS_128_0x1C
);
3218 if (0) DO_D( VCMPPS_256_0x1C
); // FIXME probably denorms etc in input
3220 DO_D( VCMPPD_128_0x1D
);
3221 DO_D( VCMPPD_256_0x1D
);
3222 DO_D( VCMPPS_128_0x1D
);
3223 DO_D( VCMPPS_256_0x1D
);
3225 DO_D( VCMPPD_128_0x1E
);
3226 DO_D( VCMPPD_256_0x1E
);
3227 DO_D( VCMPPS_128_0x1E
);
3228 DO_D( VCMPPS_256_0x1E
);
3230 DO_D( VCMPPD_128_0x1F
);
3231 DO_D( VCMPPD_256_0x1F
);
3232 DO_D( VCMPPS_128_0x1F
);
3233 DO_D( VCMPPS_256_0x1F
);