4 include 'avx512f.inc'
\r
8 iterate <instr,opcode>, kandb,41h, kandnb,42h, knotb,44h, korb,45h, kxnorb,46h, kxorb,47h, kaddb,4Ah
\r
10 macro instr? dest*,src*,src2*
\r
11 AVX_512.parse_operand @dest,dest
\r
12 AVX_512.parse_operand @src,src
\r
13 AVX_512.parse_operand @src2,src2
\r
14 if @dest.type = 'maskreg' & @src.type = 'maskreg' & @src2.type = 'maskreg'
\r
15 AVX.store_instruction 32,VEX_66_0F_W0,opcode,@src2,@dest.rm,@src.rm
\r
17 err 'invalid combination of operands'
\r
23 iterate <instr,opcode>, knotb,44h, kortestb,98h, ktestb,99h
\r
25 macro instr? dest*,src*
\r
26 AVX_512.parse_operand @dest,dest
\r
27 AVX_512.parse_operand @src,src
\r
28 if @dest.type = 'maskreg' & @src.type = 'maskreg'
\r
29 AVX.store_instruction 16,VEX_66_0F_W0,opcode,@src,@dest.rm
\r
31 err 'invalid combination of operands'
\r
37 macro kmovb? dest*,src*
\r
38 AVX_512.parse_operand @dest,dest
\r
39 AVX_512.parse_operand @src,src
\r
40 if @dest.type = 'maskreg' & (@src.type = 'maskreg' | @src.type = 'mem')
\r
41 if @src.type = 'mem' & @src.size and not 1
\r
42 err 'invalid operand size'
\r
44 AVX.store_instruction 16,VEX_66_0F_W0,90h,@src,@dest.rm
\r
45 else if @dest.type = 'mem' & @src.type = 'maskreg'
\r
46 if @dest.size and not 1
\r
47 err 'invalid operand size'
\r
49 AVX.store_instruction 16,VEX_66_0F_W0,91h,@dest,@src.rm
\r
50 else if @dest.type = 'maskreg' & @src.type = 'reg'
\r
52 err 'invalid operand size'
\r
54 AVX.store_instruction 16,VEX_66_0F_W0,92h,@src,@dest.rm
\r
55 else if @dest.type = 'reg' & @src.type = 'maskreg'
\r
57 err 'invalid operand size'
\r
59 AVX.store_instruction 16,VEX_66_0F_W0,93h,@src,@dest.rm
\r
61 err 'invalid combination of operands'
\r
65 macro kaddw? dest*,src*,src2*
\r
66 AVX_512.parse_operand @dest,dest
\r
67 AVX_512.parse_operand @src,src
\r
68 AVX_512.parse_operand @src2,src2
\r
69 if @dest.type = 'maskreg' & @src.type = 'maskreg' & @src2.type = 'maskreg'
\r
70 AVX.store_instruction 32,VEX_0F_W0,opcode,@src2,@dest.rm,@src.rm
\r
72 err 'invalid combination of operands'
\r
76 macro ktestw? dest*,src*
\r
77 AVX_512.parse_operand @dest,dest
\r
78 AVX_512.parse_operand @src,src
\r
79 if @dest.type = 'maskreg' & @src.type = 'maskreg'
\r
80 AVX.store_instruction 16,VEX_0F_W0,opcode,@src,@dest.rm
\r
82 err 'invalid combination of operands'
\r
86 iterate <instr,vex_mpw,opcode>, kshiftrb,VEX_66_0F3A_W0,30h, kshiftlb,VEX_66_0F3A_W0,32h
\r
88 macro instr? dest*,src*,aux*
\r
89 AVX_512.parse_operand @dest,dest
\r
90 AVX_512.parse_operand @src,src
\r
91 x86.parse_operand @aux,aux
\r
92 if @dest.type = 'maskreg' & @src.type = 'maskreg' & @aux.type = 'imm'
\r
93 if @aux.size and not 1
\r
94 err 'invalid operand size'
\r
96 AVX.store_instruction 16,vex_mpw,opcode,@src,@dest.rm,,1,@aux.imm
\r
98 err 'invalid combination of operands'
\r
104 iterate <instr,opcode>, and,54h, andn,55h, or,56h, xor,57h
\r
106 macro v#instr#pd? dest*,src*,src2*&
\r
107 AVX_512.basic_instruction_bcst VEX_66_0F_W0,EVEX_W1+EVEX_VL,opcode,8,dest,src,src2
\r
110 macro v#instr#ps? dest*,src*,src2*&
\r
111 AVX_512.basic_instruction_bcst VEX_0F_W0,EVEX_AS_VEX+EVEX_VL,opcode,4,dest,src,src2
\r
116 iterate <instr,opcode>, vbroadcastf32x2,19h, vbroadcasti32x2,59h
\r
118 macro instr? dest*,src*
\r
119 AVX_512.parse_operand_k1z @dest,dest
\r
120 AVX_512.parse_operand @src,src
\r
121 if @dest.type = 'mmreg' & (@src.type = 'mmreg' | @src.type = 'mem')
\r
122 if @dest.size = 16 | (@src.type = 'mmreg' & @src.size <> 16) | (@src.type = 'mem' & @src.size and not 8)
\r
123 err 'invalid operand size'
\r
126 AVX_512.store_instruction @dest.size,VEX_66_0F38_W0,EVEX_REQUIRED+EVEX_VL,opcode,@src,@dest.mask,@dest.rm
\r
128 err 'invalid combination of operands'
\r
134 iterate <instr,vex_mpw,opcode,msize>, vbroadcastf32x8,VEX_66_0F38_W0,1Bh,32, vbroadcastf64x2,VEX_66_0F38_W1,1Ah,16, \
\r
135 vbroadcasti32x8,VEX_66_0F38_W0,5Bh,32, vbroadcasti64x2,VEX_66_0F38_W1,5Ah,16
\r
137 macro instr? dest*,src*
\r
138 AVX_512.parse_operand_k1z @dest,dest
\r
139 AVX_512.parse_operand @src,src
\r
140 if @dest.type = 'mmreg' & @src.type = 'mem'
\r
141 if @dest.size <= msize | @src.size and not msize
\r
142 err 'invalid operand size'
\r
144 @src.memsize = msize
\r
145 AVX_512.store_instruction @dest.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src,@dest.mask,@dest.rm
\r
147 err 'invalid combination of operands'
\r
153 iterate <instr,vex_mpw,opcode>, vcvtpd2qq,VEX_66_0F_W1,7Bh, vcvtpd2uqq,VEX_66_0F_W1,79h, \
\r
154 vcvtqq2pd,VEX_F3_0F_W1,0E6h, vcvtuqq2pd,VEX_F3_0F_W1,7Ah
\r
156 macro instr? dest*,src*&
\r
157 AVX_512.single_source_instruction_bcst_er vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,8,dest,src
\r
162 iterate <instr,vex_mpw,opcode>, vcvttpd2qq,VEX_66_0F_W1,7Ah, vcvttpd2uqq,VEX_66_0F_W1,78h
\r
164 macro instr? dest*,src*&
\r
165 AVX_512.single_source_instruction_bcst_sae vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,8,dest,src
\r
170 iterate <instr,vex_mpw,opcode>, vcvtps2qq,VEX_66_0F_W0,7Bh, vcvtps2uqq,VEX_66_0F_W0,79h
\r
172 macro instr? dest*,src_er*&
\r
173 AVX_512.parse_operand_k1z @dest,dest
\r
174 match src=,er, src_er
\r
175 AVX_512.parse_operand @src,src
\r
176 AVX_512.parse_er @src,er,32
\r
178 AVX_512.parse_operand_bcst @src,src_er,4
\r
180 if @dest.type = 'mmreg' & (@src.type = 'mem' | @src.type = 'mmreg')
\r
181 if (@src.type = 'mem' & @src.size and not (@dest.size shr 1)) | (@src.type = 'mmreg' & (@dest.size shr 1 - 1) and not 15 + 16 <> @src.size)
\r
182 err 'invalid operand size'
\r
184 if @src.memsize = 0
\r
185 @src.memsize = @dest.size shr 1
\r
187 AVX_512.store_instruction @dest.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src,@dest.mask,@dest.rm
\r
189 err 'invalid combination of operands'
\r
195 iterate <instr,vex_mpw,opcode>, vcvtqq2ps,VEX_0F_W1,5Bh, vcvtuqq2ps,VEX_F2_0F_W1,7Ah
\r
197 macro instr? dest*,src_er*&
\r
198 AVX_512.parse_operand_k1z @dest,dest
\r
199 match src=,er, src_er
\r
200 AVX_512.parse_operand @src,src
\r
201 AVX_512.parse_er @src,er
\r
203 AVX_512.parse_operand_bcst @src,src_er,8
\r
205 if @dest.type = 'mmreg' & (@src.type = 'mem' | @src.type = 'mmreg')
\r
208 err 'operand size not specified'
\r
213 if (@src.size shr 1 - 1) and not 15 + 16 <> @dest.size | @src.size > 64
\r
214 err 'invalid operand size'
\r
216 AVX_512.store_instruction @src.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src,@dest.mask,@dest.rm
\r
218 err 'invalid combination of operands'
\r
224 iterate <instr,vex_mpw,opcode>, vcvttps2qq,VEX_66_0F_W0,7Ah, vcvttps2uqq,VEX_66_0F_W0,78h
\r
226 macro instr? dest*,src_sae*&
\r
227 AVX_512.parse_operand_k1z @dest,dest
\r
228 match src=,sae, src_sae
\r
229 AVX_512.parse_operand @src,src
\r
230 AVX_512.parse_sae @src,sae
\r
232 AVX_512.parse_operand_bcst @src,src_sae,4
\r
234 if @dest.type = 'mmreg' & (@src.type = 'mem' | @src.type = 'mmreg')
\r
235 if (@src.type = 'mem' & @src.size and not (@dest.size shr 1)) | (@src.type = 'mmreg' & (@dest.size shr 1 - 1) and not 15 + 16 <> @src.size)
\r
236 err 'invalid operand size'
\r
238 if @src.memsize = 0
\r
239 @src.memsize = @dest.size shr 1
\r
241 AVX_512.store_instruction @dest.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src,@dest.mask,@dest.rm
\r
243 err 'invalid combination of operands'
\r
249 iterate <instr,vex_mpw,opcode,msize>, vextractf32x8,VEX_66_0F3A_W0,1Bh,32, vextractf64x2,VEX_66_0F3A_W1,19h,16, \
\r
250 vextracti32x8,VEX_66_0F3A_W0,3Bh,32, vextracti64x2,VEX_66_0F3A_W1,39h,16
\r
252 macro instr? dest*,src*,aux*
\r
253 AVX_512.parse_operand_k1z @dest,dest
\r
254 AVX_512.parse_operand @src,src
\r
255 x86.parse_operand @aux,aux
\r
256 if (@dest.type = 'mmreg' | @dest.type = 'mem') & @src.type = 'mmreg' & @aux.type = 'imm'
\r
257 if @dest.size and not msize | @src.size <= msize | @aux.size and not 1
\r
258 err 'invalid operand size'
\r
260 @dest.memsize = msize
\r
261 AVX_512.store_instruction @src.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@dest,@dest.mask,@src.rm,,1,@aux.imm
\r
263 err 'invalid combination of operands'
\r
269 iterate <instr,vex_mpw,opcode,msize>, vinsertf32x8,VEX_66_0F3A_W0,1Ah,32, vinsertf64x2,VEX_66_0F3A_W1,18h,16, \
\r
270 vinserti32x8,VEX_66_0F3A_W0,3Ah,32, vinserti64x2,VEX_66_0F3A_W1,38h,16
\r
272 macro instr? dest*,src*,src2*,aux*
\r
273 AVX_512.parse_operand_k1z @dest,dest
\r
274 AVX_512.parse_operand @src,src
\r
275 AVX_512.parse_operand @src2,src2
\r
276 x86.parse_operand @aux,aux
\r
277 if @dest.type = 'mmreg' & @src.type = 'mmreg' & (@src2.type = 'mmreg' | @src2.type = 'mem') & @aux.type = 'imm'
\r
278 if @dest.size <= msize | @src.size <= msize | @src2.size and not msize | @aux.size and not 1
\r
279 err 'invalid operand size'
\r
281 @src2.memsize = msize
\r
282 AVX_512.store_instruction @dest.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src2,@dest.mask,@dest.rm,@src.rm,1,@aux.imm
\r
284 err 'invalid combination of operands'
\r
290 iterate <instr,unit,vex_mpw>, vfpclasspd,8,VEX_66_0F3A_W1, vfpclassps,4,VEX_66_0F3A_W0
\r
292 macro instr? dest*,src*,aux*
\r
293 AVX_512.parse_operand_k1 @dest,dest
\r
294 AVX_512.parse_operand_bcst @src,src,unit
\r
295 x86.parse_operand @aux,aux
\r
296 if @dest.type = 'maskreg' & (@src.type = 'mem' | @src.type = 'mmreg') & @aux.type = 'imm'
\r
298 err 'operand size not specified'
\r
299 else if (@src.size <> 16 & @src.size <> 32 & @src.size <> 64) | @aux.size and not 1
\r
300 err 'invalid operand size'
\r
302 AVX_512.store_instruction @src.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,66h,@src,@dest.mask,@dest.rm,,1,@aux.imm
\r
304 err 'invalid combination of operands'
\r
310 iterate <instr,unit,vex_mpw>, vfpclasssd,8,VEX_66_0F3A_W1, vfpclassss,4,VEX_66_0F3A_W0
\r
312 macro instr? dest*,src*,aux*
\r
313 AVX_512.parse_operand_k1 @dest,dest
\r
314 AVX_512.parse_operand @src,src
\r
315 x86.parse_operand @aux,aux
\r
316 if @dest.type = 'maskreg' & (@src.type = 'mem' | @src.type = 'mmreg') & @aux.type = 'imm'
\r
317 if (@src.type = 'mem' & @src.size and not unit) | (@src.type = 'mmreg' & @src.size <> 16) | @aux.size and not 1
\r
318 err 'invalid operand size'
\r
321 AVX_512.store_instruction @src.size,vex_mpw,EVEX_REQUIRED,67h,@src,@dest.mask,@dest.rm,,1,@aux.imm
\r
323 err 'invalid combination of operands'
\r
329 macro vpextrd? dest*,src*,aux*
\r
330 AVX_512.parse_operand @dest,dest
\r
331 AVX_512.parse_operand @src,src
\r
332 x86.parse_operand @aux,aux
\r
333 if (@dest.type = 'reg' | @dest.type = 'mem') & @src.type = 'mmreg' & @aux.type = 'imm'
\r
334 if (@dest.type = 'reg' & @dest.size <> 4 & (x86.mode < 64 | @dest.size <> 8)) | (@dest.type = 'mem' & @dest.size and not 4) | @src.size <> 16 | @aux.size and not 1
\r
335 err 'invalid operand size'
\r
338 AVX_512.store_instruction 16,VEX_66_0F3A_W0,EVEX_AS_VEX,16h,@dest,0,@src.rm,,1,@aux.imm
\r
340 err 'invalid combination of operands'
\r
344 macro vpinsrd? dest*,src*,src2*,aux*
\r
345 AVX_512.parse_operand @dest,dest
\r
346 AVX_512.parse_operand @src,src
\r
347 AVX_512.parse_operand @src2,src2
\r
348 x86.parse_operand @aux,aux
\r
349 if @dest.type = 'mmreg' & @src.type = 'mmreg' & (@src2.type = 'reg' | @src2.type = 'mem') & @aux.type = 'imm'
\r
350 if @dest.size <> 16 | @src.size <> 16 | (@src2.type = 'reg' & @src2.size <> 4) | (@src2.type = 'mem' & @src2.size and not 4) | @aux.size and not 1
\r
351 err 'invalid operand size'
\r
354 AVX_512.store_instruction 16,VEX_66_0F3A_W0,EVEX_AS_VEX,22h,@src2,0,@dest.rm,@src.rm,1,@aux.imm
\r
356 err 'invalid combination of operands'
\r
360 macro vpextrq? dest*,src*,aux*
\r
361 AVX_512.parse_operand @dest,dest
\r
362 AVX_512.parse_operand @src,src
\r
363 x86.parse_operand @aux,aux
\r
364 if (@dest.type = 'reg' | @dest.type = 'mem') & @src.type = 'mmreg' & @aux.type = 'imm'
\r
365 if @dest.size and not 8 | @src.size <> 16 | @aux.size and not 1
\r
366 err 'invalid operand size'
\r
369 err 'instruction requires long mode'
\r
372 AVX_512.store_instruction 16,VEX_66_0F3A_W1,EVEX_AS_VEX,16h,@dest,0,@src.rm,,1,@aux.imm
\r
374 err 'invalid combination of operands'
\r
378 macro vpinsrq? dest*,src*,src2*,aux*
\r
379 AVX_512.parse_operand @dest,dest
\r
380 AVX_512.parse_operand @src,src
\r
381 x86.parse_operand @src2,src2
\r
382 x86.parse_operand @aux,aux
\r
383 if @dest.type = 'mmreg' & @src.type = 'mmreg' & (@src2.type = 'reg' | @src2.type = 'mem') & @aux.type = 'imm'
\r
384 if @dest.size <> 16 | @src.size <> 16 | @src2.size and not 8 | @aux.size and not 1
\r
385 err 'invalid operand size'
\r
388 err 'instruction requires long mode'
\r
391 AVX_512.store_instruction 16,VEX_66_0F3A_W1,EVEX_AS_VEX,22h,@src2,0,@dest.rm,@src.rm,1,@aux.imm
\r
393 err 'invalid combination of operands'
\r
397 macro vpmullq? dest*,src*,src2*
\r
398 AVX_512.basic_instruction_bcst VEX_66_0F38_W1,EVEX_REQUIRED+EVEX_VL,40h,8,dest,src,src2
\r
401 iterate <instr,vex_mpw,opcode>, vpmovm2d,VEX_F3_0F38_W0,38h, vpmovm2q,VEX_F3_0F38_W1,38h
\r
403 macro instr? dest*,src*
\r
404 AVX_512.parse_operand @dest,dest
\r
405 AVX_512.parse_operand @src,src
\r
406 if @dest.type = 'mmreg' & @src.type = 'maskreg'
\r
407 AVX_512.store_instruction @dest.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src,0,@dest.rm
\r
409 err 'invalid combination of operands'
\r
415 iterate <instr,vex_mpw,opcode>, vpmovd2m,VEX_F3_0F38_W0,39h, vpmovq2m,VEX_F3_0F38_W1,39h
\r
417 macro instr? dest*,src*
\r
418 AVX_512.parse_operand @dest,dest
\r
419 AVX_512.parse_operand @src,src
\r
420 if @dest.type = 'maskreg' & @src.type = 'mmreg'
\r
421 AVX_512.store_instruction @src.size,vex_mpw,EVEX_REQUIRED+EVEX_VL,opcode,@src,0,@dest.rm
\r
423 err 'invalid combination of operands'
\r
429 iterate <instr,opcode>, range,50h
\r
431 macro v#instr#pd? dest*,src*,src2*,aux*&
\r
432 AVX_512.basic_instruction_bcst_sae_imm8 VEX_66_0F3A_W1,EVEX_REQUIRED+EVEX_VL,opcode,8,dest,src,src2,aux
\r
435 macro v#instr#ps? dest*,src*,src2*,aux*&
\r
436 AVX_512.basic_instruction_bcst_sae_imm8 VEX_66_0F3A_W0,EVEX_REQUIRED+EVEX_VL,opcode,4,dest,src,src2,aux
\r
439 macro v#instr#sd? dest*,src*,src2*,aux*&
\r
440 AVX_512.basic_instruction_sae_imm8 VEX_66_0F3A_W1,EVEX_REQUIRED,opcode+1,8,dest,src,src2,aux
\r
443 macro v#instr#ss? dest*,src*,src2*,aux*&
\r
444 AVX_512.basic_instruction_sae_imm8 VEX_66_0F3A_W0,EVEX_REQUIRED,opcode+1,4,dest,src,src2,aux
\r
449 macro vreducepd? dest*,src*,aux*&
\r
450 AVX_512.single_source_instruction_bcst_sae_imm8 VEX_66_0F3A_W1,EVEX_REQUIRED+EVEX_VL,56h,8,dest,src,aux
\r
453 macro vreduceps? dest*,src*,aux*&
\r
454 AVX_512.single_source_instruction_bcst_sae_imm8 VEX_66_0F3A_W0,EVEX_REQUIRED+EVEX_VL,56h,4,dest,src,aux
\r
457 macro vreducesd? dest*,src*,src2*,aux*&
\r
458 AVX_512.basic_instruction_sae_imm8 VEX_66_0F3A_W1,EVEX_REQUIRED,57h,8,dest,src,src2,aux
\r
461 macro vreducess? dest*,src*,src2*,aux*&
\r
462 AVX_512.basic_instruction_sae_imm8 VEX_66_0F3A_W0,EVEX_REQUIRED,57h,4,dest,src,src2,aux
\r