1 //===-- X86InstrFragmentsSIMD.td - x86 SIMD ISA ------------*- tablegen -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file provides pattern fragments useful for SIMD instructions.
11 //===----------------------------------------------------------------------===//
13 //===----------------------------------------------------------------------===//
14 // MMX specific DAG Nodes.
15 //===----------------------------------------------------------------------===//
17 // Low word of MMX to GPR.
18 def MMX_X86movd2w : SDNode<"X86ISD::MMX_MOVD2W", SDTypeProfile<1, 1,
19 [SDTCisVT<0, i32>, SDTCisVT<1, x86mmx>]>>;
20 // GPR to low word of MMX.
21 def MMX_X86movw2d : SDNode<"X86ISD::MMX_MOVW2D", SDTypeProfile<1, 1,
22 [SDTCisVT<0, x86mmx>, SDTCisVT<1, i32>]>>;
24 //===----------------------------------------------------------------------===//
25 // MMX Pattern Fragments
26 //===----------------------------------------------------------------------===//
28 def load_mmx : PatFrag<(ops node:$ptr), (x86mmx (load node:$ptr))>;
30 //===----------------------------------------------------------------------===//
31 // SSE specific DAG Nodes.
32 //===----------------------------------------------------------------------===//
34 def SDTX86VFCMP : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
35 SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>,
38 def X86fmin : SDNode<"X86ISD::FMIN", SDTFPBinOp>;
39 def X86fmax : SDNode<"X86ISD::FMAX", SDTFPBinOp>;
40 def X86fmins : SDNode<"X86ISD::FMINS", SDTFPBinOp>;
41 def X86fmaxs : SDNode<"X86ISD::FMAXS", SDTFPBinOp>;
43 // Commutative and Associative FMIN and FMAX.
44 def X86fminc : SDNode<"X86ISD::FMINC", SDTFPBinOp,
45 [SDNPCommutative, SDNPAssociative]>;
46 def X86fmaxc : SDNode<"X86ISD::FMAXC", SDTFPBinOp,
47 [SDNPCommutative, SDNPAssociative]>;
49 def X86strict_fmin : SDNode<"X86ISD::STRICT_FMIN", SDTFPBinOp,
51 def X86strict_fmax : SDNode<"X86ISD::STRICT_FMAX", SDTFPBinOp,
54 def X86any_fmin : PatFrags<(ops node:$src1, node:$src2),
55 [(X86strict_fmin node:$src1, node:$src2),
56 (X86fmin node:$src1, node:$src2)]>;
57 def X86any_fmax : PatFrags<(ops node:$src1, node:$src2),
58 [(X86strict_fmax node:$src1, node:$src2),
59 (X86fmax node:$src1, node:$src2)]>;
61 def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp,
62 [SDNPCommutative, SDNPAssociative]>;
63 def X86for : SDNode<"X86ISD::FOR", SDTFPBinOp,
64 [SDNPCommutative, SDNPAssociative]>;
65 def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp,
66 [SDNPCommutative, SDNPAssociative]>;
67 def X86fandn : SDNode<"X86ISD::FANDN", SDTFPBinOp>;
68 def X86frsqrt : SDNode<"X86ISD::FRSQRT", SDTFPUnaryOp>;
69 def X86frcp : SDNode<"X86ISD::FRCP", SDTFPUnaryOp>;
70 def X86fhadd : SDNode<"X86ISD::FHADD", SDTFPBinOp>;
71 def X86fhsub : SDNode<"X86ISD::FHSUB", SDTFPBinOp>;
72 def X86hadd : SDNode<"X86ISD::HADD", SDTIntBinOp>;
73 def X86hsub : SDNode<"X86ISD::HSUB", SDTIntBinOp>;
74 def X86comi : SDNode<"X86ISD::COMI", SDTX86FCmp>;
75 def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86FCmp>;
76 def X86comi512 : SDNode<"X86ISD::COMX", SDTX86FCmp>;
77 def X86ucomi512 : SDNode<"X86ISD::UCOMX", SDTX86FCmp>;
78 def SDTX86Cmps : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisSameAs<0, 1>,
79 SDTCisSameAs<1, 2>, SDTCisVT<3, i8>]>;
80 def X86cmps : SDNode<"X86ISD::FSETCC", SDTX86Cmps>;
82 def X86pshufb : SDNode<"X86ISD::PSHUFB",
83 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i8>, SDTCisSameAs<0,1>,
85 def X86psadbw : SDNode<"X86ISD::PSADBW",
86 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>,
87 SDTCVecEltisVT<1, i8>,
88 SDTCisSameSizeAs<0,1>,
89 SDTCisSameAs<1,2>]>, [SDNPCommutative]>;
90 def SDTX86PSADBW : SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>,
91 SDTCVecEltisVT<1, i8>,
92 SDTCisSameSizeAs<0,1>,
93 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>;
94 def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", SDTX86PSADBW>;
95 def X86andnp : SDNode<"X86ISD::ANDNP",
96 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
98 def X86multishift : SDNode<"X86ISD::MULTISHIFT",
99 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
100 SDTCisSameAs<1,2>]>>;
101 def X86pextrb : SDNode<"X86ISD::PEXTRB",
102 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v16i8>,
104 def X86pextrw : SDNode<"X86ISD::PEXTRW",
105 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v8i16>,
107 def X86pinsrb : SDNode<"X86ISD::PINSRB",
108 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>,
109 SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>;
110 def X86pinsrw : SDNode<"X86ISD::PINSRW",
111 SDTypeProfile<1, 3, [SDTCisVT<0, v8i16>, SDTCisSameAs<0,1>,
112 SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>;
113 def X86insertps : SDNode<"X86ISD::INSERTPS",
114 SDTypeProfile<1, 3, [SDTCisVT<0, v4f32>, SDTCisSameAs<0,1>,
115 SDTCisVT<2, v4f32>, SDTCisVT<3, i8>]>>;
116 def X86vzmovl : SDNode<"X86ISD::VZEXT_MOVL",
117 SDTypeProfile<1, 1, [SDTCisSameAs<0,1>]>>;
119 def X86vzld : SDNode<"X86ISD::VZEXT_LOAD", SDTLoad,
120 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
121 def X86vextractst : SDNode<"X86ISD::VEXTRACT_STORE", SDTStore,
122 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
123 def X86VBroadcastld : SDNode<"X86ISD::VBROADCAST_LOAD", SDTLoad,
124 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
125 def X86SubVBroadcastld : SDNode<"X86ISD::SUBV_BROADCAST_LOAD", SDTLoad,
126 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
128 def SDTVtrunc : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>,
129 SDTCisInt<0>, SDTCisInt<1>,
130 SDTCisOpSmallerThanOp<0, 1>]>;
131 def SDTVmtrunc : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>,
132 SDTCisInt<0>, SDTCisInt<1>,
133 SDTCisOpSmallerThanOp<0, 1>,
135 SDTCVecEltisVT<3, i1>,
136 SDTCisSameNumEltsAs<1, 3>]>;
138 def X86vtrunc : SDNode<"X86ISD::VTRUNC", SDTVtrunc>;
139 def X86vtruncs : SDNode<"X86ISD::VTRUNCS", SDTVtrunc>;
140 def X86vtruncus : SDNode<"X86ISD::VTRUNCUS", SDTVtrunc>;
141 def X86vmtrunc : SDNode<"X86ISD::VMTRUNC", SDTVmtrunc>;
142 def X86vmtruncs : SDNode<"X86ISD::VMTRUNCS", SDTVmtrunc>;
143 def X86vmtruncus : SDNode<"X86ISD::VMTRUNCUS", SDTVmtrunc>;
145 def X86vfpext : SDNode<"X86ISD::VFPEXT",
146 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
147 SDTCisFP<1>, SDTCisVec<1>]>>;
149 def X86strict_vfpext : SDNode<"X86ISD::STRICT_VFPEXT",
150 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
151 SDTCisFP<1>, SDTCisVec<1>]>,
154 def X86any_vfpext : PatFrags<(ops node:$src),
155 [(X86strict_vfpext node:$src),
156 (X86vfpext node:$src)]>;
158 def X86vfpround: SDNode<"X86ISD::VFPROUND",
159 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
160 SDTCisFP<1>, SDTCisVec<1>,
161 SDTCisOpSmallerThanOp<0, 1>]>>;
162 def X86vfpround2 : SDNode<"X86ISD::VFPROUND2",
163 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
164 SDTCisFP<1>, SDTCisVec<1>,
166 SDTCisOpSmallerThanOp<0, 1>]>>;
168 def X86strict_vfpround: SDNode<"X86ISD::STRICT_VFPROUND",
169 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
170 SDTCisFP<1>, SDTCisVec<1>,
171 SDTCisOpSmallerThanOp<0, 1>]>,
174 def X86any_vfpround : PatFrags<(ops node:$src),
175 [(X86strict_vfpround node:$src),
176 (X86vfpround node:$src)]>;
178 def X86frounds : SDNode<"X86ISD::VFPROUNDS",
179 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
181 SDTCisFP<2>, SDTCisVec<2>,
182 SDTCisSameSizeAs<0, 2>]>>;
184 def X86froundsRnd: SDNode<"X86ISD::VFPROUNDS_RND",
185 SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
187 SDTCisFP<2>, SDTCisVec<2>,
188 SDTCisSameSizeAs<0, 2>,
191 def X86fpexts : SDNode<"X86ISD::VFPEXTS",
192 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
194 SDTCisFP<2>, SDTCisVec<2>,
195 SDTCisSameSizeAs<0, 2>]>>;
196 def X86fpextsSAE : SDNode<"X86ISD::VFPEXTS_SAE",
197 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
199 SDTCisFP<2>, SDTCisVec<2>,
200 SDTCisSameSizeAs<0, 2>]>>;
202 def X86vmfpround: SDNode<"X86ISD::VMFPROUND",
203 SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
204 SDTCisFP<1>, SDTCisVec<1>,
206 SDTCVecEltisVT<3, i1>,
207 SDTCisSameNumEltsAs<1, 3>]>>;
209 def X86vshiftimm : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
210 SDTCisVT<2, i8>, SDTCisInt<0>]>;
212 def X86vshldq : SDNode<"X86ISD::VSHLDQ", X86vshiftimm>;
213 def X86vshrdq : SDNode<"X86ISD::VSRLDQ", X86vshiftimm>;
214 def X86pcmpeq : SDNode<"X86ISD::PCMPEQ", SDTIntBinOp, [SDNPCommutative]>;
215 def X86pcmpgt : SDNode<"X86ISD::PCMPGT", SDTIntBinOp>;
217 def X86cmpp : SDNode<"X86ISD::CMPP", SDTX86VFCMP>;
218 def X86strict_cmpp : SDNode<"X86ISD::STRICT_CMPP", SDTX86VFCMP, [SDNPHasChain]>;
219 def X86any_cmpp : PatFrags<(ops node:$src1, node:$src2, node:$src3),
220 [(X86strict_cmpp node:$src1, node:$src2, node:$src3),
221 (X86cmpp node:$src1, node:$src2, node:$src3)]>;
224 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>,
225 SDTCisVec<1>, SDTCisSameAs<2, 1>,
226 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>]>;
228 def X86MaskCmpMaskCC :
229 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>,
230 SDTCisVec<1>, SDTCisSameAs<2, 1>,
231 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>, SDTCisSameAs<4, 0>]>;
232 def X86CmpMaskCCScalar :
233 SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisFP<1>, SDTCisSameAs<1, 2>,
236 def X86cmpm : SDNode<"X86ISD::CMPM", X86CmpMaskCC>;
237 def X86cmpmm : SDNode<"X86ISD::CMPMM", X86MaskCmpMaskCC>;
238 def X86strict_cmpm : SDNode<"X86ISD::STRICT_CMPM", X86CmpMaskCC, [SDNPHasChain]>;
239 def X86any_cmpm : PatFrags<(ops node:$src1, node:$src2, node:$src3),
240 [(X86strict_cmpm node:$src1, node:$src2, node:$src3),
241 (X86cmpm node:$src1, node:$src2, node:$src3)]>;
242 def X86cmpmmSAE : SDNode<"X86ISD::CMPMM_SAE", X86MaskCmpMaskCC>;
243 def X86cmpms : SDNode<"X86ISD::FSETCCM", X86CmpMaskCCScalar>;
244 def X86cmpmsSAE : SDNode<"X86ISD::FSETCCM_SAE", X86CmpMaskCCScalar>;
246 def X86phminpos: SDNode<"X86ISD::PHMINPOS",
247 SDTypeProfile<1, 1, [SDTCisVT<0, v8i16>, SDTCisVT<1, v8i16>]>>;
249 def X86vshiftuniform : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
250 SDTCisVec<2>, SDTCisInt<0>,
253 def X86vshl : SDNode<"X86ISD::VSHL", X86vshiftuniform>;
254 def X86vsrl : SDNode<"X86ISD::VSRL", X86vshiftuniform>;
255 def X86vsra : SDNode<"X86ISD::VSRA", X86vshiftuniform>;
257 def X86vshiftvariable : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
258 SDTCisSameAs<0,2>, SDTCisInt<0>]>;
260 def X86vshlv : SDNode<"X86ISD::VSHLV", X86vshiftvariable>;
261 def X86vsrlv : SDNode<"X86ISD::VSRLV", X86vshiftvariable>;
262 def X86vsrav : SDNode<"X86ISD::VSRAV", X86vshiftvariable>;
264 def X86vshli : SDNode<"X86ISD::VSHLI", X86vshiftimm>;
265 def X86vsrli : SDNode<"X86ISD::VSRLI", X86vshiftimm>;
266 def X86vsrai : SDNode<"X86ISD::VSRAI", X86vshiftimm>;
268 def X86kshiftl : SDNode<"X86ISD::KSHIFTL",
269 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>,
272 def X86kshiftr : SDNode<"X86ISD::KSHIFTR",
273 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>,
277 def X86kadd : SDNode<"X86ISD::KADD", SDTIntBinOp, [SDNPCommutative]>;
279 def X86vrotli : SDNode<"X86ISD::VROTLI", X86vshiftimm>;
280 def X86vrotri : SDNode<"X86ISD::VROTRI", X86vshiftimm>;
282 def X86vpshl : SDNode<"X86ISD::VPSHL", X86vshiftvariable>;
283 def X86vpsha : SDNode<"X86ISD::VPSHA", X86vshiftvariable>;
285 def X86vpcom : SDNode<"X86ISD::VPCOM",
286 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
288 SDTCisVT<3, i8>, SDTCisInt<0>]>>;
289 def X86vpcomu : SDNode<"X86ISD::VPCOMU",
290 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
292 SDTCisVT<3, i8>, SDTCisInt<0>]>>;
293 def X86vpermil2 : SDNode<"X86ISD::VPERMIL2",
294 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>,
296 SDTCisFP<0>, SDTCisInt<3>,
297 SDTCisSameNumEltsAs<0, 3>,
298 SDTCisSameSizeAs<0,3>,
300 def X86vpperm : SDNode<"X86ISD::VPPERM",
301 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>,
302 SDTCisSameAs<0,2>, SDTCisSameAs<0, 3>]>>;
304 def SDTX86CmpPTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>,
306 SDTCisSameAs<2, 1>]>;
308 def X86mulhrs : SDNode<"X86ISD::MULHRS", SDTIntBinOp, [SDNPCommutative]>;
309 def X86ptest : SDNode<"X86ISD::PTEST", SDTX86CmpPTest>;
310 def X86testp : SDNode<"X86ISD::TESTP", SDTX86CmpPTest>;
311 def X86kortest : SDNode<"X86ISD::KORTEST", SDTX86CmpPTest>;
312 def X86ktest : SDNode<"X86ISD::KTEST", SDTX86CmpPTest>;
314 def X86movmsk : SDNode<"X86ISD::MOVMSK",
315 SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVec<1>]>>;
317 def X86selects : SDNode<"X86ISD::SELECTS",
318 SDTypeProfile<1, 3, [SDTCisVT<1, v1i1>,
320 SDTCisSameAs<2, 3>]>>;
322 def X86pmuludq : SDNode<"X86ISD::PMULUDQ",
323 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>,
327 def X86pmuldq : SDNode<"X86ISD::PMULDQ",
328 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>,
333 def X86extrqi : SDNode<"X86ISD::EXTRQI",
334 SDTypeProfile<1, 3, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>,
335 SDTCisVT<2, i8>, SDTCisVT<3, i8>]>>;
336 def X86insertqi : SDNode<"X86ISD::INSERTQI",
337 SDTypeProfile<1, 4, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>,
338 SDTCisSameAs<1,2>, SDTCisVT<3, i8>,
341 // Specific shuffle nodes - At some point ISD::VECTOR_SHUFFLE will always get
342 // translated into one of the target nodes below during lowering.
343 // Note: this is a work in progress...
344 def SDTShuff1Op : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisSameAs<0,1>]>;
345 def SDTShuff2Op : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
347 def SDTShuff2OpFP : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>,
348 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>]>;
350 def SDTShuff2OpM : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
351 SDTCisFP<0>, SDTCisInt<2>,
352 SDTCisSameNumEltsAs<0,2>,
353 SDTCisSameSizeAs<0,2>]>;
354 def SDTShuff2OpI : SDTypeProfile<1, 2, [SDTCisVec<0>,
355 SDTCisSameAs<0,1>, SDTCisVT<2, i8>]>;
356 def SDTShuff3OpI : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
357 SDTCisSameAs<0,2>, SDTCisVT<3, i8>]>;
358 def SDTFPBinOpImm: SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
362 def SDTFPTernaryOpImm: SDTypeProfile<1, 4, [SDTCisFP<0>, SDTCisSameAs<0,1>,
365 SDTCisSameSizeAs<0, 3>,
366 SDTCisSameNumEltsAs<0, 3>,
368 def SDTFPUnaryOpImm: SDTypeProfile<1, 2, [SDTCisFP<0>,
372 def SDTVBroadcast : SDTypeProfile<1, 1, [SDTCisVec<0>]>;
373 def SDTVBroadcastm : SDTypeProfile<1, 1, [SDTCisVec<0>,
374 SDTCisInt<0>, SDTCisInt<1>]>;
376 def SDTBlend : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
377 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>;
379 def SDTTernlog : SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisVec<0>,
380 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>,
381 SDTCisSameAs<0,3>, SDTCisVT<4, i8>]>;
383 def SDTFPBinOpRound : SDTypeProfile<1, 3, [ // fadd_round, fmul_round, etc.
384 SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>, SDTCisVT<3, i32>]>;
386 def SDTFPUnaryOpRound : SDTypeProfile<1, 2, [ // fsqrt_round, fgetexp_round, etc.
387 SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisVT<2, i32>]>;
389 def SDTFmaRound : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>,
390 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>,
391 SDTCisFP<0>, SDTCisVT<4, i32>]>;
395 1, [SDTCisVec<0>, SDTCisVec<1>, SDTCisInt<0>, SDTCisFP<1>]>;
397 def X86fp2sisat : SDNode<"X86ISD::FP_TO_SINT_SAT", SDTFPToxIntSatOp>;
398 def X86fp2uisat : SDNode<"X86ISD::FP_TO_UINT_SAT", SDTFPToxIntSatOp>;
400 def X86PAlignr : SDNode<"X86ISD::PALIGNR",
401 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i8>,
405 def X86VAlign : SDNode<"X86ISD::VALIGN", SDTShuff3OpI>;
407 def X86VShld : SDNode<"X86ISD::VSHLD", SDTShuff3OpI>;
408 def X86VShrd : SDNode<"X86ISD::VSHRD", SDTShuff3OpI>;
409 def X86VShldv : SDNode<"X86ISD::VSHLDV",
410 SDTypeProfile<1, 3, [SDTCisVec<0>,
413 SDTCisSameAs<0,3>]>>;
414 def X86VShrdv : SDNode<"X86ISD::VSHRDV",
415 SDTypeProfile<1, 3, [SDTCisVec<0>,
418 SDTCisSameAs<0,3>]>>;
420 def X86Conflict : SDNode<"X86ISD::CONFLICT", SDTIntUnaryOp>;
422 def X86PShufd : SDNode<"X86ISD::PSHUFD", SDTShuff2OpI>;
423 def X86PShufhw : SDNode<"X86ISD::PSHUFHW", SDTShuff2OpI>;
424 def X86PShuflw : SDNode<"X86ISD::PSHUFLW", SDTShuff2OpI>;
426 def X86Shufp : SDNode<"X86ISD::SHUFP", SDTShuff3OpI>;
427 def X86Shuf128 : SDNode<"X86ISD::SHUF128", SDTShuff3OpI>;
429 def X86Movddup : SDNode<"X86ISD::MOVDDUP", SDTShuff1Op>;
430 def X86Movshdup : SDNode<"X86ISD::MOVSHDUP", SDTShuff1Op>;
431 def X86Movsldup : SDNode<"X86ISD::MOVSLDUP", SDTShuff1Op>;
433 def X86Movsd : SDNode<"X86ISD::MOVSD",
434 SDTypeProfile<1, 2, [SDTCisVT<0, v2f64>,
436 SDTCisVT<2, v2f64>]>>;
437 def X86Movss : SDNode<"X86ISD::MOVSS",
438 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>,
440 SDTCisVT<2, v4f32>]>>;
442 def X86Movsh : SDNode<"X86ISD::MOVSH",
443 SDTypeProfile<1, 2, [SDTCisVT<0, v8f16>,
445 SDTCisVT<2, v8f16>]>>;
447 def X86Movlhps : SDNode<"X86ISD::MOVLHPS",
448 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>,
450 SDTCisVT<2, v4f32>]>>;
451 def X86Movhlps : SDNode<"X86ISD::MOVHLPS",
452 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>,
454 SDTCisVT<2, v4f32>]>>;
456 def SDTPack : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<0>,
457 SDTCisVec<1>, SDTCisInt<1>,
458 SDTCisSameSizeAs<0,1>,
460 SDTCisOpSmallerThanOp<0, 1>]>;
461 def X86Packss : SDNode<"X86ISD::PACKSS", SDTPack>;
462 def X86Packus : SDNode<"X86ISD::PACKUS", SDTPack>;
464 def X86Unpckl : SDNode<"X86ISD::UNPCKL", SDTShuff2Op>;
465 def X86Unpckh : SDNode<"X86ISD::UNPCKH", SDTShuff2Op>;
467 def X86vpmaddubsw : SDNode<"X86ISD::VPMADDUBSW",
468 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>,
469 SDTCVecEltisVT<1, i8>,
470 SDTCisSameSizeAs<0,1>,
471 SDTCisSameAs<1,2>]>>;
472 def X86vpmaddwd : SDNode<"X86ISD::VPMADDWD",
473 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i32>,
474 SDTCVecEltisVT<1, i16>,
475 SDTCisSameSizeAs<0,1>,
479 def X86VPermilpv : SDNode<"X86ISD::VPERMILPV", SDTShuff2OpM>;
480 def X86VPermilpi : SDNode<"X86ISD::VPERMILPI", SDTShuff2OpI>;
481 def X86VPermv : SDNode<"X86ISD::VPERMV",
482 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<1>,
483 SDTCisSameNumEltsAs<0,1>,
484 SDTCisSameSizeAs<0,1>,
485 SDTCisSameAs<0,2>]>>;
486 def X86VPermi : SDNode<"X86ISD::VPERMI", SDTShuff2OpI>;
487 def X86VPermt2 : SDNode<"X86ISD::VPERMV3",
488 SDTypeProfile<1, 3, [SDTCisVec<0>,
489 SDTCisSameAs<0,1>, SDTCisInt<2>,
490 SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>,
491 SDTCisSameSizeAs<0,2>,
492 SDTCisSameAs<0,3>]>, []>;
494 def X86vpternlog : SDNode<"X86ISD::VPTERNLOG", SDTTernlog>;
496 def X86VPerm2x128 : SDNode<"X86ISD::VPERM2X128", SDTShuff3OpI>;
498 def X86VFixupimm : SDNode<"X86ISD::VFIXUPIMM", SDTFPTernaryOpImm>;
499 def X86VFixupimmSAE : SDNode<"X86ISD::VFIXUPIMM_SAE", SDTFPTernaryOpImm>;
500 def X86VFixupimms : SDNode<"X86ISD::VFIXUPIMMS", SDTFPTernaryOpImm>;
501 def X86VFixupimmSAEs : SDNode<"X86ISD::VFIXUPIMMS_SAE", SDTFPTernaryOpImm>;
502 def X86VRange : SDNode<"X86ISD::VRANGE", SDTFPBinOpImm>;
503 def X86VRangeSAE : SDNode<"X86ISD::VRANGE_SAE", SDTFPBinOpImm>;
504 def X86VReduce : SDNode<"X86ISD::VREDUCE", SDTFPUnaryOpImm>;
505 def X86VReduceSAE : SDNode<"X86ISD::VREDUCE_SAE", SDTFPUnaryOpImm>;
506 def X86VRndScale : SDNode<"X86ISD::VRNDSCALE", SDTFPUnaryOpImm>;
507 def X86strict_VRndScale : SDNode<"X86ISD::STRICT_VRNDSCALE", SDTFPUnaryOpImm,
509 def X86any_VRndScale : PatFrags<(ops node:$src1, node:$src2),
510 [(X86strict_VRndScale node:$src1, node:$src2),
511 (X86VRndScale node:$src1, node:$src2)]>;
513 def X86VRndScaleSAE: SDNode<"X86ISD::VRNDSCALE_SAE", SDTFPUnaryOpImm>;
514 def X86VGetMant : SDNode<"X86ISD::VGETMANT", SDTFPUnaryOpImm>;
515 def X86VGetMantSAE : SDNode<"X86ISD::VGETMANT_SAE", SDTFPUnaryOpImm>;
516 def X86Vfpclass : SDNode<"X86ISD::VFPCLASS",
517 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>,
519 SDTCisSameNumEltsAs<0,1>,
520 SDTCisVT<2, i32>]>, []>;
521 def X86Vfpclasss : SDNode<"X86ISD::VFPCLASSS",
522 SDTypeProfile<1, 2, [SDTCisVT<0, v1i1>,
523 SDTCisFP<1>, SDTCisVT<2, i32>]>,[]>;
525 def X86VBroadcast : SDNode<"X86ISD::VBROADCAST", SDTVBroadcast>;
526 def X86VBroadcastm : SDNode<"X86ISD::VBROADCASTM", SDTVBroadcastm>;
528 def X86Blendi : SDNode<"X86ISD::BLENDI", SDTBlend>;
529 def X86Blendv : SDNode<"X86ISD::BLENDV",
530 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisInt<1>,
533 SDTCisSameNumEltsAs<0, 1>,
534 SDTCisSameSizeAs<0, 1>]>>;
536 def X86Addsub : SDNode<"X86ISD::ADDSUB", SDTFPBinOp>;
538 def X86faddRnd : SDNode<"X86ISD::FADD_RND", SDTFPBinOpRound>;
539 def X86fadds : SDNode<"X86ISD::FADDS", SDTFPBinOp>;
540 def X86faddRnds : SDNode<"X86ISD::FADDS_RND", SDTFPBinOpRound>;
541 def X86fsubRnd : SDNode<"X86ISD::FSUB_RND", SDTFPBinOpRound>;
542 def X86fsubs : SDNode<"X86ISD::FSUBS", SDTFPBinOp>;
543 def X86fsubRnds : SDNode<"X86ISD::FSUBS_RND", SDTFPBinOpRound>;
544 def X86fmulRnd : SDNode<"X86ISD::FMUL_RND", SDTFPBinOpRound>;
545 def X86fmuls : SDNode<"X86ISD::FMULS", SDTFPBinOp>;
546 def X86fmulRnds : SDNode<"X86ISD::FMULS_RND", SDTFPBinOpRound>;
547 def X86fdivRnd : SDNode<"X86ISD::FDIV_RND", SDTFPBinOpRound>;
548 def X86fdivs : SDNode<"X86ISD::FDIVS", SDTFPBinOp>;
549 def X86fdivRnds : SDNode<"X86ISD::FDIVS_RND", SDTFPBinOpRound>;
550 def X86fmaxSAE : SDNode<"X86ISD::FMAX_SAE", SDTFPBinOp>;
551 def X86fmaxSAEs : SDNode<"X86ISD::FMAXS_SAE", SDTFPBinOp>;
552 def X86fminSAE : SDNode<"X86ISD::FMIN_SAE", SDTFPBinOp>;
553 def X86fminSAEs : SDNode<"X86ISD::FMINS_SAE", SDTFPBinOp>;
554 def X86scalef : SDNode<"X86ISD::SCALEF", SDTFPBinOp>;
555 def X86scalefRnd : SDNode<"X86ISD::SCALEF_RND", SDTFPBinOpRound>;
556 def X86scalefs : SDNode<"X86ISD::SCALEFS", SDTFPBinOp>;
557 def X86scalefsRnd: SDNode<"X86ISD::SCALEFS_RND", SDTFPBinOpRound>;
558 def X86fsqrtRnd : SDNode<"X86ISD::FSQRT_RND", SDTFPUnaryOpRound>;
559 def X86fsqrts : SDNode<"X86ISD::FSQRTS", SDTFPBinOp>;
560 def X86fsqrtRnds : SDNode<"X86ISD::FSQRTS_RND", SDTFPBinOpRound>;
561 def X86fgetexp : SDNode<"X86ISD::FGETEXP", SDTFPUnaryOp>;
562 def X86fgetexpSAE : SDNode<"X86ISD::FGETEXP_SAE", SDTFPUnaryOp>;
563 def X86fgetexps : SDNode<"X86ISD::FGETEXPS", SDTFPBinOp>;
564 def X86fgetexpSAEs : SDNode<"X86ISD::FGETEXPS_SAE", SDTFPBinOp>;
566 def X86Fnmadd : SDNode<"X86ISD::FNMADD", SDTFPTernaryOp, [SDNPCommutative]>;
567 def X86strict_Fnmadd : SDNode<"X86ISD::STRICT_FNMADD", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>;
568 def X86any_Fnmadd : PatFrags<(ops node:$src1, node:$src2, node:$src3),
569 [(X86strict_Fnmadd node:$src1, node:$src2, node:$src3),
570 (X86Fnmadd node:$src1, node:$src2, node:$src3)]>;
571 def X86Fmsub : SDNode<"X86ISD::FMSUB", SDTFPTernaryOp, [SDNPCommutative]>;
572 def X86strict_Fmsub : SDNode<"X86ISD::STRICT_FMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>;
573 def X86any_Fmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3),
574 [(X86strict_Fmsub node:$src1, node:$src2, node:$src3),
575 (X86Fmsub node:$src1, node:$src2, node:$src3)]>;
576 def X86Fnmsub : SDNode<"X86ISD::FNMSUB", SDTFPTernaryOp, [SDNPCommutative]>;
577 def X86strict_Fnmsub : SDNode<"X86ISD::STRICT_FNMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>;
578 def X86any_Fnmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3),
579 [(X86strict_Fnmsub node:$src1, node:$src2, node:$src3),
580 (X86Fnmsub node:$src1, node:$src2, node:$src3)]>;
581 def X86Fmaddsub : SDNode<"X86ISD::FMADDSUB", SDTFPTernaryOp, [SDNPCommutative]>;
582 def X86Fmsubadd : SDNode<"X86ISD::FMSUBADD", SDTFPTernaryOp, [SDNPCommutative]>;
584 def X86FmaddRnd : SDNode<"X86ISD::FMADD_RND", SDTFmaRound, [SDNPCommutative]>;
585 def X86FnmaddRnd : SDNode<"X86ISD::FNMADD_RND", SDTFmaRound, [SDNPCommutative]>;
586 def X86FmsubRnd : SDNode<"X86ISD::FMSUB_RND", SDTFmaRound, [SDNPCommutative]>;
587 def X86FnmsubRnd : SDNode<"X86ISD::FNMSUB_RND", SDTFmaRound, [SDNPCommutative]>;
588 def X86FmaddsubRnd : SDNode<"X86ISD::FMADDSUB_RND", SDTFmaRound, [SDNPCommutative]>;
589 def X86FmsubaddRnd : SDNode<"X86ISD::FMSUBADD_RND", SDTFmaRound, [SDNPCommutative]>;
591 def X86vp2intersect : SDNode<"X86ISD::VP2INTERSECT",
592 SDTypeProfile<1, 2, [SDTCisVT<0, untyped>,
593 SDTCisVec<1>, SDTCisSameAs<1, 2>]>>;
595 def SDTIFma : SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<0,1>,
596 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>;
597 def x86vpmadd52l : SDNode<"X86ISD::VPMADD52L", SDTIFma, [SDNPCommutative]>;
598 def x86vpmadd52h : SDNode<"X86ISD::VPMADD52H", SDTIFma, [SDNPCommutative]>;
600 def x86vfmaddc : SDNode<"X86ISD::VFMADDC", SDTFPTernaryOp, [SDNPCommutative]>;
601 def x86vfmaddcRnd : SDNode<"X86ISD::VFMADDC_RND", SDTFmaRound, [SDNPCommutative]>;
602 def x86vfcmaddc : SDNode<"X86ISD::VFCMADDC", SDTFPTernaryOp>;
603 def x86vfcmaddcRnd : SDNode<"X86ISD::VFCMADDC_RND", SDTFmaRound>;
604 def x86vfmulc : SDNode<"X86ISD::VFMULC", SDTFPBinOp, [SDNPCommutative]>;
605 def x86vfmulcRnd : SDNode<"X86ISD::VFMULC_RND", SDTFPBinOpRound, [SDNPCommutative]>;
606 def x86vfcmulc : SDNode<"X86ISD::VFCMULC", SDTFPBinOp>;
607 def x86vfcmulcRnd : SDNode<"X86ISD::VFCMULC_RND", SDTFPBinOpRound>;
609 def x86vfmaddcSh : SDNode<"X86ISD::VFMADDCSH", SDTFPTernaryOp, [SDNPCommutative]>;
610 def x86vfcmaddcSh : SDNode<"X86ISD::VFCMADDCSH", SDTFPTernaryOp>;
611 def x86vfmulcSh : SDNode<"X86ISD::VFMULCSH", SDTFPBinOp, [SDNPCommutative]>;
612 def x86vfcmulcSh : SDNode<"X86ISD::VFCMULCSH", SDTFPBinOp>;
613 def x86vfmaddcShRnd : SDNode<"X86ISD::VFMADDCSH_RND", SDTFmaRound, [SDNPCommutative]>;
614 def x86vfcmaddcShRnd : SDNode<"X86ISD::VFCMADDCSH_RND",SDTFmaRound>;
615 def x86vfmulcShRnd : SDNode<"X86ISD::VFMULCSH_RND", SDTFPBinOpRound, [SDNPCommutative]>;
616 def x86vfcmulcShRnd : SDNode<"X86ISD::VFCMULCSH_RND", SDTFPBinOpRound>;
618 def X86rsqrt14 : SDNode<"X86ISD::RSQRT14", SDTFPUnaryOp>;
619 def X86rcp14 : SDNode<"X86ISD::RCP14", SDTFPUnaryOp>;
622 def SDTVnni : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
623 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>;
624 def X86Vpdpbusd : SDNode<"X86ISD::VPDPBUSD", SDTVnni>;
625 def X86Vpdpbusds : SDNode<"X86ISD::VPDPBUSDS", SDTVnni>;
626 def X86Vpdpwssd : SDNode<"X86ISD::VPDPWSSD", SDTVnni>;
627 def X86Vpdpwssds : SDNode<"X86ISD::VPDPWSSDS", SDTVnni>;
629 def X86rsqrt14s : SDNode<"X86ISD::RSQRT14S", SDTFPBinOp>;
630 def X86rcp14s : SDNode<"X86ISD::RCP14S", SDTFPBinOp>;
631 def X86Ranges : SDNode<"X86ISD::VRANGES", SDTFPBinOpImm>;
632 def X86RndScales : SDNode<"X86ISD::VRNDSCALES", SDTFPBinOpImm>;
633 def X86Reduces : SDNode<"X86ISD::VREDUCES", SDTFPBinOpImm>;
634 def X86GetMants : SDNode<"X86ISD::VGETMANTS", SDTFPBinOpImm>;
635 def X86RangesSAE : SDNode<"X86ISD::VRANGES_SAE", SDTFPBinOpImm>;
636 def X86RndScalesSAE : SDNode<"X86ISD::VRNDSCALES_SAE", SDTFPBinOpImm>;
637 def X86ReducesSAE : SDNode<"X86ISD::VREDUCES_SAE", SDTFPBinOpImm>;
638 def X86GetMantsSAE : SDNode<"X86ISD::VGETMANTS_SAE", SDTFPBinOpImm>;
640 def X86compress: SDNode<"X86ISD::COMPRESS", SDTypeProfile<1, 3,
641 [SDTCisSameAs<0, 1>, SDTCisVec<1>,
642 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>,
643 SDTCisSameNumEltsAs<0, 3>]>, []>;
644 def X86expand : SDNode<"X86ISD::EXPAND", SDTypeProfile<1, 3,
645 [SDTCisSameAs<0, 1>, SDTCisVec<1>,
646 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>,
647 SDTCisSameNumEltsAs<0, 3>]>, []>;
650 def X86Vpshufbitqmb : SDNode<"X86ISD::VPSHUFBITQMB",
651 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
653 SDTCVecEltisVT<0,i1>,
654 SDTCisSameNumEltsAs<0,1>]>>;
656 def SDTintToFP: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>,
657 SDTCisSameAs<0,1>, SDTCisInt<2>]>;
658 def SDTintToFPRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisFP<0>,
659 SDTCisSameAs<0,1>, SDTCisInt<2>,
662 def SDTFloatToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>,
663 SDTCisInt<0>, SDTCisFP<1>]>;
664 def SDTFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
665 SDTCisInt<0>, SDTCisFP<1>,
667 def SDTSFloatToInt: SDTypeProfile<1, 1, [SDTCisInt<0>, SDTCisFP<1>,
669 def SDTSFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>, SDTCisFP<1>,
670 SDTCisVec<1>, SDTCisVT<2, i32>]>;
672 def SDTVintToFP: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>,
673 SDTCisFP<0>, SDTCisInt<1>]>;
674 def SDTVintToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
675 SDTCisFP<0>, SDTCisInt<1>,
679 def X86SintToFp : SDNode<"X86ISD::SCALAR_SINT_TO_FP", SDTintToFP>;
680 def X86SintToFpRnd : SDNode<"X86ISD::SCALAR_SINT_TO_FP_RND", SDTintToFPRound>;
681 def X86UintToFp : SDNode<"X86ISD::SCALAR_UINT_TO_FP", SDTintToFP>;
682 def X86UintToFpRnd : SDNode<"X86ISD::SCALAR_UINT_TO_FP_RND", SDTintToFPRound>;
684 def X86cvtts2Int : SDNode<"X86ISD::CVTTS2SI", SDTSFloatToInt>;
685 def X86cvtts2UInt : SDNode<"X86ISD::CVTTS2UI", SDTSFloatToInt>;
686 def X86cvtts2IntSAE : SDNode<"X86ISD::CVTTS2SI_SAE", SDTSFloatToInt>;
687 def X86cvtts2UIntSAE : SDNode<"X86ISD::CVTTS2UI_SAE", SDTSFloatToInt>;
689 def X86cvts2si : SDNode<"X86ISD::CVTS2SI", SDTSFloatToInt>;
690 def X86cvts2usi : SDNode<"X86ISD::CVTS2UI", SDTSFloatToInt>;
691 def X86cvts2siRnd : SDNode<"X86ISD::CVTS2SI_RND", SDTSFloatToIntRnd>;
692 def X86cvts2usiRnd : SDNode<"X86ISD::CVTS2UI_RND", SDTSFloatToIntRnd>;
694 def X86cvttss2Int : SDNode<"X86ISD::CVTTS2SIS", SDTSFloatToInt>;
695 def X86cvttss2UInt : SDNode<"X86ISD::CVTTS2UIS", SDTSFloatToInt>;
696 def X86cvttss2IntSAE : SDNode<"X86ISD::CVTTS2SIS_SAE", SDTSFloatToInt>;
697 def X86cvttss2UIntSAE : SDNode<"X86ISD::CVTTS2UIS_SAE", SDTSFloatToInt>;
699 // Vector with rounding mode
701 // cvtt fp-to-int staff
702 def X86cvttp2siSAE : SDNode<"X86ISD::CVTTP2SI_SAE", SDTFloatToInt>;
703 def X86cvttp2uiSAE : SDNode<"X86ISD::CVTTP2UI_SAE", SDTFloatToInt>;
705 def X86VSintToFpRnd : SDNode<"X86ISD::SINT_TO_FP_RND", SDTVintToFPRound>;
706 def X86VUintToFpRnd : SDNode<"X86ISD::UINT_TO_FP_RND", SDTVintToFPRound>;
708 def X86cvttp2sisSAE : SDNode<"X86ISD::CVTTP2SIS_SAE", SDTFloatToInt>;
709 def X86cvttp2uisSAE : SDNode<"X86ISD::CVTTP2UIS_SAE", SDTFloatToInt>;
710 def X86cvttp2sis : SDNode<"X86ISD::CVTTP2SIS", SDTFloatToInt>;
711 def X86cvttp2uis : SDNode<"X86ISD::CVTTP2UIS", SDTFloatToInt>;
713 // cvt fp-to-int staff
714 def X86cvtp2IntRnd : SDNode<"X86ISD::CVTP2SI_RND", SDTFloatToIntRnd>;
715 def X86cvtp2UIntRnd : SDNode<"X86ISD::CVTP2UI_RND", SDTFloatToIntRnd>;
717 // Vector without rounding mode
719 // cvtt fp-to-int staff
720 def X86cvttp2si : SDNode<"X86ISD::CVTTP2SI", SDTFloatToInt>;
721 def X86cvttp2ui : SDNode<"X86ISD::CVTTP2UI", SDTFloatToInt>;
722 def X86strict_cvttp2si : SDNode<"X86ISD::STRICT_CVTTP2SI", SDTFloatToInt, [SDNPHasChain]>;
723 def X86strict_cvttp2ui : SDNode<"X86ISD::STRICT_CVTTP2UI", SDTFloatToInt, [SDNPHasChain]>;
724 def X86any_cvttp2si : PatFrags<(ops node:$src),
725 [(X86strict_cvttp2si node:$src),
726 (X86cvttp2si node:$src)]>;
727 def X86any_cvttp2ui : PatFrags<(ops node:$src),
728 [(X86strict_cvttp2ui node:$src),
729 (X86cvttp2ui node:$src)]>;
731 def X86VSintToFP : SDNode<"X86ISD::CVTSI2P", SDTVintToFP>;
732 def X86VUintToFP : SDNode<"X86ISD::CVTUI2P", SDTVintToFP>;
733 def X86strict_VSintToFP : SDNode<"X86ISD::STRICT_CVTSI2P", SDTVintToFP, [SDNPHasChain]>;
734 def X86strict_VUintToFP : SDNode<"X86ISD::STRICT_CVTUI2P", SDTVintToFP, [SDNPHasChain]>;
735 def X86any_VSintToFP : PatFrags<(ops node:$src),
736 [(X86strict_VSintToFP node:$src),
737 (X86VSintToFP node:$src)]>;
738 def X86any_VUintToFP : PatFrags<(ops node:$src),
739 [(X86strict_VUintToFP node:$src),
740 (X86VUintToFP node:$src)]>;
743 // cvt int-to-fp staff
744 def X86cvtp2Int : SDNode<"X86ISD::CVTP2SI", SDTFloatToInt>;
745 def X86cvtp2UInt : SDNode<"X86ISD::CVTP2UI", SDTFloatToInt>;
748 // Masked versions of above
749 def SDTMVintToFP: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>,
750 SDTCisFP<0>, SDTCisInt<1>,
752 SDTCVecEltisVT<3, i1>,
753 SDTCisSameNumEltsAs<1, 3>]>;
754 def SDTMFloatToInt: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>,
755 SDTCisInt<0>, SDTCisFP<1>,
756 SDTCisSameSizeAs<0, 1>,
758 SDTCVecEltisVT<3, i1>,
759 SDTCisSameNumEltsAs<1, 3>]>;
761 def X86VMSintToFP : SDNode<"X86ISD::MCVTSI2P", SDTMVintToFP>;
762 def X86VMUintToFP : SDNode<"X86ISD::MCVTUI2P", SDTMVintToFP>;
764 def X86mcvtp2Int : SDNode<"X86ISD::MCVTP2SI", SDTMFloatToInt>;
765 def X86mcvtp2UInt : SDNode<"X86ISD::MCVTP2UI", SDTMFloatToInt>;
766 def X86mcvttp2si : SDNode<"X86ISD::MCVTTP2SI", SDTMFloatToInt>;
767 def X86mcvttp2ui : SDNode<"X86ISD::MCVTTP2UI", SDTMFloatToInt>;
768 def X86mcvttp2sis : SDNode<"X86ISD::MCVTTP2SIS", SDTMFloatToInt>;
769 def X86mcvttp2uis : SDNode<"X86ISD::MCVTTP2UIS", SDTMFloatToInt>;
771 def SDTcvtph2ps : SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>,
772 SDTCVecEltisVT<1, i16>]>;
773 def X86cvtph2ps : SDNode<"X86ISD::CVTPH2PS", SDTcvtph2ps>;
774 def X86strict_cvtph2ps : SDNode<"X86ISD::STRICT_CVTPH2PS", SDTcvtph2ps,
776 def X86any_cvtph2ps : PatFrags<(ops node:$src),
777 [(X86strict_cvtph2ps node:$src),
778 (X86cvtph2ps node:$src)]>;
780 def X86cvtph2psSAE : SDNode<"X86ISD::CVTPH2PS_SAE", SDTcvtph2ps>;
782 def SDTcvtps2ph : SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>,
783 SDTCVecEltisVT<1, f32>,
785 def X86cvtps2ph : SDNode<"X86ISD::CVTPS2PH", SDTcvtps2ph>;
786 def X86strict_cvtps2ph : SDNode<"X86ISD::STRICT_CVTPS2PH", SDTcvtps2ph,
788 def X86any_cvtps2ph : PatFrags<(ops node:$src1, node:$src2),
789 [(X86strict_cvtps2ph node:$src1, node:$src2),
790 (X86cvtps2ph node:$src1, node:$src2)]>;
792 def X86cvtps2phSAE : SDNode<"X86ISD::CVTPS2PH_SAE", SDTcvtps2ph>;
794 def SDTmcvtps2ph : SDTypeProfile<1, 4, [SDTCVecEltisVT<0, i16>,
795 SDTCVecEltisVT<1, f32>,
798 SDTCVecEltisVT<4, i1>,
799 SDTCisSameNumEltsAs<1, 4>]>;
800 def X86mcvtps2ph : SDNode<"X86ISD::MCVTPS2PH", SDTmcvtps2ph>;
801 def X86mcvtps2phSAE : SDNode<"X86ISD::MCVTPS2PH_SAE", SDTmcvtps2ph>;
803 def X86vfpextSAE : SDNode<"X86ISD::VFPEXT_SAE",
804 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
805 SDTCisFP<1>, SDTCisVec<1>,
806 SDTCisOpSmallerThanOp<1, 0>]>>;
807 def X86vfproundRnd: SDNode<"X86ISD::VFPROUND_RND",
808 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
809 SDTCisFP<1>, SDTCisVec<1>,
810 SDTCisOpSmallerThanOp<0, 1>,
813 def X86vminmax : SDNode<"X86ISD::VMINMAX", SDTypeProfile<1, 3, [SDTCisSameAs<0,1>,
814 SDTCisSameAs<0,2>, SDTCisInt<3>]>>;
815 def X86vminmaxSae : SDNode<"X86ISD::VMINMAX_SAE", SDTypeProfile<1, 3, [SDTCisSameAs<0,1>,
816 SDTCisSameAs<0,2>, SDTCisInt<3>]>>;
818 def X86vminmaxs : SDNode<"X86ISD::VMINMAXS", SDTypeProfile<1, 3, [SDTCisSameAs<0,1>,
819 SDTCisSameAs<0,2>, SDTCisInt<3>]>>;
820 def X86vminmaxsSae : SDNode<"X86ISD::VMINMAXS_SAE", SDTypeProfile<1, 3, [SDTCisSameAs<0,1>,
821 SDTCisSameAs<0,2>, SDTCisInt<3>]>>;
823 // cvt fp to bfloat16
824 def X86mcvtneps2bf16 : SDNode<"X86ISD::MCVTNEPS2BF16",
825 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, bf16>,
826 SDTCVecEltisVT<1, f32>,
828 SDTCVecEltisVT<3, i1>,
829 SDTCisSameNumEltsAs<1, 3>]>>;
830 def X86cvtneps2bf16 : SDNode<"X86ISD::CVTNEPS2BF16",
831 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, bf16>,
832 SDTCVecEltisVT<1, f32>]>>;
833 def X86dpbf16ps : SDNode<"X86ISD::DPBF16PS",
834 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>,
836 SDTCVecEltisVT<2, bf16>,
837 SDTCisSameAs<2,3>]>>;
838 def X86dpfp16ps : SDNode<"X86ISD::DPFP16PS",
839 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>,
841 SDTCVecEltisVT<2, f16>,
842 SDTCisSameAs<2,3>]>>;
844 // galois field arithmetic
845 def X86GF2P8affineinvqb : SDNode<"X86ISD::GF2P8AFFINEINVQB", SDTBlend>;
846 def X86GF2P8affineqb : SDNode<"X86ISD::GF2P8AFFINEQB", SDTBlend>;
847 def X86GF2P8mulb : SDNode<"X86ISD::GF2P8MULB", SDTIntBinOp>;
849 def SDTX86MaskedStore: SDTypeProfile<0, 3, [ // masked store
850 SDTCisVec<0>, SDTCisPtrTy<1>, SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>
853 def X86vpdpbssd : SDNode<"X86ISD::VPDPBSSD", SDTVnni>;
854 def X86vpdpbssds : SDNode<"X86ISD::VPDPBSSDS", SDTVnni>;
855 def X86vpdpbsud : SDNode<"X86ISD::VPDPBSUD", SDTVnni>;
856 def X86vpdpbsuds : SDNode<"X86ISD::VPDPBSUDS", SDTVnni>;
857 def X86vpdpbuud : SDNode<"X86ISD::VPDPBUUD", SDTVnni>;
858 def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>;
860 def X86vpdpwsud : SDNode<"X86ISD::VPDPWSUD", SDTVnni>;
861 def X86vpdpwsuds : SDNode<"X86ISD::VPDPWSUDS", SDTVnni>;
862 def X86vpdpwusd : SDNode<"X86ISD::VPDPWUSD", SDTVnni>;
863 def X86vpdpwusds : SDNode<"X86ISD::VPDPWUSDS", SDTVnni>;
864 def X86vpdpwuud : SDNode<"X86ISD::VPDPWUUD", SDTVnni>;
865 def X86vpdpwuuds : SDNode<"X86ISD::VPDPWUUDS", SDTVnni>;
867 def X86Vmpsadbw : SDNode<"X86ISD::MPSADBW", SDTX86PSADBW>;
869 // in place saturated cvt fp-to-int
870 def X86vcvtp2ibs : SDNode<"X86ISD::CVTP2IBS", SDTFloatToInt>;
871 def X86vcvtp2iubs : SDNode<"X86ISD::CVTP2IUBS", SDTFloatToInt>;
873 def X86vcvtp2ibsRnd : SDNode<"X86ISD::CVTP2IBS_RND", SDTFloatToIntRnd>;
874 def X86vcvtp2iubsRnd : SDNode<"X86ISD::CVTP2IUBS_RND", SDTFloatToIntRnd>;
876 // in place saturated cvtt fp-to-int staff
877 def X86vcvttp2ibs : SDNode<"X86ISD::CVTTP2IBS", SDTFloatToInt>;
878 def X86vcvttp2iubs : SDNode<"X86ISD::CVTTP2IUBS", SDTFloatToInt>;
880 def X86vcvttp2ibsSAE : SDNode<"X86ISD::CVTTP2IBS_SAE", SDTFloatToInt>;
881 def X86vcvttp2iubsSAE : SDNode<"X86ISD::CVTTP2IUBS_SAE", SDTFloatToInt>;
883 def SDTAVX10CONVERT_I82F16 : SDTypeProfile<1, 2, [
884 SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, f16>, SDTCisSameAs<1, 2>
887 def SDTAVX10CONVERT_F16I8 : SDTypeProfile<1, 1, [
888 SDTCVecEltisVT<0, f16>, SDTCVecEltisVT<1, i8>
891 def SDTAVX10CONVERT_I8F16 : SDTypeProfile<1, 1, [
892 SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, f16>
895 def SDTAVX10CONVERT_I8F16_MASK : SDTypeProfile<1, 3, [
896 SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, f16>,
897 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>,
898 SDTCisSameNumEltsAs<1, 3>
901 def SDTAVX10CONVERT_2I8F16 : SDTypeProfile<1, 2, [
902 SDTCVecEltisVT<0, i8>, SDTCVecEltisVT<1, i8>, SDTCVecEltisVT<2, f16>
905 def SDTAVX10CONVERT_2I8F16_MASK : SDTypeProfile<1, 4, [
906 SDTCVecEltisVT<0, i8>, SDTCisSameAs<0, 1>,
907 SDTCVecEltisVT<2, f16>, SDTCisSameAs<0, 3>, SDTCVecEltisVT<4, i1>,
908 SDTCisSameNumEltsAs<2, 4>
911 def X86vfpround2Rnd : SDNode<"X86ISD::VFPROUND2_RND",
912 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f16>,
913 SDTCVecEltisVT<1, f32>,
917 def X86vcvtne2ph2bf8 : SDNode<"X86ISD::VCVTNE2PH2BF8",
918 SDTAVX10CONVERT_I82F16>;
919 def X86vcvtne2ph2bf8s : SDNode<"X86ISD::VCVTNE2PH2BF8S",
920 SDTAVX10CONVERT_I82F16>;
921 def X86vcvtne2ph2hf8 : SDNode<"X86ISD::VCVTNE2PH2HF8",
922 SDTAVX10CONVERT_I82F16>;
923 def X86vcvtne2ph2hf8s : SDNode<"X86ISD::VCVTNE2PH2HF8S",
924 SDTAVX10CONVERT_I82F16>;
926 def X86vcvthf82ph : SDNode<"X86ISD::VCVTHF82PH",
927 SDTAVX10CONVERT_F16I8>;
929 def X86vcvtbiasph2bf8 : SDNode<"X86ISD::VCVTBIASPH2BF8",
930 SDTAVX10CONVERT_2I8F16>;
931 def X86vcvtbiasph2bf8s : SDNode<"X86ISD::VCVTBIASPH2BF8S",
932 SDTAVX10CONVERT_2I8F16>;
933 def X86vcvtbiasph2hf8 : SDNode<"X86ISD::VCVTBIASPH2HF8",
934 SDTAVX10CONVERT_2I8F16>;
935 def X86vcvtbiasph2hf8s : SDNode<"X86ISD::VCVTBIASPH2HF8S",
936 SDTAVX10CONVERT_2I8F16>;
937 def X86vcvtneph2bf8 : SDNode<"X86ISD::VCVTNEPH2BF8",
938 SDTAVX10CONVERT_I8F16>;
939 def X86vcvtneph2bf8s : SDNode<"X86ISD::VCVTNEPH2BF8S",
940 SDTAVX10CONVERT_I8F16>;
941 def X86vcvtneph2hf8 : SDNode<"X86ISD::VCVTNEPH2HF8",
942 SDTAVX10CONVERT_I8F16>;
943 def X86vcvtneph2hf8s : SDNode<"X86ISD::VCVTNEPH2HF8S",
944 SDTAVX10CONVERT_I8F16>;
946 def X86vmcvtbiasph2bf8 : SDNode<"X86ISD::VMCVTBIASPH2BF8",
947 SDTAVX10CONVERT_2I8F16_MASK>;
948 def X86vmcvtbiasph2bf8s : SDNode<"X86ISD::VMCVTBIASPH2BF8S",
949 SDTAVX10CONVERT_2I8F16_MASK>;
950 def X86vmcvtbiasph2hf8 : SDNode<"X86ISD::VMCVTBIASPH2HF8",
951 SDTAVX10CONVERT_2I8F16_MASK>;
952 def X86vmcvtbiasph2hf8s : SDNode<"X86ISD::VMCVTBIASPH2HF8S",
953 SDTAVX10CONVERT_2I8F16_MASK>;
954 def X86vmcvtneph2bf8 : SDNode<"X86ISD::VMCVTNEPH2BF8",
955 SDTAVX10CONVERT_I8F16_MASK>;
956 def X86vmcvtneph2bf8s : SDNode<"X86ISD::VMCVTNEPH2BF8S",
957 SDTAVX10CONVERT_I8F16_MASK>;
958 def X86vmcvtneph2hf8 : SDNode<"X86ISD::VMCVTNEPH2HF8",
959 SDTAVX10CONVERT_I8F16_MASK>;
960 def X86vmcvtneph2hf8s : SDNode<"X86ISD::VMCVTNEPH2HF8S",
961 SDTAVX10CONVERT_I8F16_MASK>;
963 //===----------------------------------------------------------------------===//
964 // SSE pattern fragments
965 //===----------------------------------------------------------------------===//
967 // 128-bit load pattern fragments
968 def loadv8f16 : PatFrag<(ops node:$ptr), (v8f16 (load node:$ptr))>;
969 def loadv8bf16 : PatFrag<(ops node:$ptr), (v8bf16 (load node:$ptr))>;
970 def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>;
971 def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>;
972 def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>;
973 def loadv4i32 : PatFrag<(ops node:$ptr), (v4i32 (load node:$ptr))>;
974 def loadv8i16 : PatFrag<(ops node:$ptr), (v8i16 (load node:$ptr))>;
975 def loadv16i8 : PatFrag<(ops node:$ptr), (v16i8 (load node:$ptr))>;
977 // 256-bit load pattern fragments
978 def loadv16f16 : PatFrag<(ops node:$ptr), (v16f16 (load node:$ptr))>;
979 def loadv16bf16 : PatFrag<(ops node:$ptr), (v16bf16 (load node:$ptr))>;
980 def loadv8f32 : PatFrag<(ops node:$ptr), (v8f32 (load node:$ptr))>;
981 def loadv4f64 : PatFrag<(ops node:$ptr), (v4f64 (load node:$ptr))>;
982 def loadv4i64 : PatFrag<(ops node:$ptr), (v4i64 (load node:$ptr))>;
983 def loadv8i32 : PatFrag<(ops node:$ptr), (v8i32 (load node:$ptr))>;
984 def loadv16i16 : PatFrag<(ops node:$ptr), (v16i16 (load node:$ptr))>;
985 def loadv32i8 : PatFrag<(ops node:$ptr), (v32i8 (load node:$ptr))>;
987 // 512-bit load pattern fragments
988 def loadv32f16 : PatFrag<(ops node:$ptr), (v32f16 (load node:$ptr))>;
989 def loadv32bf16 : PatFrag<(ops node:$ptr), (v32bf16 (load node:$ptr))>;
990 def loadv16f32 : PatFrag<(ops node:$ptr), (v16f32 (load node:$ptr))>;
991 def loadv8f64 : PatFrag<(ops node:$ptr), (v8f64 (load node:$ptr))>;
992 def loadv8i64 : PatFrag<(ops node:$ptr), (v8i64 (load node:$ptr))>;
993 def loadv16i32 : PatFrag<(ops node:$ptr), (v16i32 (load node:$ptr))>;
994 def loadv32i16 : PatFrag<(ops node:$ptr), (v32i16 (load node:$ptr))>;
995 def loadv64i8 : PatFrag<(ops node:$ptr), (v64i8 (load node:$ptr))>;
997 // 128-/256-/512-bit extload pattern fragments
998 def extloadv2f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>;
999 def extloadv4f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>;
1000 def extloadv8f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>;
1001 def extloadv2f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
1002 def extloadv4f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
1003 def extloadv8f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
1004 def extloadv16f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
1006 // Like 'store', but always requires vector size alignment.
1007 def alignedstore : PatFrag<(ops node:$val, node:$ptr),
1008 (store node:$val, node:$ptr), [{
1009 auto *St = cast<StoreSDNode>(N);
1010 return St->getAlign() >= St->getMemoryVT().getStoreSize();
1013 // Like 'load', but always requires vector size alignment.
1014 def alignedload : PatFrag<(ops node:$ptr), (load node:$ptr), [{
1015 auto *Ld = cast<LoadSDNode>(N);
1016 return Ld->getAlign() >= Ld->getMemoryVT().getStoreSize();
1019 // 128-bit aligned load pattern fragments
1020 // NOTE: all 128-bit integer vector loads are promoted to v2i64
1021 def alignedloadv8f16 : PatFrag<(ops node:$ptr),
1022 (v8f16 (alignedload node:$ptr))>;
1023 def alignedloadv8bf16 : PatFrag<(ops node:$ptr),
1024 (v8bf16 (alignedload node:$ptr))>;
1025 def alignedloadv4f32 : PatFrag<(ops node:$ptr),
1026 (v4f32 (alignedload node:$ptr))>;
1027 def alignedloadv2f64 : PatFrag<(ops node:$ptr),
1028 (v2f64 (alignedload node:$ptr))>;
1029 def alignedloadv2i64 : PatFrag<(ops node:$ptr),
1030 (v2i64 (alignedload node:$ptr))>;
1031 def alignedloadv4i32 : PatFrag<(ops node:$ptr),
1032 (v4i32 (alignedload node:$ptr))>;
1033 def alignedloadv8i16 : PatFrag<(ops node:$ptr),
1034 (v8i16 (alignedload node:$ptr))>;
1035 def alignedloadv16i8 : PatFrag<(ops node:$ptr),
1036 (v16i8 (alignedload node:$ptr))>;
1038 // 256-bit aligned load pattern fragments
1039 // NOTE: all 256-bit integer vector loads are promoted to v4i64
1040 def alignedloadv16f16 : PatFrag<(ops node:$ptr),
1041 (v16f16 (alignedload node:$ptr))>;
1042 def alignedloadv16bf16 : PatFrag<(ops node:$ptr),
1043 (v16bf16 (alignedload node:$ptr))>;
1044 def alignedloadv8f32 : PatFrag<(ops node:$ptr),
1045 (v8f32 (alignedload node:$ptr))>;
1046 def alignedloadv4f64 : PatFrag<(ops node:$ptr),
1047 (v4f64 (alignedload node:$ptr))>;
1048 def alignedloadv4i64 : PatFrag<(ops node:$ptr),
1049 (v4i64 (alignedload node:$ptr))>;
1050 def alignedloadv8i32 : PatFrag<(ops node:$ptr),
1051 (v8i32 (alignedload node:$ptr))>;
1052 def alignedloadv16i16 : PatFrag<(ops node:$ptr),
1053 (v16i16 (alignedload node:$ptr))>;
1054 def alignedloadv32i8 : PatFrag<(ops node:$ptr),
1055 (v32i8 (alignedload node:$ptr))>;
1057 // 512-bit aligned load pattern fragments
1058 def alignedloadv32f16 : PatFrag<(ops node:$ptr),
1059 (v32f16 (alignedload node:$ptr))>;
1060 def alignedloadv32bf16 : PatFrag<(ops node:$ptr),
1061 (v32bf16 (alignedload node:$ptr))>;
1062 def alignedloadv16f32 : PatFrag<(ops node:$ptr),
1063 (v16f32 (alignedload node:$ptr))>;
1064 def alignedloadv8f64 : PatFrag<(ops node:$ptr),
1065 (v8f64 (alignedload node:$ptr))>;
1066 def alignedloadv8i64 : PatFrag<(ops node:$ptr),
1067 (v8i64 (alignedload node:$ptr))>;
1068 def alignedloadv16i32 : PatFrag<(ops node:$ptr),
1069 (v16i32 (alignedload node:$ptr))>;
1070 def alignedloadv32i16 : PatFrag<(ops node:$ptr),
1071 (v32i16 (alignedload node:$ptr))>;
1072 def alignedloadv64i8 : PatFrag<(ops node:$ptr),
1073 (v64i8 (alignedload node:$ptr))>;
1075 // Like 'load', but uses special alignment checks suitable for use in
1076 // memory operands in most SSE instructions, which are required to
1077 // be naturally aligned on some targets but not on others. If the subtarget
1078 // allows unaligned accesses, match any load, though this may require
1079 // setting a feature bit in the processor (on startup, for example).
1080 // Opteron 10h and later implement such a feature.
1081 def memop : PatFrag<(ops node:$ptr), (load node:$ptr), [{
1082 auto *Ld = cast<LoadSDNode>(N);
1083 return Subtarget->hasSSEUnalignedMem() ||
1084 Ld->getAlign() >= Ld->getMemoryVT().getStoreSize();
1087 // 128-bit memop pattern fragments
1088 // NOTE: all 128-bit integer vector loads are promoted to v2i64
1089 def memopv4f32 : PatFrag<(ops node:$ptr), (v4f32 (memop node:$ptr))>;
1090 def memopv2f64 : PatFrag<(ops node:$ptr), (v2f64 (memop node:$ptr))>;
1091 def memopv2i64 : PatFrag<(ops node:$ptr), (v2i64 (memop node:$ptr))>;
1092 def memopv4i32 : PatFrag<(ops node:$ptr), (v4i32 (memop node:$ptr))>;
1093 def memopv8i16 : PatFrag<(ops node:$ptr), (v8i16 (memop node:$ptr))>;
1094 def memopv16i8 : PatFrag<(ops node:$ptr), (v16i8 (memop node:$ptr))>;
1096 // 128-bit bitconvert pattern fragments
1097 def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>;
1098 def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>;
1099 def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>;
1100 def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>;
1101 def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>;
1102 def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>;
1104 // 256-bit bitconvert pattern fragments
1105 def bc_v32i8 : PatFrag<(ops node:$in), (v32i8 (bitconvert node:$in))>;
1106 def bc_v16i16 : PatFrag<(ops node:$in), (v16i16 (bitconvert node:$in))>;
1107 def bc_v8i32 : PatFrag<(ops node:$in), (v8i32 (bitconvert node:$in))>;
1108 def bc_v4i64 : PatFrag<(ops node:$in), (v4i64 (bitconvert node:$in))>;
1109 def bc_v8f32 : PatFrag<(ops node:$in), (v8f32 (bitconvert node:$in))>;
1110 def bc_v4f64 : PatFrag<(ops node:$in), (v4f64 (bitconvert node:$in))>;
1112 // 512-bit bitconvert pattern fragments
1113 def bc_v64i8 : PatFrag<(ops node:$in), (v64i8 (bitconvert node:$in))>;
1114 def bc_v32i16 : PatFrag<(ops node:$in), (v32i16 (bitconvert node:$in))>;
1115 def bc_v16i32 : PatFrag<(ops node:$in), (v16i32 (bitconvert node:$in))>;
1116 def bc_v8i64 : PatFrag<(ops node:$in), (v8i64 (bitconvert node:$in))>;
1117 def bc_v8f64 : PatFrag<(ops node:$in), (v8f64 (bitconvert node:$in))>;
1118 def bc_v16f32 : PatFrag<(ops node:$in), (v16f32 (bitconvert node:$in))>;
1120 def X86vzload16 : PatFrag<(ops node:$src),
1121 (X86vzld node:$src), [{
1122 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2;
1125 def X86vzload32 : PatFrag<(ops node:$src),
1126 (X86vzld node:$src), [{
1127 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4;
1130 def X86vzload64 : PatFrag<(ops node:$src),
1131 (X86vzld node:$src), [{
1132 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8;
1135 def X86vextractstore64 : PatFrag<(ops node:$val, node:$ptr),
1136 (X86vextractst node:$val, node:$ptr), [{
1137 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8;
1140 def X86VBroadcastld8 : PatFrag<(ops node:$src),
1141 (X86VBroadcastld node:$src), [{
1142 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 1;
1145 def X86VBroadcastld16 : PatFrag<(ops node:$src),
1146 (X86VBroadcastld node:$src), [{
1147 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2;
1150 def X86VBroadcastld32 : PatFrag<(ops node:$src),
1151 (X86VBroadcastld node:$src), [{
1152 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4;
1155 def X86VBroadcastld64 : PatFrag<(ops node:$src),
1156 (X86VBroadcastld node:$src), [{
1157 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8;
1160 def X86SubVBroadcastld128 : PatFrag<(ops node:$src),
1161 (X86SubVBroadcastld node:$src), [{
1162 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 16;
1165 def X86SubVBroadcastld256 : PatFrag<(ops node:$src),
1166 (X86SubVBroadcastld node:$src), [{
1167 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 32;
1170 // Scalar SSE intrinsic fragments to match several different types of loads.
1171 // Used by scalar SSE intrinsic instructions which have 128 bit types, but
1172 // only load a single element.
1173 // FIXME: We should add more canolicalizing in DAGCombine. Particulary removing
1174 // the simple_load case.
1175 def sse_load_bf16 : PatFrags<(ops node:$ptr),
1176 [(v8bf16 (simple_load node:$ptr)),
1177 (v8bf16 (X86vzload16 node:$ptr)),
1178 (v8bf16 (scalar_to_vector (loadf16 node:$ptr)))]>;
1179 def sse_load_f16 : PatFrags<(ops node:$ptr),
1180 [(v8f16 (simple_load node:$ptr)),
1181 (v8f16 (X86vzload16 node:$ptr)),
1182 (v8f16 (scalar_to_vector (loadf16 node:$ptr)))]>;
1183 def sse_load_f32 : PatFrags<(ops node:$ptr),
1184 [(v4f32 (simple_load node:$ptr)),
1185 (v4f32 (X86vzload32 node:$ptr)),
1186 (v4f32 (scalar_to_vector (loadf32 node:$ptr)))]>;
1187 def sse_load_f64 : PatFrags<(ops node:$ptr),
1188 [(v2f64 (simple_load node:$ptr)),
1189 (v2f64 (X86vzload64 node:$ptr)),
1190 (v2f64 (scalar_to_vector (loadf64 node:$ptr)))]>;
1192 def fp16imm0 : PatLeaf<(f16 fpimm), [{
1193 return N->isExactlyValue(+0.0);
1196 def fp32imm0 : PatLeaf<(f32 fpimm), [{
1197 return N->isExactlyValue(+0.0);
1200 def fp64imm0 : PatLeaf<(f64 fpimm), [{
1201 return N->isExactlyValue(+0.0);
1204 def fp128imm0 : PatLeaf<(f128 fpimm), [{
1205 return N->isExactlyValue(+0.0);
1208 // EXTRACT_get_vextract128_imm xform function: convert extract_subvector index
1209 // to VEXTRACTF128/VEXTRACTI128 imm.
1210 def EXTRACT_get_vextract128_imm : SDNodeXForm<extract_subvector, [{
1211 return getExtractVEXTRACTImmediate(N, 128, SDLoc(N));
1214 // INSERT_get_vinsert128_imm xform function: convert insert_subvector index to
1215 // VINSERTF128/VINSERTI128 imm.
1216 def INSERT_get_vinsert128_imm : SDNodeXForm<insert_subvector, [{
1217 return getInsertVINSERTImmediate(N, 128, SDLoc(N));
1220 // INSERT_get_vperm2x128_imm xform function: convert insert_subvector index to
1221 // commuted VPERM2F128/VPERM2I128 imm.
1222 def INSERT_get_vperm2x128_commutedimm : SDNodeXForm<insert_subvector, [{
1223 return getPermuteVINSERTCommutedImmediate(N, 128, SDLoc(N));
1226 // EXTRACT_get_vextract256_imm xform function: convert extract_subvector index
1227 // to VEXTRACTF64x4 imm.
1228 def EXTRACT_get_vextract256_imm : SDNodeXForm<extract_subvector, [{
1229 return getExtractVEXTRACTImmediate(N, 256, SDLoc(N));
1232 // INSERT_get_vinsert256_imm xform function: convert insert_subvector index to
1233 // VINSERTF64x4 imm.
1234 def INSERT_get_vinsert256_imm : SDNodeXForm<insert_subvector, [{
1235 return getInsertVINSERTImmediate(N, 256, SDLoc(N));
1238 def vextract128_extract : PatFrag<(ops node:$bigvec, node:$index),
1239 (extract_subvector node:$bigvec,
1241 // Index 0 can be handled via extract_subreg.
1242 return !isNullConstant(N->getOperand(1));
1243 }], EXTRACT_get_vextract128_imm>;
1245 def vinsert128_insert : PatFrag<(ops node:$bigvec, node:$smallvec,
1247 (insert_subvector node:$bigvec, node:$smallvec,
1249 INSERT_get_vinsert128_imm>;
1251 def vextract256_extract : PatFrag<(ops node:$bigvec, node:$index),
1252 (extract_subvector node:$bigvec,
1254 // Index 0 can be handled via extract_subreg.
1255 return !isNullConstant(N->getOperand(1));
1256 }], EXTRACT_get_vextract256_imm>;
1258 def vinsert256_insert : PatFrag<(ops node:$bigvec, node:$smallvec,
1260 (insert_subvector node:$bigvec, node:$smallvec,
1262 INSERT_get_vinsert256_imm>;
1264 def masked_load : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1265 (masked_ld node:$src1, undef, node:$src2, node:$src3), [{
1266 return !cast<MaskedLoadSDNode>(N)->isExpandingLoad() &&
1267 cast<MaskedLoadSDNode>(N)->getExtensionType() == ISD::NON_EXTLOAD &&
1268 cast<MaskedLoadSDNode>(N)->isUnindexed();
1271 def masked_load_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1272 (masked_load node:$src1, node:$src2, node:$src3), [{
1273 // Use the node type to determine the size the alignment needs to match.
1274 // We can't use memory VT because type widening changes the node VT, but
1275 // not the memory VT.
1276 auto *Ld = cast<MaskedLoadSDNode>(N);
1277 return Ld->getAlign() >= Ld->getValueType(0).getStoreSize();
1280 def X86mExpandingLoad : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1281 (masked_ld node:$src1, undef, node:$src2, node:$src3), [{
1282 return cast<MaskedLoadSDNode>(N)->isExpandingLoad() &&
1283 cast<MaskedLoadSDNode>(N)->isUnindexed();
1286 // Masked store fragments.
1287 // X86mstore can't be implemented in core DAG files because some targets
1288 // do not support vector types (llvm-tblgen will fail).
1289 def masked_store : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1290 (masked_st node:$src1, node:$src2, undef, node:$src3), [{
1291 return !cast<MaskedStoreSDNode>(N)->isTruncatingStore() &&
1292 !cast<MaskedStoreSDNode>(N)->isCompressingStore() &&
1293 cast<MaskedStoreSDNode>(N)->isUnindexed();
1296 def masked_store_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1297 (masked_store node:$src1, node:$src2, node:$src3), [{
1298 // Use the node type to determine the size the alignment needs to match.
1299 // We can't use memory VT because type widening changes the node VT, but
1300 // not the memory VT.
1301 auto *St = cast<MaskedStoreSDNode>(N);
1302 return St->getAlign() >= St->getOperand(1).getValueType().getStoreSize();
1305 def X86mCompressingStore : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1306 (masked_st node:$src1, node:$src2, undef, node:$src3), [{
1307 return cast<MaskedStoreSDNode>(N)->isCompressingStore() &&
1308 cast<MaskedStoreSDNode>(N)->isUnindexed();
1311 // masked truncstore fragments
1312 // X86mtruncstore can't be implemented in core DAG files because some targets
1313 // doesn't support vector type ( llvm-tblgen will fail)
1314 def X86mtruncstore : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1315 (masked_st node:$src1, node:$src2, undef, node:$src3), [{
1316 return cast<MaskedStoreSDNode>(N)->isTruncatingStore() &&
1317 cast<MaskedStoreSDNode>(N)->isUnindexed();
1319 def masked_truncstorevi8 :
1320 PatFrag<(ops node:$src1, node:$src2, node:$src3),
1321 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{
1322 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
1324 def masked_truncstorevi16 :
1325 PatFrag<(ops node:$src1, node:$src2, node:$src3),
1326 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{
1327 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
1329 def masked_truncstorevi32 :
1330 PatFrag<(ops node:$src1, node:$src2, node:$src3),
1331 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{
1332 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
1335 def X86TruncSStore : SDNode<"X86ISD::VTRUNCSTORES", SDTStore,
1336 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
1338 def X86TruncUSStore : SDNode<"X86ISD::VTRUNCSTOREUS", SDTStore,
1339 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
1341 def X86MTruncSStore : SDNode<"X86ISD::VMTRUNCSTORES", SDTX86MaskedStore,
1342 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
1344 def X86MTruncUSStore : SDNode<"X86ISD::VMTRUNCSTOREUS", SDTX86MaskedStore,
1345 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
1347 def truncstore_s_vi8 : PatFrag<(ops node:$val, node:$ptr),
1348 (X86TruncSStore node:$val, node:$ptr), [{
1349 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
1352 def truncstore_us_vi8 : PatFrag<(ops node:$val, node:$ptr),
1353 (X86TruncUSStore node:$val, node:$ptr), [{
1354 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
1357 def truncstore_s_vi16 : PatFrag<(ops node:$val, node:$ptr),
1358 (X86TruncSStore node:$val, node:$ptr), [{
1359 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
1362 def truncstore_us_vi16 : PatFrag<(ops node:$val, node:$ptr),
1363 (X86TruncUSStore node:$val, node:$ptr), [{
1364 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
1367 def truncstore_s_vi32 : PatFrag<(ops node:$val, node:$ptr),
1368 (X86TruncSStore node:$val, node:$ptr), [{
1369 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
1372 def truncstore_us_vi32 : PatFrag<(ops node:$val, node:$ptr),
1373 (X86TruncUSStore node:$val, node:$ptr), [{
1374 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
1377 def masked_truncstore_s_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1378 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{
1379 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
1382 def masked_truncstore_us_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1383 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{
1384 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
1387 def masked_truncstore_s_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1388 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{
1389 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
1392 def masked_truncstore_us_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1393 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{
1394 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
1397 def masked_truncstore_s_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1398 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{
1399 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
1402 def masked_truncstore_us_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
1403 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{
1404 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
1407 def X86Vfpclasss_su : PatFrag<(ops node:$src1, node:$src2),
1408 (X86Vfpclasss node:$src1, node:$src2), [{
1409 return N->hasOneUse();
1412 def X86Vfpclass_su : PatFrag<(ops node:$src1, node:$src2),
1413 (X86Vfpclass node:$src1, node:$src2), [{
1414 return N->hasOneUse();
1417 // These nodes use 'vnot' instead of 'not' to support vectors.
1418 def vandn : PatFrag<(ops node:$i0, node:$i1), (and (vnot node:$i0), node:$i1)>;
1419 def vxnor : PatFrag<(ops node:$i0, node:$i1), (vnot (xor node:$i0, node:$i1))>;
1421 // Used for matching masked operations. Ensures the operation part only has a
1423 def vselect_mask : PatFrag<(ops node:$mask, node:$src1, node:$src2),
1424 (vselect node:$mask, node:$src1, node:$src2), [{
1425 return isProfitableToFormMaskedOp(N);
1428 def X86selects_mask : PatFrag<(ops node:$mask, node:$src1, node:$src2),
1429 (X86selects node:$mask, node:$src1, node:$src2), [{
1430 return isProfitableToFormMaskedOp(N);
1433 def X86cmpms_su : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1434 (X86cmpms node:$src1, node:$src2, node:$cc), [{
1435 return N->hasOneUse();
1437 def X86cmpmsSAE_su : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1438 (X86cmpmsSAE node:$src1, node:$src2, node:$cc), [{
1439 return N->hasOneUse();
1442 // PatFrags that contain a select and a truncate op. The take operands in the
1443 // same order as X86vmtrunc, X86vmtruncs, X86vmtruncus. This allows us to pass
1444 // either to the multiclasses.
1445 def select_trunc : PatFrag<(ops node:$src, node:$src0, node:$mask),
1446 (vselect_mask node:$mask,
1447 (trunc node:$src), node:$src0)>;
1448 def select_truncs : PatFrag<(ops node:$src, node:$src0, node:$mask),
1449 (vselect_mask node:$mask,
1450 (X86vtruncs node:$src), node:$src0)>;
1451 def select_truncus : PatFrag<(ops node:$src, node:$src0, node:$mask),
1452 (vselect_mask node:$mask,
1453 (X86vtruncus node:$src), node:$src0)>;
1455 def X86Vpshufbitqmb_su : PatFrag<(ops node:$src1, node:$src2),
1456 (X86Vpshufbitqmb node:$src1, node:$src2), [{
1457 return N->hasOneUse();
1460 def X86pcmpgtm : PatFrag<(ops node:$src1, node:$src2),
1461 (setcc node:$src1, node:$src2, SETGT)>;
1463 def X86pcmpm_imm : SDNodeXForm<setcc, [{
1464 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get();
1465 uint8_t SSECC = X86::getVPCMPImmForCond(CC);
1466 return getI8Imm(SSECC, SDLoc(N));
1469 // Swapped operand version of the above.
1470 def X86pcmpm_imm_commute : SDNodeXForm<setcc, [{
1471 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get();
1472 uint8_t SSECC = X86::getVPCMPImmForCond(CC);
1473 SSECC = X86::getSwappedVPCMPImm(SSECC);
1474 return getI8Imm(SSECC, SDLoc(N));
1477 def X86pcmpm : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1478 (setcc node:$src1, node:$src2, node:$cc), [{
1479 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get();
1480 return !ISD::isUnsignedIntSetCC(CC);
1483 def X86pcmpm_su : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1484 (setcc node:$src1, node:$src2, node:$cc), [{
1485 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get();
1486 return N->hasOneUse() && !ISD::isUnsignedIntSetCC(CC);
1489 def X86pcmpum : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1490 (setcc node:$src1, node:$src2, node:$cc), [{
1491 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get();
1492 return ISD::isUnsignedIntSetCC(CC);
1495 def X86pcmpum_su : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1496 (setcc node:$src1, node:$src2, node:$cc), [{
1497 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get();
1498 return N->hasOneUse() && ISD::isUnsignedIntSetCC(CC);
1501 def X86cmpm_su : PatFrag<(ops node:$src1, node:$src2, node:$cc),
1502 (X86cmpm node:$src1, node:$src2, node:$cc), [{
1503 return N->hasOneUse();
1506 def X86cmpm_imm_commute : SDNodeXForm<timm, [{
1507 uint8_t Imm = X86::getSwappedVCMPImm(N->getZExtValue() & 0x1f);
1508 return getI8Imm(Imm, SDLoc(N));
1511 def X86vpmaddwd_su : PatFrag<(ops node:$lhs, node:$rhs),
1512 (X86vpmaddwd node:$lhs, node:$rhs), [{
1513 return N->hasOneUse();