[X86] Preserve volatile ATOMIC_LOAD_OR nodes
[llvm-project.git] / llvm / lib / Target / X86 / X86LegalizerInfo.cpp
bloba4a247f85f3d727149006ff84bc67153b60c2a95
1 //===- X86LegalizerInfo.cpp --------------------------------------*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the targeting of the Machinelegalizer class for X86.
10 /// \todo This should be generated by TableGen.
11 //===----------------------------------------------------------------------===//
13 #include "X86LegalizerInfo.h"
14 #include "X86Subtarget.h"
15 #include "X86TargetMachine.h"
16 #include "llvm/CodeGen/GlobalISel/LegalizerHelper.h"
17 #include "llvm/CodeGen/TargetOpcodes.h"
18 #include "llvm/CodeGen/ValueTypes.h"
19 #include "llvm/IR/DerivedTypes.h"
20 #include "llvm/IR/Type.h"
22 using namespace llvm;
23 using namespace TargetOpcode;
24 using namespace LegalizeActions;
25 using namespace LegalityPredicates;
27 X86LegalizerInfo::X86LegalizerInfo(const X86Subtarget &STI,
28 const X86TargetMachine &TM)
29 : Subtarget(STI) {
31 bool Is64Bit = Subtarget.is64Bit();
32 bool HasSSE1 = Subtarget.hasSSE1();
33 bool HasSSE2 = Subtarget.hasSSE2();
34 bool HasSSE41 = Subtarget.hasSSE41();
35 bool HasAVX = Subtarget.hasAVX();
36 bool HasAVX2 = Subtarget.hasAVX2();
37 bool HasAVX512 = Subtarget.hasAVX512();
38 bool HasVLX = Subtarget.hasVLX();
39 bool HasDQI = Subtarget.hasAVX512() && Subtarget.hasDQI();
40 bool HasBWI = Subtarget.hasAVX512() && Subtarget.hasBWI();
42 const LLT p0 = LLT::pointer(0, TM.getPointerSizeInBits(0));
43 const LLT s1 = LLT::scalar(1);
44 const LLT s8 = LLT::scalar(8);
45 const LLT s16 = LLT::scalar(16);
46 const LLT s32 = LLT::scalar(32);
47 const LLT s64 = LLT::scalar(64);
48 const LLT s80 = LLT::scalar(80);
49 const LLT s128 = LLT::scalar(128);
50 const LLT sMaxScalar = Subtarget.is64Bit() ? s64 : s32;
51 const LLT v2s32 = LLT::fixed_vector(2, 32);
52 const LLT v4s8 = LLT::fixed_vector(4, 8);
55 const LLT v16s8 = LLT::fixed_vector(16, 8);
56 const LLT v8s16 = LLT::fixed_vector(8, 16);
57 const LLT v4s32 = LLT::fixed_vector(4, 32);
58 const LLT v2s64 = LLT::fixed_vector(2, 64);
59 const LLT v2p0 = LLT::fixed_vector(2, p0);
61 const LLT v32s8 = LLT::fixed_vector(32, 8);
62 const LLT v16s16 = LLT::fixed_vector(16, 16);
63 const LLT v8s32 = LLT::fixed_vector(8, 32);
64 const LLT v4s64 = LLT::fixed_vector(4, 64);
65 const LLT v4p0 = LLT::fixed_vector(4, p0);
67 const LLT v64s8 = LLT::fixed_vector(64, 8);
68 const LLT v32s16 = LLT::fixed_vector(32, 16);
69 const LLT v16s32 = LLT::fixed_vector(16, 32);
70 const LLT v8s64 = LLT::fixed_vector(8, 64);
72 // todo: AVX512 bool vector predicate types
74 // implicit/constants
75 getActionDefinitionsBuilder(G_IMPLICIT_DEF)
76 .legalIf([=](const LegalityQuery &Query) -> bool {
77 // 32/64-bits needs support for s64/s128 to handle cases:
78 // s64 = EXTEND (G_IMPLICIT_DEF s32) -> s64 = G_IMPLICIT_DEF
79 // s128 = EXTEND (G_IMPLICIT_DEF s32/s64) -> s128 = G_IMPLICIT_DEF
80 return typeInSet(0, {p0, s1, s8, s16, s32, s64})(Query) ||
81 (Is64Bit && typeInSet(0, {s128})(Query));
82 });
84 getActionDefinitionsBuilder(G_CONSTANT)
85 .legalIf([=](const LegalityQuery &Query) -> bool {
86 return typeInSet(0, {p0, s8, s16, s32})(Query) ||
87 (Is64Bit && typeInSet(0, {s64})(Query));
89 .widenScalarToNextPow2(0, /*Min=*/8)
90 .clampScalar(0, s8, sMaxScalar);
92 // merge/unmerge
93 for (unsigned Op : {G_MERGE_VALUES, G_UNMERGE_VALUES}) {
94 unsigned BigTyIdx = Op == G_MERGE_VALUES ? 0 : 1;
95 unsigned LitTyIdx = Op == G_MERGE_VALUES ? 1 : 0;
96 getActionDefinitionsBuilder(Op)
97 .widenScalarToNextPow2(LitTyIdx, /*Min=*/8)
98 .widenScalarToNextPow2(BigTyIdx, /*Min=*/16)
99 .minScalar(LitTyIdx, s8)
100 .minScalar(BigTyIdx, s32)
101 .legalIf([=](const LegalityQuery &Q) {
102 switch (Q.Types[BigTyIdx].getSizeInBits()) {
103 case 16:
104 case 32:
105 case 64:
106 case 128:
107 case 256:
108 case 512:
109 break;
110 default:
111 return false;
113 switch (Q.Types[LitTyIdx].getSizeInBits()) {
114 case 8:
115 case 16:
116 case 32:
117 case 64:
118 case 128:
119 case 256:
120 return true;
121 default:
122 return false;
127 // integer addition/subtraction
128 getActionDefinitionsBuilder({G_ADD, G_SUB})
129 .legalIf([=](const LegalityQuery &Query) -> bool {
130 if (typeInSet(0, {s8, s16, s32})(Query))
131 return true;
132 if (Is64Bit && typeInSet(0, {s64})(Query))
133 return true;
134 if (HasSSE2 && typeInSet(0, {v16s8, v8s16, v4s32, v2s64})(Query))
135 return true;
136 if (HasAVX2 && typeInSet(0, {v32s8, v16s16, v8s32, v4s64})(Query))
137 return true;
138 if (HasAVX512 && typeInSet(0, {v16s32, v8s64})(Query))
139 return true;
140 if (HasBWI && typeInSet(0, {v64s8, v32s16})(Query))
141 return true;
142 return false;
144 .clampMinNumElements(0, s8, 16)
145 .clampMinNumElements(0, s16, 8)
146 .clampMinNumElements(0, s32, 4)
147 .clampMinNumElements(0, s64, 2)
148 .clampMaxNumElements(0, s8, HasBWI ? 64 : (HasAVX2 ? 32 : 16))
149 .clampMaxNumElements(0, s16, HasBWI ? 32 : (HasAVX2 ? 16 : 8))
150 .clampMaxNumElements(0, s32, HasAVX512 ? 16 : (HasAVX2 ? 8 : 4))
151 .clampMaxNumElements(0, s64, HasAVX512 ? 8 : (HasAVX2 ? 4 : 2))
152 .widenScalarToNextPow2(0, /*Min=*/32)
153 .clampScalar(0, s8, sMaxScalar)
154 .scalarize(0);
156 getActionDefinitionsBuilder({G_UADDE, G_UADDO, G_USUBE, G_USUBO})
157 .legalIf([=](const LegalityQuery &Query) -> bool {
158 return typePairInSet(0, 1, {{s8, s1}, {s16, s1}, {s32, s1}})(Query) ||
159 (Is64Bit && typePairInSet(0, 1, {{s64, s1}})(Query));
161 .widenScalarToNextPow2(0, /*Min=*/32)
162 .clampScalar(0, s8, sMaxScalar)
163 .clampScalar(1, s1, s1)
164 .scalarize(0);
166 // integer multiply
167 getActionDefinitionsBuilder(G_MUL)
168 .legalIf([=](const LegalityQuery &Query) -> bool {
169 if (typeInSet(0, {s8, s16, s32})(Query))
170 return true;
171 if (Is64Bit && typeInSet(0, {s64})(Query))
172 return true;
173 if (HasSSE2 && typeInSet(0, {v8s16})(Query))
174 return true;
175 if (HasSSE41 && typeInSet(0, {v4s32})(Query))
176 return true;
177 if (HasAVX2 && typeInSet(0, {v16s16, v8s32})(Query))
178 return true;
179 if (HasAVX512 && typeInSet(0, {v16s32})(Query))
180 return true;
181 if (HasDQI && typeInSet(0, {v8s64})(Query))
182 return true;
183 if (HasDQI && HasVLX && typeInSet(0, {v2s64, v4s64})(Query))
184 return true;
185 if (HasBWI && typeInSet(0, {v32s16})(Query))
186 return true;
187 return false;
189 .clampMinNumElements(0, s16, 8)
190 .clampMinNumElements(0, s32, 4)
191 .clampMinNumElements(0, s64, HasVLX ? 2 : 8)
192 .clampMaxNumElements(0, s16, HasBWI ? 32 : (HasAVX2 ? 16 : 8))
193 .clampMaxNumElements(0, s32, HasAVX512 ? 16 : (HasAVX2 ? 8 : 4))
194 .clampMaxNumElements(0, s64, 8)
195 .widenScalarToNextPow2(0, /*Min=*/32)
196 .clampScalar(0, s8, sMaxScalar)
197 .scalarize(0);
199 getActionDefinitionsBuilder({G_SMULH, G_UMULH})
200 .legalIf([=](const LegalityQuery &Query) -> bool {
201 return typeInSet(0, {s8, s16, s32})(Query) ||
202 (Is64Bit && typeInSet(0, {s64})(Query));
204 .widenScalarToNextPow2(0, /*Min=*/32)
205 .clampScalar(0, s8, sMaxScalar)
206 .scalarize(0);
208 // integer divisions
209 getActionDefinitionsBuilder({G_SDIV, G_SREM, G_UDIV, G_UREM})
210 .legalIf([=](const LegalityQuery &Query) -> bool {
211 return typeInSet(0, {s8, s16, s32})(Query) ||
212 (Is64Bit && typeInSet(0, {s64})(Query));
214 .clampScalar(0, s8, sMaxScalar);
216 // integer shifts
217 getActionDefinitionsBuilder({G_SHL, G_LSHR, G_ASHR})
218 .legalIf([=](const LegalityQuery &Query) -> bool {
219 return typePairInSet(0, 1, {{s8, s8}, {s16, s8}, {s32, s8}})(Query) ||
220 (Is64Bit && typePairInSet(0, 1, {{s64, s8}})(Query));
222 .clampScalar(0, s8, sMaxScalar)
223 .clampScalar(1, s8, s8);
225 // integer logic
226 getActionDefinitionsBuilder({G_AND, G_OR, G_XOR})
227 .legalIf([=](const LegalityQuery &Query) -> bool {
228 if (typeInSet(0, {s8, s16, s32})(Query))
229 return true;
230 if (Is64Bit && typeInSet(0, {s64})(Query))
231 return true;
232 if (HasSSE2 && typeInSet(0, {v16s8, v8s16, v4s32, v2s64})(Query))
233 return true;
234 if (HasAVX && typeInSet(0, {v32s8, v16s16, v8s32, v4s64})(Query))
235 return true;
236 if (HasAVX512 && typeInSet(0, {v64s8, v32s16, v16s32, v8s64})(Query))
237 return true;
238 return false;
240 .clampMinNumElements(0, s8, 16)
241 .clampMinNumElements(0, s16, 8)
242 .clampMinNumElements(0, s32, 4)
243 .clampMinNumElements(0, s64, 2)
244 .clampMaxNumElements(0, s8, HasAVX512 ? 64 : (HasAVX ? 32 : 16))
245 .clampMaxNumElements(0, s16, HasAVX512 ? 32 : (HasAVX ? 16 : 8))
246 .clampMaxNumElements(0, s32, HasAVX512 ? 16 : (HasAVX ? 8 : 4))
247 .clampMaxNumElements(0, s64, HasAVX512 ? 8 : (HasAVX ? 4 : 2))
248 .widenScalarToNextPow2(0, /*Min=*/32)
249 .clampScalar(0, s8, sMaxScalar)
250 .scalarize(0);
252 // integer comparison
253 const std::initializer_list<LLT> IntTypes32 = {s8, s16, s32, p0};
254 const std::initializer_list<LLT> IntTypes64 = {s8, s16, s32, s64, p0};
256 getActionDefinitionsBuilder(G_ICMP)
257 .legalForCartesianProduct({s8}, Is64Bit ? IntTypes64 : IntTypes32)
258 .clampScalar(0, s8, s8);
260 // bswap
261 getActionDefinitionsBuilder(G_BSWAP)
262 .legalIf([=](const LegalityQuery &Query) {
263 return Query.Types[0] == s32 ||
264 (Subtarget.is64Bit() && Query.Types[0] == s64);
266 .widenScalarToNextPow2(0, /*Min=*/32)
267 .clampScalar(0, s32, sMaxScalar);
269 // popcount
270 getActionDefinitionsBuilder(G_CTPOP)
271 .legalIf([=](const LegalityQuery &Query) -> bool {
272 return Subtarget.hasPOPCNT() &&
273 (typePairInSet(0, 1, {{s16, s16}, {s32, s32}})(Query) ||
274 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query)));
276 .widenScalarToNextPow2(1, /*Min=*/16)
277 .clampScalar(1, s16, sMaxScalar)
278 .scalarSameSizeAs(0, 1);
280 // count leading zeros (LZCNT)
281 getActionDefinitionsBuilder(G_CTLZ)
282 .legalIf([=](const LegalityQuery &Query) -> bool {
283 return Subtarget.hasLZCNT() &&
284 (typePairInSet(0, 1, {{s16, s16}, {s32, s32}})(Query) ||
285 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query)));
287 .widenScalarToNextPow2(1, /*Min=*/16)
288 .clampScalar(1, s16, sMaxScalar)
289 .scalarSameSizeAs(0, 1);
291 // count trailing zeros
292 getActionDefinitionsBuilder({G_CTTZ_ZERO_UNDEF, G_CTTZ})
293 .legalIf([=](const LegalityQuery &Query) -> bool {
294 return (Query.Opcode == G_CTTZ_ZERO_UNDEF || Subtarget.hasBMI()) &&
295 (typePairInSet(0, 1, {{s16, s16}, {s32, s32}})(Query) ||
296 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query)));
298 .widenScalarToNextPow2(1, /*Min=*/16)
299 .clampScalar(1, s16, sMaxScalar)
300 .scalarSameSizeAs(0, 1);
302 // control flow
303 getActionDefinitionsBuilder(G_PHI)
304 .legalIf([=](const LegalityQuery &Query) -> bool {
305 return typeInSet(0, {s8, s16, s32, p0})(Query) ||
306 (Is64Bit && typeInSet(0, {s64})(Query)) ||
307 (HasSSE1 && typeInSet(0, {v16s8, v8s16, v4s32, v2s64})(Query)) ||
308 (HasAVX && typeInSet(0, {v32s8, v16s16, v8s32, v4s64})(Query)) ||
309 (HasAVX512 &&
310 typeInSet(0, {v64s8, v32s16, v16s32, v8s64})(Query));
312 .clampMinNumElements(0, s8, 16)
313 .clampMinNumElements(0, s16, 8)
314 .clampMinNumElements(0, s32, 4)
315 .clampMinNumElements(0, s64, 2)
316 .clampMaxNumElements(0, s8, HasAVX512 ? 64 : (HasAVX ? 32 : 16))
317 .clampMaxNumElements(0, s16, HasAVX512 ? 32 : (HasAVX ? 16 : 8))
318 .clampMaxNumElements(0, s32, HasAVX512 ? 16 : (HasAVX ? 8 : 4))
319 .clampMaxNumElements(0, s64, HasAVX512 ? 8 : (HasAVX ? 4 : 2))
320 .widenScalarToNextPow2(0, /*Min=*/32)
321 .clampScalar(0, s8, sMaxScalar)
322 .scalarize(0);
324 getActionDefinitionsBuilder(G_BRCOND).legalFor({s1});
326 // pointer handling
327 const std::initializer_list<LLT> PtrTypes32 = {s1, s8, s16, s32};
328 const std::initializer_list<LLT> PtrTypes64 = {s1, s8, s16, s32, s64};
330 getActionDefinitionsBuilder(G_PTRTOINT)
331 .legalForCartesianProduct(Is64Bit ? PtrTypes64 : PtrTypes32, {p0})
332 .maxScalar(0, sMaxScalar)
333 .widenScalarToNextPow2(0, /*Min*/ 8);
335 getActionDefinitionsBuilder(G_INTTOPTR).legalFor({{p0, sMaxScalar}});
337 getActionDefinitionsBuilder(G_PTR_ADD)
338 .legalIf([=](const LegalityQuery &Query) -> bool {
339 return typePairInSet(0, 1, {{p0, s32}})(Query) ||
340 (Is64Bit && typePairInSet(0, 1, {{p0, s64}})(Query));
342 .widenScalarToNextPow2(1, /*Min*/ 32)
343 .clampScalar(1, s32, sMaxScalar);
345 getActionDefinitionsBuilder({G_FRAME_INDEX, G_GLOBAL_VALUE}).legalFor({p0});
347 // load/store: add more corner cases
348 for (unsigned Op : {G_LOAD, G_STORE}) {
349 auto &Action = getActionDefinitionsBuilder(Op);
350 Action.legalForTypesWithMemDesc({{s8, p0, s1, 1},
351 {s8, p0, s8, 1},
352 {s16, p0, s8, 1},
353 {s16, p0, s16, 1},
354 {s32, p0, s8, 1},
355 {s32, p0, s16, 1},
356 {s32, p0, s32, 1},
357 {s80, p0, s80, 1},
358 {p0, p0, p0, 1},
359 {v4s8, p0, v4s8, 1}});
360 if (Is64Bit)
361 Action.legalForTypesWithMemDesc({{s64, p0, s8, 1},
362 {s64, p0, s16, 1},
363 {s64, p0, s32, 1},
364 {s64, p0, s64, 1},
365 {v2s32, p0, v2s32, 1}});
366 if (HasSSE1)
367 Action.legalForTypesWithMemDesc({{v16s8, p0, v16s8, 1},
368 {v8s16, p0, v8s16, 1},
369 {v4s32, p0, v4s32, 1},
370 {v2s64, p0, v2s64, 1},
371 {v2p0, p0, v2p0, 1}});
372 if (HasAVX)
373 Action.legalForTypesWithMemDesc({{v32s8, p0, v32s8, 1},
374 {v16s16, p0, v16s16, 1},
375 {v8s32, p0, v8s32, 1},
376 {v4s64, p0, v4s64, 1},
377 {v4p0, p0, v4p0, 1}});
378 if (HasAVX512)
379 Action.legalForTypesWithMemDesc({{v64s8, p0, v64s8, 1},
380 {v32s16, p0, v32s16, 1},
381 {v16s32, p0, v16s32, 1},
382 {v8s64, p0, v8s64, 1}});
383 Action.widenScalarToNextPow2(0, /*Min=*/8).clampScalar(0, s8, sMaxScalar);
386 for (unsigned Op : {G_SEXTLOAD, G_ZEXTLOAD}) {
387 auto &Action = getActionDefinitionsBuilder(Op);
388 Action.legalForTypesWithMemDesc({{s16, p0, s8, 1},
389 {s32, p0, s8, 1},
390 {s32, p0, s16, 1}});
391 if (Is64Bit)
392 Action.legalForTypesWithMemDesc({{s64, p0, s8, 1},
393 {s64, p0, s16, 1},
394 {s64, p0, s32, 1}});
395 // TODO - SSE41/AVX2/AVX512F/AVX512BW vector extensions
398 // sext, zext, and anyext
399 getActionDefinitionsBuilder({G_SEXT, G_ZEXT, G_ANYEXT})
400 .legalIf([=](const LegalityQuery &Query) {
401 return typeInSet(0, {s8, s16, s32})(Query) ||
402 (Query.Opcode == G_ANYEXT && Query.Types[0] == s128) ||
403 (Is64Bit && Query.Types[0] == s64);
405 .widenScalarToNextPow2(0, /*Min=*/8)
406 .clampScalar(0, s8, sMaxScalar)
407 .widenScalarToNextPow2(1, /*Min=*/8)
408 .clampScalar(1, s8, sMaxScalar);
410 getActionDefinitionsBuilder(G_SEXT_INREG).lower();
412 // fp constants
413 getActionDefinitionsBuilder(G_FCONSTANT)
414 .legalIf([=](const LegalityQuery &Query) -> bool {
415 return (HasSSE1 && typeInSet(0, {s32})(Query)) ||
416 (HasSSE2 && typeInSet(0, {s64})(Query));
419 // fp arithmetic
420 getActionDefinitionsBuilder({G_FADD, G_FSUB, G_FMUL, G_FDIV})
421 .legalIf([=](const LegalityQuery &Query) {
422 return (HasSSE1 && typeInSet(0, {s32, v4s32})(Query)) ||
423 (HasSSE2 && typeInSet(0, {s64, v2s64})(Query)) ||
424 (HasAVX && typeInSet(0, {v8s32, v4s64})(Query)) ||
425 (HasAVX512 && typeInSet(0, {v16s32, v8s64})(Query));
428 // fp comparison
429 getActionDefinitionsBuilder(G_FCMP)
430 .legalIf([=](const LegalityQuery &Query) {
431 return (HasSSE1 && typePairInSet(0, 1, {{s8, s32}})(Query)) ||
432 (HasSSE2 && typePairInSet(0, 1, {{s8, s64}})(Query));
434 .clampScalar(0, s8, s8)
435 .clampScalar(1, s32, HasSSE2 ? s64 : s32)
436 .widenScalarToNextPow2(1);
438 // fp conversions
439 getActionDefinitionsBuilder(G_FPEXT).legalIf([=](const LegalityQuery &Query) {
440 return (HasSSE2 && typePairInSet(0, 1, {{s64, s32}})(Query)) ||
441 (HasAVX && typePairInSet(0, 1, {{v4s64, v4s32}})(Query)) ||
442 (HasAVX512 && typePairInSet(0, 1, {{v8s64, v8s32}})(Query));
445 getActionDefinitionsBuilder(G_FPTRUNC).legalIf(
446 [=](const LegalityQuery &Query) {
447 return (HasSSE2 && typePairInSet(0, 1, {{s32, s64}})(Query)) ||
448 (HasAVX && typePairInSet(0, 1, {{v4s32, v4s64}})(Query)) ||
449 (HasAVX512 && typePairInSet(0, 1, {{v8s32, v8s64}})(Query));
452 getActionDefinitionsBuilder(G_SITOFP)
453 .legalIf([=](const LegalityQuery &Query) {
454 return (HasSSE1 &&
455 (typePairInSet(0, 1, {{s32, s32}})(Query) ||
456 (Is64Bit && typePairInSet(0, 1, {{s32, s64}})(Query)))) ||
457 (HasSSE2 &&
458 (typePairInSet(0, 1, {{s64, s32}})(Query) ||
459 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query))));
461 .clampScalar(1, s32, sMaxScalar)
462 .widenScalarToNextPow2(1)
463 .clampScalar(0, s32, HasSSE2 ? s64 : s32)
464 .widenScalarToNextPow2(0);
466 getActionDefinitionsBuilder(G_FPTOSI)
467 .legalIf([=](const LegalityQuery &Query) {
468 return (HasSSE1 &&
469 (typePairInSet(0, 1, {{s32, s32}})(Query) ||
470 (Is64Bit && typePairInSet(0, 1, {{s64, s32}})(Query)))) ||
471 (HasSSE2 &&
472 (typePairInSet(0, 1, {{s32, s64}})(Query) ||
473 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query))));
475 .clampScalar(1, s32, HasSSE2 ? s64 : s32)
476 .widenScalarToNextPow2(0)
477 .clampScalar(0, s32, sMaxScalar)
478 .widenScalarToNextPow2(1);
480 // vector ops
481 getActionDefinitionsBuilder({G_EXTRACT, G_INSERT})
482 .legalIf([=](const LegalityQuery &Query) {
483 unsigned SubIdx = Query.Opcode == G_EXTRACT ? 0 : 1;
484 unsigned FullIdx = Query.Opcode == G_EXTRACT ? 1 : 0;
485 return (HasAVX && typePairInSet(SubIdx, FullIdx,
486 {{v16s8, v32s8},
487 {v8s16, v16s16},
488 {v4s32, v8s32},
489 {v2s64, v4s64}})(Query)) ||
490 (HasAVX512 && typePairInSet(SubIdx, FullIdx,
491 {{v16s8, v64s8},
492 {v32s8, v64s8},
493 {v8s16, v32s16},
494 {v16s16, v32s16},
495 {v4s32, v16s32},
496 {v8s32, v16s32},
497 {v2s64, v8s64},
498 {v4s64, v8s64}})(Query));
501 // todo: only permit dst types up to max legal vector register size?
502 getActionDefinitionsBuilder(G_CONCAT_VECTORS)
503 .legalIf([=](const LegalityQuery &Query) {
504 return (HasSSE1 && typePairInSet(1, 0,
505 {{v16s8, v32s8},
506 {v8s16, v16s16},
507 {v4s32, v8s32},
508 {v2s64, v4s64}})(Query)) ||
509 (HasAVX && typePairInSet(1, 0,
510 {{v16s8, v64s8},
511 {v32s8, v64s8},
512 {v8s16, v32s16},
513 {v16s16, v32s16},
514 {v4s32, v16s32},
515 {v8s32, v16s32},
516 {v2s64, v8s64},
517 {v4s64, v8s64}})(Query));
520 // todo: vectors and address spaces
521 getActionDefinitionsBuilder(G_SELECT)
522 .legalFor({{s8, s32}, {s16, s32}, {s32, s32}, {s64, s32},
523 {p0, s32}})
524 .widenScalarToNextPow2(0, /*Min=*/8)
525 .clampScalar(0, s8, sMaxScalar)
526 .clampScalar(1, s32, s32);
528 // memory intrinsics
529 getActionDefinitionsBuilder({G_MEMCPY, G_MEMMOVE, G_MEMSET}).libcall();
531 // fp intrinsics
532 getActionDefinitionsBuilder(G_INTRINSIC_ROUNDEVEN)
533 .scalarize(0)
534 .minScalar(0, LLT::scalar(32))
535 .libcall();
537 getActionDefinitionsBuilder({G_FREEZE, G_CONSTANT_FOLD_BARRIER})
538 .legalFor({s8, s16, s32, s64, p0})
539 .widenScalarToNextPow2(0, /*Min=*/8)
540 .clampScalar(0, s8, sMaxScalar);
542 getLegacyLegalizerInfo().computeTables();
543 verify(*STI.getInstrInfo());
546 bool X86LegalizerInfo::legalizeIntrinsic(LegalizerHelper &Helper,
547 MachineInstr &MI) const {
548 return true;