1 //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file implements methods that make it really easy to deal with intrinsic
12 // All intrinsic function calls are instances of the call instruction, so these
13 // are all subclasses of the CallInst class. Note that none of these classes
14 // has state or virtual methods, which is an important part of this gross/neat
17 // In some cases, arguments to intrinsics need to be generic and are defined as
18 // type pointer to empty struct { }*. To access the real item of interest the
19 // cast instruction needs to be stripped away.
21 //===----------------------------------------------------------------------===//
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/ADT/StringSwitch.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/DebugInfoMetadata.h"
27 #include "llvm/IR/Metadata.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/IR/Operator.h"
30 #include "llvm/IR/PatternMatch.h"
31 #include "llvm/IR/Statepoint.h"
36 bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID
) {
38 case Intrinsic::objc_autorelease
:
39 case Intrinsic::objc_autoreleasePoolPop
:
40 case Intrinsic::objc_autoreleasePoolPush
:
41 case Intrinsic::objc_autoreleaseReturnValue
:
42 case Intrinsic::objc_copyWeak
:
43 case Intrinsic::objc_destroyWeak
:
44 case Intrinsic::objc_initWeak
:
45 case Intrinsic::objc_loadWeak
:
46 case Intrinsic::objc_loadWeakRetained
:
47 case Intrinsic::objc_moveWeak
:
48 case Intrinsic::objc_release
:
49 case Intrinsic::objc_retain
:
50 case Intrinsic::objc_retainAutorelease
:
51 case Intrinsic::objc_retainAutoreleaseReturnValue
:
52 case Intrinsic::objc_retainAutoreleasedReturnValue
:
53 case Intrinsic::objc_retainBlock
:
54 case Intrinsic::objc_storeStrong
:
55 case Intrinsic::objc_storeWeak
:
56 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue
:
57 case Intrinsic::objc_retainedObject
:
58 case Intrinsic::objc_unretainedObject
:
59 case Intrinsic::objc_unretainedPointer
:
60 case Intrinsic::objc_retain_autorelease
:
61 case Intrinsic::objc_sync_enter
:
62 case Intrinsic::objc_sync_exit
:
69 //===----------------------------------------------------------------------===//
70 /// DbgVariableIntrinsic - This is the common base class for debug info
71 /// intrinsics for variables.
74 iterator_range
<location_op_iterator
> RawLocationWrapper::location_ops() const {
75 Metadata
*MD
= getRawLocation();
76 assert(MD
&& "First operand of DbgVariableIntrinsic should be non-null.");
77 // If operand is ValueAsMetadata, return a range over just that operand.
78 if (auto *VAM
= dyn_cast
<ValueAsMetadata
>(MD
)) {
79 return {location_op_iterator(VAM
), location_op_iterator(VAM
+ 1)};
81 // If operand is DIArgList, return a range over its args.
82 if (auto *AL
= dyn_cast
<DIArgList
>(MD
))
83 return {location_op_iterator(AL
->args_begin()),
84 location_op_iterator(AL
->args_end())};
85 // Operand must be an empty metadata tuple, so return empty iterator.
86 return {location_op_iterator(static_cast<ValueAsMetadata
*>(nullptr)),
87 location_op_iterator(static_cast<ValueAsMetadata
*>(nullptr))};
90 iterator_range
<location_op_iterator
>
91 DbgVariableIntrinsic::location_ops() const {
92 return getWrappedLocation().location_ops();
95 Value
*DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx
) const {
96 return getWrappedLocation().getVariableLocationOp(OpIdx
);
99 Value
*RawLocationWrapper::getVariableLocationOp(unsigned OpIdx
) const {
100 Metadata
*MD
= getRawLocation();
101 assert(MD
&& "First operand of DbgVariableIntrinsic should be non-null.");
102 if (auto *AL
= dyn_cast
<DIArgList
>(MD
))
103 return AL
->getArgs()[OpIdx
]->getValue();
107 isa
<ValueAsMetadata
>(MD
) &&
108 "Attempted to get location operand from DbgVariableIntrinsic with none.");
109 auto *V
= cast
<ValueAsMetadata
>(MD
);
110 assert(OpIdx
== 0 && "Operand Index must be 0 for a debug intrinsic with a "
111 "single location operand.");
112 return V
->getValue();
115 static ValueAsMetadata
*getAsMetadata(Value
*V
) {
116 return isa
<MetadataAsValue
>(V
) ? dyn_cast
<ValueAsMetadata
>(
117 cast
<MetadataAsValue
>(V
)->getMetadata())
118 : ValueAsMetadata::get(V
);
121 void DbgVariableIntrinsic::replaceVariableLocationOp(Value
*OldValue
,
124 // If OldValue is used as the address part of a dbg.assign intrinsic replace
125 // it with NewValue and return true.
126 auto ReplaceDbgAssignAddress
= [this, OldValue
, NewValue
]() -> bool {
127 auto *DAI
= dyn_cast
<DbgAssignIntrinsic
>(this);
128 if (!DAI
|| OldValue
!= DAI
->getAddress())
130 DAI
->setAddress(NewValue
);
133 bool DbgAssignAddrReplaced
= ReplaceDbgAssignAddress();
134 (void)DbgAssignAddrReplaced
;
136 assert(NewValue
&& "Values must be non-null");
137 auto Locations
= location_ops();
138 auto OldIt
= find(Locations
, OldValue
);
139 if (OldIt
== Locations
.end()) {
140 if (AllowEmpty
|| DbgAssignAddrReplaced
)
142 assert(DbgAssignAddrReplaced
&&
143 "OldValue must be dbg.assign addr if unused in DIArgList");
147 assert(OldIt
!= Locations
.end() && "OldValue must be a current location");
149 Value
*NewOperand
= isa
<MetadataAsValue
>(NewValue
)
151 : MetadataAsValue::get(
152 getContext(), ValueAsMetadata::get(NewValue
));
153 return setArgOperand(0, NewOperand
);
155 SmallVector
<ValueAsMetadata
*, 4> MDs
;
156 ValueAsMetadata
*NewOperand
= getAsMetadata(NewValue
);
157 for (auto *VMD
: Locations
)
158 MDs
.push_back(VMD
== *OldIt
? NewOperand
: getAsMetadata(VMD
));
160 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs
)));
162 void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx
,
164 assert(OpIdx
< getNumVariableLocationOps() && "Invalid Operand Index");
166 Value
*NewOperand
= isa
<MetadataAsValue
>(NewValue
)
168 : MetadataAsValue::get(
169 getContext(), ValueAsMetadata::get(NewValue
));
170 return setArgOperand(0, NewOperand
);
172 SmallVector
<ValueAsMetadata
*, 4> MDs
;
173 ValueAsMetadata
*NewOperand
= getAsMetadata(NewValue
);
174 for (unsigned Idx
= 0; Idx
< getNumVariableLocationOps(); ++Idx
)
175 MDs
.push_back(Idx
== OpIdx
? NewOperand
176 : getAsMetadata(getVariableLocationOp(Idx
)));
178 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs
)));
181 void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef
<Value
*> NewValues
,
182 DIExpression
*NewExpr
) {
183 assert(NewExpr
->hasAllLocationOps(getNumVariableLocationOps() +
185 "NewExpr for debug variable intrinsic does not reference every "
186 "location operand.");
187 assert(!is_contained(NewValues
, nullptr) && "New values must be non-null");
188 setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr
));
189 SmallVector
<ValueAsMetadata
*, 4> MDs
;
190 for (auto *VMD
: location_ops())
191 MDs
.push_back(getAsMetadata(VMD
));
192 for (auto *VMD
: NewValues
)
193 MDs
.push_back(getAsMetadata(VMD
));
195 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs
)));
198 std::optional
<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
199 if (auto Fragment
= getExpression()->getFragmentInfo())
200 return Fragment
->SizeInBits
;
201 return getVariable()->getSizeInBits();
204 Value
*DbgAssignIntrinsic::getAddress() const {
205 auto *MD
= getRawAddress();
206 if (auto *V
= dyn_cast
<ValueAsMetadata
>(MD
))
207 return V
->getValue();
209 // When the value goes to null, it gets replaced by an empty MDNode.
210 assert(!cast
<MDNode
>(MD
)->getNumOperands() && "Expected an empty MDNode");
214 void DbgAssignIntrinsic::setAssignId(DIAssignID
*New
) {
215 setOperand(OpAssignID
, MetadataAsValue::get(getContext(), New
));
218 void DbgAssignIntrinsic::setAddress(Value
*V
) {
219 setOperand(OpAddress
,
220 MetadataAsValue::get(getContext(), ValueAsMetadata::get(V
)));
223 void DbgAssignIntrinsic::setKillAddress() {
226 setAddress(PoisonValue::get(getAddress()->getType()));
229 bool DbgAssignIntrinsic::isKillAddress() const {
230 Value
*Addr
= getAddress();
231 return !Addr
|| isa
<UndefValue
>(Addr
);
234 void DbgAssignIntrinsic::setValue(Value
*V
) {
236 MetadataAsValue::get(getContext(), ValueAsMetadata::get(V
)));
239 ConstantInt
*InstrProfCntrInstBase::getNumCounters() const {
240 if (InstrProfValueProfileInst::classof(this))
241 llvm_unreachable("InstrProfValueProfileInst does not have counters!");
242 return cast
<ConstantInt
>(const_cast<Value
*>(getArgOperand(2)));
245 ConstantInt
*InstrProfCntrInstBase::getIndex() const {
246 if (InstrProfValueProfileInst::classof(this))
247 llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
248 return cast
<ConstantInt
>(const_cast<Value
*>(getArgOperand(3)));
251 void InstrProfCntrInstBase::setIndex(uint32_t Idx
) {
252 assert(isa
<InstrProfCntrInstBase
>(this));
253 setArgOperand(3, ConstantInt::get(Type::getInt32Ty(getContext()), Idx
));
256 Value
*InstrProfIncrementInst::getStep() const {
257 if (InstrProfIncrementInstStep::classof(this)) {
258 return const_cast<Value
*>(getArgOperand(4));
260 const Module
*M
= getModule();
261 LLVMContext
&Context
= M
->getContext();
262 return ConstantInt::get(Type::getInt64Ty(Context
), 1);
265 Value
*InstrProfCallsite::getCallee() const {
266 if (isa
<InstrProfCallsite
>(this))
267 return getArgOperand(4);
271 void InstrProfCallsite::setCallee(Value
*Callee
) {
272 assert(isa
<InstrProfCallsite
>(this));
273 setArgOperand(4, Callee
);
276 std::optional
<RoundingMode
> ConstrainedFPIntrinsic::getRoundingMode() const {
277 unsigned NumOperands
= arg_size();
278 Metadata
*MD
= nullptr;
279 auto *MAV
= dyn_cast
<MetadataAsValue
>(getArgOperand(NumOperands
- 2));
281 MD
= MAV
->getMetadata();
282 if (!MD
|| !isa
<MDString
>(MD
))
284 return convertStrToRoundingMode(cast
<MDString
>(MD
)->getString());
287 std::optional
<fp::ExceptionBehavior
>
288 ConstrainedFPIntrinsic::getExceptionBehavior() const {
289 unsigned NumOperands
= arg_size();
290 Metadata
*MD
= nullptr;
291 auto *MAV
= dyn_cast
<MetadataAsValue
>(getArgOperand(NumOperands
- 1));
293 MD
= MAV
->getMetadata();
294 if (!MD
|| !isa
<MDString
>(MD
))
296 return convertStrToExceptionBehavior(cast
<MDString
>(MD
)->getString());
299 bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
300 std::optional
<fp::ExceptionBehavior
> Except
= getExceptionBehavior();
302 if (*Except
!= fp::ebIgnore
)
306 std::optional
<RoundingMode
> Rounding
= getRoundingMode();
308 if (*Rounding
!= RoundingMode::NearestTiesToEven
)
315 static FCmpInst::Predicate
getFPPredicateFromMD(const Value
*Op
) {
316 Metadata
*MD
= cast
<MetadataAsValue
>(Op
)->getMetadata();
317 if (!MD
|| !isa
<MDString
>(MD
))
318 return FCmpInst::BAD_FCMP_PREDICATE
;
319 return StringSwitch
<FCmpInst::Predicate
>(cast
<MDString
>(MD
)->getString())
320 .Case("oeq", FCmpInst::FCMP_OEQ
)
321 .Case("ogt", FCmpInst::FCMP_OGT
)
322 .Case("oge", FCmpInst::FCMP_OGE
)
323 .Case("olt", FCmpInst::FCMP_OLT
)
324 .Case("ole", FCmpInst::FCMP_OLE
)
325 .Case("one", FCmpInst::FCMP_ONE
)
326 .Case("ord", FCmpInst::FCMP_ORD
)
327 .Case("uno", FCmpInst::FCMP_UNO
)
328 .Case("ueq", FCmpInst::FCMP_UEQ
)
329 .Case("ugt", FCmpInst::FCMP_UGT
)
330 .Case("uge", FCmpInst::FCMP_UGE
)
331 .Case("ult", FCmpInst::FCMP_ULT
)
332 .Case("ule", FCmpInst::FCMP_ULE
)
333 .Case("une", FCmpInst::FCMP_UNE
)
334 .Default(FCmpInst::BAD_FCMP_PREDICATE
);
337 FCmpInst::Predicate
ConstrainedFPCmpIntrinsic::getPredicate() const {
338 return getFPPredicateFromMD(getArgOperand(2));
341 unsigned ConstrainedFPIntrinsic::getNonMetadataArgCount() const {
342 // All constrained fp intrinsics have "fpexcept" metadata.
343 unsigned NumArgs
= arg_size() - 1;
345 // Some intrinsics have "round" metadata.
346 if (Intrinsic::hasConstrainedFPRoundingModeOperand(getIntrinsicID()))
349 // Compare intrinsics take their predicate as metadata.
350 if (isa
<ConstrainedFPCmpIntrinsic
>(this))
356 bool ConstrainedFPIntrinsic::classof(const IntrinsicInst
*I
) {
357 return Intrinsic::isConstrainedFPIntrinsic(I
->getIntrinsicID());
360 ElementCount
VPIntrinsic::getStaticVectorLength() const {
361 auto GetVectorLengthOfType
= [](const Type
*T
) -> ElementCount
{
362 const auto *VT
= cast
<VectorType
>(T
);
363 auto ElemCount
= VT
->getElementCount();
367 Value
*VPMask
= getMaskParam();
369 assert((getIntrinsicID() == Intrinsic::vp_merge
||
370 getIntrinsicID() == Intrinsic::vp_select
) &&
371 "Unexpected VP intrinsic without mask operand");
372 return GetVectorLengthOfType(getType());
374 return GetVectorLengthOfType(VPMask
->getType());
377 Value
*VPIntrinsic::getMaskParam() const {
378 if (auto MaskPos
= getMaskParamPos(getIntrinsicID()))
379 return getArgOperand(*MaskPos
);
383 void VPIntrinsic::setMaskParam(Value
*NewMask
) {
384 auto MaskPos
= getMaskParamPos(getIntrinsicID());
385 setArgOperand(*MaskPos
, NewMask
);
388 Value
*VPIntrinsic::getVectorLengthParam() const {
389 if (auto EVLPos
= getVectorLengthParamPos(getIntrinsicID()))
390 return getArgOperand(*EVLPos
);
394 void VPIntrinsic::setVectorLengthParam(Value
*NewEVL
) {
395 auto EVLPos
= getVectorLengthParamPos(getIntrinsicID());
396 setArgOperand(*EVLPos
, NewEVL
);
399 std::optional
<unsigned>
400 VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID
) {
401 switch (IntrinsicID
) {
405 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
406 case Intrinsic::VPID: \
408 #include "llvm/IR/VPIntrinsics.def"
412 std::optional
<unsigned>
413 VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID
) {
414 switch (IntrinsicID
) {
418 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
419 case Intrinsic::VPID: \
421 #include "llvm/IR/VPIntrinsics.def"
425 /// \return the alignment of the pointer used by this load/store/gather or
427 MaybeAlign
VPIntrinsic::getPointerAlignment() const {
428 std::optional
<unsigned> PtrParamOpt
=
429 getMemoryPointerParamPos(getIntrinsicID());
430 assert(PtrParamOpt
&& "no pointer argument!");
431 return getParamAlign(*PtrParamOpt
);
434 /// \return The pointer operand of this load,store, gather or scatter.
435 Value
*VPIntrinsic::getMemoryPointerParam() const {
436 if (auto PtrParamOpt
= getMemoryPointerParamPos(getIntrinsicID()))
437 return getArgOperand(*PtrParamOpt
);
441 std::optional
<unsigned>
442 VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID
) {
446 case Intrinsic::vp_store
:
447 case Intrinsic::vp_scatter
:
448 case Intrinsic::experimental_vp_strided_store
:
450 case Intrinsic::vp_load
:
451 case Intrinsic::vp_gather
:
452 case Intrinsic::experimental_vp_strided_load
:
457 /// \return The data (payload) operand of this store or scatter.
458 Value
*VPIntrinsic::getMemoryDataParam() const {
459 auto DataParamOpt
= getMemoryDataParamPos(getIntrinsicID());
462 return getArgOperand(*DataParamOpt
);
465 std::optional
<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID
) {
469 case Intrinsic::vp_store
:
470 case Intrinsic::vp_scatter
:
471 case Intrinsic::experimental_vp_strided_store
:
476 constexpr bool isVPIntrinsic(Intrinsic::ID ID
) {
480 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
481 case Intrinsic::VPID: \
483 #include "llvm/IR/VPIntrinsics.def"
488 bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID
) {
489 return ::isVPIntrinsic(ID
);
492 // Equivalent non-predicated opcode
493 constexpr static std::optional
<unsigned>
494 getFunctionalOpcodeForVP(Intrinsic::ID ID
) {
498 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
499 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
500 #define END_REGISTER_VP_INTRINSIC(VPID) break;
501 #include "llvm/IR/VPIntrinsics.def"
506 std::optional
<unsigned>
507 VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID
) {
508 return ::getFunctionalOpcodeForVP(ID
);
511 // Equivalent non-predicated intrinsic ID
512 constexpr static std::optional
<Intrinsic::ID
>
513 getFunctionalIntrinsicIDForVP(Intrinsic::ID ID
) {
517 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
518 #define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
519 #define END_REGISTER_VP_INTRINSIC(VPID) break;
520 #include "llvm/IR/VPIntrinsics.def"
525 std::optional
<Intrinsic::ID
>
526 VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID
) {
527 return ::getFunctionalIntrinsicIDForVP(ID
);
530 constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID
) {
534 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
535 #define VP_PROPERTY_NO_FUNCTIONAL return true;
536 #define END_REGISTER_VP_INTRINSIC(VPID) break;
537 #include "llvm/IR/VPIntrinsics.def"
542 // All VP intrinsics should have an equivalent non-VP opcode or intrinsic
543 // defined, or be marked that they don't have one.
544 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \
545 static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \
546 getFunctionalOpcodeForVP(Intrinsic::VPID) || \
547 getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
548 #include "llvm/IR/VPIntrinsics.def"
550 // Equivalent non-predicated constrained intrinsic
551 std::optional
<Intrinsic::ID
>
552 VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID
) {
556 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
557 #define VP_PROPERTY_CONSTRAINEDFP(CID) return Intrinsic::CID;
558 #define END_REGISTER_VP_INTRINSIC(VPID) break;
559 #include "llvm/IR/VPIntrinsics.def"
564 Intrinsic::ID
VPIntrinsic::getForOpcode(unsigned IROPC
) {
569 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
570 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
571 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
572 #include "llvm/IR/VPIntrinsics.def"
574 return Intrinsic::not_intrinsic
;
577 constexpr static Intrinsic::ID
getForIntrinsic(Intrinsic::ID Id
) {
578 if (::isVPIntrinsic(Id
))
584 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
585 #define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) case Intrinsic::INTRIN:
586 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
587 #include "llvm/IR/VPIntrinsics.def"
589 return Intrinsic::not_intrinsic
;
592 Intrinsic::ID
VPIntrinsic::getForIntrinsic(Intrinsic::ID Id
) {
593 return ::getForIntrinsic(Id
);
596 bool VPIntrinsic::canIgnoreVectorLengthParam() const {
597 using namespace PatternMatch
;
599 ElementCount EC
= getStaticVectorLength();
601 // No vlen param - no lanes masked-off by it.
602 auto *VLParam
= getVectorLengthParam();
606 // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
607 // Length parameter is strictly greater-than the number of vector elements of
608 // the operation. This function returns true when this is detected statically
611 // Check whether "W == vscale * EC.getKnownMinValue()"
612 if (EC
.isScalable()) {
613 // Compare vscale patterns
614 uint64_t VScaleFactor
;
615 if (match(VLParam
, m_Mul(m_VScale(), m_ConstantInt(VScaleFactor
))))
616 return VScaleFactor
>= EC
.getKnownMinValue();
617 return (EC
.getKnownMinValue() == 1) && match(VLParam
, m_VScale());
620 // standard SIMD operation
621 const auto *VLConst
= dyn_cast
<ConstantInt
>(VLParam
);
625 uint64_t VLNum
= VLConst
->getZExtValue();
626 if (VLNum
>= EC
.getKnownMinValue())
632 Function
*VPIntrinsic::getOrInsertDeclarationForParams(
633 Module
*M
, Intrinsic::ID VPID
, Type
*ReturnType
, ArrayRef
<Value
*> Params
) {
634 assert(isVPIntrinsic(VPID
) && "not a VP intrinsic");
638 Type
*OverloadTy
= Params
[0]->getType();
639 if (VPReductionIntrinsic::isVPReduction(VPID
))
641 Params
[*VPReductionIntrinsic::getVectorParamPos(VPID
)]->getType();
643 VPFunc
= Intrinsic::getOrInsertDeclaration(M
, VPID
, OverloadTy
);
646 case Intrinsic::vp_trunc
:
647 case Intrinsic::vp_sext
:
648 case Intrinsic::vp_zext
:
649 case Intrinsic::vp_fptoui
:
650 case Intrinsic::vp_fptosi
:
651 case Intrinsic::vp_uitofp
:
652 case Intrinsic::vp_sitofp
:
653 case Intrinsic::vp_fptrunc
:
654 case Intrinsic::vp_fpext
:
655 case Intrinsic::vp_ptrtoint
:
656 case Intrinsic::vp_inttoptr
:
657 case Intrinsic::vp_lrint
:
658 case Intrinsic::vp_llrint
:
659 case Intrinsic::vp_cttz_elts
:
660 VPFunc
= Intrinsic::getOrInsertDeclaration(
661 M
, VPID
, {ReturnType
, Params
[0]->getType()});
663 case Intrinsic::vp_is_fpclass
:
664 VPFunc
= Intrinsic::getOrInsertDeclaration(M
, VPID
, {Params
[0]->getType()});
666 case Intrinsic::vp_merge
:
667 case Intrinsic::vp_select
:
668 VPFunc
= Intrinsic::getOrInsertDeclaration(M
, VPID
, {Params
[1]->getType()});
670 case Intrinsic::vp_load
:
671 VPFunc
= Intrinsic::getOrInsertDeclaration(
672 M
, VPID
, {ReturnType
, Params
[0]->getType()});
674 case Intrinsic::experimental_vp_strided_load
:
675 VPFunc
= Intrinsic::getOrInsertDeclaration(
676 M
, VPID
, {ReturnType
, Params
[0]->getType(), Params
[1]->getType()});
678 case Intrinsic::vp_gather
:
679 VPFunc
= Intrinsic::getOrInsertDeclaration(
680 M
, VPID
, {ReturnType
, Params
[0]->getType()});
682 case Intrinsic::vp_store
:
683 VPFunc
= Intrinsic::getOrInsertDeclaration(
684 M
, VPID
, {Params
[0]->getType(), Params
[1]->getType()});
686 case Intrinsic::experimental_vp_strided_store
:
687 VPFunc
= Intrinsic::getOrInsertDeclaration(
689 {Params
[0]->getType(), Params
[1]->getType(), Params
[2]->getType()});
691 case Intrinsic::vp_scatter
:
692 VPFunc
= Intrinsic::getOrInsertDeclaration(
693 M
, VPID
, {Params
[0]->getType(), Params
[1]->getType()});
695 case Intrinsic::experimental_vp_splat
:
696 VPFunc
= Intrinsic::getOrInsertDeclaration(M
, VPID
, ReturnType
);
699 assert(VPFunc
&& "Could not declare VP intrinsic");
703 bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID
) {
705 case Intrinsic::vp_reduce_add
:
706 case Intrinsic::vp_reduce_mul
:
707 case Intrinsic::vp_reduce_and
:
708 case Intrinsic::vp_reduce_or
:
709 case Intrinsic::vp_reduce_xor
:
710 case Intrinsic::vp_reduce_smax
:
711 case Intrinsic::vp_reduce_smin
:
712 case Intrinsic::vp_reduce_umax
:
713 case Intrinsic::vp_reduce_umin
:
714 case Intrinsic::vp_reduce_fmax
:
715 case Intrinsic::vp_reduce_fmin
:
716 case Intrinsic::vp_reduce_fmaximum
:
717 case Intrinsic::vp_reduce_fminimum
:
718 case Intrinsic::vp_reduce_fadd
:
719 case Intrinsic::vp_reduce_fmul
:
726 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID
) {
727 // All of the vp.casts correspond to instructions
728 if (std::optional
<unsigned> Opc
= getFunctionalOpcodeForVP(ID
))
729 return Instruction::isCast(*Opc
);
733 bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID
) {
737 case Intrinsic::vp_fcmp
:
738 case Intrinsic::vp_icmp
:
743 bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID
) {
747 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
748 #define VP_PROPERTY_BINARYOP return true;
749 #define END_REGISTER_VP_INTRINSIC(VPID) break;
750 #include "llvm/IR/VPIntrinsics.def"
755 static ICmpInst::Predicate
getIntPredicateFromMD(const Value
*Op
) {
756 Metadata
*MD
= cast
<MetadataAsValue
>(Op
)->getMetadata();
757 if (!MD
|| !isa
<MDString
>(MD
))
758 return ICmpInst::BAD_ICMP_PREDICATE
;
759 return StringSwitch
<ICmpInst::Predicate
>(cast
<MDString
>(MD
)->getString())
760 .Case("eq", ICmpInst::ICMP_EQ
)
761 .Case("ne", ICmpInst::ICMP_NE
)
762 .Case("ugt", ICmpInst::ICMP_UGT
)
763 .Case("uge", ICmpInst::ICMP_UGE
)
764 .Case("ult", ICmpInst::ICMP_ULT
)
765 .Case("ule", ICmpInst::ICMP_ULE
)
766 .Case("sgt", ICmpInst::ICMP_SGT
)
767 .Case("sge", ICmpInst::ICMP_SGE
)
768 .Case("slt", ICmpInst::ICMP_SLT
)
769 .Case("sle", ICmpInst::ICMP_SLE
)
770 .Default(ICmpInst::BAD_ICMP_PREDICATE
);
773 CmpInst::Predicate
VPCmpIntrinsic::getPredicate() const {
774 assert(isVPCmp(getIntrinsicID()));
775 return getIntrinsicID() == Intrinsic::vp_fcmp
776 ? getFPPredicateFromMD(getArgOperand(2))
777 : getIntPredicateFromMD(getArgOperand(2));
780 unsigned VPReductionIntrinsic::getVectorParamPos() const {
781 return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
784 unsigned VPReductionIntrinsic::getStartParamPos() const {
785 return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
788 std::optional
<unsigned>
789 VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID
) {
790 if (isVPReduction(ID
))
795 std::optional
<unsigned>
796 VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID
) {
797 if (isVPReduction(ID
))
802 Instruction::BinaryOps
BinaryOpIntrinsic::getBinaryOp() const {
803 switch (getIntrinsicID()) {
804 case Intrinsic::uadd_with_overflow
:
805 case Intrinsic::sadd_with_overflow
:
806 case Intrinsic::uadd_sat
:
807 case Intrinsic::sadd_sat
:
808 return Instruction::Add
;
809 case Intrinsic::usub_with_overflow
:
810 case Intrinsic::ssub_with_overflow
:
811 case Intrinsic::usub_sat
:
812 case Intrinsic::ssub_sat
:
813 return Instruction::Sub
;
814 case Intrinsic::umul_with_overflow
:
815 case Intrinsic::smul_with_overflow
:
816 return Instruction::Mul
;
818 llvm_unreachable("Invalid intrinsic");
822 bool BinaryOpIntrinsic::isSigned() const {
823 switch (getIntrinsicID()) {
824 case Intrinsic::sadd_with_overflow
:
825 case Intrinsic::ssub_with_overflow
:
826 case Intrinsic::smul_with_overflow
:
827 case Intrinsic::sadd_sat
:
828 case Intrinsic::ssub_sat
:
835 unsigned BinaryOpIntrinsic::getNoWrapKind() const {
837 return OverflowingBinaryOperator::NoSignedWrap
;
839 return OverflowingBinaryOperator::NoUnsignedWrap
;
842 const Value
*GCProjectionInst::getStatepoint() const {
843 const Value
*Token
= getArgOperand(0);
844 if (isa
<UndefValue
>(Token
))
847 // Treat none token as if it was undef here
848 if (isa
<ConstantTokenNone
>(Token
))
849 return UndefValue::get(Token
->getType());
851 // This takes care both of relocates for call statepoints and relocates
852 // on normal path of invoke statepoint.
853 if (!isa
<LandingPadInst
>(Token
))
854 return cast
<GCStatepointInst
>(Token
);
856 // This relocate is on exceptional path of an invoke statepoint
857 const BasicBlock
*InvokeBB
=
858 cast
<Instruction
>(Token
)->getParent()->getUniquePredecessor();
860 assert(InvokeBB
&& "safepoints should have unique landingpads");
861 assert(InvokeBB
->getTerminator() &&
862 "safepoint block should be well formed");
864 return cast
<GCStatepointInst
>(InvokeBB
->getTerminator());
867 Value
*GCRelocateInst::getBasePtr() const {
868 auto Statepoint
= getStatepoint();
869 if (isa
<UndefValue
>(Statepoint
))
870 return UndefValue::get(Statepoint
->getType());
872 auto *GCInst
= cast
<GCStatepointInst
>(Statepoint
);
873 if (auto Opt
= GCInst
->getOperandBundle(LLVMContext::OB_gc_live
))
874 return *(Opt
->Inputs
.begin() + getBasePtrIndex());
875 return *(GCInst
->arg_begin() + getBasePtrIndex());
878 Value
*GCRelocateInst::getDerivedPtr() const {
879 auto *Statepoint
= getStatepoint();
880 if (isa
<UndefValue
>(Statepoint
))
881 return UndefValue::get(Statepoint
->getType());
883 auto *GCInst
= cast
<GCStatepointInst
>(Statepoint
);
884 if (auto Opt
= GCInst
->getOperandBundle(LLVMContext::OB_gc_live
))
885 return *(Opt
->Inputs
.begin() + getDerivedPtrIndex());
886 return *(GCInst
->arg_begin() + getDerivedPtrIndex());