1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file contains functions that make it easier to manipulate type metadata
10 // for devirtualization.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Analysis/TypeMetadataUtils.h"
15 #include "llvm/IR/Constants.h"
16 #include "llvm/IR/Dominators.h"
17 #include "llvm/IR/Instructions.h"
18 #include "llvm/IR/IntrinsicInst.h"
19 #include "llvm/IR/Module.h"
23 // Search for virtual calls that call FPtr and add them to DevirtCalls.
25 findCallsAtConstantOffset(SmallVectorImpl
<DevirtCallSite
> &DevirtCalls
,
26 bool *HasNonCallUses
, Value
*FPtr
, uint64_t Offset
,
27 const CallInst
*CI
, DominatorTree
&DT
) {
28 for (const Use
&U
: FPtr
->uses()) {
29 Instruction
*User
= cast
<Instruction
>(U
.getUser());
30 // Ignore this instruction if it is not dominated by the type intrinsic
31 // being analyzed. Otherwise we may transform a call sharing the same
32 // vtable pointer incorrectly. Specifically, this situation can arise
33 // after indirect call promotion and inlining, where we may have uses
34 // of the vtable pointer guarded by a function pointer check, and a fallback
36 if (!DT
.dominates(CI
, User
))
38 if (isa
<BitCastInst
>(User
)) {
39 findCallsAtConstantOffset(DevirtCalls
, HasNonCallUses
, User
, Offset
, CI
,
41 } else if (auto *CI
= dyn_cast
<CallInst
>(User
)) {
42 DevirtCalls
.push_back({Offset
, *CI
});
43 } else if (auto *II
= dyn_cast
<InvokeInst
>(User
)) {
44 DevirtCalls
.push_back({Offset
, *II
});
45 } else if (HasNonCallUses
) {
46 *HasNonCallUses
= true;
51 // Search for virtual calls that load from VPtr and add them to DevirtCalls.
52 static void findLoadCallsAtConstantOffset(
53 const Module
*M
, SmallVectorImpl
<DevirtCallSite
> &DevirtCalls
, Value
*VPtr
,
54 int64_t Offset
, const CallInst
*CI
, DominatorTree
&DT
) {
55 for (const Use
&U
: VPtr
->uses()) {
56 Value
*User
= U
.getUser();
57 if (isa
<BitCastInst
>(User
)) {
58 findLoadCallsAtConstantOffset(M
, DevirtCalls
, User
, Offset
, CI
, DT
);
59 } else if (isa
<LoadInst
>(User
)) {
60 findCallsAtConstantOffset(DevirtCalls
, nullptr, User
, Offset
, CI
, DT
);
61 } else if (auto GEP
= dyn_cast
<GetElementPtrInst
>(User
)) {
62 // Take into account the GEP offset.
63 if (VPtr
== GEP
->getPointerOperand() && GEP
->hasAllConstantIndices()) {
64 SmallVector
<Value
*, 8> Indices(drop_begin(GEP
->operands()));
65 int64_t GEPOffset
= M
->getDataLayout().getIndexedOffsetInType(
66 GEP
->getSourceElementType(), Indices
);
67 findLoadCallsAtConstantOffset(M
, DevirtCalls
, User
, Offset
+ GEPOffset
,
74 void llvm::findDevirtualizableCallsForTypeTest(
75 SmallVectorImpl
<DevirtCallSite
> &DevirtCalls
,
76 SmallVectorImpl
<CallInst
*> &Assumes
, const CallInst
*CI
,
78 assert(CI
->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test
||
79 CI
->getCalledFunction()->getIntrinsicID() ==
80 Intrinsic::public_type_test
);
82 const Module
*M
= CI
->getParent()->getParent()->getParent();
84 // Find llvm.assume intrinsics for this llvm.type.test call.
85 for (const Use
&CIU
: CI
->uses())
86 if (auto *Assume
= dyn_cast
<AssumeInst
>(CIU
.getUser()))
87 Assumes
.push_back(Assume
);
89 // If we found any, search for virtual calls based on %p and add them to
92 findLoadCallsAtConstantOffset(
93 M
, DevirtCalls
, CI
->getArgOperand(0)->stripPointerCasts(), 0, CI
, DT
);
96 void llvm::findDevirtualizableCallsForTypeCheckedLoad(
97 SmallVectorImpl
<DevirtCallSite
> &DevirtCalls
,
98 SmallVectorImpl
<Instruction
*> &LoadedPtrs
,
99 SmallVectorImpl
<Instruction
*> &Preds
, bool &HasNonCallUses
,
100 const CallInst
*CI
, DominatorTree
&DT
) {
101 assert(CI
->getCalledFunction()->getIntrinsicID() ==
102 Intrinsic::type_checked_load
||
103 CI
->getCalledFunction()->getIntrinsicID() ==
104 Intrinsic::type_checked_load_relative
);
106 auto *Offset
= dyn_cast
<ConstantInt
>(CI
->getArgOperand(1));
108 HasNonCallUses
= true;
112 for (const Use
&U
: CI
->uses()) {
113 auto CIU
= U
.getUser();
114 if (auto EVI
= dyn_cast
<ExtractValueInst
>(CIU
)) {
115 if (EVI
->getNumIndices() == 1 && EVI
->getIndices()[0] == 0) {
116 LoadedPtrs
.push_back(EVI
);
119 if (EVI
->getNumIndices() == 1 && EVI
->getIndices()[0] == 1) {
120 Preds
.push_back(EVI
);
124 HasNonCallUses
= true;
127 for (Value
*LoadedPtr
: LoadedPtrs
)
128 findCallsAtConstantOffset(DevirtCalls
, &HasNonCallUses
, LoadedPtr
,
129 Offset
->getZExtValue(), CI
, DT
);
132 Constant
*llvm::getPointerAtOffset(Constant
*I
, uint64_t Offset
, Module
&M
,
133 Constant
*TopLevelGlobal
) {
134 if (I
->getType()->isPointerTy()) {
140 const DataLayout
&DL
= M
.getDataLayout();
142 if (auto *C
= dyn_cast
<ConstantStruct
>(I
)) {
143 const StructLayout
*SL
= DL
.getStructLayout(C
->getType());
144 if (Offset
>= SL
->getSizeInBytes())
147 unsigned Op
= SL
->getElementContainingOffset(Offset
);
148 return getPointerAtOffset(cast
<Constant
>(I
->getOperand(Op
)),
149 Offset
- SL
->getElementOffset(Op
), M
,
152 if (auto *C
= dyn_cast
<ConstantArray
>(I
)) {
153 ArrayType
*VTableTy
= C
->getType();
154 uint64_t ElemSize
= DL
.getTypeAllocSize(VTableTy
->getElementType());
156 unsigned Op
= Offset
/ ElemSize
;
157 if (Op
>= C
->getNumOperands())
160 return getPointerAtOffset(cast
<Constant
>(I
->getOperand(Op
)),
161 Offset
% ElemSize
, M
, TopLevelGlobal
);
164 // (Swift-specific) relative-pointer support starts here.
165 if (auto *CI
= dyn_cast
<ConstantInt
>(I
)) {
166 if (Offset
== 0 && CI
->isZero()) {
170 if (auto *C
= dyn_cast
<ConstantExpr
>(I
)) {
171 switch (C
->getOpcode()) {
172 case Instruction::Trunc
:
173 case Instruction::PtrToInt
:
174 return getPointerAtOffset(cast
<Constant
>(C
->getOperand(0)), Offset
, M
,
176 case Instruction::Sub
: {
177 auto *Operand0
= cast
<Constant
>(C
->getOperand(0));
178 auto *Operand1
= cast
<Constant
>(C
->getOperand(1));
180 auto StripGEP
= [](Constant
*C
) {
181 auto *CE
= dyn_cast
<ConstantExpr
>(C
);
184 if (CE
->getOpcode() != Instruction::GetElementPtr
)
186 return CE
->getOperand(0);
188 auto *Operand1TargetGlobal
= StripGEP(getPointerAtOffset(Operand1
, 0, M
));
190 // Check that in the "sub (@a, @b)" expression, @b points back to the top
191 // level global (or a GEP thereof) that we're processing. Otherwise bail.
192 if (Operand1TargetGlobal
!= TopLevelGlobal
)
195 return getPointerAtOffset(Operand0
, Offset
, M
, TopLevelGlobal
);
204 void llvm::replaceRelativePointerUsersWithZero(Function
*F
) {
205 for (auto *U
: F
->users()) {
206 auto *PtrExpr
= dyn_cast
<ConstantExpr
>(U
);
207 if (!PtrExpr
|| PtrExpr
->getOpcode() != Instruction::PtrToInt
)
210 for (auto *PtrToIntUser
: PtrExpr
->users()) {
211 auto *SubExpr
= dyn_cast
<ConstantExpr
>(PtrToIntUser
);
212 if (!SubExpr
|| SubExpr
->getOpcode() != Instruction::Sub
)
215 SubExpr
->replaceNonMetadataUsesWith(
216 ConstantInt::get(SubExpr
->getType(), 0));