[mlir][linalg] Add support for masked vectorization of `tensor.insert_slice` (1/N...
[llvm-project.git] / llvm / lib / Analysis / TypeMetadataUtils.cpp
blob9ec0785eb5034d6b30d7e22b057e542fe342d3b9
1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains functions that make it easier to manipulate type metadata
10 // for devirtualization.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Analysis/TypeMetadataUtils.h"
15 #include "llvm/IR/Constants.h"
16 #include "llvm/IR/Dominators.h"
17 #include "llvm/IR/Instructions.h"
18 #include "llvm/IR/IntrinsicInst.h"
19 #include "llvm/IR/Module.h"
21 using namespace llvm;
23 // Search for virtual calls that call FPtr and add them to DevirtCalls.
24 static void
25 findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
26 bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
27 const CallInst *CI, DominatorTree &DT) {
28 for (const Use &U : FPtr->uses()) {
29 Instruction *User = cast<Instruction>(U.getUser());
30 // Ignore this instruction if it is not dominated by the type intrinsic
31 // being analyzed. Otherwise we may transform a call sharing the same
32 // vtable pointer incorrectly. Specifically, this situation can arise
33 // after indirect call promotion and inlining, where we may have uses
34 // of the vtable pointer guarded by a function pointer check, and a fallback
35 // indirect call.
36 if (CI->getFunction() != User->getFunction())
37 continue;
38 if (!DT.dominates(CI, User))
39 continue;
40 if (isa<BitCastInst>(User)) {
41 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
42 DT);
43 } else if (auto *CI = dyn_cast<CallInst>(User)) {
44 DevirtCalls.push_back({Offset, *CI});
45 } else if (auto *II = dyn_cast<InvokeInst>(User)) {
46 DevirtCalls.push_back({Offset, *II});
47 } else if (HasNonCallUses) {
48 *HasNonCallUses = true;
53 // Search for virtual calls that load from VPtr and add them to DevirtCalls.
54 static void findLoadCallsAtConstantOffset(
55 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
56 int64_t Offset, const CallInst *CI, DominatorTree &DT) {
57 for (const Use &U : VPtr->uses()) {
58 Value *User = U.getUser();
59 if (isa<BitCastInst>(User)) {
60 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
61 } else if (isa<LoadInst>(User)) {
62 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
63 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
64 // Take into account the GEP offset.
65 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
66 SmallVector<Value *, 8> Indices(drop_begin(GEP->operands()));
67 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
68 GEP->getSourceElementType(), Indices);
69 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
70 CI, DT);
72 } else if (auto *Call = dyn_cast<CallInst>(User)) {
73 if (Call->getIntrinsicID() == llvm::Intrinsic::load_relative) {
74 if (auto *LoadOffset = dyn_cast<ConstantInt>(Call->getOperand(1))) {
75 findCallsAtConstantOffset(DevirtCalls, nullptr, User,
76 Offset + LoadOffset->getSExtValue(), CI,
77 DT);
84 void llvm::findDevirtualizableCallsForTypeTest(
85 SmallVectorImpl<DevirtCallSite> &DevirtCalls,
86 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
87 DominatorTree &DT) {
88 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test ||
89 CI->getCalledFunction()->getIntrinsicID() ==
90 Intrinsic::public_type_test);
92 const Module *M = CI->getParent()->getParent()->getParent();
94 // Find llvm.assume intrinsics for this llvm.type.test call.
95 for (const Use &CIU : CI->uses())
96 if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
97 Assumes.push_back(Assume);
99 // If we found any, search for virtual calls based on %p and add them to
100 // DevirtCalls.
101 if (!Assumes.empty())
102 findLoadCallsAtConstantOffset(
103 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
106 void llvm::findDevirtualizableCallsForTypeCheckedLoad(
107 SmallVectorImpl<DevirtCallSite> &DevirtCalls,
108 SmallVectorImpl<Instruction *> &LoadedPtrs,
109 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
110 const CallInst *CI, DominatorTree &DT) {
111 assert(CI->getCalledFunction()->getIntrinsicID() ==
112 Intrinsic::type_checked_load ||
113 CI->getCalledFunction()->getIntrinsicID() ==
114 Intrinsic::type_checked_load_relative);
116 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
117 if (!Offset) {
118 HasNonCallUses = true;
119 return;
122 for (const Use &U : CI->uses()) {
123 auto CIU = U.getUser();
124 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
125 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
126 LoadedPtrs.push_back(EVI);
127 continue;
129 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
130 Preds.push_back(EVI);
131 continue;
134 HasNonCallUses = true;
137 for (Value *LoadedPtr : LoadedPtrs)
138 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
139 Offset->getZExtValue(), CI, DT);
142 Constant *llvm::getPointerAtOffset(Constant *I, uint64_t Offset, Module &M,
143 Constant *TopLevelGlobal) {
144 // TODO: Ideally it would be the caller who knows if it's appropriate to strip
145 // the DSOLocalEquicalent. More generally, it would feel more appropriate to
146 // have two functions that handle absolute and relative pointers separately.
147 if (auto *Equiv = dyn_cast<DSOLocalEquivalent>(I))
148 I = Equiv->getGlobalValue();
150 if (I->getType()->isPointerTy()) {
151 if (Offset == 0)
152 return I;
153 return nullptr;
156 const DataLayout &DL = M.getDataLayout();
158 if (auto *C = dyn_cast<ConstantStruct>(I)) {
159 const StructLayout *SL = DL.getStructLayout(C->getType());
160 if (Offset >= SL->getSizeInBytes())
161 return nullptr;
163 unsigned Op = SL->getElementContainingOffset(Offset);
164 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
165 Offset - SL->getElementOffset(Op), M,
166 TopLevelGlobal);
168 if (auto *C = dyn_cast<ConstantArray>(I)) {
169 ArrayType *VTableTy = C->getType();
170 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
172 unsigned Op = Offset / ElemSize;
173 if (Op >= C->getNumOperands())
174 return nullptr;
176 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
177 Offset % ElemSize, M, TopLevelGlobal);
180 // Relative-pointer support starts here.
181 if (auto *CI = dyn_cast<ConstantInt>(I)) {
182 if (Offset == 0 && CI->isZero()) {
183 return I;
186 if (auto *C = dyn_cast<ConstantExpr>(I)) {
187 switch (C->getOpcode()) {
188 case Instruction::Trunc:
189 case Instruction::PtrToInt:
190 return getPointerAtOffset(cast<Constant>(C->getOperand(0)), Offset, M,
191 TopLevelGlobal);
192 case Instruction::Sub: {
193 auto *Operand0 = cast<Constant>(C->getOperand(0));
194 auto *Operand1 = cast<Constant>(C->getOperand(1));
196 auto StripGEP = [](Constant *C) {
197 auto *CE = dyn_cast<ConstantExpr>(C);
198 if (!CE)
199 return C;
200 if (CE->getOpcode() != Instruction::GetElementPtr)
201 return C;
202 return CE->getOperand(0);
204 auto *Operand1TargetGlobal = StripGEP(getPointerAtOffset(Operand1, 0, M));
206 // Check that in the "sub (@a, @b)" expression, @b points back to the top
207 // level global (or a GEP thereof) that we're processing. Otherwise bail.
208 if (Operand1TargetGlobal != TopLevelGlobal)
209 return nullptr;
211 return getPointerAtOffset(Operand0, Offset, M, TopLevelGlobal);
213 default:
214 return nullptr;
217 return nullptr;
220 std::pair<Function *, Constant *>
221 llvm::getFunctionAtVTableOffset(GlobalVariable *GV, uint64_t Offset,
222 Module &M) {
223 Constant *Ptr = getPointerAtOffset(GV->getInitializer(), Offset, M, GV);
224 if (!Ptr)
225 return std::pair<Function *, Constant *>(nullptr, nullptr);
227 auto C = Ptr->stripPointerCasts();
228 // Make sure this is a function or alias to a function.
229 auto Fn = dyn_cast<Function>(C);
230 auto A = dyn_cast<GlobalAlias>(C);
231 if (!Fn && A)
232 Fn = dyn_cast<Function>(A->getAliasee());
234 if (!Fn)
235 return std::pair<Function *, Constant *>(nullptr, nullptr);
237 return std::pair<Function *, Constant *>(Fn, C);
240 static void replaceRelativePointerUserWithZero(User *U) {
241 auto *PtrExpr = dyn_cast<ConstantExpr>(U);
242 if (!PtrExpr || PtrExpr->getOpcode() != Instruction::PtrToInt)
243 return;
245 for (auto *PtrToIntUser : PtrExpr->users()) {
246 auto *SubExpr = dyn_cast<ConstantExpr>(PtrToIntUser);
247 if (!SubExpr || SubExpr->getOpcode() != Instruction::Sub)
248 return;
250 SubExpr->replaceNonMetadataUsesWith(
251 ConstantInt::get(SubExpr->getType(), 0));
255 void llvm::replaceRelativePointerUsersWithZero(Constant *C) {
256 for (auto *U : C->users()) {
257 if (auto *Equiv = dyn_cast<DSOLocalEquivalent>(U))
258 replaceRelativePointerUsersWithZero(Equiv);
259 else
260 replaceRelativePointerUserWithZero(U);