[ORC] Add std::tuple support to SimplePackedSerialization.
[llvm-project.git] / llvm / lib / Analysis / TypeMetadataUtils.cpp
blobf015ba9a09cab063125d79687f57b9fa806a46fc
1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains functions that make it easier to manipulate type metadata
10 // for devirtualization.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Analysis/TypeMetadataUtils.h"
15 #include "llvm/IR/Constants.h"
16 #include "llvm/IR/Dominators.h"
17 #include "llvm/IR/Instructions.h"
18 #include "llvm/IR/IntrinsicInst.h"
19 #include "llvm/IR/Intrinsics.h"
20 #include "llvm/IR/Module.h"
22 using namespace llvm;
24 // Search for virtual calls that call FPtr and add them to DevirtCalls.
25 static void
26 findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
27 bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
28 const CallInst *CI, DominatorTree &DT) {
29 for (const Use &U : FPtr->uses()) {
30 Instruction *User = cast<Instruction>(U.getUser());
31 // Ignore this instruction if it is not dominated by the type intrinsic
32 // being analyzed. Otherwise we may transform a call sharing the same
33 // vtable pointer incorrectly. Specifically, this situation can arise
34 // after indirect call promotion and inlining, where we may have uses
35 // of the vtable pointer guarded by a function pointer check, and a fallback
36 // indirect call.
37 if (!DT.dominates(CI, User))
38 continue;
39 if (isa<BitCastInst>(User)) {
40 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
41 DT);
42 } else if (auto *CI = dyn_cast<CallInst>(User)) {
43 DevirtCalls.push_back({Offset, *CI});
44 } else if (auto *II = dyn_cast<InvokeInst>(User)) {
45 DevirtCalls.push_back({Offset, *II});
46 } else if (HasNonCallUses) {
47 *HasNonCallUses = true;
52 // Search for virtual calls that load from VPtr and add them to DevirtCalls.
53 static void findLoadCallsAtConstantOffset(
54 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
55 int64_t Offset, const CallInst *CI, DominatorTree &DT) {
56 for (const Use &U : VPtr->uses()) {
57 Value *User = U.getUser();
58 if (isa<BitCastInst>(User)) {
59 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
60 } else if (isa<LoadInst>(User)) {
61 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
62 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
63 // Take into account the GEP offset.
64 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
65 SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
66 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
67 GEP->getSourceElementType(), Indices);
68 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
69 CI, DT);
75 void llvm::findDevirtualizableCallsForTypeTest(
76 SmallVectorImpl<DevirtCallSite> &DevirtCalls,
77 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
78 DominatorTree &DT) {
79 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test);
81 const Module *M = CI->getParent()->getParent()->getParent();
83 // Find llvm.assume intrinsics for this llvm.type.test call.
84 for (const Use &CIU : CI->uses())
85 if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
86 Assumes.push_back(Assume);
88 // If we found any, search for virtual calls based on %p and add them to
89 // DevirtCalls.
90 if (!Assumes.empty())
91 findLoadCallsAtConstantOffset(
92 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
95 void llvm::findDevirtualizableCallsForTypeCheckedLoad(
96 SmallVectorImpl<DevirtCallSite> &DevirtCalls,
97 SmallVectorImpl<Instruction *> &LoadedPtrs,
98 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
99 const CallInst *CI, DominatorTree &DT) {
100 assert(CI->getCalledFunction()->getIntrinsicID() ==
101 Intrinsic::type_checked_load);
103 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
104 if (!Offset) {
105 HasNonCallUses = true;
106 return;
109 for (const Use &U : CI->uses()) {
110 auto CIU = U.getUser();
111 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
112 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
113 LoadedPtrs.push_back(EVI);
114 continue;
116 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
117 Preds.push_back(EVI);
118 continue;
121 HasNonCallUses = true;
124 for (Value *LoadedPtr : LoadedPtrs)
125 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
126 Offset->getZExtValue(), CI, DT);
129 Constant *llvm::getPointerAtOffset(Constant *I, uint64_t Offset, Module &M) {
130 if (I->getType()->isPointerTy()) {
131 if (Offset == 0)
132 return I;
133 return nullptr;
136 const DataLayout &DL = M.getDataLayout();
138 if (auto *C = dyn_cast<ConstantStruct>(I)) {
139 const StructLayout *SL = DL.getStructLayout(C->getType());
140 if (Offset >= SL->getSizeInBytes())
141 return nullptr;
143 unsigned Op = SL->getElementContainingOffset(Offset);
144 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
145 Offset - SL->getElementOffset(Op), M);
147 if (auto *C = dyn_cast<ConstantArray>(I)) {
148 ArrayType *VTableTy = C->getType();
149 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
151 unsigned Op = Offset / ElemSize;
152 if (Op >= C->getNumOperands())
153 return nullptr;
155 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
156 Offset % ElemSize, M);
158 return nullptr;