[ARM] More MVE compare vector splat combines for ANDs
[llvm-complete.git] / lib / Analysis / TypeMetadataUtils.cpp
blob9311dfbc6eba0e21fd60085119d7db9bfd5a1124
1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains functions that make it easier to manipulate type metadata
10 // for devirtualization.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Analysis/TypeMetadataUtils.h"
15 #include "llvm/IR/Constants.h"
16 #include "llvm/IR/Dominators.h"
17 #include "llvm/IR/Intrinsics.h"
18 #include "llvm/IR/Module.h"
20 using namespace llvm;
22 // Search for virtual calls that call FPtr and add them to DevirtCalls.
23 static void
24 findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
25 bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
26 const CallInst *CI, DominatorTree &DT) {
27 for (const Use &U : FPtr->uses()) {
28 Instruction *User = cast<Instruction>(U.getUser());
29 // Ignore this instruction if it is not dominated by the type intrinsic
30 // being analyzed. Otherwise we may transform a call sharing the same
31 // vtable pointer incorrectly. Specifically, this situation can arise
32 // after indirect call promotion and inlining, where we may have uses
33 // of the vtable pointer guarded by a function pointer check, and a fallback
34 // indirect call.
35 if (!DT.dominates(CI, User))
36 continue;
37 if (isa<BitCastInst>(User)) {
38 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
39 DT);
40 } else if (auto CI = dyn_cast<CallInst>(User)) {
41 DevirtCalls.push_back({Offset, CI});
42 } else if (auto II = dyn_cast<InvokeInst>(User)) {
43 DevirtCalls.push_back({Offset, II});
44 } else if (HasNonCallUses) {
45 *HasNonCallUses = true;
50 // Search for virtual calls that load from VPtr and add them to DevirtCalls.
51 static void findLoadCallsAtConstantOffset(
52 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
53 int64_t Offset, const CallInst *CI, DominatorTree &DT) {
54 for (const Use &U : VPtr->uses()) {
55 Value *User = U.getUser();
56 if (isa<BitCastInst>(User)) {
57 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
58 } else if (isa<LoadInst>(User)) {
59 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
60 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
61 // Take into account the GEP offset.
62 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
63 SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
64 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
65 GEP->getSourceElementType(), Indices);
66 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
67 CI, DT);
73 void llvm::findDevirtualizableCallsForTypeTest(
74 SmallVectorImpl<DevirtCallSite> &DevirtCalls,
75 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
76 DominatorTree &DT) {
77 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test);
79 const Module *M = CI->getParent()->getParent()->getParent();
81 // Find llvm.assume intrinsics for this llvm.type.test call.
82 for (const Use &CIU : CI->uses()) {
83 if (auto *AssumeCI = dyn_cast<CallInst>(CIU.getUser())) {
84 Function *F = AssumeCI->getCalledFunction();
85 if (F && F->getIntrinsicID() == Intrinsic::assume)
86 Assumes.push_back(AssumeCI);
90 // If we found any, search for virtual calls based on %p and add them to
91 // DevirtCalls.
92 if (!Assumes.empty())
93 findLoadCallsAtConstantOffset(
94 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
97 void llvm::findDevirtualizableCallsForTypeCheckedLoad(
98 SmallVectorImpl<DevirtCallSite> &DevirtCalls,
99 SmallVectorImpl<Instruction *> &LoadedPtrs,
100 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
101 const CallInst *CI, DominatorTree &DT) {
102 assert(CI->getCalledFunction()->getIntrinsicID() ==
103 Intrinsic::type_checked_load);
105 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
106 if (!Offset) {
107 HasNonCallUses = true;
108 return;
111 for (const Use &U : CI->uses()) {
112 auto CIU = U.getUser();
113 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
114 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
115 LoadedPtrs.push_back(EVI);
116 continue;
118 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
119 Preds.push_back(EVI);
120 continue;
123 HasNonCallUses = true;
126 for (Value *LoadedPtr : LoadedPtrs)
127 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
128 Offset->getZExtValue(), CI, DT);