[clang] constexpr built-in elementwise bitreverse function. (#118177)
[llvm-project.git] / bolt / lib / Passes / JTFootprintReduction.cpp
blob71bdbba950df14c45bd2271085d14e7493010719
1 //===- bolt/Passes/JTFootprintReduction.cpp -------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements JTFootprintReduction class.
11 //===----------------------------------------------------------------------===//
13 #include "bolt/Passes/JTFootprintReduction.h"
14 #include "bolt/Core/BinaryFunctionCallGraph.h"
15 #include "bolt/Passes/DataflowInfoManager.h"
16 #include "llvm/Support/CommandLine.h"
18 #define DEBUG_TYPE "JT"
20 using namespace llvm;
21 using namespace bolt;
23 namespace opts {
25 extern cl::OptionCategory BoltOptCategory;
27 extern cl::opt<unsigned> Verbosity;
29 extern cl::opt<JumpTableSupportLevel> JumpTables;
31 static cl::opt<bool> JTFootprintOnlyPIC(
32 "jt-footprint-optimize-for-icache",
33 cl::desc("with jt-footprint-reduction, only process PIC jumptables and turn"
34 " off other transformations that increase code size"),
35 cl::init(false), cl::ZeroOrMore, cl::cat(BoltOptCategory));
37 } // namespace opts
39 namespace llvm {
40 namespace bolt {
42 void JTFootprintReduction::checkOpportunities(BinaryFunction &Function,
43 DataflowInfoManager &Info) {
44 BinaryContext &BC = Function.getBinaryContext();
45 std::map<JumpTable *, uint64_t> AllJTs;
47 for (BinaryBasicBlock &BB : Function) {
48 for (MCInst &Inst : BB) {
49 JumpTable *JumpTable = Function.getJumpTable(Inst);
50 if (!JumpTable)
51 continue;
53 AllJTs[JumpTable] += BB.getKnownExecutionCount();
54 ++IndJmps;
56 if (BlacklistedJTs.count(JumpTable)) {
57 ++IndJmpsDenied;
58 continue;
61 uint64_t Scale;
62 // Try a standard indirect jump matcher
63 std::unique_ptr<MCPlusBuilder::MCInstMatcher> IndJmpMatcher =
64 BC.MIB->matchIndJmp(BC.MIB->matchAnyOperand(),
65 BC.MIB->matchImm(Scale), BC.MIB->matchReg(),
66 BC.MIB->matchAnyOperand());
67 if (!opts::JTFootprintOnlyPIC &&
68 IndJmpMatcher->match(*BC.MRI, *BC.MIB,
69 MutableArrayRef<MCInst>(&*BB.begin(), &Inst + 1),
70 -1) &&
71 Scale == 8) {
72 if (Info.getLivenessAnalysis().scavengeRegAfter(&Inst))
73 continue;
74 BlacklistedJTs.insert(JumpTable);
75 ++IndJmpsDenied;
76 ++NumJTsNoReg;
77 continue;
80 // Try a PIC matcher. The pattern we are looking for is a PIC JT ind jmp:
81 // addq %rdx, %rsi
82 // addq %rdx, %rdi
83 // leaq DATAat0x402450(%rip), %r11
84 // movslq (%r11,%rdx,4), %rcx
85 // addq %r11, %rcx
86 // jmpq *%rcx # JUMPTABLE @0x402450
87 MCPhysReg BaseReg1;
88 MCPhysReg BaseReg2;
89 uint64_t Offset;
90 std::unique_ptr<MCPlusBuilder::MCInstMatcher> PICIndJmpMatcher =
91 BC.MIB->matchIndJmp(BC.MIB->matchAdd(
92 BC.MIB->matchReg(BaseReg1),
93 BC.MIB->matchLoad(BC.MIB->matchReg(BaseReg2),
94 BC.MIB->matchImm(Scale), BC.MIB->matchReg(),
95 BC.MIB->matchImm(Offset))));
96 std::unique_ptr<MCPlusBuilder::MCInstMatcher> PICBaseAddrMatcher =
97 BC.MIB->matchIndJmp(
98 BC.MIB->matchAdd(BC.MIB->matchLoadAddr(BC.MIB->matchSymbol()),
99 BC.MIB->matchAnyOperand()));
100 if (!PICIndJmpMatcher->match(
101 *BC.MRI, *BC.MIB,
102 MutableArrayRef<MCInst>(&*BB.begin(), &Inst + 1), -1) ||
103 Scale != 4 || BaseReg1 != BaseReg2 || Offset != 0 ||
104 !PICBaseAddrMatcher->match(
105 *BC.MRI, *BC.MIB,
106 MutableArrayRef<MCInst>(&*BB.begin(), &Inst + 1), -1)) {
107 BlacklistedJTs.insert(JumpTable);
108 ++IndJmpsDenied;
109 ++NumJTsBadMatch;
110 continue;
115 // Statistics only
116 for (const auto &JTFreq : AllJTs) {
117 JumpTable *JT = JTFreq.first;
118 uint64_t CurScore = JTFreq.second;
119 TotalJTScore += CurScore;
120 if (!BlacklistedJTs.count(JT)) {
121 OptimizedScore += CurScore;
122 if (JT->EntrySize == 8)
123 BytesSaved += JT->getSize() >> 1;
126 TotalJTs += AllJTs.size();
127 TotalJTsDenied += BlacklistedJTs.size();
130 bool JTFootprintReduction::tryOptimizeNonPIC(
131 BinaryContext &BC, BinaryBasicBlock &BB, BinaryBasicBlock::iterator Inst,
132 uint64_t JTAddr, JumpTable *JumpTable, DataflowInfoManager &Info) {
133 if (opts::JTFootprintOnlyPIC)
134 return false;
136 MCOperand Base;
137 uint64_t Scale;
138 MCPhysReg Index;
139 MCOperand Offset;
140 std::unique_ptr<MCPlusBuilder::MCInstMatcher> IndJmpMatcher =
141 BC.MIB->matchIndJmp(BC.MIB->matchAnyOperand(Base),
142 BC.MIB->matchImm(Scale), BC.MIB->matchReg(Index),
143 BC.MIB->matchAnyOperand(Offset));
144 if (!IndJmpMatcher->match(*BC.MRI, *BC.MIB,
145 MutableArrayRef<MCInst>(&*BB.begin(), &*Inst + 1),
146 -1))
147 return false;
149 assert(Scale == 8 && "Wrong scale");
151 Scale = 4;
152 IndJmpMatcher->annotate(*BC.MIB, "DeleteMe");
154 LivenessAnalysis &LA = Info.getLivenessAnalysis();
155 MCPhysReg Reg = LA.scavengeRegAfter(&*Inst);
156 assert(Reg != 0 && "Register scavenger failed!");
157 MCOperand RegOp = MCOperand::createReg(Reg);
158 SmallVector<MCInst, 4> NewFrag;
160 BC.MIB->createIJmp32Frag(NewFrag, Base, MCOperand::createImm(Scale),
161 MCOperand::createReg(Index), Offset, RegOp);
162 BC.MIB->setJumpTable(NewFrag.back(), JTAddr, Index);
164 JumpTable->OutputEntrySize = 4;
166 BB.replaceInstruction(Inst, NewFrag.begin(), NewFrag.end());
167 return true;
170 bool JTFootprintReduction::tryOptimizePIC(BinaryContext &BC,
171 BinaryBasicBlock &BB,
172 BinaryBasicBlock::iterator Inst,
173 uint64_t JTAddr, JumpTable *JumpTable,
174 DataflowInfoManager &Info) {
175 MCPhysReg BaseReg;
176 uint64_t Scale;
177 MCPhysReg Index;
178 MCOperand Offset;
179 MCOperand JumpTableRef;
180 std::unique_ptr<MCPlusBuilder::MCInstMatcher> PICIndJmpMatcher =
181 BC.MIB->matchIndJmp(BC.MIB->matchAdd(
182 BC.MIB->matchLoadAddr(BC.MIB->matchAnyOperand(JumpTableRef)),
183 BC.MIB->matchLoad(BC.MIB->matchReg(BaseReg), BC.MIB->matchImm(Scale),
184 BC.MIB->matchReg(Index),
185 BC.MIB->matchAnyOperand())));
186 if (!PICIndJmpMatcher->match(
187 *BC.MRI, *BC.MIB, MutableArrayRef<MCInst>(&*BB.begin(), &*Inst + 1),
188 -1))
189 return false;
191 assert(Scale == 4 && "Wrong scale");
193 PICIndJmpMatcher->annotate(*BC.MIB, "DeleteMe");
195 MCOperand RegOp = MCOperand::createReg(BaseReg);
196 SmallVector<MCInst, 4> NewFrag;
198 BC.MIB->createIJmp32Frag(NewFrag, MCOperand::createReg(0),
199 MCOperand::createImm(Scale),
200 MCOperand::createReg(Index), JumpTableRef, RegOp);
201 BC.MIB->setJumpTable(NewFrag.back(), JTAddr, Index);
203 JumpTable->OutputEntrySize = 4;
204 // DePICify
205 JumpTable->Type = JumpTable::JTT_NORMAL;
207 BB.replaceInstruction(Inst, NewFrag.begin(), NewFrag.end());
208 return true;
211 void JTFootprintReduction::optimizeFunction(BinaryFunction &Function,
212 DataflowInfoManager &Info) {
213 BinaryContext &BC = Function.getBinaryContext();
214 for (BinaryBasicBlock &BB : Function) {
215 if (!BB.getNumNonPseudos())
216 continue;
218 auto IndJmpRI = BB.getLastNonPseudo();
219 auto IndJmp = std::prev(IndJmpRI.base());
220 const uint64_t JTAddr = BC.MIB->getJumpTable(*IndJmp);
222 if (!JTAddr)
223 continue;
225 JumpTable *JumpTable = Function.getJumpTable(*IndJmp);
226 if (BlacklistedJTs.count(JumpTable))
227 continue;
229 if (tryOptimizeNonPIC(BC, BB, IndJmp, JTAddr, JumpTable, Info) ||
230 tryOptimizePIC(BC, BB, IndJmp, JTAddr, JumpTable, Info)) {
231 Modified.insert(&Function);
232 continue;
235 llvm_unreachable("Should either optimize PIC or NonPIC successfully");
238 if (!Modified.count(&Function))
239 return;
241 for (BinaryBasicBlock &BB : Function)
242 for (auto I = BB.begin(); I != BB.end();)
243 if (BC.MIB->hasAnnotation(*I, "DeleteMe"))
244 I = BB.eraseInstruction(I);
245 else
246 ++I;
249 Error JTFootprintReduction::runOnFunctions(BinaryContext &BC) {
250 if (opts::JumpTables == JTS_BASIC && BC.HasRelocations)
251 return Error::success();
253 std::unique_ptr<RegAnalysis> RA;
254 std::unique_ptr<BinaryFunctionCallGraph> CG;
255 if (!opts::JTFootprintOnlyPIC) {
256 CG.reset(new BinaryFunctionCallGraph(buildCallGraph(BC)));
257 RA.reset(new RegAnalysis(BC, &BC.getBinaryFunctions(), &*CG));
259 for (auto &BFIt : BC.getBinaryFunctions()) {
260 BinaryFunction &Function = BFIt.second;
262 if (!Function.isSimple() || Function.isIgnored())
263 continue;
265 if (Function.getKnownExecutionCount() == 0)
266 continue;
268 DataflowInfoManager Info(Function, RA.get(), nullptr);
269 BlacklistedJTs.clear();
270 checkOpportunities(Function, Info);
271 optimizeFunction(Function, Info);
274 if (TotalJTs == TotalJTsDenied) {
275 BC.outs() << "BOLT-INFO: JT Footprint reduction: no changes were made.\n";
276 return Error::success();
279 BC.outs() << "BOLT-INFO: JT Footprint reduction stats (simple funcs only):\n";
280 if (OptimizedScore)
281 BC.outs() << format("\t %.2lf%%", (OptimizedScore * 100.0 / TotalJTScore))
282 << " of dynamic JT entries were reduced.\n";
283 BC.outs() << "\t " << TotalJTs - TotalJTsDenied << " of " << TotalJTs
284 << " jump tables affected.\n";
285 BC.outs() << "\t " << IndJmps - IndJmpsDenied << " of " << IndJmps
286 << " indirect jumps to JTs affected.\n";
287 BC.outs() << "\t " << NumJTsBadMatch
288 << " JTs discarded due to unsupported jump pattern.\n";
289 BC.outs() << "\t " << NumJTsNoReg
290 << " JTs discarded due to register unavailability.\n";
291 BC.outs() << "\t " << BytesSaved << " bytes saved.\n";
292 return Error::success();
295 } // namespace bolt
296 } // namespace llvm