Merge branch 'master' into msp430
[llvm/msp430.git] / lib / Transforms / Scalar / Reassociate.cpp
blob293cf9248b7e53873ed3fd3366600438870dae7f
1 //===- Reassociate.cpp - Reassociate binary expressions -------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This pass reassociates commutative expressions in an order that is designed
11 // to promote better constant propagation, GCSE, LICM, PRE...
13 // For example: 4 + (x + 5) -> x + (4 + 5)
15 // In the implementation of this algorithm, constants are assigned rank = 0,
16 // function arguments are rank = 1, and other values are assigned ranks
17 // corresponding to the reverse post order traversal of current function
18 // (starting at 2), which effectively gives values in deep loops higher rank
19 // than values not in loops.
21 //===----------------------------------------------------------------------===//
23 #define DEBUG_TYPE "reassociate"
24 #include "llvm/Transforms/Scalar.h"
25 #include "llvm/Constants.h"
26 #include "llvm/DerivedTypes.h"
27 #include "llvm/Function.h"
28 #include "llvm/Instructions.h"
29 #include "llvm/IntrinsicInst.h"
30 #include "llvm/Pass.h"
31 #include "llvm/Assembly/Writer.h"
32 #include "llvm/Support/CFG.h"
33 #include "llvm/Support/Compiler.h"
34 #include "llvm/Support/Debug.h"
35 #include "llvm/Support/ValueHandle.h"
36 #include "llvm/ADT/PostOrderIterator.h"
37 #include "llvm/ADT/Statistic.h"
38 #include <algorithm>
39 #include <map>
40 using namespace llvm;
42 STATISTIC(NumLinear , "Number of insts linearized");
43 STATISTIC(NumChanged, "Number of insts reassociated");
44 STATISTIC(NumAnnihil, "Number of expr tree annihilated");
45 STATISTIC(NumFactor , "Number of multiplies factored");
47 namespace {
48 struct VISIBILITY_HIDDEN ValueEntry {
49 unsigned Rank;
50 Value *Op;
51 ValueEntry(unsigned R, Value *O) : Rank(R), Op(O) {}
53 inline bool operator<(const ValueEntry &LHS, const ValueEntry &RHS) {
54 return LHS.Rank > RHS.Rank; // Sort so that highest rank goes to start.
58 #ifndef NDEBUG
59 /// PrintOps - Print out the expression identified in the Ops list.
60 ///
61 static void PrintOps(Instruction *I, const std::vector<ValueEntry> &Ops) {
62 Module *M = I->getParent()->getParent()->getParent();
63 cerr << Instruction::getOpcodeName(I->getOpcode()) << " "
64 << *Ops[0].Op->getType();
65 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
66 WriteAsOperand(*cerr.stream() << " ", Ops[i].Op, false, M);
67 cerr << "," << Ops[i].Rank;
70 #endif
72 namespace {
73 class VISIBILITY_HIDDEN Reassociate : public FunctionPass {
74 std::map<BasicBlock*, unsigned> RankMap;
75 std::map<AssertingVH<>, unsigned> ValueRankMap;
76 bool MadeChange;
77 public:
78 static char ID; // Pass identification, replacement for typeid
79 Reassociate() : FunctionPass(&ID) {}
81 bool runOnFunction(Function &F);
83 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
84 AU.setPreservesCFG();
86 private:
87 void BuildRankMap(Function &F);
88 unsigned getRank(Value *V);
89 void ReassociateExpression(BinaryOperator *I);
90 void RewriteExprTree(BinaryOperator *I, std::vector<ValueEntry> &Ops,
91 unsigned Idx = 0);
92 Value *OptimizeExpression(BinaryOperator *I, std::vector<ValueEntry> &Ops);
93 void LinearizeExprTree(BinaryOperator *I, std::vector<ValueEntry> &Ops);
94 void LinearizeExpr(BinaryOperator *I);
95 Value *RemoveFactorFromExpression(Value *V, Value *Factor);
96 void ReassociateBB(BasicBlock *BB);
98 void RemoveDeadBinaryOp(Value *V);
102 char Reassociate::ID = 0;
103 static RegisterPass<Reassociate> X("reassociate", "Reassociate expressions");
105 // Public interface to the Reassociate pass
106 FunctionPass *llvm::createReassociatePass() { return new Reassociate(); }
108 void Reassociate::RemoveDeadBinaryOp(Value *V) {
109 Instruction *Op = dyn_cast<Instruction>(V);
110 if (!Op || !isa<BinaryOperator>(Op) || !isa<CmpInst>(Op) || !Op->use_empty())
111 return;
113 Value *LHS = Op->getOperand(0), *RHS = Op->getOperand(1);
114 RemoveDeadBinaryOp(LHS);
115 RemoveDeadBinaryOp(RHS);
119 static bool isUnmovableInstruction(Instruction *I) {
120 if (I->getOpcode() == Instruction::PHI ||
121 I->getOpcode() == Instruction::Alloca ||
122 I->getOpcode() == Instruction::Load ||
123 I->getOpcode() == Instruction::Malloc ||
124 I->getOpcode() == Instruction::Invoke ||
125 (I->getOpcode() == Instruction::Call &&
126 !isa<DbgInfoIntrinsic>(I)) ||
127 I->getOpcode() == Instruction::UDiv ||
128 I->getOpcode() == Instruction::SDiv ||
129 I->getOpcode() == Instruction::FDiv ||
130 I->getOpcode() == Instruction::URem ||
131 I->getOpcode() == Instruction::SRem ||
132 I->getOpcode() == Instruction::FRem)
133 return true;
134 return false;
137 void Reassociate::BuildRankMap(Function &F) {
138 unsigned i = 2;
140 // Assign distinct ranks to function arguments
141 for (Function::arg_iterator I = F.arg_begin(), E = F.arg_end(); I != E; ++I)
142 ValueRankMap[&*I] = ++i;
144 ReversePostOrderTraversal<Function*> RPOT(&F);
145 for (ReversePostOrderTraversal<Function*>::rpo_iterator I = RPOT.begin(),
146 E = RPOT.end(); I != E; ++I) {
147 BasicBlock *BB = *I;
148 unsigned BBRank = RankMap[BB] = ++i << 16;
150 // Walk the basic block, adding precomputed ranks for any instructions that
151 // we cannot move. This ensures that the ranks for these instructions are
152 // all different in the block.
153 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I)
154 if (isUnmovableInstruction(I))
155 ValueRankMap[&*I] = ++BBRank;
159 unsigned Reassociate::getRank(Value *V) {
160 if (isa<Argument>(V)) return ValueRankMap[V]; // Function argument...
162 Instruction *I = dyn_cast<Instruction>(V);
163 if (I == 0) return 0; // Otherwise it's a global or constant, rank 0.
165 unsigned &CachedRank = ValueRankMap[I];
166 if (CachedRank) return CachedRank; // Rank already known?
168 // If this is an expression, return the 1+MAX(rank(LHS), rank(RHS)) so that
169 // we can reassociate expressions for code motion! Since we do not recurse
170 // for PHI nodes, we cannot have infinite recursion here, because there
171 // cannot be loops in the value graph that do not go through PHI nodes.
172 unsigned Rank = 0, MaxRank = RankMap[I->getParent()];
173 for (unsigned i = 0, e = I->getNumOperands();
174 i != e && Rank != MaxRank; ++i)
175 Rank = std::max(Rank, getRank(I->getOperand(i)));
177 // If this is a not or neg instruction, do not count it for rank. This
178 // assures us that X and ~X will have the same rank.
179 if (!I->getType()->isInteger() ||
180 (!BinaryOperator::isNot(I) && !BinaryOperator::isNeg(I)))
181 ++Rank;
183 //DOUT << "Calculated Rank[" << V->getName() << "] = "
184 // << Rank << "\n";
186 return CachedRank = Rank;
189 /// isReassociableOp - Return true if V is an instruction of the specified
190 /// opcode and if it only has one use.
191 static BinaryOperator *isReassociableOp(Value *V, unsigned Opcode) {
192 if ((V->hasOneUse() || V->use_empty()) && isa<Instruction>(V) &&
193 cast<Instruction>(V)->getOpcode() == Opcode)
194 return cast<BinaryOperator>(V);
195 return 0;
198 /// LowerNegateToMultiply - Replace 0-X with X*-1.
200 static Instruction *LowerNegateToMultiply(Instruction *Neg,
201 std::map<AssertingVH<>, unsigned> &ValueRankMap) {
202 Constant *Cst = ConstantInt::getAllOnesValue(Neg->getType());
204 Instruction *Res = BinaryOperator::CreateMul(Neg->getOperand(1), Cst, "",Neg);
205 ValueRankMap.erase(Neg);
206 Res->takeName(Neg);
207 Neg->replaceAllUsesWith(Res);
208 Neg->eraseFromParent();
209 return Res;
212 // Given an expression of the form '(A+B)+(D+C)', turn it into '(((A+B)+C)+D)'.
213 // Note that if D is also part of the expression tree that we recurse to
214 // linearize it as well. Besides that case, this does not recurse into A,B, or
215 // C.
216 void Reassociate::LinearizeExpr(BinaryOperator *I) {
217 BinaryOperator *LHS = cast<BinaryOperator>(I->getOperand(0));
218 BinaryOperator *RHS = cast<BinaryOperator>(I->getOperand(1));
219 assert(isReassociableOp(LHS, I->getOpcode()) &&
220 isReassociableOp(RHS, I->getOpcode()) &&
221 "Not an expression that needs linearization?");
223 DOUT << "Linear" << *LHS << *RHS << *I;
225 // Move the RHS instruction to live immediately before I, avoiding breaking
226 // dominator properties.
227 RHS->moveBefore(I);
229 // Move operands around to do the linearization.
230 I->setOperand(1, RHS->getOperand(0));
231 RHS->setOperand(0, LHS);
232 I->setOperand(0, RHS);
234 ++NumLinear;
235 MadeChange = true;
236 DOUT << "Linearized: " << *I;
238 // If D is part of this expression tree, tail recurse.
239 if (isReassociableOp(I->getOperand(1), I->getOpcode()))
240 LinearizeExpr(I);
244 /// LinearizeExprTree - Given an associative binary expression tree, traverse
245 /// all of the uses putting it into canonical form. This forces a left-linear
246 /// form of the the expression (((a+b)+c)+d), and collects information about the
247 /// rank of the non-tree operands.
249 /// NOTE: These intentionally destroys the expression tree operands (turning
250 /// them into undef values) to reduce #uses of the values. This means that the
251 /// caller MUST use something like RewriteExprTree to put the values back in.
253 void Reassociate::LinearizeExprTree(BinaryOperator *I,
254 std::vector<ValueEntry> &Ops) {
255 Value *LHS = I->getOperand(0), *RHS = I->getOperand(1);
256 unsigned Opcode = I->getOpcode();
258 // First step, linearize the expression if it is in ((A+B)+(C+D)) form.
259 BinaryOperator *LHSBO = isReassociableOp(LHS, Opcode);
260 BinaryOperator *RHSBO = isReassociableOp(RHS, Opcode);
262 // If this is a multiply expression tree and it contains internal negations,
263 // transform them into multiplies by -1 so they can be reassociated.
264 if (I->getOpcode() == Instruction::Mul) {
265 if (!LHSBO && LHS->hasOneUse() && BinaryOperator::isNeg(LHS)) {
266 LHS = LowerNegateToMultiply(cast<Instruction>(LHS), ValueRankMap);
267 LHSBO = isReassociableOp(LHS, Opcode);
269 if (!RHSBO && RHS->hasOneUse() && BinaryOperator::isNeg(RHS)) {
270 RHS = LowerNegateToMultiply(cast<Instruction>(RHS), ValueRankMap);
271 RHSBO = isReassociableOp(RHS, Opcode);
275 if (!LHSBO) {
276 if (!RHSBO) {
277 // Neither the LHS or RHS as part of the tree, thus this is a leaf. As
278 // such, just remember these operands and their rank.
279 Ops.push_back(ValueEntry(getRank(LHS), LHS));
280 Ops.push_back(ValueEntry(getRank(RHS), RHS));
282 // Clear the leaves out.
283 I->setOperand(0, UndefValue::get(I->getType()));
284 I->setOperand(1, UndefValue::get(I->getType()));
285 return;
286 } else {
287 // Turn X+(Y+Z) -> (Y+Z)+X
288 std::swap(LHSBO, RHSBO);
289 std::swap(LHS, RHS);
290 bool Success = !I->swapOperands();
291 assert(Success && "swapOperands failed");
292 Success = false;
293 MadeChange = true;
295 } else if (RHSBO) {
296 // Turn (A+B)+(C+D) -> (((A+B)+C)+D). This guarantees the the RHS is not
297 // part of the expression tree.
298 LinearizeExpr(I);
299 LHS = LHSBO = cast<BinaryOperator>(I->getOperand(0));
300 RHS = I->getOperand(1);
301 RHSBO = 0;
304 // Okay, now we know that the LHS is a nested expression and that the RHS is
305 // not. Perform reassociation.
306 assert(!isReassociableOp(RHS, Opcode) && "LinearizeExpr failed!");
308 // Move LHS right before I to make sure that the tree expression dominates all
309 // values.
310 LHSBO->moveBefore(I);
312 // Linearize the expression tree on the LHS.
313 LinearizeExprTree(LHSBO, Ops);
315 // Remember the RHS operand and its rank.
316 Ops.push_back(ValueEntry(getRank(RHS), RHS));
318 // Clear the RHS leaf out.
319 I->setOperand(1, UndefValue::get(I->getType()));
322 // RewriteExprTree - Now that the operands for this expression tree are
323 // linearized and optimized, emit them in-order. This function is written to be
324 // tail recursive.
325 void Reassociate::RewriteExprTree(BinaryOperator *I,
326 std::vector<ValueEntry> &Ops,
327 unsigned i) {
328 if (i+2 == Ops.size()) {
329 if (I->getOperand(0) != Ops[i].Op ||
330 I->getOperand(1) != Ops[i+1].Op) {
331 Value *OldLHS = I->getOperand(0);
332 DOUT << "RA: " << *I;
333 I->setOperand(0, Ops[i].Op);
334 I->setOperand(1, Ops[i+1].Op);
335 DOUT << "TO: " << *I;
336 MadeChange = true;
337 ++NumChanged;
339 // If we reassociated a tree to fewer operands (e.g. (1+a+2) -> (a+3)
340 // delete the extra, now dead, nodes.
341 RemoveDeadBinaryOp(OldLHS);
343 return;
345 assert(i+2 < Ops.size() && "Ops index out of range!");
347 if (I->getOperand(1) != Ops[i].Op) {
348 DOUT << "RA: " << *I;
349 I->setOperand(1, Ops[i].Op);
350 DOUT << "TO: " << *I;
351 MadeChange = true;
352 ++NumChanged;
355 BinaryOperator *LHS = cast<BinaryOperator>(I->getOperand(0));
356 assert(LHS->getOpcode() == I->getOpcode() &&
357 "Improper expression tree!");
359 // Compactify the tree instructions together with each other to guarantee
360 // that the expression tree is dominated by all of Ops.
361 LHS->moveBefore(I);
362 RewriteExprTree(LHS, Ops, i+1);
367 // NegateValue - Insert instructions before the instruction pointed to by BI,
368 // that computes the negative version of the value specified. The negative
369 // version of the value is returned, and BI is left pointing at the instruction
370 // that should be processed next by the reassociation pass.
372 static Value *NegateValue(Value *V, Instruction *BI) {
373 // We are trying to expose opportunity for reassociation. One of the things
374 // that we want to do to achieve this is to push a negation as deep into an
375 // expression chain as possible, to expose the add instructions. In practice,
376 // this means that we turn this:
377 // X = -(A+12+C+D) into X = -A + -12 + -C + -D = -12 + -A + -C + -D
378 // so that later, a: Y = 12+X could get reassociated with the -12 to eliminate
379 // the constants. We assume that instcombine will clean up the mess later if
380 // we introduce tons of unnecessary negation instructions...
382 if (Instruction *I = dyn_cast<Instruction>(V))
383 if (I->getOpcode() == Instruction::Add && I->hasOneUse()) {
384 // Push the negates through the add.
385 I->setOperand(0, NegateValue(I->getOperand(0), BI));
386 I->setOperand(1, NegateValue(I->getOperand(1), BI));
388 // We must move the add instruction here, because the neg instructions do
389 // not dominate the old add instruction in general. By moving it, we are
390 // assured that the neg instructions we just inserted dominate the
391 // instruction we are about to insert after them.
393 I->moveBefore(BI);
394 I->setName(I->getName()+".neg");
395 return I;
398 // Insert a 'neg' instruction that subtracts the value from zero to get the
399 // negation.
401 return BinaryOperator::CreateNeg(V, V->getName() + ".neg", BI);
404 /// ShouldBreakUpSubtract - Return true if we should break up this subtract of
405 /// X-Y into (X + -Y).
406 static bool ShouldBreakUpSubtract(Instruction *Sub) {
407 // If this is a negation, we can't split it up!
408 if (BinaryOperator::isNeg(Sub))
409 return false;
411 // Don't bother to break this up unless either the LHS is an associable add or
412 // subtract or if this is only used by one.
413 if (isReassociableOp(Sub->getOperand(0), Instruction::Add) ||
414 isReassociableOp(Sub->getOperand(0), Instruction::Sub))
415 return true;
416 if (isReassociableOp(Sub->getOperand(1), Instruction::Add) ||
417 isReassociableOp(Sub->getOperand(1), Instruction::Sub))
418 return true;
419 if (Sub->hasOneUse() &&
420 (isReassociableOp(Sub->use_back(), Instruction::Add) ||
421 isReassociableOp(Sub->use_back(), Instruction::Sub)))
422 return true;
424 return false;
427 /// BreakUpSubtract - If we have (X-Y), and if either X is an add, or if this is
428 /// only used by an add, transform this into (X+(0-Y)) to promote better
429 /// reassociation.
430 static Instruction *BreakUpSubtract(Instruction *Sub,
431 std::map<AssertingVH<>, unsigned> &ValueRankMap) {
432 // Convert a subtract into an add and a neg instruction... so that sub
433 // instructions can be commuted with other add instructions...
435 // Calculate the negative value of Operand 1 of the sub instruction...
436 // and set it as the RHS of the add instruction we just made...
438 Value *NegVal = NegateValue(Sub->getOperand(1), Sub);
439 Instruction *New =
440 BinaryOperator::CreateAdd(Sub->getOperand(0), NegVal, "", Sub);
441 New->takeName(Sub);
443 // Everyone now refers to the add instruction.
444 ValueRankMap.erase(Sub);
445 Sub->replaceAllUsesWith(New);
446 Sub->eraseFromParent();
448 DOUT << "Negated: " << *New;
449 return New;
452 /// ConvertShiftToMul - If this is a shift of a reassociable multiply or is used
453 /// by one, change this into a multiply by a constant to assist with further
454 /// reassociation.
455 static Instruction *ConvertShiftToMul(Instruction *Shl,
456 std::map<AssertingVH<>, unsigned> &ValueRankMap) {
457 // If an operand of this shift is a reassociable multiply, or if the shift
458 // is used by a reassociable multiply or add, turn into a multiply.
459 if (isReassociableOp(Shl->getOperand(0), Instruction::Mul) ||
460 (Shl->hasOneUse() &&
461 (isReassociableOp(Shl->use_back(), Instruction::Mul) ||
462 isReassociableOp(Shl->use_back(), Instruction::Add)))) {
463 Constant *MulCst = ConstantInt::get(Shl->getType(), 1);
464 MulCst = ConstantExpr::getShl(MulCst, cast<Constant>(Shl->getOperand(1)));
466 Instruction *Mul = BinaryOperator::CreateMul(Shl->getOperand(0), MulCst,
467 "", Shl);
468 ValueRankMap.erase(Shl);
469 Mul->takeName(Shl);
470 Shl->replaceAllUsesWith(Mul);
471 Shl->eraseFromParent();
472 return Mul;
474 return 0;
477 // Scan backwards and forwards among values with the same rank as element i to
478 // see if X exists. If X does not exist, return i.
479 static unsigned FindInOperandList(std::vector<ValueEntry> &Ops, unsigned i,
480 Value *X) {
481 unsigned XRank = Ops[i].Rank;
482 unsigned e = Ops.size();
483 for (unsigned j = i+1; j != e && Ops[j].Rank == XRank; ++j)
484 if (Ops[j].Op == X)
485 return j;
486 // Scan backwards
487 for (unsigned j = i-1; j != ~0U && Ops[j].Rank == XRank; --j)
488 if (Ops[j].Op == X)
489 return j;
490 return i;
493 /// EmitAddTreeOfValues - Emit a tree of add instructions, summing Ops together
494 /// and returning the result. Insert the tree before I.
495 static Value *EmitAddTreeOfValues(Instruction *I, std::vector<Value*> &Ops) {
496 if (Ops.size() == 1) return Ops.back();
498 Value *V1 = Ops.back();
499 Ops.pop_back();
500 Value *V2 = EmitAddTreeOfValues(I, Ops);
501 return BinaryOperator::CreateAdd(V2, V1, "tmp", I);
504 /// RemoveFactorFromExpression - If V is an expression tree that is a
505 /// multiplication sequence, and if this sequence contains a multiply by Factor,
506 /// remove Factor from the tree and return the new tree.
507 Value *Reassociate::RemoveFactorFromExpression(Value *V, Value *Factor) {
508 BinaryOperator *BO = isReassociableOp(V, Instruction::Mul);
509 if (!BO) return 0;
511 std::vector<ValueEntry> Factors;
512 LinearizeExprTree(BO, Factors);
514 bool FoundFactor = false;
515 for (unsigned i = 0, e = Factors.size(); i != e; ++i)
516 if (Factors[i].Op == Factor) {
517 FoundFactor = true;
518 Factors.erase(Factors.begin()+i);
519 break;
521 if (!FoundFactor) {
522 // Make sure to restore the operands to the expression tree.
523 RewriteExprTree(BO, Factors);
524 return 0;
527 if (Factors.size() == 1) return Factors[0].Op;
529 RewriteExprTree(BO, Factors);
530 return BO;
533 /// FindSingleUseMultiplyFactors - If V is a single-use multiply, recursively
534 /// add its operands as factors, otherwise add V to the list of factors.
535 static void FindSingleUseMultiplyFactors(Value *V,
536 std::vector<Value*> &Factors) {
537 BinaryOperator *BO;
538 if ((!V->hasOneUse() && !V->use_empty()) ||
539 !(BO = dyn_cast<BinaryOperator>(V)) ||
540 BO->getOpcode() != Instruction::Mul) {
541 Factors.push_back(V);
542 return;
545 // Otherwise, add the LHS and RHS to the list of factors.
546 FindSingleUseMultiplyFactors(BO->getOperand(1), Factors);
547 FindSingleUseMultiplyFactors(BO->getOperand(0), Factors);
552 Value *Reassociate::OptimizeExpression(BinaryOperator *I,
553 std::vector<ValueEntry> &Ops) {
554 // Now that we have the linearized expression tree, try to optimize it.
555 // Start by folding any constants that we found.
556 bool IterateOptimization = false;
557 if (Ops.size() == 1) return Ops[0].Op;
559 unsigned Opcode = I->getOpcode();
561 if (Constant *V1 = dyn_cast<Constant>(Ops[Ops.size()-2].Op))
562 if (Constant *V2 = dyn_cast<Constant>(Ops.back().Op)) {
563 Ops.pop_back();
564 Ops.back().Op = ConstantExpr::get(Opcode, V1, V2);
565 return OptimizeExpression(I, Ops);
568 // Check for destructive annihilation due to a constant being used.
569 if (ConstantInt *CstVal = dyn_cast<ConstantInt>(Ops.back().Op))
570 switch (Opcode) {
571 default: break;
572 case Instruction::And:
573 if (CstVal->isZero()) { // ... & 0 -> 0
574 ++NumAnnihil;
575 return CstVal;
576 } else if (CstVal->isAllOnesValue()) { // ... & -1 -> ...
577 Ops.pop_back();
579 break;
580 case Instruction::Mul:
581 if (CstVal->isZero()) { // ... * 0 -> 0
582 ++NumAnnihil;
583 return CstVal;
584 } else if (cast<ConstantInt>(CstVal)->isOne()) {
585 Ops.pop_back(); // ... * 1 -> ...
587 break;
588 case Instruction::Or:
589 if (CstVal->isAllOnesValue()) { // ... | -1 -> -1
590 ++NumAnnihil;
591 return CstVal;
593 // FALLTHROUGH!
594 case Instruction::Add:
595 case Instruction::Xor:
596 if (CstVal->isZero()) // ... [|^+] 0 -> ...
597 Ops.pop_back();
598 break;
600 if (Ops.size() == 1) return Ops[0].Op;
602 // Handle destructive annihilation do to identities between elements in the
603 // argument list here.
604 switch (Opcode) {
605 default: break;
606 case Instruction::And:
607 case Instruction::Or:
608 case Instruction::Xor:
609 // Scan the operand lists looking for X and ~X pairs, along with X,X pairs.
610 // If we find any, we can simplify the expression. X&~X == 0, X|~X == -1.
611 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
612 // First, check for X and ~X in the operand list.
613 assert(i < Ops.size());
614 if (BinaryOperator::isNot(Ops[i].Op)) { // Cannot occur for ^.
615 Value *X = BinaryOperator::getNotArgument(Ops[i].Op);
616 unsigned FoundX = FindInOperandList(Ops, i, X);
617 if (FoundX != i) {
618 if (Opcode == Instruction::And) { // ...&X&~X = 0
619 ++NumAnnihil;
620 return Constant::getNullValue(X->getType());
621 } else if (Opcode == Instruction::Or) { // ...|X|~X = -1
622 ++NumAnnihil;
623 return ConstantInt::getAllOnesValue(X->getType());
628 // Next, check for duplicate pairs of values, which we assume are next to
629 // each other, due to our sorting criteria.
630 assert(i < Ops.size());
631 if (i+1 != Ops.size() && Ops[i+1].Op == Ops[i].Op) {
632 if (Opcode == Instruction::And || Opcode == Instruction::Or) {
633 // Drop duplicate values.
634 Ops.erase(Ops.begin()+i);
635 --i; --e;
636 IterateOptimization = true;
637 ++NumAnnihil;
638 } else {
639 assert(Opcode == Instruction::Xor);
640 if (e == 2) {
641 ++NumAnnihil;
642 return Constant::getNullValue(Ops[0].Op->getType());
644 // ... X^X -> ...
645 Ops.erase(Ops.begin()+i, Ops.begin()+i+2);
646 i -= 1; e -= 2;
647 IterateOptimization = true;
648 ++NumAnnihil;
652 break;
654 case Instruction::Add:
655 // Scan the operand lists looking for X and -X pairs. If we find any, we
656 // can simplify the expression. X+-X == 0.
657 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
658 assert(i < Ops.size());
659 // Check for X and -X in the operand list.
660 if (BinaryOperator::isNeg(Ops[i].Op)) {
661 Value *X = BinaryOperator::getNegArgument(Ops[i].Op);
662 unsigned FoundX = FindInOperandList(Ops, i, X);
663 if (FoundX != i) {
664 // Remove X and -X from the operand list.
665 if (Ops.size() == 2) {
666 ++NumAnnihil;
667 return Constant::getNullValue(X->getType());
668 } else {
669 Ops.erase(Ops.begin()+i);
670 if (i < FoundX)
671 --FoundX;
672 else
673 --i; // Need to back up an extra one.
674 Ops.erase(Ops.begin()+FoundX);
675 IterateOptimization = true;
676 ++NumAnnihil;
677 --i; // Revisit element.
678 e -= 2; // Removed two elements.
685 // Scan the operand list, checking to see if there are any common factors
686 // between operands. Consider something like A*A+A*B*C+D. We would like to
687 // reassociate this to A*(A+B*C)+D, which reduces the number of multiplies.
688 // To efficiently find this, we count the number of times a factor occurs
689 // for any ADD operands that are MULs.
690 std::map<Value*, unsigned> FactorOccurrences;
691 unsigned MaxOcc = 0;
692 Value *MaxOccVal = 0;
693 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
694 if (BinaryOperator *BOp = dyn_cast<BinaryOperator>(Ops[i].Op)) {
695 if (BOp->getOpcode() == Instruction::Mul && BOp->use_empty()) {
696 // Compute all of the factors of this added value.
697 std::vector<Value*> Factors;
698 FindSingleUseMultiplyFactors(BOp, Factors);
699 assert(Factors.size() > 1 && "Bad linearize!");
701 // Add one to FactorOccurrences for each unique factor in this op.
702 if (Factors.size() == 2) {
703 unsigned Occ = ++FactorOccurrences[Factors[0]];
704 if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factors[0]; }
705 if (Factors[0] != Factors[1]) { // Don't double count A*A.
706 Occ = ++FactorOccurrences[Factors[1]];
707 if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factors[1]; }
709 } else {
710 std::set<Value*> Duplicates;
711 for (unsigned i = 0, e = Factors.size(); i != e; ++i) {
712 if (Duplicates.insert(Factors[i]).second) {
713 unsigned Occ = ++FactorOccurrences[Factors[i]];
714 if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factors[i]; }
722 // If any factor occurred more than one time, we can pull it out.
723 if (MaxOcc > 1) {
724 DOUT << "\nFACTORING [" << MaxOcc << "]: " << *MaxOccVal << "\n";
726 // Create a new instruction that uses the MaxOccVal twice. If we don't do
727 // this, we could otherwise run into situations where removing a factor
728 // from an expression will drop a use of maxocc, and this can cause
729 // RemoveFactorFromExpression on successive values to behave differently.
730 Instruction *DummyInst = BinaryOperator::CreateAdd(MaxOccVal, MaxOccVal);
731 std::vector<Value*> NewMulOps;
732 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
733 if (Value *V = RemoveFactorFromExpression(Ops[i].Op, MaxOccVal)) {
734 NewMulOps.push_back(V);
735 Ops.erase(Ops.begin()+i);
736 --i; --e;
740 // No need for extra uses anymore.
741 delete DummyInst;
743 unsigned NumAddedValues = NewMulOps.size();
744 Value *V = EmitAddTreeOfValues(I, NewMulOps);
745 Value *V2 = BinaryOperator::CreateMul(V, MaxOccVal, "tmp", I);
747 // Now that we have inserted V and its sole use, optimize it. This allows
748 // us to handle cases that require multiple factoring steps, such as this:
749 // A*A*B + A*A*C --> A*(A*B+A*C) --> A*(A*(B+C))
750 if (NumAddedValues > 1)
751 ReassociateExpression(cast<BinaryOperator>(V));
753 ++NumFactor;
755 if (Ops.empty())
756 return V2;
758 // Add the new value to the list of things being added.
759 Ops.insert(Ops.begin(), ValueEntry(getRank(V2), V2));
761 // Rewrite the tree so that there is now a use of V.
762 RewriteExprTree(I, Ops);
763 return OptimizeExpression(I, Ops);
765 break;
766 //case Instruction::Mul:
769 if (IterateOptimization)
770 return OptimizeExpression(I, Ops);
771 return 0;
775 /// ReassociateBB - Inspect all of the instructions in this basic block,
776 /// reassociating them as we go.
777 void Reassociate::ReassociateBB(BasicBlock *BB) {
778 for (BasicBlock::iterator BBI = BB->begin(); BBI != BB->end(); ) {
779 Instruction *BI = BBI++;
780 if (BI->getOpcode() == Instruction::Shl &&
781 isa<ConstantInt>(BI->getOperand(1)))
782 if (Instruction *NI = ConvertShiftToMul(BI, ValueRankMap)) {
783 MadeChange = true;
784 BI = NI;
787 // Reject cases where it is pointless to do this.
788 if (!isa<BinaryOperator>(BI) || BI->getType()->isFloatingPoint() ||
789 isa<VectorType>(BI->getType()))
790 continue; // Floating point ops are not associative.
792 // If this is a subtract instruction which is not already in negate form,
793 // see if we can convert it to X+-Y.
794 if (BI->getOpcode() == Instruction::Sub) {
795 if (ShouldBreakUpSubtract(BI)) {
796 BI = BreakUpSubtract(BI, ValueRankMap);
797 MadeChange = true;
798 } else if (BinaryOperator::isNeg(BI)) {
799 // Otherwise, this is a negation. See if the operand is a multiply tree
800 // and if this is not an inner node of a multiply tree.
801 if (isReassociableOp(BI->getOperand(1), Instruction::Mul) &&
802 (!BI->hasOneUse() ||
803 !isReassociableOp(BI->use_back(), Instruction::Mul))) {
804 BI = LowerNegateToMultiply(BI, ValueRankMap);
805 MadeChange = true;
810 // If this instruction is a commutative binary operator, process it.
811 if (!BI->isAssociative()) continue;
812 BinaryOperator *I = cast<BinaryOperator>(BI);
814 // If this is an interior node of a reassociable tree, ignore it until we
815 // get to the root of the tree, to avoid N^2 analysis.
816 if (I->hasOneUse() && isReassociableOp(I->use_back(), I->getOpcode()))
817 continue;
819 // If this is an add tree that is used by a sub instruction, ignore it
820 // until we process the subtract.
821 if (I->hasOneUse() && I->getOpcode() == Instruction::Add &&
822 cast<Instruction>(I->use_back())->getOpcode() == Instruction::Sub)
823 continue;
825 ReassociateExpression(I);
829 void Reassociate::ReassociateExpression(BinaryOperator *I) {
831 // First, walk the expression tree, linearizing the tree, collecting
832 std::vector<ValueEntry> Ops;
833 LinearizeExprTree(I, Ops);
835 DOUT << "RAIn:\t"; DEBUG(PrintOps(I, Ops)); DOUT << "\n";
837 // Now that we have linearized the tree to a list and have gathered all of
838 // the operands and their ranks, sort the operands by their rank. Use a
839 // stable_sort so that values with equal ranks will have their relative
840 // positions maintained (and so the compiler is deterministic). Note that
841 // this sorts so that the highest ranking values end up at the beginning of
842 // the vector.
843 std::stable_sort(Ops.begin(), Ops.end());
845 // OptimizeExpression - Now that we have the expression tree in a convenient
846 // sorted form, optimize it globally if possible.
847 if (Value *V = OptimizeExpression(I, Ops)) {
848 // This expression tree simplified to something that isn't a tree,
849 // eliminate it.
850 DOUT << "Reassoc to scalar: " << *V << "\n";
851 I->replaceAllUsesWith(V);
852 RemoveDeadBinaryOp(I);
853 return;
856 // We want to sink immediates as deeply as possible except in the case where
857 // this is a multiply tree used only by an add, and the immediate is a -1.
858 // In this case we reassociate to put the negation on the outside so that we
859 // can fold the negation into the add: (-X)*Y + Z -> Z-X*Y
860 if (I->getOpcode() == Instruction::Mul && I->hasOneUse() &&
861 cast<Instruction>(I->use_back())->getOpcode() == Instruction::Add &&
862 isa<ConstantInt>(Ops.back().Op) &&
863 cast<ConstantInt>(Ops.back().Op)->isAllOnesValue()) {
864 Ops.insert(Ops.begin(), Ops.back());
865 Ops.pop_back();
868 DOUT << "RAOut:\t"; DEBUG(PrintOps(I, Ops)); DOUT << "\n";
870 if (Ops.size() == 1) {
871 // This expression tree simplified to something that isn't a tree,
872 // eliminate it.
873 I->replaceAllUsesWith(Ops[0].Op);
874 RemoveDeadBinaryOp(I);
875 } else {
876 // Now that we ordered and optimized the expressions, splat them back into
877 // the expression tree, removing any unneeded nodes.
878 RewriteExprTree(I, Ops);
883 bool Reassociate::runOnFunction(Function &F) {
884 // Recalculate the rank map for F
885 BuildRankMap(F);
887 MadeChange = false;
888 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI)
889 ReassociateBB(FI);
891 // We are done with the rank map...
892 RankMap.clear();
893 ValueRankMap.clear();
894 return MadeChange;