1 //===- Reassociate.cpp - Reassociate binary expressions -------------------===//
3 // This pass reassociates commutative expressions in an order that is designed
4 // to promote better constant propogation, GCSE, LICM, PRE...
6 // For example: 4 + (x + 5) -> x + (4 + 5)
8 // Note that this pass works best if left shifts have been promoted to explicit
9 // multiplies before this pass executes.
11 // In the implementation of this algorithm, constants are assigned rank = 0,
12 // function arguments are rank = 1, and other values are assigned ranks
13 // corresponding to the reverse post order traversal of current function
14 // (starting at 2), which effectively gives values in deep loops higher rank
15 // than values not in loops.
17 //===----------------------------------------------------------------------===//
19 #include "llvm/Transforms/Scalar.h"
20 #include "llvm/Function.h"
21 #include "llvm/BasicBlock.h"
22 #include "llvm/iOperators.h"
23 #include "llvm/Type.h"
24 #include "llvm/Pass.h"
25 #include "llvm/Constant.h"
26 #include "llvm/Support/CFG.h"
27 #include "Support/PostOrderIterator.h"
28 #include "Support/StatisticReporter.h"
30 //#define DEBUG_REASSOC(x) std::cerr << x
31 #define DEBUG_REASSOC(x)
33 static Statistic<> NumLinear ("reassociate\t- Number of insts linearized");
34 static Statistic<> NumChanged("reassociate\t- Number of insts reassociated");
35 static Statistic<> NumSwapped("reassociate\t- Number of insts with operands swapped");
38 class Reassociate : public FunctionPass {
39 map<BasicBlock*, unsigned> RankMap;
41 const char *getPassName() const {
42 return "Expression Reassociation";
45 bool runOnFunction(Function *F);
47 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
51 void BuildRankMap(Function *F);
52 unsigned getRank(Value *V);
53 bool ReassociateExpr(BinaryOperator *I);
54 bool ReassociateBB(BasicBlock *BB);
58 Pass *createReassociatePass() { return new Reassociate(); }
60 void Reassociate::BuildRankMap(Function *F) {
62 ReversePostOrderTraversal<Function*> RPOT(F);
63 for (ReversePostOrderTraversal<Function*>::rpo_iterator I = RPOT.begin(),
64 E = RPOT.end(); I != E; ++I)
68 unsigned Reassociate::getRank(Value *V) {
69 if (isa<Argument>(V)) return 1; // Function argument...
70 if (Instruction *I = dyn_cast<Instruction>(V)) {
71 // If this is an expression, return the MAX(rank(LHS), rank(RHS)) so that we
72 // can reassociate expressions for code motion! Since we do not recurse for
73 // PHI nodes, we cannot have infinite recursion here, because there cannot
74 // be loops in the value graph (except for PHI nodes).
76 if (I->getOpcode() == Instruction::PHINode ||
77 I->getOpcode() == Instruction::Alloca ||
78 I->getOpcode() == Instruction::Malloc || isa<TerminatorInst>(I) ||
80 return RankMap[I->getParent()];
82 unsigned Rank = 0, MaxRank = RankMap[I->getParent()];
83 for (unsigned i = 0, e = I->getNumOperands();
84 i != e && Rank != MaxRank; ++i)
85 Rank = std::max(Rank, getRank(I->getOperand(i)));
90 // Otherwise it's a global or constant, rank 0.
95 // isCommutativeOperator - Return true if the specified instruction is
96 // commutative and associative. If the instruction is not commutative and
97 // associative, we can not reorder its operands!
99 static inline BinaryOperator *isCommutativeOperator(Instruction *I) {
100 // Floating point operations do not commute!
101 if (I->getType()->isFloatingPoint()) return 0;
103 if (I->getOpcode() == Instruction::Add ||
104 I->getOpcode() == Instruction::Mul ||
105 I->getOpcode() == Instruction::And ||
106 I->getOpcode() == Instruction::Or ||
107 I->getOpcode() == Instruction::Xor)
108 return cast<BinaryOperator>(I);
113 bool Reassociate::ReassociateExpr(BinaryOperator *I) {
114 Value *LHS = I->getOperand(0);
115 Value *RHS = I->getOperand(1);
116 unsigned LHSRank = getRank(LHS);
117 unsigned RHSRank = getRank(RHS);
119 bool Changed = false;
121 // Make sure the LHS of the operand always has the greater rank...
122 if (LHSRank < RHSRank) {
125 std::swap(LHSRank, RHSRank);
128 DEBUG_REASSOC("Transposed: " << I << " Result BB: " << I->getParent());
131 // If the LHS is the same operator as the current one is, and if we are the
132 // only expression using it...
134 if (BinaryOperator *LHSI = dyn_cast<BinaryOperator>(LHS))
135 if (LHSI->getOpcode() == I->getOpcode() && LHSI->use_size() == 1) {
136 // If the rank of our current RHS is less than the rank of the LHS's LHS,
137 // then we reassociate the two instructions...
138 if (RHSRank < getRank(LHSI->getOperand(0))) {
140 if (BinaryOperator *IOp = dyn_cast<BinaryOperator>(LHSI->getOperand(0)))
141 if (IOp->getOpcode() == LHSI->getOpcode())
142 TakeOp = 1; // Hoist out non-tree portion
144 // Convert ((a + 12) + 10) into (a + (12 + 10))
145 I->setOperand(0, LHSI->getOperand(TakeOp));
146 LHSI->setOperand(TakeOp, RHS);
147 I->setOperand(1, LHSI);
150 DEBUG_REASSOC("Reassociated: " << I << " Result BB: " <<I->getParent());
152 // Since we modified the RHS instruction, make sure that we recheck it.
153 ReassociateExpr(LHSI);
162 // NegateValue - Insert instructions before the instruction pointed to by BI,
163 // that computes the negative version of the value specified. The negative
164 // version of the value is returned, and BI is left pointing at the instruction
165 // that should be processed next by the reassociation pass.
167 static Value *NegateValue(Value *V, BasicBlock *BB, BasicBlock::iterator &BI) {
168 // We are trying to expose opportunity for reassociation. One of the things
169 // that we want to do to achieve this is to push a negation as deep into an
170 // expression chain as possible, to expose the add instructions. In practice,
171 // this means that we turn this:
172 // X = -(A+12+C+D) into X = -A + -12 + -C + -D = -12 + -A + -C + -D
173 // so that later, a: Y = 12+X could get reassociated with the -12 to eliminate
174 // the constants. We assume that instcombine will clean up the mess later if
175 // we introduce tons of unneccesary negation instructions...
177 if (Instruction *I = dyn_cast<Instruction>(V))
178 if (I->getOpcode() == Instruction::Add && I->use_size() == 1) {
179 Value *RHS = NegateValue(I->getOperand(1), BB, BI);
180 Value *LHS = NegateValue(I->getOperand(0), BB, BI);
182 // We must actually insert a new add instruction here, because the neg
183 // instructions do not dominate the old add instruction in general. By
184 // adding it now, we are assured that the neg instructions we just
185 // inserted dominate the instruction we are about to insert after them.
187 BasicBlock::iterator NBI = BI;
189 // Scan through the inserted instructions, looking for RHS, which must be
190 // after LHS in the instruction list.
191 while (*NBI != RHS) ++NBI;
194 BinaryOperator::create(Instruction::Add, LHS, RHS, I->getName()+".neg");
195 BB->getInstList().insert(NBI+1, Add); // Add to the basic block...
199 // Insert a 'neg' instruction that subtracts the value from zero to get the
203 BinaryOperator::create(Instruction::Sub,
204 Constant::getNullValue(V->getType()), V,
205 V->getName()+".neg");
206 BI = BB->getInstList().insert(BI, Neg); // Add to the basic block...
211 bool Reassociate::ReassociateBB(BasicBlock *BB) {
212 bool Changed = false;
213 for (BasicBlock::iterator BI = BB->begin(); BI != BB->end(); ++BI) {
214 Instruction *Inst = *BI;
216 // If this instruction is a commutative binary operator, and the ranks of
217 // the two operands are sorted incorrectly, fix it now.
219 if (BinaryOperator *I = isCommutativeOperator(Inst)) {
220 if (!I->use_empty()) {
221 // Make sure that we don't have a tree-shaped computation. If we do,
222 // linearize it. Convert (A+B)+(C+D) into ((A+B)+C)+D
224 Instruction *LHSI = dyn_cast<Instruction>(I->getOperand(0));
225 Instruction *RHSI = dyn_cast<Instruction>(I->getOperand(1));
226 if (LHSI && (int)LHSI->getOpcode() == I->getOpcode() &&
227 RHSI && (int)RHSI->getOpcode() == I->getOpcode() &&
228 RHSI->use_size() == 1) {
229 // Insert a new temporary instruction... (A+B)+C
230 BinaryOperator *Tmp = BinaryOperator::create(I->getOpcode(), LHSI,
232 RHSI->getName()+".ra");
233 BI = BB->getInstList().insert(BI, Tmp); // Add to the basic block...
234 I->setOperand(0, Tmp);
235 I->setOperand(1, RHSI->getOperand(1));
237 // Process the temporary instruction for reassociation now.
241 DEBUG_REASSOC("Linearized: " << I << " Result BB: " << BB);
244 // Make sure that this expression is correctly reassociated with respect
245 // to it's used values...
247 Changed |= ReassociateExpr(I);
250 } else if (Inst->getOpcode() == Instruction::Sub &&
251 Inst->getOperand(0) != Constant::getNullValue(Inst->getType())) {
252 // Convert a subtract into an add and a neg instruction... so that sub
253 // instructions can be commuted with other add instructions...
255 Instruction *New = BinaryOperator::create(Instruction::Add,
259 Value *NegatedValue = Inst->getOperand(1);
261 // Everyone now refers to the add instruction...
262 Inst->replaceAllUsesWith(New);
264 // Put the new add in the place of the subtract... deleting the subtract
265 delete BB->getInstList().replaceWith(BI, New);
267 // Calculate the negative value of Operand 1 of the sub instruction...
268 // and set it as the RHS of the add instruction we just made...
269 New->setOperand(1, NegateValue(NegatedValue, BB, BI));
272 DEBUG_REASSOC("Negated: " << New << " Result BB: " << BB);
280 bool Reassociate::runOnFunction(Function *F) {
281 // Recalculate the rank map for F
284 bool Changed = false;
285 for (Function::iterator FI = F->begin(), FE = F->end(); FI != FE; ++FI)
286 Changed |= ReassociateBB(*FI);
288 // We are done with the rank map...