1 //===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements a trivial dead store elimination that only considers
11 // basic-block local redundant stores.
13 // FIXME: This should eventually be extended to be a post-dominator tree
14 // traversal. Doing so would be pretty trivial.
16 //===----------------------------------------------------------------------===//
18 #define DEBUG_TYPE "dse"
19 #include "llvm/Transforms/Scalar.h"
20 #include "llvm/Constants.h"
21 #include "llvm/Function.h"
22 #include "llvm/Instructions.h"
23 #include "llvm/IntrinsicInst.h"
24 #include "llvm/Pass.h"
25 #include "llvm/ADT/SmallPtrSet.h"
26 #include "llvm/ADT/Statistic.h"
27 #include "llvm/Analysis/AliasAnalysis.h"
28 #include "llvm/Analysis/Dominators.h"
29 #include "llvm/Analysis/MemoryBuiltins.h"
30 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
31 #include "llvm/Target/TargetData.h"
32 #include "llvm/Transforms/Utils/Local.h"
35 STATISTIC(NumFastStores, "Number of stores deleted");
36 STATISTIC(NumFastOther , "Number of other instrs removed");
39 struct DSE : public FunctionPass {
42 static char ID; // Pass identification, replacement for typeid
43 DSE() : FunctionPass(ID) {}
45 virtual bool runOnFunction(Function &F) {
48 DominatorTree &DT = getAnalysis<DominatorTree>();
50 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I)
51 // Only check non-dead blocks. Dead blocks may have strange pointer
52 // cycles that will confuse alias analysis.
53 if (DT.isReachableFromEntry(I))
54 Changed |= runOnBasicBlock(*I);
58 bool runOnBasicBlock(BasicBlock &BB);
59 bool handleFreeWithNonTrivialDependency(const CallInst *F,
61 bool handleEndBlock(BasicBlock &BB);
62 bool RemoveUndeadPointers(Value *Ptr, unsigned killPointerSize,
63 BasicBlock::iterator &BBI,
64 SmallPtrSet<Value*, 64> &deadPointers);
65 void DeleteDeadInstruction(Instruction *I,
66 SmallPtrSet<Value*, 64> *deadPointers = 0);
69 // getAnalysisUsage - We require post dominance frontiers (aka Control
71 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
73 AU.addRequired<DominatorTree>();
74 AU.addRequired<AliasAnalysis>();
75 AU.addRequired<MemoryDependenceAnalysis>();
76 AU.addPreserved<DominatorTree>();
77 AU.addPreserved<MemoryDependenceAnalysis>();
80 unsigned getPointerSize(Value *V) const;
85 INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false)
86 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
87 INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis)
88 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
89 INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false)
91 FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
93 /// doesClobberMemory - Does this instruction clobber (write without reading)
95 static bool doesClobberMemory(Instruction *I) {
96 if (isa<StoreInst>(I))
98 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
99 switch (II->getIntrinsicID()) {
102 case Intrinsic::memset:
103 case Intrinsic::memmove:
104 case Intrinsic::memcpy:
105 case Intrinsic::init_trampoline:
106 case Intrinsic::lifetime_end:
113 /// isElidable - If the value of this instruction and the memory it writes to is
114 /// unused, may we delete this instrtction?
115 static bool isElidable(Instruction *I) {
116 assert(doesClobberMemory(I));
117 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I))
118 return II->getIntrinsicID() != Intrinsic::lifetime_end;
119 if (StoreInst *SI = dyn_cast<StoreInst>(I))
120 return !SI->isVolatile();
124 /// getPointerOperand - Return the pointer that is being clobbered.
125 static Value *getPointerOperand(Instruction *I) {
126 assert(doesClobberMemory(I));
127 if (StoreInst *SI = dyn_cast<StoreInst>(I))
128 return SI->getPointerOperand();
129 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
130 return MI->getArgOperand(0);
132 IntrinsicInst *II = cast<IntrinsicInst>(I);
133 switch (II->getIntrinsicID()) {
134 default: assert(false && "Unexpected intrinsic!");
135 case Intrinsic::init_trampoline:
136 return II->getArgOperand(0);
137 case Intrinsic::lifetime_end:
138 return II->getArgOperand(1);
142 /// getStoreSize - Return the length in bytes of the write by the clobbering
143 /// instruction. If variable or unknown, returns -1.
144 static unsigned getStoreSize(Instruction *I, const TargetData *TD) {
145 assert(doesClobberMemory(I));
146 if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
148 return TD->getTypeStoreSize(SI->getOperand(0)->getType());
152 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
153 Len = MI->getLength();
155 IntrinsicInst *II = cast<IntrinsicInst>(I);
156 switch (II->getIntrinsicID()) {
157 default: assert(false && "Unexpected intrinsic!");
158 case Intrinsic::init_trampoline:
160 case Intrinsic::lifetime_end:
161 Len = II->getArgOperand(0);
165 if (ConstantInt *LenCI = dyn_cast<ConstantInt>(Len))
166 if (!LenCI->isAllOnesValue())
167 return LenCI->getZExtValue();
171 /// isStoreAtLeastAsWideAs - Return true if the size of the store in I1 is
172 /// greater than or equal to the store in I2. This returns false if we don't
175 static bool isStoreAtLeastAsWideAs(Instruction *I1, Instruction *I2,
176 const TargetData *TD) {
177 const Type *I1Ty = getPointerOperand(I1)->getType();
178 const Type *I2Ty = getPointerOperand(I2)->getType();
180 // Exactly the same type, must have exactly the same size.
181 if (I1Ty == I2Ty) return true;
183 int I1Size = getStoreSize(I1, TD);
184 int I2Size = getStoreSize(I2, TD);
186 return I1Size != -1 && I2Size != -1 && I1Size >= I2Size;
189 bool DSE::runOnBasicBlock(BasicBlock &BB) {
190 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>();
191 TD = getAnalysisIfAvailable<TargetData>();
193 bool MadeChange = false;
195 // Do a top-down walk on the BB.
196 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
197 Instruction *Inst = BBI++;
199 // If we find a store or a free, get its memory dependence.
200 if (!doesClobberMemory(Inst) && !isFreeCall(Inst))
203 MemDepResult InstDep = MD.getDependency(Inst);
205 // Ignore non-local stores.
206 // FIXME: cross-block DSE would be fun. :)
207 if (InstDep.isNonLocal()) continue;
209 // Handle frees whose dependencies are non-trivial.
210 if (const CallInst *F = isFreeCall(Inst)) {
211 MadeChange |= handleFreeWithNonTrivialDependency(F, InstDep);
215 // If not a definite must-alias dependency, ignore it.
216 if (!InstDep.isDef())
219 // If this is a store-store dependence, then the previous store is dead so
220 // long as this store is at least as big as it.
221 if (doesClobberMemory(InstDep.getInst())) {
222 Instruction *DepStore = InstDep.getInst();
223 if (isStoreAtLeastAsWideAs(Inst, DepStore, TD) &&
224 isElidable(DepStore)) {
225 // Delete the store and now-dead instructions that feed it.
226 DeleteDeadInstruction(DepStore);
230 // DeleteDeadInstruction can delete the current instruction in loop
233 if (BBI != BB.begin())
239 if (!isElidable(Inst))
242 // If we're storing the same value back to a pointer that we just
243 // loaded from, then the store can be removed.
244 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
245 if (LoadInst *DepLoad = dyn_cast<LoadInst>(InstDep.getInst())) {
246 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
247 SI->getOperand(0) == DepLoad) {
248 // DeleteDeadInstruction can delete the current instruction. Save BBI
249 // in case we need it.
250 WeakVH NextInst(BBI);
252 DeleteDeadInstruction(SI);
254 if (NextInst == 0) // Next instruction deleted.
256 else if (BBI != BB.begin()) // Revisit this instruction if possible.
265 // If this is a lifetime end marker, we can throw away the store.
266 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(InstDep.getInst())) {
267 if (II->getIntrinsicID() == Intrinsic::lifetime_end) {
268 // Delete the store and now-dead instructions that feed it.
269 // DeleteDeadInstruction can delete the current instruction. Save BBI
270 // in case we need it.
271 WeakVH NextInst(BBI);
273 DeleteDeadInstruction(Inst);
275 if (NextInst == 0) // Next instruction deleted.
277 else if (BBI != BB.begin()) // Revisit this instruction if possible.
286 // If this block ends in a return, unwind, or unreachable, all allocas are
287 // dead at its end, which means stores to them are also dead.
288 if (BB.getTerminator()->getNumSuccessors() == 0)
289 MadeChange |= handleEndBlock(BB);
294 /// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose
295 /// dependency is a store to a field of that structure.
296 bool DSE::handleFreeWithNonTrivialDependency(const CallInst *F,
298 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
300 Instruction *Dependency = Dep.getInst();
301 if (!Dependency || !doesClobberMemory(Dependency) || !isElidable(Dependency))
304 Value *DepPointer = getPointerOperand(Dependency)->getUnderlyingObject();
306 // Check for aliasing.
307 if (AA.alias(F->getArgOperand(0), 1, DepPointer, 1) !=
308 AliasAnalysis::MustAlias)
311 // DCE instructions only used to calculate that store
312 DeleteDeadInstruction(Dependency);
317 /// handleEndBlock - Remove dead stores to stack-allocated locations in the
318 /// function end block. Ex:
321 /// store i32 1, i32* %A
323 bool DSE::handleEndBlock(BasicBlock &BB) {
324 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
326 bool MadeChange = false;
328 // Pointers alloca'd in this function are dead in the end block
329 SmallPtrSet<Value*, 64> deadPointers;
331 // Find all of the alloca'd pointers in the entry block.
332 BasicBlock *Entry = BB.getParent()->begin();
333 for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I)
334 if (AllocaInst *AI = dyn_cast<AllocaInst>(I))
335 deadPointers.insert(AI);
337 // Treat byval arguments the same, stores to them are dead at the end of the
339 for (Function::arg_iterator AI = BB.getParent()->arg_begin(),
340 AE = BB.getParent()->arg_end(); AI != AE; ++AI)
341 if (AI->hasByValAttr())
342 deadPointers.insert(AI);
344 // Scan the basic block backwards
345 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
348 // If we find a store whose pointer is dead.
349 if (doesClobberMemory(BBI)) {
350 if (isElidable(BBI)) {
351 // See through pointer-to-pointer bitcasts
352 Value *pointerOperand = getPointerOperand(BBI)->getUnderlyingObject();
354 // Alloca'd pointers or byval arguments (which are functionally like
355 // alloca's) are valid candidates for removal.
356 if (deadPointers.count(pointerOperand)) {
357 // DCE instructions only used to calculate that store.
358 Instruction *Dead = BBI;
360 DeleteDeadInstruction(Dead, &deadPointers);
367 // Because a memcpy or memmove is also a load, we can't skip it if we
369 if (!isa<MemTransferInst>(BBI))
373 Value *killPointer = 0;
374 unsigned killPointerSize = AliasAnalysis::UnknownSize;
376 // If we encounter a use of the pointer, it is no longer considered dead
377 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
378 // However, if this load is unused and not volatile, we can go ahead and
379 // remove it, and not have to worry about it making our pointer undead!
380 if (L->use_empty() && !L->isVolatile()) {
382 DeleteDeadInstruction(L, &deadPointers);
388 killPointer = L->getPointerOperand();
389 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
390 killPointer = V->getOperand(0);
391 } else if (isa<MemTransferInst>(BBI) &&
392 isa<ConstantInt>(cast<MemTransferInst>(BBI)->getLength())) {
393 killPointer = cast<MemTransferInst>(BBI)->getSource();
394 killPointerSize = cast<ConstantInt>(
395 cast<MemTransferInst>(BBI)->getLength())->getZExtValue();
396 } else if (AllocaInst *A = dyn_cast<AllocaInst>(BBI)) {
397 deadPointers.erase(A);
399 // Dead alloca's can be DCE'd when we reach them
400 if (A->use_empty()) {
402 DeleteDeadInstruction(A, &deadPointers);
408 } else if (CallSite CS = cast<Value>(BBI)) {
409 // If this call does not access memory, it can't
410 // be undeadifying any of our pointers.
411 if (AA.doesNotAccessMemory(CS))
417 // Remove any pointers made undead by the call from the dead set
418 std::vector<Value*> dead;
419 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
420 E = deadPointers.end(); I != E; ++I) {
421 // HACK: if we detect that our AA is imprecise, it's not
422 // worth it to scan the rest of the deadPointers set. Just
423 // assume that the AA will return ModRef for everything, and
424 // go ahead and bail.
425 if (modRef >= 16 && other == 0) {
426 deadPointers.clear();
430 // See if the call site touches it
431 AliasAnalysis::ModRefResult A = AA.getModRefInfo(CS, *I,
434 if (A == AliasAnalysis::ModRef)
439 if (A == AliasAnalysis::ModRef || A == AliasAnalysis::Ref)
443 for (std::vector<Value*>::iterator I = dead.begin(), E = dead.end();
445 deadPointers.erase(*I);
448 } else if (isInstructionTriviallyDead(BBI)) {
449 // For any non-memory-affecting non-terminators, DCE them as we reach them
450 Instruction *Inst = BBI;
452 DeleteDeadInstruction(Inst, &deadPointers);
461 killPointer = killPointer->getUnderlyingObject();
463 // Deal with undead pointers
464 MadeChange |= RemoveUndeadPointers(killPointer, killPointerSize, BBI,
471 /// RemoveUndeadPointers - check for uses of a pointer that make it
472 /// undead when scanning for dead stores to alloca's.
473 bool DSE::RemoveUndeadPointers(Value *killPointer, unsigned killPointerSize,
474 BasicBlock::iterator &BBI,
475 SmallPtrSet<Value*, 64> &deadPointers) {
476 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
478 // If the kill pointer can be easily reduced to an alloca,
479 // don't bother doing extraneous AA queries.
480 if (deadPointers.count(killPointer)) {
481 deadPointers.erase(killPointer);
485 // A global can't be in the dead pointer set.
486 if (isa<GlobalValue>(killPointer))
489 bool MadeChange = false;
491 SmallVector<Value*, 16> undead;
493 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
494 E = deadPointers.end(); I != E; ++I) {
495 // See if this pointer could alias it
496 AliasAnalysis::AliasResult A = AA.alias(*I, getPointerSize(*I),
497 killPointer, killPointerSize);
499 // If it must-alias and a store, we can delete it
500 if (isa<StoreInst>(BBI) && A == AliasAnalysis::MustAlias) {
501 StoreInst *S = cast<StoreInst>(BBI);
505 DeleteDeadInstruction(S, &deadPointers);
511 // Otherwise, it is undead
512 } else if (A != AliasAnalysis::NoAlias)
513 undead.push_back(*I);
516 for (SmallVector<Value*, 16>::iterator I = undead.begin(), E = undead.end();
518 deadPointers.erase(*I);
523 /// DeleteDeadInstruction - Delete this instruction. Before we do, go through
524 /// and zero out all the operands of this instruction. If any of them become
525 /// dead, delete them and the computation tree that feeds them.
527 /// If ValueSet is non-null, remove any deleted instructions from it as well.
529 void DSE::DeleteDeadInstruction(Instruction *I,
530 SmallPtrSet<Value*, 64> *ValueSet) {
531 SmallVector<Instruction*, 32> NowDeadInsts;
533 NowDeadInsts.push_back(I);
536 // Before we touch this instruction, remove it from memdep!
537 MemoryDependenceAnalysis &MDA = getAnalysis<MemoryDependenceAnalysis>();
539 Instruction *DeadInst = NowDeadInsts.pop_back_val();
543 // This instruction is dead, zap it, in stages. Start by removing it from
544 // MemDep, which needs to know the operands and needs it to be in the
546 MDA.removeInstruction(DeadInst);
548 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
549 Value *Op = DeadInst->getOperand(op);
550 DeadInst->setOperand(op, 0);
552 // If this operand just became dead, add it to the NowDeadInsts list.
553 if (!Op->use_empty()) continue;
555 if (Instruction *OpI = dyn_cast<Instruction>(Op))
556 if (isInstructionTriviallyDead(OpI))
557 NowDeadInsts.push_back(OpI);
560 DeadInst->eraseFromParent();
562 if (ValueSet) ValueSet->erase(DeadInst);
563 } while (!NowDeadInsts.empty());
566 unsigned DSE::getPointerSize(Value *V) const {
568 if (AllocaInst *A = dyn_cast<AllocaInst>(V)) {
569 // Get size information for the alloca
570 if (ConstantInt *C = dyn_cast<ConstantInt>(A->getArraySize()))
571 return C->getZExtValue() * TD->getTypeAllocSize(A->getAllocatedType());
573 assert(isa<Argument>(V) && "Expected AllocaInst or Argument!");
574 const PointerType *PT = cast<PointerType>(V->getType());
575 return TD->getTypeAllocSize(PT->getElementType());
578 return AliasAnalysis::UnknownSize;