1 //===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements a trivial dead store elimination that only considers
11 // basic-block local redundant stores.
13 // FIXME: This should eventually be extended to be a post-dominator tree
14 // traversal. Doing so would be pretty trivial.
16 //===----------------------------------------------------------------------===//
18 #define DEBUG_TYPE "dse"
19 #include "llvm/Transforms/Scalar.h"
20 #include "llvm/Constants.h"
21 #include "llvm/Function.h"
22 #include "llvm/Instructions.h"
23 #include "llvm/IntrinsicInst.h"
24 #include "llvm/Pass.h"
25 #include "llvm/ADT/SmallPtrSet.h"
26 #include "llvm/ADT/Statistic.h"
27 #include "llvm/Analysis/AliasAnalysis.h"
28 #include "llvm/Analysis/Dominators.h"
29 #include "llvm/Analysis/MemoryBuiltins.h"
30 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
31 #include "llvm/Target/TargetData.h"
32 #include "llvm/Transforms/Utils/Local.h"
35 STATISTIC(NumFastStores, "Number of stores deleted");
36 STATISTIC(NumFastOther , "Number of other instrs removed");
39 struct DSE : public FunctionPass {
42 static char ID; // Pass identification, replacement for typeid
43 DSE() : FunctionPass(ID) {
44 initializeDSEPass(*PassRegistry::getPassRegistry());
47 virtual bool runOnFunction(Function &F) {
50 DominatorTree &DT = getAnalysis<DominatorTree>();
52 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I)
53 // Only check non-dead blocks. Dead blocks may have strange pointer
54 // cycles that will confuse alias analysis.
55 if (DT.isReachableFromEntry(I))
56 Changed |= runOnBasicBlock(*I);
60 bool runOnBasicBlock(BasicBlock &BB);
61 bool handleFreeWithNonTrivialDependency(const CallInst *F,
64 bool handleEndBlock(BasicBlock &BB);
65 bool RemoveUndeadPointers(Value *Ptr, uint64_t killPointerSize,
66 BasicBlock::iterator &BBI,
67 SmallPtrSet<Value*, 64> &deadPointers);
68 void DeleteDeadInstruction(Instruction *I,
69 SmallPtrSet<Value*, 64> *deadPointers = 0);
72 // getAnalysisUsage - We require post dominance frontiers (aka Control
74 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
76 AU.addRequired<DominatorTree>();
77 AU.addRequired<AliasAnalysis>();
78 AU.addRequired<MemoryDependenceAnalysis>();
79 AU.addPreserved<DominatorTree>();
80 AU.addPreserved<MemoryDependenceAnalysis>();
83 uint64_t getPointerSize(Value *V) const;
88 INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false)
89 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
90 INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis)
91 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
92 INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false)
94 FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
96 /// doesClobberMemory - Does this instruction clobber (write without reading)
98 static bool doesClobberMemory(Instruction *I) {
99 if (isa<StoreInst>(I))
101 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
102 switch (II->getIntrinsicID()) {
105 case Intrinsic::memset:
106 case Intrinsic::memmove:
107 case Intrinsic::memcpy:
108 case Intrinsic::init_trampoline:
109 case Intrinsic::lifetime_end:
116 /// isElidable - If the value of this instruction and the memory it writes to is
117 /// unused, may we delete this instrtction?
118 static bool isElidable(Instruction *I) {
119 assert(doesClobberMemory(I));
120 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I))
121 return II->getIntrinsicID() != Intrinsic::lifetime_end;
122 if (StoreInst *SI = dyn_cast<StoreInst>(I))
123 return !SI->isVolatile();
127 /// getPointerOperand - Return the pointer that is being clobbered.
128 static Value *getPointerOperand(Instruction *I) {
129 assert(doesClobberMemory(I));
130 if (StoreInst *SI = dyn_cast<StoreInst>(I))
131 return SI->getPointerOperand();
132 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
133 return MI->getArgOperand(0);
135 IntrinsicInst *II = cast<IntrinsicInst>(I);
136 switch (II->getIntrinsicID()) {
137 default: assert(false && "Unexpected intrinsic!");
138 case Intrinsic::init_trampoline:
139 return II->getArgOperand(0);
140 case Intrinsic::lifetime_end:
141 return II->getArgOperand(1);
145 /// getStoreSize - Return the length in bytes of the write by the clobbering
146 /// instruction. If variable or unknown, returns AliasAnalysis::UnknownSize.
147 static uint64_t getStoreSize(Instruction *I, const TargetData *TD) {
148 assert(doesClobberMemory(I));
149 if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
150 if (!TD) return AliasAnalysis::UnknownSize;
151 return TD->getTypeStoreSize(SI->getOperand(0)->getType());
155 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
156 Len = MI->getLength();
158 IntrinsicInst *II = cast<IntrinsicInst>(I);
159 switch (II->getIntrinsicID()) {
160 default: assert(false && "Unexpected intrinsic!");
161 case Intrinsic::init_trampoline:
162 return AliasAnalysis::UnknownSize;
163 case Intrinsic::lifetime_end:
164 Len = II->getArgOperand(0);
168 if (ConstantInt *LenCI = dyn_cast<ConstantInt>(Len))
169 if (!LenCI->isAllOnesValue())
170 return LenCI->getZExtValue();
171 return AliasAnalysis::UnknownSize;
174 /// isStoreAtLeastAsWideAs - Return true if the size of the store in I1 is
175 /// greater than or equal to the store in I2. This returns false if we don't
178 static bool isStoreAtLeastAsWideAs(Instruction *I1, Instruction *I2,
179 const TargetData *TD) {
180 const Type *I1Ty = getPointerOperand(I1)->getType();
181 const Type *I2Ty = getPointerOperand(I2)->getType();
183 // Exactly the same type, must have exactly the same size.
184 if (I1Ty == I2Ty) return true;
186 uint64_t I1Size = getStoreSize(I1, TD);
187 uint64_t I2Size = getStoreSize(I2, TD);
189 return I1Size != AliasAnalysis::UnknownSize &&
190 I2Size != AliasAnalysis::UnknownSize &&
194 bool DSE::runOnBasicBlock(BasicBlock &BB) {
195 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>();
196 TD = getAnalysisIfAvailable<TargetData>();
198 bool MadeChange = false;
200 // Do a top-down walk on the BB.
201 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
202 Instruction *Inst = BBI++;
204 // If we find a store or a free, get its memory dependence.
205 if (!doesClobberMemory(Inst) && !isFreeCall(Inst))
208 MemDepResult InstDep = MD.getDependency(Inst);
210 // Ignore non-local stores.
211 // FIXME: cross-block DSE would be fun. :)
212 if (InstDep.isNonLocal()) continue;
214 // Handle frees whose dependencies are non-trivial.
215 if (const CallInst *F = isFreeCall(Inst)) {
216 MadeChange |= handleFreeWithNonTrivialDependency(F, Inst, InstDep);
220 // If not a definite must-alias dependency, ignore it.
221 if (!InstDep.isDef())
224 // If this is a store-store dependence, then the previous store is dead so
225 // long as this store is at least as big as it.
226 if (doesClobberMemory(InstDep.getInst())) {
227 Instruction *DepStore = InstDep.getInst();
228 if (isStoreAtLeastAsWideAs(Inst, DepStore, TD) &&
229 isElidable(DepStore)) {
230 // Delete the store and now-dead instructions that feed it.
231 DeleteDeadInstruction(DepStore);
235 // DeleteDeadInstruction can delete the current instruction in loop
238 if (BBI != BB.begin())
244 if (!isElidable(Inst))
247 // If we're storing the same value back to a pointer that we just
248 // loaded from, then the store can be removed.
249 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
250 if (LoadInst *DepLoad = dyn_cast<LoadInst>(InstDep.getInst())) {
251 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
252 SI->getOperand(0) == DepLoad) {
253 // DeleteDeadInstruction can delete the current instruction. Save BBI
254 // in case we need it.
255 WeakVH NextInst(BBI);
257 DeleteDeadInstruction(SI);
259 if (NextInst == 0) // Next instruction deleted.
261 else if (BBI != BB.begin()) // Revisit this instruction if possible.
270 // If this is a lifetime end marker, we can throw away the store.
271 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(InstDep.getInst())) {
272 if (II->getIntrinsicID() == Intrinsic::lifetime_end) {
273 // Delete the store and now-dead instructions that feed it.
274 // DeleteDeadInstruction can delete the current instruction. Save BBI
275 // in case we need it.
276 WeakVH NextInst(BBI);
278 DeleteDeadInstruction(Inst);
280 if (NextInst == 0) // Next instruction deleted.
282 else if (BBI != BB.begin()) // Revisit this instruction if possible.
291 // If this block ends in a return, unwind, or unreachable, all allocas are
292 // dead at its end, which means stores to them are also dead.
293 if (BB.getTerminator()->getNumSuccessors() == 0)
294 MadeChange |= handleEndBlock(BB);
299 /// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose
300 /// dependency is a store to a field of that structure.
301 bool DSE::handleFreeWithNonTrivialDependency(const CallInst *F,
304 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
305 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>();
308 Instruction *Dependency = Dep.getInst();
309 if (!Dependency || !doesClobberMemory(Dependency) || !isElidable(Dependency))
312 Value *DepPointer = getPointerOperand(Dependency)->getUnderlyingObject();
314 // Check for aliasing.
315 if (AA.alias(F->getArgOperand(0), 1, DepPointer, 1) !=
316 AliasAnalysis::MustAlias)
319 // DCE instructions only used to calculate that store
320 DeleteDeadInstruction(Dependency);
323 // Inst's old Dependency is now deleted. Compute the next dependency,
324 // which may also be dead, as in
326 // s[1] = 0; // This has just been deleted.
328 Dep = MD.getDependency(Inst);
329 } while (!Dep.isNonLocal());
333 /// handleEndBlock - Remove dead stores to stack-allocated locations in the
334 /// function end block. Ex:
337 /// store i32 1, i32* %A
339 bool DSE::handleEndBlock(BasicBlock &BB) {
340 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
342 bool MadeChange = false;
344 // Pointers alloca'd in this function are dead in the end block
345 SmallPtrSet<Value*, 64> deadPointers;
347 // Find all of the alloca'd pointers in the entry block.
348 BasicBlock *Entry = BB.getParent()->begin();
349 for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I)
350 if (AllocaInst *AI = dyn_cast<AllocaInst>(I))
351 deadPointers.insert(AI);
353 // Treat byval arguments the same, stores to them are dead at the end of the
355 for (Function::arg_iterator AI = BB.getParent()->arg_begin(),
356 AE = BB.getParent()->arg_end(); AI != AE; ++AI)
357 if (AI->hasByValAttr())
358 deadPointers.insert(AI);
360 // Scan the basic block backwards
361 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
364 // If we find a store whose pointer is dead.
365 if (doesClobberMemory(BBI)) {
366 if (isElidable(BBI)) {
367 // See through pointer-to-pointer bitcasts
368 Value *pointerOperand = getPointerOperand(BBI)->getUnderlyingObject();
370 // Alloca'd pointers or byval arguments (which are functionally like
371 // alloca's) are valid candidates for removal.
372 if (deadPointers.count(pointerOperand)) {
373 // DCE instructions only used to calculate that store.
374 Instruction *Dead = BBI;
376 DeleteDeadInstruction(Dead, &deadPointers);
383 // Because a memcpy or memmove is also a load, we can't skip it if we
385 if (!isa<MemTransferInst>(BBI))
389 Value *killPointer = 0;
390 uint64_t killPointerSize = AliasAnalysis::UnknownSize;
392 // If we encounter a use of the pointer, it is no longer considered dead
393 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
394 // However, if this load is unused and not volatile, we can go ahead and
395 // remove it, and not have to worry about it making our pointer undead!
396 if (L->use_empty() && !L->isVolatile()) {
398 DeleteDeadInstruction(L, &deadPointers);
404 killPointer = L->getPointerOperand();
405 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
406 killPointer = V->getOperand(0);
407 } else if (isa<MemTransferInst>(BBI) &&
408 isa<ConstantInt>(cast<MemTransferInst>(BBI)->getLength())) {
409 killPointer = cast<MemTransferInst>(BBI)->getSource();
410 killPointerSize = cast<ConstantInt>(
411 cast<MemTransferInst>(BBI)->getLength())->getZExtValue();
412 } else if (AllocaInst *A = dyn_cast<AllocaInst>(BBI)) {
413 deadPointers.erase(A);
415 // Dead alloca's can be DCE'd when we reach them
416 if (A->use_empty()) {
418 DeleteDeadInstruction(A, &deadPointers);
424 } else if (CallSite CS = cast<Value>(BBI)) {
425 // If this call does not access memory, it can't
426 // be undeadifying any of our pointers.
427 if (AA.doesNotAccessMemory(CS))
433 // Remove any pointers made undead by the call from the dead set
434 std::vector<Value*> dead;
435 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
436 E = deadPointers.end(); I != E; ++I) {
437 // HACK: if we detect that our AA is imprecise, it's not
438 // worth it to scan the rest of the deadPointers set. Just
439 // assume that the AA will return ModRef for everything, and
440 // go ahead and bail.
441 if (modRef >= 16 && other == 0) {
442 deadPointers.clear();
446 // See if the call site touches it
447 AliasAnalysis::ModRefResult A = AA.getModRefInfo(CS, *I,
450 if (A == AliasAnalysis::ModRef)
455 if (A == AliasAnalysis::ModRef || A == AliasAnalysis::Ref)
459 for (std::vector<Value*>::iterator I = dead.begin(), E = dead.end();
461 deadPointers.erase(*I);
464 } else if (isInstructionTriviallyDead(BBI)) {
465 // For any non-memory-affecting non-terminators, DCE them as we reach them
466 Instruction *Inst = BBI;
468 DeleteDeadInstruction(Inst, &deadPointers);
477 killPointer = killPointer->getUnderlyingObject();
479 // Deal with undead pointers
480 MadeChange |= RemoveUndeadPointers(killPointer, killPointerSize, BBI,
487 /// RemoveUndeadPointers - check for uses of a pointer that make it
488 /// undead when scanning for dead stores to alloca's.
489 bool DSE::RemoveUndeadPointers(Value *killPointer, uint64_t killPointerSize,
490 BasicBlock::iterator &BBI,
491 SmallPtrSet<Value*, 64> &deadPointers) {
492 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
494 // If the kill pointer can be easily reduced to an alloca,
495 // don't bother doing extraneous AA queries.
496 if (deadPointers.count(killPointer)) {
497 deadPointers.erase(killPointer);
501 // A global can't be in the dead pointer set.
502 if (isa<GlobalValue>(killPointer))
505 bool MadeChange = false;
507 SmallVector<Value*, 16> undead;
509 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
510 E = deadPointers.end(); I != E; ++I) {
511 // See if this pointer could alias it
512 AliasAnalysis::AliasResult A = AA.alias(*I, getPointerSize(*I),
513 killPointer, killPointerSize);
515 // If it must-alias and a store, we can delete it
516 if (isa<StoreInst>(BBI) && A == AliasAnalysis::MustAlias) {
517 StoreInst *S = cast<StoreInst>(BBI);
521 DeleteDeadInstruction(S, &deadPointers);
527 // Otherwise, it is undead
528 } else if (A != AliasAnalysis::NoAlias)
529 undead.push_back(*I);
532 for (SmallVector<Value*, 16>::iterator I = undead.begin(), E = undead.end();
534 deadPointers.erase(*I);
539 /// DeleteDeadInstruction - Delete this instruction. Before we do, go through
540 /// and zero out all the operands of this instruction. If any of them become
541 /// dead, delete them and the computation tree that feeds them.
543 /// If ValueSet is non-null, remove any deleted instructions from it as well.
545 void DSE::DeleteDeadInstruction(Instruction *I,
546 SmallPtrSet<Value*, 64> *ValueSet) {
547 SmallVector<Instruction*, 32> NowDeadInsts;
549 NowDeadInsts.push_back(I);
552 // Before we touch this instruction, remove it from memdep!
553 MemoryDependenceAnalysis &MDA = getAnalysis<MemoryDependenceAnalysis>();
555 Instruction *DeadInst = NowDeadInsts.pop_back_val();
559 // This instruction is dead, zap it, in stages. Start by removing it from
560 // MemDep, which needs to know the operands and needs it to be in the
562 MDA.removeInstruction(DeadInst);
564 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
565 Value *Op = DeadInst->getOperand(op);
566 DeadInst->setOperand(op, 0);
568 // If this operand just became dead, add it to the NowDeadInsts list.
569 if (!Op->use_empty()) continue;
571 if (Instruction *OpI = dyn_cast<Instruction>(Op))
572 if (isInstructionTriviallyDead(OpI))
573 NowDeadInsts.push_back(OpI);
576 DeadInst->eraseFromParent();
578 if (ValueSet) ValueSet->erase(DeadInst);
579 } while (!NowDeadInsts.empty());
582 uint64_t DSE::getPointerSize(Value *V) const {
584 if (AllocaInst *A = dyn_cast<AllocaInst>(V)) {
585 // Get size information for the alloca
586 if (ConstantInt *C = dyn_cast<ConstantInt>(A->getArraySize()))
587 return C->getZExtValue() * TD->getTypeAllocSize(A->getAllocatedType());
589 assert(isa<Argument>(V) && "Expected AllocaInst or Argument!");
590 const PointerType *PT = cast<PointerType>(V->getType());
591 return TD->getTypeAllocSize(PT->getElementType());
594 return AliasAnalysis::UnknownSize;