1 //===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements a trivial dead store elimination that only considers
11 // basic-block local redundant stores.
13 // FIXME: This should eventually be extended to be a post-dominator tree
14 // traversal. Doing so would be pretty trivial.
16 //===----------------------------------------------------------------------===//
18 #define DEBUG_TYPE "dse"
19 #include "llvm/Transforms/Scalar.h"
20 #include "llvm/Constants.h"
21 #include "llvm/Function.h"
22 #include "llvm/Instructions.h"
23 #include "llvm/IntrinsicInst.h"
24 #include "llvm/Pass.h"
25 #include "llvm/ADT/SmallPtrSet.h"
26 #include "llvm/ADT/Statistic.h"
27 #include "llvm/Analysis/AliasAnalysis.h"
28 #include "llvm/Analysis/Dominators.h"
29 #include "llvm/Analysis/MemoryBuiltins.h"
30 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
31 #include "llvm/Target/TargetData.h"
32 #include "llvm/Transforms/Utils/Local.h"
35 STATISTIC(NumFastStores, "Number of stores deleted");
36 STATISTIC(NumFastOther , "Number of other instrs removed");
39 struct DSE : public FunctionPass {
42 static char ID; // Pass identification, replacement for typeid
43 DSE() : FunctionPass(ID) {
44 initializeDSEPass(*PassRegistry::getPassRegistry());
47 virtual bool runOnFunction(Function &F) {
50 DominatorTree &DT = getAnalysis<DominatorTree>();
52 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I)
53 // Only check non-dead blocks. Dead blocks may have strange pointer
54 // cycles that will confuse alias analysis.
55 if (DT.isReachableFromEntry(I))
56 Changed |= runOnBasicBlock(*I);
60 bool runOnBasicBlock(BasicBlock &BB);
61 bool handleFreeWithNonTrivialDependency(const CallInst *F,
64 bool handleEndBlock(BasicBlock &BB);
65 bool RemoveUndeadPointers(Value *Ptr, uint64_t killPointerSize,
66 BasicBlock::iterator &BBI,
67 SmallPtrSet<Value*, 64> &deadPointers);
68 void DeleteDeadInstruction(Instruction *I,
69 SmallPtrSet<Value*, 64> *deadPointers = 0);
72 // getAnalysisUsage - We require post dominance frontiers (aka Control
74 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
76 AU.addRequired<DominatorTree>();
77 AU.addRequired<AliasAnalysis>();
78 AU.addRequired<MemoryDependenceAnalysis>();
79 AU.addPreserved<DominatorTree>();
80 AU.addPreserved<MemoryDependenceAnalysis>();
83 uint64_t getPointerSize(Value *V) const;
88 INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false)
89 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
90 INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis)
91 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
92 INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false)
94 FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
96 /// doesClobberMemory - Does this instruction clobber (write without reading)
98 static bool doesClobberMemory(Instruction *I) {
99 if (isa<StoreInst>(I))
101 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
102 switch (II->getIntrinsicID()) {
105 case Intrinsic::memset:
106 case Intrinsic::memmove:
107 case Intrinsic::memcpy:
108 case Intrinsic::init_trampoline:
109 case Intrinsic::lifetime_end:
116 /// isElidable - If the value of this instruction and the memory it writes to is
117 /// unused, may we delete this instrtction?
118 static bool isElidable(Instruction *I) {
119 assert(doesClobberMemory(I));
120 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I))
121 return II->getIntrinsicID() != Intrinsic::lifetime_end;
122 if (StoreInst *SI = dyn_cast<StoreInst>(I))
123 return !SI->isVolatile();
127 /// getPointerOperand - Return the pointer that is being clobbered.
128 static Value *getPointerOperand(Instruction *I) {
129 assert(doesClobberMemory(I));
130 if (StoreInst *SI = dyn_cast<StoreInst>(I))
131 return SI->getPointerOperand();
132 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
133 return MI->getArgOperand(0);
135 IntrinsicInst *II = cast<IntrinsicInst>(I);
136 switch (II->getIntrinsicID()) {
137 default: assert(false && "Unexpected intrinsic!");
138 case Intrinsic::init_trampoline:
139 return II->getArgOperand(0);
140 case Intrinsic::lifetime_end:
141 return II->getArgOperand(1);
145 /// getStoreSize - Return the length in bytes of the write by the clobbering
146 /// instruction. If variable or unknown, returns AliasAnalysis::UnknownSize.
147 static uint64_t getStoreSize(Instruction *I, const TargetData *TD) {
148 assert(doesClobberMemory(I));
149 if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
150 if (!TD) return AliasAnalysis::UnknownSize;
151 return TD->getTypeStoreSize(SI->getOperand(0)->getType());
155 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
156 Len = MI->getLength();
158 IntrinsicInst *II = cast<IntrinsicInst>(I);
159 switch (II->getIntrinsicID()) {
160 default: assert(false && "Unexpected intrinsic!");
161 case Intrinsic::init_trampoline:
162 return AliasAnalysis::UnknownSize;
163 case Intrinsic::lifetime_end:
164 Len = II->getArgOperand(0);
168 if (ConstantInt *LenCI = dyn_cast<ConstantInt>(Len))
169 if (!LenCI->isAllOnesValue())
170 return LenCI->getZExtValue();
171 return AliasAnalysis::UnknownSize;
174 /// isStoreAtLeastAsWideAs - Return true if the size of the store in I1 is
175 /// greater than or equal to the store in I2. This returns false if we don't
178 static bool isStoreAtLeastAsWideAs(Instruction *I1, Instruction *I2,
179 const TargetData *TD) {
180 const Type *I1Ty = getPointerOperand(I1)->getType();
181 const Type *I2Ty = getPointerOperand(I2)->getType();
183 // Exactly the same type, must have exactly the same size.
184 if (I1Ty == I2Ty) return true;
186 uint64_t I1Size = getStoreSize(I1, TD);
187 uint64_t I2Size = getStoreSize(I2, TD);
189 return I1Size != AliasAnalysis::UnknownSize &&
190 I2Size != AliasAnalysis::UnknownSize &&
194 bool DSE::runOnBasicBlock(BasicBlock &BB) {
195 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>();
196 TD = getAnalysisIfAvailable<TargetData>();
198 bool MadeChange = false;
200 // Do a top-down walk on the BB.
201 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
202 Instruction *Inst = BBI++;
204 // If we find a store or a free, get its memory dependence.
205 if (!doesClobberMemory(Inst) && !isFreeCall(Inst))
208 MemDepResult InstDep = MD.getDependency(Inst);
210 // Ignore non-local store liveness.
211 // FIXME: cross-block DSE would be fun. :)
212 if (InstDep.isNonLocal()) continue;
214 // Handle frees whose dependencies are non-trivial.
215 if (const CallInst *F = isFreeCall(Inst)) {
216 MadeChange |= handleFreeWithNonTrivialDependency(F, Inst, InstDep);
220 // If we're storing the same value back to a pointer that we just
221 // loaded from, then the store can be removed.
222 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
223 if (LoadInst *DepLoad = dyn_cast<LoadInst>(InstDep.getInst())) {
224 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
225 SI->getOperand(0) == DepLoad && !SI->isVolatile()) {
226 // DeleteDeadInstruction can delete the current instruction. Save BBI
227 // in case we need it.
228 WeakVH NextInst(BBI);
230 DeleteDeadInstruction(SI);
232 if (NextInst == 0) // Next instruction deleted.
234 else if (BBI != BB.begin()) // Revisit this instruction if possible.
243 if (!InstDep.isDef()) {
244 // If this is a may-aliased store that is clobbering the store value, we
245 // can keep searching past it for another must-aliased pointer that stores
246 // to the same location. For example, in:
250 // we can remove the first store to P even though we don't know if P and Q
252 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
253 AliasAnalysis::Location Loc =
254 getAnalysis<AliasAnalysis>().getLocation(SI);
255 while (InstDep.isClobber() && isa<StoreInst>(InstDep.getInst()) &&
256 InstDep.getInst() != &BB.front())
257 InstDep = MD.getPointerDependencyFrom(Loc, false, InstDep.getInst(),
262 // If this is a store-store dependence, then the previous store is dead so
263 // long as this store is at least as big as it.
264 if (InstDep.isDef() && doesClobberMemory(InstDep.getInst())) {
265 Instruction *DepStore = InstDep.getInst();
266 if (isStoreAtLeastAsWideAs(Inst, DepStore, TD) && isElidable(DepStore)) {
267 // Delete the store and now-dead instructions that feed it.
268 DeleteDeadInstruction(DepStore);
272 // DeleteDeadInstruction can delete the current instruction in loop
275 if (BBI != BB.begin())
282 // If this block ends in a return, unwind, or unreachable, all allocas are
283 // dead at its end, which means stores to them are also dead.
284 if (BB.getTerminator()->getNumSuccessors() == 0)
285 MadeChange |= handleEndBlock(BB);
290 /// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose
291 /// dependency is a store to a field of that structure.
292 bool DSE::handleFreeWithNonTrivialDependency(const CallInst *F,
295 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
296 MemoryDependenceAnalysis &MD = getAnalysis<MemoryDependenceAnalysis>();
299 Instruction *Dependency = Dep.getInst();
300 if (!Dependency || !doesClobberMemory(Dependency) || !isElidable(Dependency))
303 Value *DepPointer = getPointerOperand(Dependency)->getUnderlyingObject();
305 // Check for aliasing.
306 if (AA.alias(F->getArgOperand(0), 1, DepPointer, 1) !=
307 AliasAnalysis::MustAlias)
310 // DCE instructions only used to calculate that store
311 DeleteDeadInstruction(Dependency);
314 // Inst's old Dependency is now deleted. Compute the next dependency,
315 // which may also be dead, as in
317 // s[1] = 0; // This has just been deleted.
319 Dep = MD.getDependency(Inst);
320 } while (!Dep.isNonLocal());
324 /// handleEndBlock - Remove dead stores to stack-allocated locations in the
325 /// function end block. Ex:
328 /// store i32 1, i32* %A
330 bool DSE::handleEndBlock(BasicBlock &BB) {
331 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
333 bool MadeChange = false;
335 // Pointers alloca'd in this function are dead in the end block
336 SmallPtrSet<Value*, 64> deadPointers;
338 // Find all of the alloca'd pointers in the entry block.
339 BasicBlock *Entry = BB.getParent()->begin();
340 for (BasicBlock::iterator I = Entry->begin(), E = Entry->end(); I != E; ++I)
341 if (AllocaInst *AI = dyn_cast<AllocaInst>(I))
342 deadPointers.insert(AI);
344 // Treat byval arguments the same, stores to them are dead at the end of the
346 for (Function::arg_iterator AI = BB.getParent()->arg_begin(),
347 AE = BB.getParent()->arg_end(); AI != AE; ++AI)
348 if (AI->hasByValAttr())
349 deadPointers.insert(AI);
351 // Scan the basic block backwards
352 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
355 // If we find a store whose pointer is dead.
356 if (doesClobberMemory(BBI)) {
357 if (isElidable(BBI)) {
358 // See through pointer-to-pointer bitcasts
359 Value *pointerOperand = getPointerOperand(BBI)->getUnderlyingObject();
361 // Alloca'd pointers or byval arguments (which are functionally like
362 // alloca's) are valid candidates for removal.
363 if (deadPointers.count(pointerOperand)) {
364 // DCE instructions only used to calculate that store.
365 Instruction *Dead = BBI;
367 DeleteDeadInstruction(Dead, &deadPointers);
374 // Because a memcpy or memmove is also a load, we can't skip it if we
376 if (!isa<MemTransferInst>(BBI))
380 Value *killPointer = 0;
381 uint64_t killPointerSize = AliasAnalysis::UnknownSize;
383 // If we encounter a use of the pointer, it is no longer considered dead
384 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
385 // However, if this load is unused and not volatile, we can go ahead and
386 // remove it, and not have to worry about it making our pointer undead!
387 if (L->use_empty() && !L->isVolatile()) {
389 DeleteDeadInstruction(L, &deadPointers);
395 killPointer = L->getPointerOperand();
396 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
397 killPointer = V->getOperand(0);
398 } else if (isa<MemTransferInst>(BBI) &&
399 isa<ConstantInt>(cast<MemTransferInst>(BBI)->getLength())) {
400 killPointer = cast<MemTransferInst>(BBI)->getSource();
401 killPointerSize = cast<ConstantInt>(
402 cast<MemTransferInst>(BBI)->getLength())->getZExtValue();
403 } else if (AllocaInst *A = dyn_cast<AllocaInst>(BBI)) {
404 deadPointers.erase(A);
406 // Dead alloca's can be DCE'd when we reach them
407 if (A->use_empty()) {
409 DeleteDeadInstruction(A, &deadPointers);
415 } else if (CallSite CS = cast<Value>(BBI)) {
416 // If this call does not access memory, it can't
417 // be undeadifying any of our pointers.
418 if (AA.doesNotAccessMemory(CS))
424 // Remove any pointers made undead by the call from the dead set
425 std::vector<Value*> dead;
426 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
427 E = deadPointers.end(); I != E; ++I) {
428 // HACK: if we detect that our AA is imprecise, it's not
429 // worth it to scan the rest of the deadPointers set. Just
430 // assume that the AA will return ModRef for everything, and
431 // go ahead and bail.
432 if (modRef >= 16 && other == 0) {
433 deadPointers.clear();
437 // See if the call site touches it
438 AliasAnalysis::ModRefResult A = AA.getModRefInfo(CS, *I,
441 if (A == AliasAnalysis::ModRef)
446 if (A == AliasAnalysis::ModRef || A == AliasAnalysis::Ref)
450 for (std::vector<Value*>::iterator I = dead.begin(), E = dead.end();
452 deadPointers.erase(*I);
455 } else if (isInstructionTriviallyDead(BBI)) {
456 // For any non-memory-affecting non-terminators, DCE them as we reach them
457 Instruction *Inst = BBI;
459 DeleteDeadInstruction(Inst, &deadPointers);
468 killPointer = killPointer->getUnderlyingObject();
470 // Deal with undead pointers
471 MadeChange |= RemoveUndeadPointers(killPointer, killPointerSize, BBI,
478 /// RemoveUndeadPointers - check for uses of a pointer that make it
479 /// undead when scanning for dead stores to alloca's.
480 bool DSE::RemoveUndeadPointers(Value *killPointer, uint64_t killPointerSize,
481 BasicBlock::iterator &BBI,
482 SmallPtrSet<Value*, 64> &deadPointers) {
483 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
485 // If the kill pointer can be easily reduced to an alloca,
486 // don't bother doing extraneous AA queries.
487 if (deadPointers.count(killPointer)) {
488 deadPointers.erase(killPointer);
492 // A global can't be in the dead pointer set.
493 if (isa<GlobalValue>(killPointer))
496 bool MadeChange = false;
498 SmallVector<Value*, 16> undead;
500 for (SmallPtrSet<Value*, 64>::iterator I = deadPointers.begin(),
501 E = deadPointers.end(); I != E; ++I) {
502 // See if this pointer could alias it
503 AliasAnalysis::AliasResult A = AA.alias(*I, getPointerSize(*I),
504 killPointer, killPointerSize);
506 // If it must-alias and a store, we can delete it
507 if (isa<StoreInst>(BBI) && A == AliasAnalysis::MustAlias) {
508 StoreInst *S = cast<StoreInst>(BBI);
512 DeleteDeadInstruction(S, &deadPointers);
518 // Otherwise, it is undead
519 } else if (A != AliasAnalysis::NoAlias)
520 undead.push_back(*I);
523 for (SmallVector<Value*, 16>::iterator I = undead.begin(), E = undead.end();
525 deadPointers.erase(*I);
530 /// DeleteDeadInstruction - Delete this instruction. Before we do, go through
531 /// and zero out all the operands of this instruction. If any of them become
532 /// dead, delete them and the computation tree that feeds them.
534 /// If ValueSet is non-null, remove any deleted instructions from it as well.
536 void DSE::DeleteDeadInstruction(Instruction *I,
537 SmallPtrSet<Value*, 64> *ValueSet) {
538 SmallVector<Instruction*, 32> NowDeadInsts;
540 NowDeadInsts.push_back(I);
543 // Before we touch this instruction, remove it from memdep!
544 MemoryDependenceAnalysis &MDA = getAnalysis<MemoryDependenceAnalysis>();
546 Instruction *DeadInst = NowDeadInsts.pop_back_val();
550 // This instruction is dead, zap it, in stages. Start by removing it from
551 // MemDep, which needs to know the operands and needs it to be in the
553 MDA.removeInstruction(DeadInst);
555 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
556 Value *Op = DeadInst->getOperand(op);
557 DeadInst->setOperand(op, 0);
559 // If this operand just became dead, add it to the NowDeadInsts list.
560 if (!Op->use_empty()) continue;
562 if (Instruction *OpI = dyn_cast<Instruction>(Op))
563 if (isInstructionTriviallyDead(OpI))
564 NowDeadInsts.push_back(OpI);
567 DeadInst->eraseFromParent();
569 if (ValueSet) ValueSet->erase(DeadInst);
570 } while (!NowDeadInsts.empty());
573 uint64_t DSE::getPointerSize(Value *V) const {
575 if (AllocaInst *A = dyn_cast<AllocaInst>(V)) {
576 // Get size information for the alloca
577 if (ConstantInt *C = dyn_cast<ConstantInt>(A->getArraySize()))
578 return C->getZExtValue() * TD->getTypeAllocSize(A->getAllocatedType());
580 assert(isa<Argument>(V) && "Expected AllocaInst or Argument!");
581 const PointerType *PT = cast<PointerType>(V->getType());
582 return TD->getTypeAllocSize(PT->getElementType());
585 return AliasAnalysis::UnknownSize;