1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Instructions.h"
20 #include "llvm/Function.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Support/CFG.h"
23 #include "llvm/Support/CommandLine.h"
24 #include "llvm/Target/TargetData.h"
25 #include "llvm/ADT/Statistic.h"
27 #define DEBUG_TYPE "memdep"
31 // Control the calculation of non-local dependencies by only examining the
32 // predecessors if the basic block has less than X amount (50 by default).
34 PredLimit("nonlocaldep-threshold", cl::Hidden, cl::init(50),
35 cl::desc("Control the calculation of non-local"
36 "dependencies (default = 50)"));
38 STATISTIC(NumCacheNonlocal, "Number of cached non-local responses");
39 STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
41 char MemoryDependenceAnalysis::ID = 0;
43 Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
44 Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
45 Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
47 // Register this pass...
48 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
49 "Memory Dependence Analysis", false, true);
51 void MemoryDependenceAnalysis::ping(Instruction *D) {
52 for (depMapType::iterator I = depGraphLocal.begin(), E = depGraphLocal.end();
54 assert(I->first != D);
55 assert(I->second.first != D);
58 for (nonLocalDepMapType::iterator I = depGraphNonLocal.begin(), E = depGraphNonLocal.end();
60 assert(I->first != D);
61 for (DenseMap<BasicBlock*, Value*>::iterator II = I->second.begin(),
62 EE = I->second.end(); II != EE; ++II)
63 assert(II->second != D);
66 for (reverseDepMapType::iterator I = reverseDep.begin(), E = reverseDep.end();
68 for (SmallPtrSet<Instruction*, 4>::iterator II = I->second.begin(), EE = I->second.end();
72 for (reverseDepMapType::iterator I = reverseDepNonLocal.begin(), E = reverseDepNonLocal.end();
74 for (SmallPtrSet<Instruction*, 4>::iterator II = I->second.begin(), EE = I->second.end();
79 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
81 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
83 AU.addRequiredTransitive<AliasAnalysis>();
84 AU.addRequiredTransitive<TargetData>();
87 /// getCallSiteDependency - Private helper for finding the local dependencies
89 Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
93 std::pair<Instruction*, bool>& cachedResult =
94 depGraphLocal[C.getInstruction()];
95 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
96 TargetData& TD = getAnalysis<TargetData>();
97 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
98 BasicBlock::iterator QI = C.getInstruction();
100 // If the starting point was specifiy, use it
103 blockBegin = start->getParent()->begin();
104 // If the starting point wasn't specified, but the block was, use it
105 } else if (!start && block) {
107 blockBegin = block->begin();
110 // Walk backwards through the block, looking for dependencies
111 while (QI != blockBegin) {
114 // If this inst is a memory op, get the pointer it accessed
116 uint64_t pointerSize = 0;
117 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
118 pointer = S->getPointerOperand();
119 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
120 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
122 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
123 pointerSize = C->getZExtValue() * \
124 TD.getABITypeSize(AI->getAllocatedType());
127 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
128 pointer = V->getOperand(0);
129 pointerSize = TD.getTypeStoreSize(V->getType());
130 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
131 pointer = F->getPointerOperand();
133 // FreeInsts erase the entire structure
135 } else if (CallSite::get(QI).getInstruction() != 0) {
136 AliasAnalysis::ModRefBehavior result =
137 AA.getModRefBehavior(CallSite::get(QI));
138 if (result != AliasAnalysis::DoesNotAccessMemory) {
139 if (!start && !block) {
140 cachedResult.first = QI;
141 cachedResult.second = true;
142 reverseDep[QI].insert(C.getInstruction());
151 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
152 if (!start && !block) {
153 cachedResult.first = QI;
154 cachedResult.second = true;
155 reverseDep[QI].insert(C.getInstruction());
161 // No dependence found
162 cachedResult.first = NonLocal;
163 cachedResult.second = true;
164 reverseDep[NonLocal].insert(C.getInstruction());
168 /// nonLocalHelper - Private helper used to calculate non-local dependencies
169 /// by doing DFS on the predecessors of a block to find its dependencies
170 void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
172 DenseMap<BasicBlock*, Value*>& resp) {
173 // Set of blocks that we've already visited in our DFS
174 SmallPtrSet<BasicBlock*, 4> visited;
175 // If we're updating a dirtied cache entry, we don't need to reprocess
176 // already computed entries.
177 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(),
178 E = resp.end(); I != E; ++I)
179 if (I->second != Dirty)
180 visited.insert(I->first);
182 // Current stack of the DFS
183 SmallVector<BasicBlock*, 4> stack;
184 for (pred_iterator PI = pred_begin(block), PE = pred_end(block);
186 stack.push_back(*PI);
189 while (!stack.empty()) {
190 BasicBlock* BB = stack.back();
192 // If we've already visited this block, no need to revist
193 if (visited.count(BB)) {
198 // If we find a new block with a local dependency for query,
199 // then we insert the new dependency and backtrack.
203 Instruction* localDep = getDependency(query, 0, BB);
204 if (localDep != NonLocal) {
205 resp.insert(std::make_pair(BB, localDep));
210 // If we re-encounter the starting block, we still need to search it
211 // because there might be a dependency in the starting block AFTER
212 // the position of the query. This is necessary to get loops right.
213 } else if (BB == block) {
216 Instruction* localDep = getDependency(query, 0, BB);
217 if (localDep != query)
218 resp.insert(std::make_pair(BB, localDep));
225 // If we didn't find anything, recurse on the precessors of this block
226 // Only do this for blocks with a small number of predecessors.
227 bool predOnStack = false;
228 bool inserted = false;
229 if (std::distance(pred_begin(BB), pred_end(BB)) <= PredLimit) {
230 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
232 if (!visited.count(*PI)) {
233 stack.push_back(*PI);
239 // If we inserted a new predecessor, then we'll come back to this block
242 // If we didn't insert because we have no predecessors, then this
243 // query has no dependency at all.
244 else if (!inserted && !predOnStack) {
245 resp.insert(std::make_pair(BB, None));
246 // If we didn't insert because our predecessors are already on the stack,
247 // then we might still have a dependency, but it will be discovered during
249 } else if (!inserted && predOnStack){
250 resp.insert(std::make_pair(BB, NonLocal));
257 /// getNonLocalDependency - Fills the passed-in map with the non-local
258 /// dependencies of the queries. The map will contain NonLocal for
259 /// blocks between the query and its dependencies.
260 void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
261 DenseMap<BasicBlock*, Value*>& resp) {
262 if (depGraphNonLocal.count(query)) {
263 DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query];
266 SmallVector<BasicBlock*, 4> dirtied;
267 for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(),
268 E = cached.end(); I != E; ++I)
269 if (I->second == Dirty)
270 dirtied.push_back(I->first);
272 for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
273 E = dirtied.end(); I != E; ++I) {
274 Instruction* localDep = getDependency(query, 0, *I);
275 if (localDep != NonLocal)
276 cached[*I] = localDep;
279 nonLocalHelper(query, *I, cached);
287 NumUncacheNonlocal++;
289 // If not, go ahead and search for non-local deps.
290 nonLocalHelper(query, query->getParent(), resp);
292 // Update the non-local dependency cache
293 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
295 depGraphNonLocal[query].insert(*I);
296 reverseDepNonLocal[I->second].insert(query);
300 /// getDependency - Return the instruction on which a memory operation
301 /// depends. The local parameter indicates if the query should only
302 /// evaluate dependencies within the same basic block.
303 Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
306 // Start looking for dependencies with the queried inst
307 BasicBlock::iterator QI = query;
309 // Check for a cached result
310 std::pair<Instruction*, bool>& cachedResult = depGraphLocal[query];
311 // If we have a _confirmed_ cached entry, return it
312 if (!block && !start) {
313 if (cachedResult.second)
314 return cachedResult.first;
315 else if (cachedResult.first && cachedResult.first != NonLocal)
316 // If we have an unconfirmed cached entry, we can start our search from there
317 QI = cachedResult.first;
322 else if (!start && block)
325 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
326 TargetData& TD = getAnalysis<TargetData>();
328 // Get the pointer value for which dependence will be determined
330 uint64_t dependeeSize = 0;
331 bool queryIsVolatile = false;
332 if (StoreInst* S = dyn_cast<StoreInst>(query)) {
333 dependee = S->getPointerOperand();
334 dependeeSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
335 queryIsVolatile = S->isVolatile();
336 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) {
337 dependee = L->getPointerOperand();
338 dependeeSize = TD.getTypeStoreSize(L->getType());
339 queryIsVolatile = L->isVolatile();
340 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) {
341 dependee = V->getOperand(0);
342 dependeeSize = TD.getTypeStoreSize(V->getType());
343 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) {
344 dependee = F->getPointerOperand();
346 // FreeInsts erase the entire structure, not just a field
348 } else if (CallSite::get(query).getInstruction() != 0)
349 return getCallSiteDependency(CallSite::get(query), start, block);
350 else if (isa<AllocationInst>(query))
355 BasicBlock::iterator blockBegin = block ? block->begin()
356 : query->getParent()->begin();
358 // Walk backwards through the basic block, looking for dependencies
359 while (QI != blockBegin) {
362 // If this inst is a memory op, get the pointer it accessed
364 uint64_t pointerSize = 0;
365 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
366 // All volatile loads/stores depend on each other
367 if (queryIsVolatile && S->isVolatile()) {
368 if (!start && !block) {
369 cachedResult.first = S;
370 cachedResult.second = true;
371 reverseDep[S].insert(query);
377 pointer = S->getPointerOperand();
378 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
379 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
380 // All volatile loads/stores depend on each other
381 if (queryIsVolatile && L->isVolatile()) {
382 if (!start && !block) {
383 cachedResult.first = L;
384 cachedResult.second = true;
385 reverseDep[L].insert(query);
391 pointer = L->getPointerOperand();
392 pointerSize = TD.getTypeStoreSize(L->getType());
393 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
395 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
396 pointerSize = C->getZExtValue() * \
397 TD.getABITypeSize(AI->getAllocatedType());
400 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
401 pointer = V->getOperand(0);
402 pointerSize = TD.getTypeStoreSize(V->getType());
403 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
404 pointer = F->getPointerOperand();
406 // FreeInsts erase the entire structure
408 } else if (CallSite::get(QI).getInstruction() != 0) {
409 // Call insts need special handling. Check if they can modify our pointer
410 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
411 dependee, dependeeSize);
413 if (MR != AliasAnalysis::NoModRef) {
414 // Loads don't depend on read-only calls
415 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref)
418 if (!start && !block) {
419 cachedResult.first = QI;
420 cachedResult.second = true;
421 reverseDep[QI].insert(query);
430 // If we found a pointer, check if it could be the same as our pointer
432 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize,
433 dependee, dependeeSize);
435 if (R != AliasAnalysis::NoAlias) {
436 // May-alias loads don't depend on each other
437 if (isa<LoadInst>(query) && isa<LoadInst>(QI) &&
438 R == AliasAnalysis::MayAlias)
441 if (!start && !block) {
442 cachedResult.first = QI;
443 cachedResult.second = true;
444 reverseDep[QI].insert(query);
452 // If we found nothing, return the non-local flag
453 if (!start && !block) {
454 cachedResult.first = NonLocal;
455 cachedResult.second = true;
456 reverseDep[NonLocal].insert(query);
462 /// dropInstruction - Remove an instruction from the analysis, making
463 /// absolutely conservative assumptions when updating the cache. This is
464 /// useful, for example when an instruction is changed rather than removed.
465 void MemoryDependenceAnalysis::dropInstruction(Instruction* drop) {
466 depMapType::iterator depGraphEntry = depGraphLocal.find(drop);
467 if (depGraphEntry != depGraphLocal.end())
468 reverseDep[depGraphEntry->second.first].erase(drop);
470 // Drop dependency information for things that depended on this instr
471 SmallPtrSet<Instruction*, 4>& set = reverseDep[drop];
472 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
474 depGraphLocal.erase(*I);
476 depGraphLocal.erase(drop);
477 reverseDep.erase(drop);
479 for (DenseMap<BasicBlock*, Value*>::iterator DI =
480 depGraphNonLocal[drop].begin(), DE = depGraphNonLocal[drop].end();
482 if (DI->second != None)
483 reverseDepNonLocal[DI->second].erase(drop);
485 if (reverseDepNonLocal.count(drop)) {
486 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[drop];
487 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
489 for (DenseMap<BasicBlock*, Value*>::iterator DI =
490 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
492 if (DI->second == drop)
496 reverseDepNonLocal.erase(drop);
497 nonLocalDepMapType::iterator I = depGraphNonLocal.find(drop);
498 if (I != depGraphNonLocal.end())
499 depGraphNonLocal.erase(I);
502 /// removeInstruction - Remove an instruction from the dependence analysis,
503 /// updating the dependence of instructions that previously depended on it.
504 /// This method attempts to keep the cache coherent using the reverse map.
505 void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) {
506 // Figure out the new dep for things that currently depend on rem
507 Instruction* newDep = NonLocal;
509 for (DenseMap<BasicBlock*, Value*>::iterator DI =
510 depGraphNonLocal[rem].begin(), DE = depGraphNonLocal[rem].end();
512 if (DI->second != None)
513 reverseDepNonLocal[DI->second].erase(rem);
515 depMapType::iterator depGraphEntry = depGraphLocal.find(rem);
517 if (depGraphEntry != depGraphLocal.end()) {
518 reverseDep[depGraphEntry->second.first].erase(rem);
520 if (depGraphEntry->second.first != NonLocal &&
521 depGraphEntry->second.first != None &&
522 depGraphEntry->second.second) {
523 // If we have dep info for rem, set them to it
524 BasicBlock::iterator RI = depGraphEntry->second.first;
527 } else if ( (depGraphEntry->second.first == NonLocal ||
528 depGraphEntry->second.first == None ) &&
529 depGraphEntry->second.second ) {
530 // If we have a confirmed non-local flag, use it
531 newDep = depGraphEntry->second.first;
533 // Otherwise, use the immediate successor of rem
534 // NOTE: This is because, when getDependence is called, it will first
535 // check the immediate predecessor of what is in the cache.
536 BasicBlock::iterator RI = rem;
541 // Otherwise, use the immediate successor of rem
542 // NOTE: This is because, when getDependence is called, it will first
543 // check the immediate predecessor of what is in the cache.
544 BasicBlock::iterator RI = rem;
549 SmallPtrSet<Instruction*, 4>& set = reverseDep[rem];
550 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
552 // Insert the new dependencies
553 // Mark it as unconfirmed as long as it is not the non-local flag
554 depGraphLocal[*I] = std::make_pair(newDep, (newDep == NonLocal ||
558 depGraphLocal.erase(rem);
559 reverseDep.erase(rem);
561 if (reverseDepNonLocal.count(rem)) {
562 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[rem];
563 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
565 for (DenseMap<BasicBlock*, Value*>::iterator DI =
566 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
568 if (DI->second == rem)
573 reverseDepNonLocal.erase(rem);
574 nonLocalDepMapType::iterator I = depGraphNonLocal.find(rem);
575 if (I != depGraphNonLocal.end())
576 depGraphNonLocal.erase(I);
578 getAnalysis<AliasAnalysis>().deleteValue(rem);