1 //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the Owen Anderson and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements an analysis that determines, for a given memory
11 // operation, what preceding memory operations it depends on. It builds on
12 // alias analysis information, and tries to provide a lazy, caching interface to
13 // a common kind of alias information query.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Instructions.h"
20 #include "llvm/Function.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Support/CFG.h"
23 #include "llvm/Target/TargetData.h"
24 #include "llvm/ADT/Statistic.h"
26 #define DEBUG_TYPE "memdep"
30 STATISTIC(NumCacheNonlocal, "Number of cached non-local responses");
31 STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
33 char MemoryDependenceAnalysis::ID = 0;
35 Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
36 Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
37 Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
39 // Register this pass...
40 static RegisterPass<MemoryDependenceAnalysis> X("memdep",
41 "Memory Dependence Analysis");
43 /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
45 void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
47 AU.addRequiredTransitive<AliasAnalysis>();
48 AU.addRequiredTransitive<TargetData>();
51 /// getCallSiteDependency - Private helper for finding the local dependencies
53 Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
57 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
58 TargetData& TD = getAnalysis<TargetData>();
59 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
60 BasicBlock::iterator QI = C.getInstruction();
62 // If the starting point was specifiy, use it
65 blockBegin = start->getParent()->end();
66 // If the starting point wasn't specified, but the block was, use it
67 } else if (!start && block) {
69 blockBegin = block->end();
72 // Walk backwards through the block, looking for dependencies
73 while (QI != blockBegin) {
76 // If this inst is a memory op, get the pointer it accessed
78 uint64_t pointerSize = 0;
79 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
80 pointer = S->getPointerOperand();
81 pointerSize = TD.getTypeSize(S->getOperand(0)->getType());
82 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
83 pointer = L->getPointerOperand();
84 pointerSize = TD.getTypeSize(L->getType());
85 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
87 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
88 pointerSize = C->getZExtValue() * \
89 TD.getTypeSize(AI->getAllocatedType());
92 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
93 pointer = V->getOperand(0);
94 pointerSize = TD.getTypeSize(V->getType());
95 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
96 pointer = F->getPointerOperand();
98 // FreeInsts erase the entire structure
100 } else if (CallSite::get(QI).getInstruction() != 0) {
101 if (AA.getModRefInfo(C, CallSite::get(QI)) != AliasAnalysis::NoModRef) {
102 if (!start && !block) {
103 depGraphLocal.insert(std::make_pair(C.getInstruction(),
104 std::make_pair(QI, true)));
105 reverseDep[QI].insert(C.getInstruction());
114 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
115 if (!start && !block) {
116 depGraphLocal.insert(std::make_pair(C.getInstruction(),
117 std::make_pair(QI, true)));
118 reverseDep[QI].insert(C.getInstruction());
124 // No dependence found
125 depGraphLocal.insert(std::make_pair(C.getInstruction(),
126 std::make_pair(NonLocal, true)));
127 reverseDep[NonLocal].insert(C.getInstruction());
131 /// nonLocalHelper - Private helper used to calculate non-local dependencies
132 /// by doing DFS on the predecessors of a block to find its dependencies
133 void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
135 DenseMap<BasicBlock*, Value*>& resp) {
136 // Set of blocks that we've already visited in our DFS
137 SmallPtrSet<BasicBlock*, 4> visited;
138 // Current stack of the DFS
139 SmallVector<BasicBlock*, 4> stack;
140 stack.push_back(block);
143 while (!stack.empty()) {
144 BasicBlock* BB = stack.back();
146 // If we've already visited this block, no need to revist
147 if (visited.count(BB)) {
152 // If we find a new block with a local dependency for query,
153 // then we insert the new dependency and backtrack.
157 Instruction* localDep = getDependency(query, 0, BB);
158 if (localDep != NonLocal) {
159 resp.insert(std::make_pair(BB, localDep));
164 // If we re-encounter the starting block, we still need to search it
165 // because there might be a dependency in the starting block AFTER
166 // the position of the query. This is necessary to get loops right.
167 } else if (BB == block && stack.size() > 1) {
170 Instruction* localDep = getDependency(query, 0, BB);
171 if (localDep != query)
172 resp.insert(std::make_pair(BB, localDep));
179 // If we didn't find anything, recurse on the precessors of this block
180 bool predOnStack = false;
181 bool inserted = false;
182 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
184 if (!visited.count(*PI)) {
185 stack.push_back(*PI);
190 // If we inserted a new predecessor, then we'll come back to this block
193 // If we didn't insert because we have no predecessors, then this
194 // query has no dependency at all.
195 else if (!inserted && !predOnStack) {
196 resp.insert(std::make_pair(BB, None));
197 // If we didn't insert because our predecessors are already on the stack,
198 // then we might still have a dependency, but it will be discovered during
200 } else if (!inserted && predOnStack){
201 resp.insert(std::make_pair(BB, NonLocal));
208 /// getNonLocalDependency - Fills the passed-in map with the non-local
209 /// dependencies of the queries. The map will contain NonLocal for
210 /// blocks between the query and its dependencies.
211 void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
212 DenseMap<BasicBlock*, Value*>& resp) {
213 if (depGraphNonLocal.count(query)) {
214 resp = depGraphNonLocal[query];
218 NumUncacheNonlocal++;
220 // If not, go ahead and search for non-local deps.
221 nonLocalHelper(query, query->getParent(), resp);
223 // Update the non-local dependency cache
224 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
226 depGraphNonLocal[query].insert(*I);
227 reverseDepNonLocal[I->second].insert(query);
231 /// getDependency - Return the instruction on which a memory operation
232 /// depends. The local paramter indicates if the query should only
233 /// evaluate dependencies within the same basic block.
234 Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
237 // Start looking for dependencies with the queried inst
238 BasicBlock::iterator QI = query;
240 // Check for a cached result
241 std::pair<Instruction*, bool> cachedResult = depGraphLocal[query];
242 // If we have a _confirmed_ cached entry, return it
243 if (cachedResult.second)
244 return cachedResult.first;
245 else if (cachedResult.first && cachedResult.first != NonLocal)
246 // If we have an unconfirmed cached entry, we can start our search from there
247 QI = cachedResult.first;
251 else if (!start && block)
254 AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
255 TargetData& TD = getAnalysis<TargetData>();
257 // Get the pointer value for which dependence will be determined
259 uint64_t dependeeSize = 0;
260 bool queryIsVolatile = false;
261 if (StoreInst* S = dyn_cast<StoreInst>(query)) {
262 dependee = S->getPointerOperand();
263 dependeeSize = TD.getTypeSize(S->getOperand(0)->getType());
264 queryIsVolatile = S->isVolatile();
265 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) {
266 dependee = L->getPointerOperand();
267 dependeeSize = TD.getTypeSize(L->getType());
268 queryIsVolatile = L->isVolatile();
269 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) {
270 dependee = V->getOperand(0);
271 dependeeSize = TD.getTypeSize(V->getType());
272 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) {
273 dependee = F->getPointerOperand();
275 // FreeInsts erase the entire structure, not just a field
277 } else if (CallSite::get(query).getInstruction() != 0)
278 return getCallSiteDependency(CallSite::get(query), start, block);
279 else if (isa<AllocationInst>(query))
284 BasicBlock::iterator blockBegin = block ? block->begin()
285 : query->getParent()->begin();
287 // Walk backwards through the basic block, looking for dependencies
288 while (QI != blockBegin) {
291 // If this inst is a memory op, get the pointer it accessed
293 uint64_t pointerSize = 0;
294 if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
295 // All volatile loads/stores depend on each other
296 if (queryIsVolatile && S->isVolatile()) {
297 if (!start && !block) {
298 depGraphLocal.insert(std::make_pair(query, std::make_pair(S, true)));
299 reverseDep[S].insert(query);
305 pointer = S->getPointerOperand();
306 pointerSize = TD.getTypeSize(S->getOperand(0)->getType());
307 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
308 // All volatile loads/stores depend on each other
309 if (queryIsVolatile && L->isVolatile()) {
310 if (!start && !block) {
311 depGraphLocal.insert(std::make_pair(query, std::make_pair(L, true)));
312 reverseDep[L].insert(query);
318 pointer = L->getPointerOperand();
319 pointerSize = TD.getTypeSize(L->getType());
320 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
322 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
323 pointerSize = C->getZExtValue() * \
324 TD.getTypeSize(AI->getAllocatedType());
327 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
328 pointer = V->getOperand(0);
329 pointerSize = TD.getTypeSize(V->getType());
330 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
331 pointer = F->getPointerOperand();
333 // FreeInsts erase the entire structure
335 } else if (CallSite::get(QI).getInstruction() != 0) {
336 // Call insts need special handling. Check if they can modify our pointer
337 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
338 dependee, dependeeSize);
340 if (MR != AliasAnalysis::NoModRef) {
341 // Loads don't depend on read-only calls
342 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref)
345 if (!start && !block) {
346 depGraphLocal.insert(std::make_pair(query,
347 std::make_pair(QI, true)));
348 reverseDep[QI].insert(query);
357 // If we found a pointer, check if it could be the same as our pointer
359 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize,
360 dependee, dependeeSize);
362 if (R != AliasAnalysis::NoAlias) {
363 // May-alias loads don't depend on each other
364 if (isa<LoadInst>(query) && isa<LoadInst>(QI) &&
365 R == AliasAnalysis::MayAlias)
368 if (!start && !block) {
369 depGraphLocal.insert(std::make_pair(query,
370 std::make_pair(QI, true)));
371 reverseDep[QI].insert(query);
379 // If we found nothing, return the non-local flag
380 if (!start && !block) {
381 depGraphLocal.insert(std::make_pair(query,
382 std::make_pair(NonLocal, true)));
383 reverseDep[NonLocal].insert(query);
389 /// removeInstruction - Remove an instruction from the dependence analysis,
390 /// updating the dependence of instructions that previously depended on it.
391 /// This method attempts to keep the cache coherent using the reverse map.
392 void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) {
393 // Figure out the new dep for things that currently depend on rem
394 Instruction* newDep = NonLocal;
396 depMapType::iterator depGraphEntry = depGraphLocal.find(rem);
398 if (depGraphEntry != depGraphLocal.end()) {
399 if (depGraphEntry->second.first != NonLocal &&
400 depGraphEntry->second.second) {
401 // If we have dep info for rem, set them to it
402 BasicBlock::iterator RI = depGraphEntry->second.first;
405 } else if (depGraphEntry->second.first == NonLocal &&
406 depGraphEntry->second.second ) {
407 // If we have a confirmed non-local flag, use it
410 // Otherwise, use the immediate successor of rem
411 // NOTE: This is because, when getDependence is called, it will first
412 // check the immediate predecessor of what is in the cache.
413 BasicBlock::iterator RI = rem;
418 SmallPtrSet<Instruction*, 4>& set = reverseDep[rem];
419 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
421 // Insert the new dependencies
422 // Mark it as unconfirmed as long as it is not the non-local flag
423 depGraphLocal[*I] = std::make_pair(newDep, !newDep);
426 reverseDep.erase(rem);
429 if (reverseDepNonLocal.count(rem)) {
430 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[rem];
431 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
433 depGraphNonLocal.erase(*I);
435 reverseDepNonLocal.erase(rem);
438 getAnalysis<AliasAnalysis>().deleteValue(rem);