blob: 0a71b30de5ee70493bbf6597dbbbaf01f06aaefe [file] [log] [blame]
Owen Anderson78e02f72007-07-06 23:14:35 +00001//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson78e02f72007-07-06 23:14:35 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements an analysis that determines, for a given memory
11// operation, what preceding memory operations it depends on. It builds on
Owen Anderson80b1f092007-08-08 22:01:54 +000012// alias analysis information, and tries to provide a lazy, caching interface to
Owen Anderson78e02f72007-07-06 23:14:35 +000013// a common kind of alias information query.
14//
15//===----------------------------------------------------------------------===//
16
Chris Lattner0e575f42008-11-28 21:45:17 +000017#define DEBUG_TYPE "memdep"
Owen Anderson78e02f72007-07-06 23:14:35 +000018#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Owen Anderson7a616a12007-07-10 17:25:03 +000019#include "llvm/Constants.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000020#include "llvm/Instructions.h"
21#include "llvm/Function.h"
22#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattnerbaad8882008-11-28 22:28:27 +000023#include "llvm/ADT/Statistic.h"
24#include "llvm/ADT/STLExtras.h"
Owen Anderson4beedbd2007-07-24 21:52:37 +000025#include "llvm/Support/CFG.h"
Tanya Lattner63aa1602008-02-06 00:54:55 +000026#include "llvm/Support/CommandLine.h"
Chris Lattner0e575f42008-11-28 21:45:17 +000027#include "llvm/Support/Debug.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000028#include "llvm/Target/TargetData.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000029using namespace llvm;
30
Chris Lattner0ec48dd2008-11-29 22:02:15 +000031STATISTIC(NumCacheNonLocal, "Number of cached non-local responses");
32STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
Owen Anderson7fad7e32007-09-09 21:43:49 +000033
Owen Anderson78e02f72007-07-06 23:14:35 +000034char MemoryDependenceAnalysis::ID = 0;
35
Owen Anderson78e02f72007-07-06 23:14:35 +000036// Register this pass...
Owen Anderson776ee1f2007-07-10 20:21:08 +000037static RegisterPass<MemoryDependenceAnalysis> X("memdep",
Chris Lattner0e575f42008-11-28 21:45:17 +000038 "Memory Dependence Analysis", false, true);
Owen Anderson78e02f72007-07-06 23:14:35 +000039
40/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
41///
42void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
43 AU.setPreservesAll();
44 AU.addRequiredTransitive<AliasAnalysis>();
45 AU.addRequiredTransitive<TargetData>();
46}
47
Owen Anderson642a9e32007-08-08 22:26:03 +000048/// getCallSiteDependency - Private helper for finding the local dependencies
49/// of a call site.
Chris Lattner73ec3cd2008-11-30 01:26:32 +000050MemoryDependenceAnalysis::DepResultTy MemoryDependenceAnalysis::
Chris Lattner5391a1d2008-11-29 03:47:00 +000051getCallSiteDependency(CallSite C, BasicBlock::iterator ScanIt,
52 BasicBlock *BB) {
Chris Lattner7f524222008-11-29 03:22:12 +000053 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
54 TargetData &TD = getAnalysis<TargetData>();
Owen Andersondbbe8162007-08-07 00:33:45 +000055
Owen Anderson642a9e32007-08-08 22:26:03 +000056 // Walk backwards through the block, looking for dependencies
Chris Lattner5391a1d2008-11-29 03:47:00 +000057 while (ScanIt != BB->begin()) {
58 Instruction *Inst = --ScanIt;
Owen Anderson5f323202007-07-10 17:59:22 +000059
60 // If this inst is a memory op, get the pointer it accessed
Chris Lattner00314b32008-11-29 09:15:21 +000061 Value *Pointer = 0;
62 uint64_t PointerSize = 0;
63 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
64 Pointer = S->getPointerOperand();
65 PointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
Chris Lattner00314b32008-11-29 09:15:21 +000066 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
67 Pointer = V->getOperand(0);
68 PointerSize = TD.getTypeStoreSize(V->getType());
69 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) {
70 Pointer = F->getPointerOperand();
Owen Anderson5f323202007-07-10 17:59:22 +000071
72 // FreeInsts erase the entire structure
Chris Lattner00314b32008-11-29 09:15:21 +000073 PointerSize = ~0UL;
74 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
75 if (AA.getModRefBehavior(CallSite::get(Inst)) ==
Chris Lattner5391a1d2008-11-29 03:47:00 +000076 AliasAnalysis::DoesNotAccessMemory)
Chris Lattner00314b32008-11-29 09:15:21 +000077 continue;
Chris Lattner73ec3cd2008-11-30 01:26:32 +000078 return DepResultTy(Inst, Normal);
Chris Lattnercfbb6342008-11-30 01:44:00 +000079 } else {
80 // Non-memory instruction.
Owen Anderson202da142007-07-10 20:39:07 +000081 continue;
Chris Lattnercfbb6342008-11-30 01:44:00 +000082 }
Owen Anderson5f323202007-07-10 17:59:22 +000083
Chris Lattner00314b32008-11-29 09:15:21 +000084 if (AA.getModRefInfo(C, Pointer, PointerSize) != AliasAnalysis::NoModRef)
Chris Lattner73ec3cd2008-11-30 01:26:32 +000085 return DepResultTy(Inst, Normal);
Owen Anderson5f323202007-07-10 17:59:22 +000086 }
87
Chris Lattner5391a1d2008-11-29 03:47:00 +000088 // No dependence found.
Chris Lattner73ec3cd2008-11-30 01:26:32 +000089 return DepResultTy(0, NonLocal);
Owen Anderson5f323202007-07-10 17:59:22 +000090}
91
Owen Anderson78e02f72007-07-06 23:14:35 +000092/// getDependency - Return the instruction on which a memory operation
Dan Gohmanc04575f2008-04-10 23:02:38 +000093/// depends. The local parameter indicates if the query should only
Owen Anderson6b278fc2007-07-10 17:08:11 +000094/// evaluate dependencies within the same basic block.
Chris Lattner73ec3cd2008-11-30 01:26:32 +000095MemoryDependenceAnalysis::DepResultTy MemoryDependenceAnalysis::
96getDependencyFromInternal(Instruction *QueryInst, BasicBlock::iterator ScanIt,
97 BasicBlock *BB) {
Chris Lattner5391a1d2008-11-29 03:47:00 +000098 AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
99 TargetData &TD = getAnalysis<TargetData>();
Owen Anderson78e02f72007-07-06 23:14:35 +0000100
101 // Get the pointer value for which dependence will be determined
Chris Lattner25a08142008-11-29 08:51:16 +0000102 Value *MemPtr = 0;
103 uint64_t MemSize = 0;
104 bool MemVolatile = false;
Chris Lattner5391a1d2008-11-29 03:47:00 +0000105
106 if (StoreInst* S = dyn_cast<StoreInst>(QueryInst)) {
Chris Lattner25a08142008-11-29 08:51:16 +0000107 MemPtr = S->getPointerOperand();
108 MemSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
109 MemVolatile = S->isVolatile();
Chris Lattner5391a1d2008-11-29 03:47:00 +0000110 } else if (LoadInst* L = dyn_cast<LoadInst>(QueryInst)) {
Chris Lattner25a08142008-11-29 08:51:16 +0000111 MemPtr = L->getPointerOperand();
112 MemSize = TD.getTypeStoreSize(L->getType());
113 MemVolatile = L->isVolatile();
Chris Lattner5391a1d2008-11-29 03:47:00 +0000114 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QueryInst)) {
Chris Lattner25a08142008-11-29 08:51:16 +0000115 MemPtr = V->getOperand(0);
116 MemSize = TD.getTypeStoreSize(V->getType());
Chris Lattner5391a1d2008-11-29 03:47:00 +0000117 } else if (FreeInst* F = dyn_cast<FreeInst>(QueryInst)) {
Chris Lattner25a08142008-11-29 08:51:16 +0000118 MemPtr = F->getPointerOperand();
119 // FreeInsts erase the entire structure, not just a field.
120 MemSize = ~0UL;
121 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst))
Chris Lattner5391a1d2008-11-29 03:47:00 +0000122 return getCallSiteDependency(CallSite::get(QueryInst), ScanIt, BB);
Chris Lattner25a08142008-11-29 08:51:16 +0000123 else // Non-memory instructions depend on nothing.
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000124 return DepResultTy(0, None);
Owen Anderson78e02f72007-07-06 23:14:35 +0000125
Owen Anderson642a9e32007-08-08 22:26:03 +0000126 // Walk backwards through the basic block, looking for dependencies
Chris Lattner5391a1d2008-11-29 03:47:00 +0000127 while (ScanIt != BB->begin()) {
128 Instruction *Inst = --ScanIt;
Chris Lattnera161ab02008-11-29 09:09:48 +0000129
130 // If the access is volatile and this is a volatile load/store, return a
131 // dependence.
132 if (MemVolatile &&
133 ((isa<LoadInst>(Inst) && cast<LoadInst>(Inst)->isVolatile()) ||
134 (isa<StoreInst>(Inst) && cast<StoreInst>(Inst)->isVolatile())))
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000135 return DepResultTy(Inst, Normal);
Chris Lattnera161ab02008-11-29 09:09:48 +0000136
Chris Lattnercfbb6342008-11-30 01:44:00 +0000137 // Values depend on loads if the pointers are must aliased. This means that
138 // a load depends on another must aliased load from the same value.
Chris Lattnera161ab02008-11-29 09:09:48 +0000139 if (LoadInst *L = dyn_cast<LoadInst>(Inst)) {
140 Value *Pointer = L->getPointerOperand();
141 uint64_t PointerSize = TD.getTypeStoreSize(L->getType());
142
143 // If we found a pointer, check if it could be the same as our pointer
144 AliasAnalysis::AliasResult R =
145 AA.alias(Pointer, PointerSize, MemPtr, MemSize);
146
147 if (R == AliasAnalysis::NoAlias)
148 continue;
149
150 // May-alias loads don't depend on each other without a dependence.
151 if (isa<LoadInst>(QueryInst) && R == AliasAnalysis::MayAlias)
152 continue;
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000153 return DepResultTy(Inst, Normal);
Owen Anderson78e02f72007-07-06 23:14:35 +0000154 }
Chris Lattner237a8282008-11-30 01:39:32 +0000155
156 // If this is an allocation, and if we know that the accessed pointer is to
157 // the allocation, return None. This means that there is no dependence and
158 // the access can be optimized based on that. For example, a load could
159 // turn into undef.
Chris Lattnera161ab02008-11-29 09:09:48 +0000160 if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
Chris Lattner237a8282008-11-30 01:39:32 +0000161 Value *AccessPtr = MemPtr->getUnderlyingObject();
Owen Anderson78e02f72007-07-06 23:14:35 +0000162
Chris Lattner237a8282008-11-30 01:39:32 +0000163 if (AccessPtr == AI ||
164 AA.alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
165 return DepResultTy(0, None);
166 continue;
Chris Lattnera161ab02008-11-29 09:09:48 +0000167 }
Chris Lattnera161ab02008-11-29 09:09:48 +0000168
169 // See if this instruction mod/ref's the pointer.
170 AliasAnalysis::ModRefResult MRR = AA.getModRefInfo(Inst, MemPtr, MemSize);
171
172 if (MRR == AliasAnalysis::NoModRef)
Chris Lattner25a08142008-11-29 08:51:16 +0000173 continue;
174
Chris Lattnera161ab02008-11-29 09:09:48 +0000175 // Loads don't depend on read-only instructions.
176 if (isa<LoadInst>(QueryInst) && MRR == AliasAnalysis::Ref)
Chris Lattner25a08142008-11-29 08:51:16 +0000177 continue;
Chris Lattnera161ab02008-11-29 09:09:48 +0000178
179 // Otherwise, there is a dependence.
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000180 return DepResultTy(Inst, Normal);
Owen Anderson78e02f72007-07-06 23:14:35 +0000181 }
182
Chris Lattner5391a1d2008-11-29 03:47:00 +0000183 // If we found nothing, return the non-local flag.
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000184 return DepResultTy(0, NonLocal);
Owen Anderson78e02f72007-07-06 23:14:35 +0000185}
186
Chris Lattner5391a1d2008-11-29 03:47:00 +0000187/// getDependency - Return the instruction on which a memory operation
188/// depends.
189MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
190 Instruction *ScanPos = QueryInst;
191
192 // Check for a cached result
193 DepResultTy &LocalCache = LocalDeps[QueryInst];
194
Chris Lattner0ec48dd2008-11-29 22:02:15 +0000195 // If the cached entry is non-dirty, just return it. Note that this depends
196 // on DepResultTy's default constructing to 'dirty'.
Chris Lattner5391a1d2008-11-29 03:47:00 +0000197 if (LocalCache.getInt() != Dirty)
198 return ConvToResult(LocalCache);
199
200 // Otherwise, if we have a dirty entry, we know we can start the scan at that
201 // instruction, which may save us some work.
202 if (Instruction *Inst = LocalCache.getPointer())
203 ScanPos = Inst;
204
205 // Do the scan.
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000206 LocalCache = getDependencyFromInternal(QueryInst, ScanPos,
207 QueryInst->getParent());
Chris Lattner5391a1d2008-11-29 03:47:00 +0000208
209 // Remember the result!
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000210 if (Instruction *I = LocalCache.getPointer())
Chris Lattner8c465272008-11-29 09:20:15 +0000211 ReverseLocalDeps[I].insert(QueryInst);
Chris Lattner5391a1d2008-11-29 03:47:00 +0000212
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000213 return ConvToResult(LocalCache);
Chris Lattner5391a1d2008-11-29 03:47:00 +0000214}
215
Chris Lattner37d041c2008-11-30 01:18:27 +0000216/// getNonLocalDependency - Perform a full dependency query for the
217/// specified instruction, returning the set of blocks that the value is
218/// potentially live across. The returned set of results will include a
219/// "NonLocal" result for all blocks where the value is live across.
220///
221/// This method assumes the instruction returns a "nonlocal" dependency
222/// within its own block.
223///
224void MemoryDependenceAnalysis::
225getNonLocalDependency(Instruction *QueryInst,
226 SmallVectorImpl<std::pair<BasicBlock*,
227 MemDepResult> > &Result) {
228 assert(getDependency(QueryInst).isNonLocal() &&
229 "getNonLocalDependency should only be used on insts with non-local deps!");
230 DenseMap<BasicBlock*, DepResultTy> &Cache = NonLocalDeps[QueryInst];
231
232 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In
233 /// the cached case, this can happen due to instructions being deleted etc. In
234 /// the uncached case, this starts out as the set of predecessors we care
235 /// about.
236 SmallVector<BasicBlock*, 32> DirtyBlocks;
237
238 if (!Cache.empty()) {
239 // If we already have a partially computed set of results, scan them to
240 // determine what is dirty, seeding our initial DirtyBlocks worklist.
241 // FIXME: In the "don't need to be updated" case, this is expensive, why not
242 // have a per-"cache" flag saying it is undirty?
243 for (DenseMap<BasicBlock*, DepResultTy>::iterator I = Cache.begin(),
244 E = Cache.end(); I != E; ++I)
245 if (I->second.getInt() == Dirty)
246 DirtyBlocks.push_back(I->first);
247
248 NumCacheNonLocal++;
249
250 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
251 // << Cache.size() << " cached: " << *QueryInst;
252 } else {
253 // Seed DirtyBlocks with each of the preds of QueryInst's block.
254 BasicBlock *QueryBB = QueryInst->getParent();
255 DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB));
256 NumUncacheNonLocal++;
257 }
258
259 // Iterate while we still have blocks to update.
260 while (!DirtyBlocks.empty()) {
261 BasicBlock *DirtyBB = DirtyBlocks.back();
262 DirtyBlocks.pop_back();
263
264 // Get the entry for this block. Note that this relies on DepResultTy
265 // default initializing to Dirty.
266 DepResultTy &DirtyBBEntry = Cache[DirtyBB];
267
268 // If DirtyBBEntry isn't dirty, it ended up on the worklist multiple times.
269 if (DirtyBBEntry.getInt() != Dirty) continue;
270
Chris Lattner37d041c2008-11-30 01:18:27 +0000271 // If the dirty entry has a pointer, start scanning from it so we don't have
272 // to rescan the entire block.
273 BasicBlock::iterator ScanPos = DirtyBB->end();
274 if (Instruction *Inst = DirtyBBEntry.getPointer())
275 ScanPos = Inst;
276
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000277 // Find out if this block has a local dependency for QueryInst.
278 DirtyBBEntry = getDependencyFromInternal(QueryInst, ScanPos, DirtyBB);
Chris Lattner37d041c2008-11-30 01:18:27 +0000279
280 // If the block has a dependency (i.e. it isn't completely transparent to
281 // the value), remember it!
282 if (DirtyBBEntry.getInt() != NonLocal) {
283 // Keep the ReverseNonLocalDeps map up to date so we can efficiently
284 // update this when we remove instructions.
285 if (Instruction *Inst = DirtyBBEntry.getPointer())
286 ReverseNonLocalDeps[Inst].insert(QueryInst);
287 continue;
288 }
289
290 // If the block *is* completely transparent to the load, we need to check
291 // the predecessors of this block. Add them to our worklist.
292 DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB));
293 }
294
295
296 // Copy the result into the output set.
297 for (DenseMap<BasicBlock*, DepResultTy>::iterator I = Cache.begin(),
298 E = Cache.end(); I != E; ++I)
299 Result.push_back(std::make_pair(I->first, ConvToResult(I->second)));
300}
301
Owen Anderson78e02f72007-07-06 23:14:35 +0000302/// removeInstruction - Remove an instruction from the dependence analysis,
303/// updating the dependence of instructions that previously depended on it.
Owen Anderson642a9e32007-08-08 22:26:03 +0000304/// This method attempts to keep the cache coherent using the reverse map.
Chris Lattner5f589dc2008-11-28 22:04:47 +0000305void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
Chris Lattner5f589dc2008-11-28 22:04:47 +0000306 // Walk through the Non-local dependencies, removing this one as the value
307 // for any cached queries.
Chris Lattner39f372e2008-11-29 01:43:36 +0000308 for (DenseMap<BasicBlock*, DepResultTy>::iterator DI =
Chris Lattner8c465272008-11-29 09:20:15 +0000309 NonLocalDeps[RemInst].begin(), DE = NonLocalDeps[RemInst].end();
Owen Anderson5fc4aba2007-12-08 01:37:09 +0000310 DI != DE; ++DI)
Chris Lattner7f524222008-11-29 03:22:12 +0000311 if (Instruction *Inst = DI->second.getPointer())
Chris Lattner8c465272008-11-29 09:20:15 +0000312 ReverseNonLocalDeps[Inst].erase(RemInst);
Owen Anderson5fc4aba2007-12-08 01:37:09 +0000313
Chris Lattner5f589dc2008-11-28 22:04:47 +0000314 // If we have a cached local dependence query for this instruction, remove it.
Chris Lattnerbaad8882008-11-28 22:28:27 +0000315 //
Chris Lattner39f372e2008-11-29 01:43:36 +0000316 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
317 if (LocalDepEntry != LocalDeps.end()) {
Chris Lattner125ce362008-11-30 01:09:30 +0000318 // Remove us from DepInst's reverse set now that the local dep info is gone.
319 if (Instruction *Inst = LocalDepEntry->second.getPointer()) {
320 SmallPtrSet<Instruction*, 4> &RLD = ReverseLocalDeps[Inst];
321 RLD.erase(RemInst);
322 if (RLD.empty())
323 ReverseLocalDeps.erase(Inst);
324 }
325
Chris Lattnerbaad8882008-11-28 22:28:27 +0000326 // Remove this local dependency info.
Chris Lattner39f372e2008-11-29 01:43:36 +0000327 LocalDeps.erase(LocalDepEntry);
Chris Lattner125ce362008-11-30 01:09:30 +0000328 }
Chris Lattnerbaad8882008-11-28 22:28:27 +0000329
Chris Lattnerd3d12ec2008-11-28 22:51:08 +0000330 // Loop over all of the things that depend on the instruction we're removing.
331 //
Chris Lattner4f8c18c2008-11-29 23:30:39 +0000332 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
333
Chris Lattner8c465272008-11-29 09:20:15 +0000334 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
335 if (ReverseDepIt != ReverseLocalDeps.end()) {
Chris Lattnerd3d12ec2008-11-28 22:51:08 +0000336 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
Chris Lattner125ce362008-11-30 01:09:30 +0000337 // RemInst can't be the terminator if it has stuff depending on it.
338 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
339 "Nothing can locally depend on a terminator");
340
341 // Anything that was locally dependent on RemInst is now going to be
342 // dependent on the instruction after RemInst. It will have the dirty flag
343 // set so it will rescan. This saves having to scan the entire block to get
344 // to this point.
345 Instruction *NewDepInst = next(BasicBlock::iterator(RemInst));
346
Chris Lattnerd3d12ec2008-11-28 22:51:08 +0000347 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
348 E = ReverseDeps.end(); I != E; ++I) {
349 Instruction *InstDependingOnRemInst = *I;
350
351 // If we thought the instruction depended on itself (possible for
352 // unconfirmed dependencies) ignore the update.
353 if (InstDependingOnRemInst == RemInst) continue;
Chris Lattner125ce362008-11-30 01:09:30 +0000354
355 LocalDeps[InstDependingOnRemInst] = DepResultTy(NewDepInst, Dirty);
Chris Lattnerd3d12ec2008-11-28 22:51:08 +0000356
Chris Lattner125ce362008-11-30 01:09:30 +0000357 // Make sure to remember that new things depend on NewDepInst.
358 ReverseDepsToAdd.push_back(std::make_pair(NewDepInst,
359 InstDependingOnRemInst));
Chris Lattnerd3d12ec2008-11-28 22:51:08 +0000360 }
Chris Lattner4f8c18c2008-11-29 23:30:39 +0000361
362 ReverseLocalDeps.erase(ReverseDepIt);
363
364 // Add new reverse deps after scanning the set, to avoid invalidating the
365 // 'ReverseDeps' reference.
366 while (!ReverseDepsToAdd.empty()) {
367 ReverseLocalDeps[ReverseDepsToAdd.back().first]
368 .insert(ReverseDepsToAdd.back().second);
369 ReverseDepsToAdd.pop_back();
370 }
Owen Anderson78e02f72007-07-06 23:14:35 +0000371 }
Owen Anderson4d13de42007-08-16 21:27:05 +0000372
Chris Lattner8c465272008-11-29 09:20:15 +0000373 ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
374 if (ReverseDepIt != ReverseNonLocalDeps.end()) {
Chris Lattnerd3d12ec2008-11-28 22:51:08 +0000375 SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second;
Owen Anderson4d13de42007-08-16 21:27:05 +0000376 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
377 I != E; ++I)
Chris Lattner8c465272008-11-29 09:20:15 +0000378 for (DenseMap<BasicBlock*, DepResultTy>::iterator
379 DI = NonLocalDeps[*I].begin(), DE = NonLocalDeps[*I].end();
Owen Andersonce4d88a2007-09-21 03:53:52 +0000380 DI != DE; ++DI)
Chris Lattner0ec48dd2008-11-29 22:02:15 +0000381 if (DI->second.getPointer() == RemInst) {
382 // Convert to a dirty entry for the subsequent instruction.
383 DI->second.setInt(Dirty);
384 if (RemInst->isTerminator())
385 DI->second.setPointer(0);
386 else {
387 Instruction *NextI = next(BasicBlock::iterator(RemInst));
388 DI->second.setPointer(NextI);
Chris Lattner4f8c18c2008-11-29 23:30:39 +0000389 assert(NextI != RemInst);
Chris Lattner0ec48dd2008-11-29 22:02:15 +0000390 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
391 }
392 }
Chris Lattner4f8c18c2008-11-29 23:30:39 +0000393
394 ReverseNonLocalDeps.erase(ReverseDepIt);
395
Chris Lattner0ec48dd2008-11-29 22:02:15 +0000396 // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
397 while (!ReverseDepsToAdd.empty()) {
398 ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
399 .insert(ReverseDepsToAdd.back().second);
400 ReverseDepsToAdd.pop_back();
401 }
Owen Anderson4d13de42007-08-16 21:27:05 +0000402 }
Owen Anderson5fc4aba2007-12-08 01:37:09 +0000403
Chris Lattner8c465272008-11-29 09:20:15 +0000404 NonLocalDeps.erase(RemInst);
Chris Lattner5f589dc2008-11-28 22:04:47 +0000405 getAnalysis<AliasAnalysis>().deleteValue(RemInst);
Chris Lattner5f589dc2008-11-28 22:04:47 +0000406 DEBUG(verifyRemoved(RemInst));
Owen Anderson78e02f72007-07-06 23:14:35 +0000407}
Chris Lattner729b2372008-11-29 21:25:10 +0000408
409/// verifyRemoved - Verify that the specified instruction does not occur
410/// in our internal data structures.
411void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
412 for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
413 E = LocalDeps.end(); I != E; ++I) {
414 assert(I->first != D && "Inst occurs in data structures");
415 assert(I->second.getPointer() != D &&
416 "Inst occurs in data structures");
417 }
418
419 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
420 E = NonLocalDeps.end(); I != E; ++I) {
421 assert(I->first != D && "Inst occurs in data structures");
422 for (DenseMap<BasicBlock*, DepResultTy>::iterator II = I->second.begin(),
423 EE = I->second.end(); II != EE; ++II)
424 assert(II->second.getPointer() != D && "Inst occurs in data structures");
425 }
426
427 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
428 E = ReverseLocalDeps.end(); I != E; ++I)
429 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
430 EE = I->second.end(); II != EE; ++II)
431 assert(*II != D && "Inst occurs in data structures");
432
433 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
434 E = ReverseNonLocalDeps.end();
435 I != E; ++I)
436 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
437 EE = I->second.end(); II != EE; ++II)
438 assert(*II != D && "Inst occurs in data structures");
439}