blob: cfaf2da6ce5b29212761267fabf836e927d0441b [file] [log] [blame]
Owen Anderson78e02f72007-07-06 23:14:35 +00001//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson78e02f72007-07-06 23:14:35 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements an analysis that determines, for a given memory
11// operation, what preceding memory operations it depends on. It builds on
Owen Anderson80b1f092007-08-08 22:01:54 +000012// alias analysis information, and tries to provide a lazy, caching interface to
Owen Anderson78e02f72007-07-06 23:14:35 +000013// a common kind of alias information query.
14//
15//===----------------------------------------------------------------------===//
16
Chris Lattner0e575f42008-11-28 21:45:17 +000017#define DEBUG_TYPE "memdep"
Owen Anderson78e02f72007-07-06 23:14:35 +000018#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattnercb5fd742011-04-26 22:42:01 +000019#include "llvm/Analysis/ValueTracking.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000020#include "llvm/Instructions.h"
Owen Andersonf6cec852009-03-09 05:12:38 +000021#include "llvm/IntrinsicInst.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000022#include "llvm/Function.h"
Dan Gohmanc1ac0d72010-09-22 21:41:02 +000023#include "llvm/LLVMContext.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000024#include "llvm/Analysis/AliasAnalysis.h"
Nick Lewycky88990242011-11-14 22:49:42 +000025#include "llvm/Analysis/CaptureTracking.h"
Chris Lattner6f7b2102009-11-27 22:05:15 +000026#include "llvm/Analysis/Dominators.h"
Chris Lattnere19e4ba2009-11-27 00:34:38 +000027#include "llvm/Analysis/InstructionSimplify.h"
Victor Hernandezf006b182009-10-27 20:05:49 +000028#include "llvm/Analysis/MemoryBuiltins.h"
Chris Lattner05e15f82009-12-09 01:59:31 +000029#include "llvm/Analysis/PHITransAddr.h"
Dan Gohman5034dd32010-12-15 20:02:24 +000030#include "llvm/Analysis/ValueTracking.h"
Chris Lattnerbaad8882008-11-28 22:28:27 +000031#include "llvm/ADT/Statistic.h"
Duncan Sands7050f3d2008-12-10 09:38:36 +000032#include "llvm/ADT/STLExtras.h"
Chris Lattner4012fdd2008-12-09 06:28:49 +000033#include "llvm/Support/PredIteratorCache.h"
Chris Lattner0e575f42008-11-28 21:45:17 +000034#include "llvm/Support/Debug.h"
Benjamin Kramerdd061b22010-11-21 15:21:46 +000035#include "llvm/Target/TargetData.h"
Owen Anderson78e02f72007-07-06 23:14:35 +000036using namespace llvm;
37
Chris Lattnerbf145d62008-12-01 01:15:42 +000038STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
39STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
Chris Lattner0ec48dd2008-11-29 22:02:15 +000040STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
Chris Lattner6290f5c2008-12-07 08:50:20 +000041
42STATISTIC(NumCacheNonLocalPtr,
43 "Number of fully cached non-local ptr responses");
44STATISTIC(NumCacheDirtyNonLocalPtr,
45 "Number of cached, but dirty, non-local ptr responses");
46STATISTIC(NumUncacheNonLocalPtr,
47 "Number of uncached non-local ptr responses");
Chris Lattner11dcd8d2008-12-08 07:31:50 +000048STATISTIC(NumCacheCompleteNonLocalPtr,
49 "Number of block queries that were completely cached");
Chris Lattner6290f5c2008-12-07 08:50:20 +000050
Eli Friedman992205a2011-06-15 23:59:25 +000051// Limit for the number of instructions to scan in a block.
52// FIXME: Figure out what a sane value is for this.
53// (500 is relatively insane.)
54static const int BlockScanLimit = 500;
55
Owen Anderson78e02f72007-07-06 23:14:35 +000056char MemoryDependenceAnalysis::ID = 0;
57
Owen Anderson78e02f72007-07-06 23:14:35 +000058// Register this pass...
Owen Anderson2ab36d32010-10-12 19:48:12 +000059INITIALIZE_PASS_BEGIN(MemoryDependenceAnalysis, "memdep",
Owen Andersonce665bd2010-10-07 22:25:06 +000060 "Memory Dependence Analysis", false, true)
Owen Anderson2ab36d32010-10-12 19:48:12 +000061INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
62INITIALIZE_PASS_END(MemoryDependenceAnalysis, "memdep",
63 "Memory Dependence Analysis", false, true)
Owen Anderson78e02f72007-07-06 23:14:35 +000064
Chris Lattner4012fdd2008-12-09 06:28:49 +000065MemoryDependenceAnalysis::MemoryDependenceAnalysis()
Owen Anderson90c579d2010-08-06 18:33:48 +000066: FunctionPass(ID), PredCache(0) {
Owen Anderson081c34b2010-10-19 17:21:58 +000067 initializeMemoryDependenceAnalysisPass(*PassRegistry::getPassRegistry());
Chris Lattner4012fdd2008-12-09 06:28:49 +000068}
69MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {
70}
71
72/// Clean up memory in between runs
73void MemoryDependenceAnalysis::releaseMemory() {
74 LocalDeps.clear();
75 NonLocalDeps.clear();
76 NonLocalPointerDeps.clear();
77 ReverseLocalDeps.clear();
78 ReverseNonLocalDeps.clear();
79 ReverseNonLocalPtrDeps.clear();
80 PredCache->clear();
81}
82
83
84
Owen Anderson78e02f72007-07-06 23:14:35 +000085/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
86///
87void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
88 AU.setPreservesAll();
89 AU.addRequiredTransitive<AliasAnalysis>();
Owen Anderson78e02f72007-07-06 23:14:35 +000090}
91
Chris Lattnerd777d402008-11-30 19:24:31 +000092bool MemoryDependenceAnalysis::runOnFunction(Function &) {
93 AA = &getAnalysis<AliasAnalysis>();
Benjamin Kramerdd061b22010-11-21 15:21:46 +000094 TD = getAnalysisIfAvailable<TargetData>();
Nick Lewycky88990242011-11-14 22:49:42 +000095 DT = getAnalysisIfAvailable<DominatorTree>();
Chris Lattner4012fdd2008-12-09 06:28:49 +000096 if (PredCache == 0)
97 PredCache.reset(new PredIteratorCache());
Chris Lattnerd777d402008-11-30 19:24:31 +000098 return false;
99}
100
Chris Lattnerd44745d2008-12-07 18:39:13 +0000101/// RemoveFromReverseMap - This is a helper function that removes Val from
102/// 'Inst's set in ReverseMap. If the set becomes empty, remove Inst's entry.
103template <typename KeyTy>
104static void RemoveFromReverseMap(DenseMap<Instruction*,
Chris Lattner6a0dcc12009-03-29 00:24:04 +0000105 SmallPtrSet<KeyTy, 4> > &ReverseMap,
106 Instruction *Inst, KeyTy Val) {
107 typename DenseMap<Instruction*, SmallPtrSet<KeyTy, 4> >::iterator
Chris Lattnerd44745d2008-12-07 18:39:13 +0000108 InstIt = ReverseMap.find(Inst);
109 assert(InstIt != ReverseMap.end() && "Reverse map out of sync?");
110 bool Found = InstIt->second.erase(Val);
Jeffrey Yasskin8e68c382010-12-23 00:58:24 +0000111 assert(Found && "Invalid reverse map!"); (void)Found;
Chris Lattnerd44745d2008-12-07 18:39:13 +0000112 if (InstIt->second.empty())
113 ReverseMap.erase(InstIt);
114}
115
Dan Gohman533c2ad2010-11-10 21:51:35 +0000116/// GetLocation - If the given instruction references a specific memory
117/// location, fill in Loc with the details, otherwise set Loc.Ptr to null.
118/// Return a ModRefInfo value describing the general behavior of the
119/// instruction.
120static
121AliasAnalysis::ModRefResult GetLocation(const Instruction *Inst,
122 AliasAnalysis::Location &Loc,
123 AliasAnalysis *AA) {
124 if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
Eli Friedman667ccf22011-08-15 20:54:19 +0000125 if (LI->isUnordered()) {
126 Loc = AA->getLocation(LI);
127 return AliasAnalysis::Ref;
128 } else if (LI->getOrdering() == Monotonic) {
129 Loc = AA->getLocation(LI);
Dan Gohman533c2ad2010-11-10 21:51:35 +0000130 return AliasAnalysis::ModRef;
131 }
Eli Friedman667ccf22011-08-15 20:54:19 +0000132 Loc = AliasAnalysis::Location();
133 return AliasAnalysis::ModRef;
Dan Gohman533c2ad2010-11-10 21:51:35 +0000134 }
135
136 if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Eli Friedman667ccf22011-08-15 20:54:19 +0000137 if (SI->isUnordered()) {
138 Loc = AA->getLocation(SI);
139 return AliasAnalysis::Mod;
140 } else if (SI->getOrdering() == Monotonic) {
141 Loc = AA->getLocation(SI);
Dan Gohman533c2ad2010-11-10 21:51:35 +0000142 return AliasAnalysis::ModRef;
143 }
Eli Friedman667ccf22011-08-15 20:54:19 +0000144 Loc = AliasAnalysis::Location();
145 return AliasAnalysis::ModRef;
Dan Gohman533c2ad2010-11-10 21:51:35 +0000146 }
147
148 if (const VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
Dan Gohman6d8eb152010-11-11 21:50:19 +0000149 Loc = AA->getLocation(V);
Dan Gohman533c2ad2010-11-10 21:51:35 +0000150 return AliasAnalysis::ModRef;
151 }
152
153 if (const CallInst *CI = isFreeCall(Inst)) {
154 // calls to free() deallocate the entire structure
155 Loc = AliasAnalysis::Location(CI->getArgOperand(0));
156 return AliasAnalysis::Mod;
157 }
158
159 if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst))
160 switch (II->getIntrinsicID()) {
161 case Intrinsic::lifetime_start:
162 case Intrinsic::lifetime_end:
163 case Intrinsic::invariant_start:
164 Loc = AliasAnalysis::Location(II->getArgOperand(1),
165 cast<ConstantInt>(II->getArgOperand(0))
166 ->getZExtValue(),
167 II->getMetadata(LLVMContext::MD_tbaa));
168 // These intrinsics don't really modify the memory, but returning Mod
169 // will allow them to be handled conservatively.
170 return AliasAnalysis::Mod;
171 case Intrinsic::invariant_end:
172 Loc = AliasAnalysis::Location(II->getArgOperand(2),
173 cast<ConstantInt>(II->getArgOperand(1))
174 ->getZExtValue(),
175 II->getMetadata(LLVMContext::MD_tbaa));
176 // These intrinsics don't really modify the memory, but returning Mod
177 // will allow them to be handled conservatively.
178 return AliasAnalysis::Mod;
179 default:
180 break;
181 }
182
183 // Otherwise, just do the coarse-grained thing that always works.
184 if (Inst->mayWriteToMemory())
185 return AliasAnalysis::ModRef;
186 if (Inst->mayReadFromMemory())
187 return AliasAnalysis::Ref;
188 return AliasAnalysis::NoModRef;
189}
Chris Lattnerbf145d62008-12-01 01:15:42 +0000190
Chris Lattner8ef57c52008-12-07 00:35:51 +0000191/// getCallSiteDependencyFrom - Private helper for finding the local
192/// dependencies of a call site.
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000193MemDepResult MemoryDependenceAnalysis::
Chris Lattner20d6f092008-12-09 21:19:42 +0000194getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
195 BasicBlock::iterator ScanIt, BasicBlock *BB) {
Eli Friedman992205a2011-06-15 23:59:25 +0000196 unsigned Limit = BlockScanLimit;
197
Owen Anderson642a9e32007-08-08 22:26:03 +0000198 // Walk backwards through the block, looking for dependencies
Chris Lattner5391a1d2008-11-29 03:47:00 +0000199 while (ScanIt != BB->begin()) {
Eli Friedman992205a2011-06-15 23:59:25 +0000200 // Limit the amount of scanning we do so we don't end up with quadratic
201 // running time on extreme testcases.
202 --Limit;
203 if (!Limit)
204 return MemDepResult::getUnknown();
205
Chris Lattner5391a1d2008-11-29 03:47:00 +0000206 Instruction *Inst = --ScanIt;
Owen Anderson5f323202007-07-10 17:59:22 +0000207
208 // If this inst is a memory op, get the pointer it accessed
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000209 AliasAnalysis::Location Loc;
Dan Gohman533c2ad2010-11-10 21:51:35 +0000210 AliasAnalysis::ModRefResult MR = GetLocation(Inst, Loc, AA);
211 if (Loc.Ptr) {
212 // A simple instruction.
213 if (AA->getModRefInfo(CS, Loc) != AliasAnalysis::NoModRef)
214 return MemDepResult::getClobber(Inst);
215 continue;
216 }
217
218 if (CallSite InstCS = cast<Value>(Inst)) {
Owen Andersonf6cec852009-03-09 05:12:38 +0000219 // Debug intrinsics don't cause dependences.
Dale Johannesen497cb6f2009-03-11 21:13:01 +0000220 if (isa<DbgInfoIntrinsic>(Inst)) continue;
Chris Lattnerb51deb92008-12-05 21:04:20 +0000221 // If these two calls do not interfere, look past it.
Chris Lattner20d6f092008-12-09 21:19:42 +0000222 switch (AA->getModRefInfo(CS, InstCS)) {
223 case AliasAnalysis::NoModRef:
Dan Gohman5fa417c2010-08-05 22:09:15 +0000224 // If the two calls are the same, return InstCS as a Def, so that
225 // CS can be found redundant and eliminated.
Dan Gohman533c2ad2010-11-10 21:51:35 +0000226 if (isReadOnlyCall && !(MR & AliasAnalysis::Mod) &&
Dan Gohman5fa417c2010-08-05 22:09:15 +0000227 CS.getInstruction()->isIdenticalToWhenDefined(Inst))
228 return MemDepResult::getDef(Inst);
229
230 // Otherwise if the two calls don't interact (e.g. InstCS is readnone)
231 // keep scanning.
Dan Gohman533c2ad2010-11-10 21:51:35 +0000232 break;
Chris Lattner20d6f092008-12-09 21:19:42 +0000233 default:
Chris Lattnerb51deb92008-12-05 21:04:20 +0000234 return MemDepResult::getClobber(Inst);
Chris Lattner20d6f092008-12-09 21:19:42 +0000235 }
Chris Lattnercfbb6342008-11-30 01:44:00 +0000236 }
Owen Anderson5f323202007-07-10 17:59:22 +0000237 }
238
Eli Friedmana990e072011-06-15 00:47:34 +0000239 // No dependence found. If this is the entry block of the function, it is
240 // unknown, otherwise it is non-local.
Chris Lattner7ebcf032008-12-07 02:15:47 +0000241 if (BB != &BB->getParent()->getEntryBlock())
242 return MemDepResult::getNonLocal();
Eli Friedmanb4141422011-10-13 22:14:57 +0000243 return MemDepResult::getNonFuncLocal();
Owen Anderson5f323202007-07-10 17:59:22 +0000244}
245
Chris Lattnercb5fd742011-04-26 22:42:01 +0000246/// isLoadLoadClobberIfExtendedToFullWidth - Return true if LI is a load that
247/// would fully overlap MemLoc if done as a wider legal integer load.
248///
249/// MemLocBase, MemLocOffset are lazily computed here the first time the
250/// base/offs of memloc is needed.
251static bool
252isLoadLoadClobberIfExtendedToFullWidth(const AliasAnalysis::Location &MemLoc,
253 const Value *&MemLocBase,
254 int64_t &MemLocOffs,
Chris Lattner4034e142011-04-28 07:29:08 +0000255 const LoadInst *LI,
256 const TargetData *TD) {
Chris Lattnercb5fd742011-04-26 22:42:01 +0000257 // If we have no target data, we can't do this.
258 if (TD == 0) return false;
259
260 // If we haven't already computed the base/offset of MemLoc, do so now.
261 if (MemLocBase == 0)
262 MemLocBase = GetPointerBaseWithConstantOffset(MemLoc.Ptr, MemLocOffs, *TD);
263
Chris Lattner4034e142011-04-28 07:29:08 +0000264 unsigned Size = MemoryDependenceAnalysis::
265 getLoadLoadClobberFullWidthSize(MemLocBase, MemLocOffs, MemLoc.Size,
266 LI, *TD);
267 return Size != 0;
268}
269
270/// getLoadLoadClobberFullWidthSize - This is a little bit of analysis that
271/// looks at a memory location for a load (specified by MemLocBase, Offs,
272/// and Size) and compares it against a load. If the specified load could
273/// be safely widened to a larger integer load that is 1) still efficient,
274/// 2) safe for the target, and 3) would provide the specified memory
275/// location value, then this function returns the size in bytes of the
276/// load width to use. If not, this returns zero.
277unsigned MemoryDependenceAnalysis::
278getLoadLoadClobberFullWidthSize(const Value *MemLocBase, int64_t MemLocOffs,
279 unsigned MemLocSize, const LoadInst *LI,
280 const TargetData &TD) {
Eli Friedman667ccf22011-08-15 20:54:19 +0000281 // We can only extend simple integer loads.
282 if (!isa<IntegerType>(LI->getType()) || !LI->isSimple()) return 0;
Chris Lattner4034e142011-04-28 07:29:08 +0000283
Chris Lattnercb5fd742011-04-26 22:42:01 +0000284 // Get the base of this load.
285 int64_t LIOffs = 0;
286 const Value *LIBase =
Chris Lattner4034e142011-04-28 07:29:08 +0000287 GetPointerBaseWithConstantOffset(LI->getPointerOperand(), LIOffs, TD);
Chris Lattnercb5fd742011-04-26 22:42:01 +0000288
289 // If the two pointers are not based on the same pointer, we can't tell that
290 // they are related.
Chris Lattner4034e142011-04-28 07:29:08 +0000291 if (LIBase != MemLocBase) return 0;
Chris Lattnercb5fd742011-04-26 22:42:01 +0000292
293 // Okay, the two values are based on the same pointer, but returned as
294 // no-alias. This happens when we have things like two byte loads at "P+1"
295 // and "P+3". Check to see if increasing the size of the "LI" load up to its
296 // alignment (or the largest native integer type) will allow us to load all
297 // the bits required by MemLoc.
298
299 // If MemLoc is before LI, then no widening of LI will help us out.
Chris Lattner4034e142011-04-28 07:29:08 +0000300 if (MemLocOffs < LIOffs) return 0;
Chris Lattnercb5fd742011-04-26 22:42:01 +0000301
302 // Get the alignment of the load in bytes. We assume that it is safe to load
303 // any legal integer up to this size without a problem. For example, if we're
304 // looking at an i8 load on x86-32 that is known 1024 byte aligned, we can
305 // widen it up to an i32 load. If it is known 2-byte aligned, we can widen it
306 // to i16.
307 unsigned LoadAlign = LI->getAlignment();
308
Chris Lattner4034e142011-04-28 07:29:08 +0000309 int64_t MemLocEnd = MemLocOffs+MemLocSize;
Chris Lattnercb5fd742011-04-26 22:42:01 +0000310
311 // If no amount of rounding up will let MemLoc fit into LI, then bail out.
Chris Lattner4034e142011-04-28 07:29:08 +0000312 if (LIOffs+LoadAlign < MemLocEnd) return 0;
Chris Lattnercb5fd742011-04-26 22:42:01 +0000313
314 // This is the size of the load to try. Start with the next larger power of
315 // two.
316 unsigned NewLoadByteSize = LI->getType()->getPrimitiveSizeInBits()/8U;
317 NewLoadByteSize = NextPowerOf2(NewLoadByteSize);
318
319 while (1) {
320 // If this load size is bigger than our known alignment or would not fit
321 // into a native integer register, then we fail.
322 if (NewLoadByteSize > LoadAlign ||
Chris Lattner4034e142011-04-28 07:29:08 +0000323 !TD.fitsInLegalInteger(NewLoadByteSize*8))
324 return 0;
Chris Lattnercb5fd742011-04-26 22:42:01 +0000325
Kostya Serebryany0ca032b2012-02-06 22:48:56 +0000326 if (LIOffs+NewLoadByteSize > MemLocEnd &&
327 LI->getParent()->getParent()->hasFnAttr(Attribute::AddressSafety)) {
328 // We will be reading past the location accessed by the original program.
329 // While this is safe in a regular build, Address Safety analysis tools
330 // may start reporting false warnings. So, don't do widening.
331 return 0;
332 }
333
Chris Lattnercb5fd742011-04-26 22:42:01 +0000334 // If a load of this width would include all of MemLoc, then we succeed.
335 if (LIOffs+NewLoadByteSize >= MemLocEnd)
Chris Lattner4034e142011-04-28 07:29:08 +0000336 return NewLoadByteSize;
Chris Lattnercb5fd742011-04-26 22:42:01 +0000337
338 NewLoadByteSize <<= 1;
339 }
Chris Lattnercb5fd742011-04-26 22:42:01 +0000340}
341
Nick Lewycky88990242011-11-14 22:49:42 +0000342namespace {
343 /// Only find pointer captures which happen before the given instruction. Uses
344 /// the dominator tree to determine whether one instruction is before another.
Nick Lewycky7912ef92011-11-20 19:37:06 +0000345 struct CapturesBefore : public CaptureTracker {
Nick Lewycky88990242011-11-14 22:49:42 +0000346 CapturesBefore(const Instruction *I, DominatorTree *DT)
347 : BeforeHere(I), DT(DT), Captured(false) {}
348
349 void tooManyUses() { Captured = true; }
350
351 bool shouldExplore(Use *U) {
352 Instruction *I = cast<Instruction>(U->getUser());
353 if (BeforeHere != I && DT->dominates(BeforeHere, I))
354 return false;
355 return true;
356 }
357
Nick Lewyckyb48a1892011-12-28 23:24:21 +0000358 bool captured(Use *U) {
359 Instruction *I = cast<Instruction>(U->getUser());
Nick Lewycky88990242011-11-14 22:49:42 +0000360 if (BeforeHere != I && DT->dominates(BeforeHere, I))
361 return false;
362 Captured = true;
363 return true;
364 }
365
366 const Instruction *BeforeHere;
367 DominatorTree *DT;
368
369 bool Captured;
370 };
371}
372
373AliasAnalysis::ModRefResult
374MemoryDependenceAnalysis::getModRefInfo(const Instruction *Inst,
375 const AliasAnalysis::Location &MemLoc) {
376 AliasAnalysis::ModRefResult MR = AA->getModRefInfo(Inst, MemLoc);
377 if (MR != AliasAnalysis::ModRef) return MR;
378
379 // FIXME: this is really just shoring-up a deficiency in alias analysis.
380 // BasicAA isn't willing to spend linear time determining whether an alloca
381 // was captured before or after this particular call, while we are. However,
382 // with a smarter AA in place, this test is just wasting compile time.
383 if (!DT) return AliasAnalysis::ModRef;
384 const Value *Object = GetUnderlyingObject(MemLoc.Ptr, TD);
Nick Lewycky9f47fb62011-11-21 19:42:56 +0000385 if (!isIdentifiedObject(Object) || isa<GlobalValue>(Object))
Nick Lewycky88990242011-11-14 22:49:42 +0000386 return AliasAnalysis::ModRef;
387 ImmutableCallSite CS(Inst);
388 if (!CS.getInstruction()) return AliasAnalysis::ModRef;
389
390 CapturesBefore CB(Inst, DT);
Nick Lewycky7912ef92011-11-20 19:37:06 +0000391 llvm::PointerMayBeCaptured(Object, &CB);
Nick Lewycky88990242011-11-14 22:49:42 +0000392
393 if (isa<Constant>(Object) || CS.getInstruction() == Object || CB.Captured)
394 return AliasAnalysis::ModRef;
395
396 unsigned ArgNo = 0;
397 for (ImmutableCallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end();
398 CI != CE; ++CI, ++ArgNo) {
399 // Only look at the no-capture or byval pointer arguments. If this
400 // pointer were passed to arguments that were neither of these, then it
401 // couldn't be no-capture.
402 if (!(*CI)->getType()->isPointerTy() ||
Nick Lewycky173862e2011-11-20 19:09:04 +0000403 (!CS.doesNotCapture(ArgNo) && !CS.isByValArgument(ArgNo)))
Nick Lewycky88990242011-11-14 22:49:42 +0000404 continue;
405
406 // If this is a no-capture pointer argument, see if we can tell that it
407 // is impossible to alias the pointer we're checking. If not, we have to
408 // assume that the call could touch the pointer, even though it doesn't
409 // escape.
410 if (!AA->isNoAlias(AliasAnalysis::Location(*CI),
411 AliasAnalysis::Location(Object))) {
412 return AliasAnalysis::ModRef;
413 }
414 }
415 return AliasAnalysis::NoModRef;
416}
417
Chris Lattnere79be942008-12-07 01:50:16 +0000418/// getPointerDependencyFrom - Return the instruction on which a memory
Dan Gohmancd5c1232010-10-29 01:14:04 +0000419/// location depends. If isLoad is true, this routine ignores may-aliases with
420/// read-only operations. If isLoad is false, this routine ignores may-aliases
421/// with reads from read-only locations.
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000422MemDepResult MemoryDependenceAnalysis::
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000423getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
Chris Lattnere79be942008-12-07 01:50:16 +0000424 BasicBlock::iterator ScanIt, BasicBlock *BB) {
Chris Lattner7ebcf032008-12-07 02:15:47 +0000425
Chris Lattnercb5fd742011-04-26 22:42:01 +0000426 const Value *MemLocBase = 0;
427 int64_t MemLocOffset = 0;
Eli Friedman992205a2011-06-15 23:59:25 +0000428
429 unsigned Limit = BlockScanLimit;
430
Chris Lattner6290f5c2008-12-07 08:50:20 +0000431 // Walk backwards through the basic block, looking for dependencies.
Chris Lattner5391a1d2008-11-29 03:47:00 +0000432 while (ScanIt != BB->begin()) {
Eli Friedman992205a2011-06-15 23:59:25 +0000433 // Limit the amount of scanning we do so we don't end up with quadratic
434 // running time on extreme testcases.
435 --Limit;
436 if (!Limit)
437 return MemDepResult::getUnknown();
438
Chris Lattner5391a1d2008-11-29 03:47:00 +0000439 Instruction *Inst = --ScanIt;
Chris Lattnera161ab02008-11-29 09:09:48 +0000440
Chris Lattner1ffb70f2009-12-01 21:15:15 +0000441 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
Chris Lattner09981982010-09-06 03:58:04 +0000442 // Debug intrinsics don't (and can't) cause dependences.
Chris Lattnerc5a5cf22010-09-06 01:26:29 +0000443 if (isa<DbgInfoIntrinsic>(II)) continue;
Owen Anderson9ff5a232009-12-02 07:35:19 +0000444
Owen Andersonb62f7922009-10-28 07:05:35 +0000445 // If we reach a lifetime begin or end marker, then the query ends here
446 // because the value is undefined.
Chris Lattner09981982010-09-06 03:58:04 +0000447 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Anderson9ff5a232009-12-02 07:35:19 +0000448 // FIXME: This only considers queries directly on the invariant-tagged
449 // pointer, not on query pointers that are indexed off of them. It'd
Chris Lattnercb5fd742011-04-26 22:42:01 +0000450 // be nice to handle that at some point (the right approach is to use
451 // GetPointerBaseWithConstantOffset).
Chris Lattnerd5c7f7c2011-04-26 21:53:34 +0000452 if (AA->isMustAlias(AliasAnalysis::Location(II->getArgOperand(1)),
453 MemLoc))
Owen Andersonb62f7922009-10-28 07:05:35 +0000454 return MemDepResult::getDef(II);
Chris Lattner09981982010-09-06 03:58:04 +0000455 continue;
Owen Anderson4bc737c2009-10-28 06:18:42 +0000456 }
457 }
458
Chris Lattnercfbb6342008-11-30 01:44:00 +0000459 // Values depend on loads if the pointers are must aliased. This means that
460 // a load depends on another must aliased load from the same value.
Chris Lattnerb51deb92008-12-05 21:04:20 +0000461 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
Eli Friedman667ccf22011-08-15 20:54:19 +0000462 // Atomic loads have complications involved.
463 // FIXME: This is overly conservative.
464 if (!LI->isUnordered())
465 return MemDepResult::getClobber(LI);
466
Dan Gohman6d8eb152010-11-11 21:50:19 +0000467 AliasAnalysis::Location LoadLoc = AA->getLocation(LI);
Chris Lattnerb51deb92008-12-05 21:04:20 +0000468
469 // If we found a pointer, check if it could be the same as our pointer.
Dan Gohmancd5c1232010-10-29 01:14:04 +0000470 AliasAnalysis::AliasResult R = AA->alias(LoadLoc, MemLoc);
Chris Lattnera161ab02008-11-29 09:09:48 +0000471
Chris Lattner1f821512011-04-26 01:21:15 +0000472 if (isLoad) {
Chris Lattnercb5fd742011-04-26 22:42:01 +0000473 if (R == AliasAnalysis::NoAlias) {
474 // If this is an over-aligned integer load (for example,
475 // "load i8* %P, align 4") see if it would obviously overlap with the
476 // queried location if widened to a larger load (e.g. if the queried
477 // location is 1 byte at P+1). If so, return it as a load/load
478 // clobber result, allowing the client to decide to widen the load if
479 // it wants to.
Chris Lattnerdb125cf2011-07-18 04:54:35 +0000480 if (IntegerType *ITy = dyn_cast<IntegerType>(LI->getType()))
Chris Lattnercb5fd742011-04-26 22:42:01 +0000481 if (LI->getAlignment()*8 > ITy->getPrimitiveSizeInBits() &&
482 isLoadLoadClobberIfExtendedToFullWidth(MemLoc, MemLocBase,
483 MemLocOffset, LI, TD))
484 return MemDepResult::getClobber(Inst);
485
486 continue;
487 }
488
Chris Lattner1f821512011-04-26 01:21:15 +0000489 // Must aliased loads are defs of each other.
490 if (R == AliasAnalysis::MustAlias)
491 return MemDepResult::getDef(Inst);
492
Dan Gohmana3351a02011-06-04 06:48:50 +0000493#if 0 // FIXME: Temporarily disabled. GVN is cleverly rewriting loads
494 // in terms of clobbering loads, but since it does this by looking
495 // at the clobbering load directly, it doesn't know about any
496 // phi translation that may have happened along the way.
497
Chris Lattner1f821512011-04-26 01:21:15 +0000498 // If we have a partial alias, then return this as a clobber for the
499 // client to handle.
500 if (R == AliasAnalysis::PartialAlias)
501 return MemDepResult::getClobber(Inst);
Dan Gohmana3351a02011-06-04 06:48:50 +0000502#endif
Chris Lattner1f821512011-04-26 01:21:15 +0000503
504 // Random may-alias loads don't depend on each other without a
505 // dependence.
Chris Lattnera161ab02008-11-29 09:09:48 +0000506 continue;
Chris Lattner1f821512011-04-26 01:21:15 +0000507 }
Dan Gohmancd5c1232010-10-29 01:14:04 +0000508
Chris Lattnercb5fd742011-04-26 22:42:01 +0000509 // Stores don't depend on other no-aliased accesses.
510 if (R == AliasAnalysis::NoAlias)
511 continue;
512
Dan Gohmancd5c1232010-10-29 01:14:04 +0000513 // Stores don't alias loads from read-only memory.
Chris Lattner1f821512011-04-26 01:21:15 +0000514 if (AA->pointsToConstantMemory(LoadLoc))
Dan Gohmancd5c1232010-10-29 01:14:04 +0000515 continue;
516
Chris Lattner1f821512011-04-26 01:21:15 +0000517 // Stores depend on may/must aliased loads.
Chris Lattnerb51deb92008-12-05 21:04:20 +0000518 return MemDepResult::getDef(Inst);
519 }
520
521 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Eli Friedman667ccf22011-08-15 20:54:19 +0000522 // Atomic stores have complications involved.
523 // FIXME: This is overly conservative.
524 if (!SI->isUnordered())
525 return MemDepResult::getClobber(SI);
526
Chris Lattnerab9cf122009-05-25 21:28:56 +0000527 // If alias analysis can tell that this store is guaranteed to not modify
528 // the query pointer, ignore it. Use getModRefInfo to handle cases where
529 // the query pointer points to constant memory etc.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000530 if (AA->getModRefInfo(SI, MemLoc) == AliasAnalysis::NoModRef)
Chris Lattnerab9cf122009-05-25 21:28:56 +0000531 continue;
532
533 // Ok, this store might clobber the query pointer. Check to see if it is
534 // a must alias: in this case, we want to return this as a def.
Dan Gohman6d8eb152010-11-11 21:50:19 +0000535 AliasAnalysis::Location StoreLoc = AA->getLocation(SI);
Chris Lattnerab9cf122009-05-25 21:28:56 +0000536
Chris Lattnerb51deb92008-12-05 21:04:20 +0000537 // If we found a pointer, check if it could be the same as our pointer.
Dan Gohman6d8eb152010-11-11 21:50:19 +0000538 AliasAnalysis::AliasResult R = AA->alias(StoreLoc, MemLoc);
Chris Lattnerb51deb92008-12-05 21:04:20 +0000539
540 if (R == AliasAnalysis::NoAlias)
541 continue;
Dan Gohman2cd19522010-12-13 22:47:57 +0000542 if (R == AliasAnalysis::MustAlias)
543 return MemDepResult::getDef(Inst);
544 return MemDepResult::getClobber(Inst);
Owen Anderson78e02f72007-07-06 23:14:35 +0000545 }
Chris Lattner237a8282008-11-30 01:39:32 +0000546
547 // If this is an allocation, and if we know that the accessed pointer is to
Chris Lattnerb51deb92008-12-05 21:04:20 +0000548 // the allocation, return Def. This means that there is no dependence and
Chris Lattner237a8282008-11-30 01:39:32 +0000549 // the access can be optimized based on that. For example, a load could
550 // turn into undef.
Victor Hernandez5c787362009-10-13 01:42:53 +0000551 // Note: Only determine this to be a malloc if Inst is the malloc call, not
552 // a subsequent bitcast of the malloc call result. There can be stores to
553 // the malloced memory between the malloc call and its bitcast uses, and we
554 // need to continue scanning until the malloc call.
Chris Lattner9b96eca2009-12-22 01:00:32 +0000555 if (isa<AllocaInst>(Inst) ||
556 (isa<CallInst>(Inst) && extractMallocCall(Inst))) {
Dan Gohmanbd1801b2011-01-24 18:53:32 +0000557 const Value *AccessPtr = GetUnderlyingObject(MemLoc.Ptr, TD);
Victor Hernandez46e83122009-09-18 21:34:51 +0000558
Chris Lattnerd5c7f7c2011-04-26 21:53:34 +0000559 if (AccessPtr == Inst || AA->isMustAlias(Inst, AccessPtr))
Victor Hernandez46e83122009-09-18 21:34:51 +0000560 return MemDepResult::getDef(Inst);
561 continue;
562 }
563
Chris Lattnerb51deb92008-12-05 21:04:20 +0000564 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
Nick Lewycky88990242011-11-14 22:49:42 +0000565 switch (getModRefInfo(Inst, MemLoc)) {
Chris Lattner3579e442008-12-09 19:47:40 +0000566 case AliasAnalysis::NoModRef:
567 // If the call has no effect on the queried pointer, just ignore it.
Chris Lattner25a08142008-11-29 08:51:16 +0000568 continue;
Owen Andersona85a6642009-10-28 06:30:52 +0000569 case AliasAnalysis::Mod:
Owen Andersona85a6642009-10-28 06:30:52 +0000570 return MemDepResult::getClobber(Inst);
Chris Lattner3579e442008-12-09 19:47:40 +0000571 case AliasAnalysis::Ref:
572 // If the call is known to never store to the pointer, and if this is a
573 // load query, we can safely ignore it (scan past it).
574 if (isLoad)
575 continue;
Chris Lattner3579e442008-12-09 19:47:40 +0000576 default:
577 // Otherwise, there is a potential dependence. Return a clobber.
578 return MemDepResult::getClobber(Inst);
579 }
Owen Anderson78e02f72007-07-06 23:14:35 +0000580 }
581
Eli Friedmana990e072011-06-15 00:47:34 +0000582 // No dependence found. If this is the entry block of the function, it is
583 // unknown, otherwise it is non-local.
Chris Lattner7ebcf032008-12-07 02:15:47 +0000584 if (BB != &BB->getParent()->getEntryBlock())
585 return MemDepResult::getNonLocal();
Eli Friedmanb4141422011-10-13 22:14:57 +0000586 return MemDepResult::getNonFuncLocal();
Owen Anderson78e02f72007-07-06 23:14:35 +0000587}
588
Chris Lattner5391a1d2008-11-29 03:47:00 +0000589/// getDependency - Return the instruction on which a memory operation
590/// depends.
591MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
592 Instruction *ScanPos = QueryInst;
593
594 // Check for a cached result
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000595 MemDepResult &LocalCache = LocalDeps[QueryInst];
Chris Lattner5391a1d2008-11-29 03:47:00 +0000596
Chris Lattner0ec48dd2008-11-29 22:02:15 +0000597 // If the cached entry is non-dirty, just return it. Note that this depends
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000598 // on MemDepResult's default constructing to 'dirty'.
599 if (!LocalCache.isDirty())
600 return LocalCache;
Chris Lattner5391a1d2008-11-29 03:47:00 +0000601
602 // Otherwise, if we have a dirty entry, we know we can start the scan at that
603 // instruction, which may save us some work.
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000604 if (Instruction *Inst = LocalCache.getInst()) {
Chris Lattner5391a1d2008-11-29 03:47:00 +0000605 ScanPos = Inst;
Chris Lattner4a69bad2008-11-30 02:52:26 +0000606
Chris Lattnerd44745d2008-12-07 18:39:13 +0000607 RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst);
Chris Lattner4a69bad2008-11-30 02:52:26 +0000608 }
Chris Lattner5391a1d2008-11-29 03:47:00 +0000609
Chris Lattnere79be942008-12-07 01:50:16 +0000610 BasicBlock *QueryParent = QueryInst->getParent();
611
Chris Lattner5391a1d2008-11-29 03:47:00 +0000612 // Do the scan.
Chris Lattnere79be942008-12-07 01:50:16 +0000613 if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) {
Eli Friedmana990e072011-06-15 00:47:34 +0000614 // No dependence found. If this is the entry block of the function, it is
615 // unknown, otherwise it is non-local.
Chris Lattner7ebcf032008-12-07 02:15:47 +0000616 if (QueryParent != &QueryParent->getParent()->getEntryBlock())
617 LocalCache = MemDepResult::getNonLocal();
618 else
Eli Friedmanb4141422011-10-13 22:14:57 +0000619 LocalCache = MemDepResult::getNonFuncLocal();
Dan Gohman533c2ad2010-11-10 21:51:35 +0000620 } else {
621 AliasAnalysis::Location MemLoc;
622 AliasAnalysis::ModRefResult MR = GetLocation(QueryInst, MemLoc, AA);
623 if (MemLoc.Ptr) {
624 // If we can do a pointer scan, make it happen.
625 bool isLoad = !(MR & AliasAnalysis::Mod);
Chris Lattner12bf43b2010-11-30 01:56:13 +0000626 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(QueryInst))
Owen Andersone1edb172011-05-17 00:05:49 +0000627 isLoad |= II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerf6f1f062010-11-21 07:34:32 +0000628
Dan Gohman533c2ad2010-11-10 21:51:35 +0000629 LocalCache = getPointerDependencyFrom(MemLoc, isLoad, ScanPos,
630 QueryParent);
631 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
Gabor Greif622b7cf2010-07-27 22:02:00 +0000632 CallSite QueryCS(QueryInst);
Nick Lewycky93d33112009-12-05 06:37:24 +0000633 bool isReadOnly = AA->onlyReadsMemory(QueryCS);
634 LocalCache = getCallSiteDependencyFrom(QueryCS, isReadOnly, ScanPos,
635 QueryParent);
Dan Gohman533c2ad2010-11-10 21:51:35 +0000636 } else
637 // Non-memory instruction.
Eli Friedmana990e072011-06-15 00:47:34 +0000638 LocalCache = MemDepResult::getUnknown();
Nick Lewyckyd801c102009-11-28 21:27:49 +0000639 }
Chris Lattner5391a1d2008-11-29 03:47:00 +0000640
641 // Remember the result!
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000642 if (Instruction *I = LocalCache.getInst())
Chris Lattner8c465272008-11-29 09:20:15 +0000643 ReverseLocalDeps[I].insert(QueryInst);
Chris Lattner5391a1d2008-11-29 03:47:00 +0000644
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +0000645 return LocalCache;
Chris Lattner5391a1d2008-11-29 03:47:00 +0000646}
647
Chris Lattner12a7db32009-01-22 07:04:01 +0000648#ifndef NDEBUG
649/// AssertSorted - This method is used when -debug is specified to verify that
650/// cache arrays are properly kept sorted.
651static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
652 int Count = -1) {
653 if (Count == -1) Count = Cache.size();
654 if (Count == 0) return;
655
656 for (unsigned i = 1; i != unsigned(Count); ++i)
Chris Lattnere18b9712009-12-09 07:08:01 +0000657 assert(!(Cache[i] < Cache[i-1]) && "Cache isn't sorted!");
Chris Lattner12a7db32009-01-22 07:04:01 +0000658}
659#endif
660
Chris Lattner1559b362008-12-09 19:38:05 +0000661/// getNonLocalCallDependency - Perform a full dependency query for the
662/// specified call, returning the set of blocks that the value is
Chris Lattner37d041c2008-11-30 01:18:27 +0000663/// potentially live across. The returned set of results will include a
664/// "NonLocal" result for all blocks where the value is live across.
665///
Chris Lattner1559b362008-12-09 19:38:05 +0000666/// This method assumes the instruction returns a "NonLocal" dependency
Chris Lattner37d041c2008-11-30 01:18:27 +0000667/// within its own block.
668///
Chris Lattner1559b362008-12-09 19:38:05 +0000669/// This returns a reference to an internal data structure that may be
670/// invalidated on the next non-local query or when an instruction is
671/// removed. Clients must copy this data if they want it around longer than
672/// that.
Chris Lattnerbf145d62008-12-01 01:15:42 +0000673const MemoryDependenceAnalysis::NonLocalDepInfo &
Chris Lattner1559b362008-12-09 19:38:05 +0000674MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
675 assert(getDependency(QueryCS.getInstruction()).isNonLocal() &&
676 "getNonLocalCallDependency should only be used on calls with non-local deps!");
677 PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()];
Chris Lattnerbf145d62008-12-01 01:15:42 +0000678 NonLocalDepInfo &Cache = CacheP.first;
Chris Lattner37d041c2008-11-30 01:18:27 +0000679
680 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In
681 /// the cached case, this can happen due to instructions being deleted etc. In
682 /// the uncached case, this starts out as the set of predecessors we care
683 /// about.
684 SmallVector<BasicBlock*, 32> DirtyBlocks;
685
686 if (!Cache.empty()) {
Chris Lattnerbf145d62008-12-01 01:15:42 +0000687 // Okay, we have a cache entry. If we know it is not dirty, just return it
688 // with no computation.
689 if (!CacheP.second) {
Dan Gohmanfe601042010-06-22 15:08:57 +0000690 ++NumCacheNonLocal;
Chris Lattnerbf145d62008-12-01 01:15:42 +0000691 return Cache;
692 }
693
Chris Lattner37d041c2008-11-30 01:18:27 +0000694 // If we already have a partially computed set of results, scan them to
Chris Lattnerbf145d62008-12-01 01:15:42 +0000695 // determine what is dirty, seeding our initial DirtyBlocks worklist.
696 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
697 I != E; ++I)
Chris Lattnere18b9712009-12-09 07:08:01 +0000698 if (I->getResult().isDirty())
699 DirtyBlocks.push_back(I->getBB());
Chris Lattner37d041c2008-11-30 01:18:27 +0000700
Chris Lattnerbf145d62008-12-01 01:15:42 +0000701 // Sort the cache so that we can do fast binary search lookups below.
702 std::sort(Cache.begin(), Cache.end());
Chris Lattner37d041c2008-11-30 01:18:27 +0000703
Chris Lattnerbf145d62008-12-01 01:15:42 +0000704 ++NumCacheDirtyNonLocal;
Chris Lattner37d041c2008-11-30 01:18:27 +0000705 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
706 // << Cache.size() << " cached: " << *QueryInst;
707 } else {
708 // Seed DirtyBlocks with each of the preds of QueryInst's block.
Chris Lattner1559b362008-12-09 19:38:05 +0000709 BasicBlock *QueryBB = QueryCS.getInstruction()->getParent();
Chris Lattner511b36c2008-12-09 06:44:17 +0000710 for (BasicBlock **PI = PredCache->GetPreds(QueryBB); *PI; ++PI)
711 DirtyBlocks.push_back(*PI);
Dan Gohmanfe601042010-06-22 15:08:57 +0000712 ++NumUncacheNonLocal;
Chris Lattner37d041c2008-11-30 01:18:27 +0000713 }
714
Chris Lattner20d6f092008-12-09 21:19:42 +0000715 // isReadonlyCall - If this is a read-only call, we can be more aggressive.
716 bool isReadonlyCall = AA->onlyReadsMemory(QueryCS);
Chris Lattner9e59c642008-12-15 03:35:32 +0000717
Chris Lattnerbf145d62008-12-01 01:15:42 +0000718 SmallPtrSet<BasicBlock*, 64> Visited;
719
720 unsigned NumSortedEntries = Cache.size();
Chris Lattner12a7db32009-01-22 07:04:01 +0000721 DEBUG(AssertSorted(Cache));
Chris Lattnerbf145d62008-12-01 01:15:42 +0000722
Chris Lattner37d041c2008-11-30 01:18:27 +0000723 // Iterate while we still have blocks to update.
724 while (!DirtyBlocks.empty()) {
725 BasicBlock *DirtyBB = DirtyBlocks.back();
726 DirtyBlocks.pop_back();
727
Chris Lattnerbf145d62008-12-01 01:15:42 +0000728 // Already processed this block?
729 if (!Visited.insert(DirtyBB))
730 continue;
Chris Lattner37d041c2008-11-30 01:18:27 +0000731
Chris Lattnerbf145d62008-12-01 01:15:42 +0000732 // Do a binary search to see if we already have an entry for this block in
733 // the cache set. If so, find it.
Chris Lattner12a7db32009-01-22 07:04:01 +0000734 DEBUG(AssertSorted(Cache, NumSortedEntries));
Chris Lattnerbf145d62008-12-01 01:15:42 +0000735 NonLocalDepInfo::iterator Entry =
736 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
Chris Lattnerdad451c2009-12-09 07:31:04 +0000737 NonLocalDepEntry(DirtyBB));
Chris Lattnere18b9712009-12-09 07:08:01 +0000738 if (Entry != Cache.begin() && prior(Entry)->getBB() == DirtyBB)
Chris Lattnerbf145d62008-12-01 01:15:42 +0000739 --Entry;
740
Chris Lattnere18b9712009-12-09 07:08:01 +0000741 NonLocalDepEntry *ExistingResult = 0;
Chris Lattnerbf145d62008-12-01 01:15:42 +0000742 if (Entry != Cache.begin()+NumSortedEntries &&
Chris Lattnere18b9712009-12-09 07:08:01 +0000743 Entry->getBB() == DirtyBB) {
Chris Lattnerbf145d62008-12-01 01:15:42 +0000744 // If we already have an entry, and if it isn't already dirty, the block
745 // is done.
Chris Lattnere18b9712009-12-09 07:08:01 +0000746 if (!Entry->getResult().isDirty())
Chris Lattnerbf145d62008-12-01 01:15:42 +0000747 continue;
748
749 // Otherwise, remember this slot so we can update the value.
Chris Lattnere18b9712009-12-09 07:08:01 +0000750 ExistingResult = &*Entry;
Chris Lattnerbf145d62008-12-01 01:15:42 +0000751 }
752
Chris Lattner37d041c2008-11-30 01:18:27 +0000753 // If the dirty entry has a pointer, start scanning from it so we don't have
754 // to rescan the entire block.
755 BasicBlock::iterator ScanPos = DirtyBB->end();
Chris Lattnerbf145d62008-12-01 01:15:42 +0000756 if (ExistingResult) {
Chris Lattnere18b9712009-12-09 07:08:01 +0000757 if (Instruction *Inst = ExistingResult->getResult().getInst()) {
Chris Lattnerbf145d62008-12-01 01:15:42 +0000758 ScanPos = Inst;
Chris Lattnerbf145d62008-12-01 01:15:42 +0000759 // We're removing QueryInst's use of Inst.
Chris Lattner1559b362008-12-09 19:38:05 +0000760 RemoveFromReverseMap(ReverseNonLocalDeps, Inst,
761 QueryCS.getInstruction());
Chris Lattnerbf145d62008-12-01 01:15:42 +0000762 }
Chris Lattnerf68f3102008-11-30 02:28:25 +0000763 }
Chris Lattner37d041c2008-11-30 01:18:27 +0000764
Chris Lattner73ec3cd2008-11-30 01:26:32 +0000765 // Find out if this block has a local dependency for QueryInst.
Chris Lattnerd8dd9342008-12-07 01:21:14 +0000766 MemDepResult Dep;
Chris Lattnere79be942008-12-07 01:50:16 +0000767
Chris Lattner1559b362008-12-09 19:38:05 +0000768 if (ScanPos != DirtyBB->begin()) {
Chris Lattner20d6f092008-12-09 21:19:42 +0000769 Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB);
Chris Lattner1559b362008-12-09 19:38:05 +0000770 } else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) {
771 // No dependence found. If this is the entry block of the function, it is
Eli Friedmana990e072011-06-15 00:47:34 +0000772 // a clobber, otherwise it is unknown.
Chris Lattner1559b362008-12-09 19:38:05 +0000773 Dep = MemDepResult::getNonLocal();
Chris Lattnere79be942008-12-07 01:50:16 +0000774 } else {
Eli Friedmanb4141422011-10-13 22:14:57 +0000775 Dep = MemDepResult::getNonFuncLocal();
Chris Lattnere79be942008-12-07 01:50:16 +0000776 }
777
Chris Lattnerbf145d62008-12-01 01:15:42 +0000778 // If we had a dirty entry for the block, update it. Otherwise, just add
779 // a new entry.
780 if (ExistingResult)
Chris Lattner0ee443d2009-12-22 04:25:02 +0000781 ExistingResult->setResult(Dep);
Chris Lattnerbf145d62008-12-01 01:15:42 +0000782 else
Chris Lattner0ee443d2009-12-22 04:25:02 +0000783 Cache.push_back(NonLocalDepEntry(DirtyBB, Dep));
Chris Lattnerbf145d62008-12-01 01:15:42 +0000784
Chris Lattner37d041c2008-11-30 01:18:27 +0000785 // If the block has a dependency (i.e. it isn't completely transparent to
Chris Lattnerbf145d62008-12-01 01:15:42 +0000786 // the value), remember the association!
787 if (!Dep.isNonLocal()) {
Chris Lattner37d041c2008-11-30 01:18:27 +0000788 // Keep the ReverseNonLocalDeps map up to date so we can efficiently
789 // update this when we remove instructions.
Chris Lattnerbf145d62008-12-01 01:15:42 +0000790 if (Instruction *Inst = Dep.getInst())
Chris Lattner1559b362008-12-09 19:38:05 +0000791 ReverseNonLocalDeps[Inst].insert(QueryCS.getInstruction());
Chris Lattnerbf145d62008-12-01 01:15:42 +0000792 } else {
Chris Lattner37d041c2008-11-30 01:18:27 +0000793
Chris Lattnerbf145d62008-12-01 01:15:42 +0000794 // If the block *is* completely transparent to the load, we need to check
795 // the predecessors of this block. Add them to our worklist.
Chris Lattner511b36c2008-12-09 06:44:17 +0000796 for (BasicBlock **PI = PredCache->GetPreds(DirtyBB); *PI; ++PI)
797 DirtyBlocks.push_back(*PI);
Chris Lattnerbf145d62008-12-01 01:15:42 +0000798 }
Chris Lattner37d041c2008-11-30 01:18:27 +0000799 }
800
Chris Lattnerbf145d62008-12-01 01:15:42 +0000801 return Cache;
Chris Lattner37d041c2008-11-30 01:18:27 +0000802}
803
Chris Lattner7ebcf032008-12-07 02:15:47 +0000804/// getNonLocalPointerDependency - Perform a full dependency query for an
805/// access to the specified (non-volatile) memory location, returning the
806/// set of instructions that either define or clobber the value.
807///
808/// This method assumes the pointer has a "NonLocal" dependency within its
809/// own block.
810///
811void MemoryDependenceAnalysis::
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000812getNonLocalPointerDependency(const AliasAnalysis::Location &Loc, bool isLoad,
813 BasicBlock *FromBB,
Chris Lattner0ee443d2009-12-22 04:25:02 +0000814 SmallVectorImpl<NonLocalDepResult> &Result) {
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000815 assert(Loc.Ptr->getType()->isPointerTy() &&
Chris Lattner3f7eb5b2008-12-07 18:45:15 +0000816 "Can't get pointer deps of a non-pointer!");
Chris Lattner9a193fd2008-12-07 02:56:57 +0000817 Result.clear();
818
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000819 PHITransAddr Address(const_cast<Value *>(Loc.Ptr), TD);
Chris Lattner05e15f82009-12-09 01:59:31 +0000820
Chris Lattner9e59c642008-12-15 03:35:32 +0000821 // This is the set of blocks we've inspected, and the pointer we consider in
822 // each block. Because of critical edges, we currently bail out if querying
823 // a block with multiple different pointers. This can happen during PHI
824 // translation.
825 DenseMap<BasicBlock*, Value*> Visited;
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000826 if (!getNonLocalPointerDepFromBB(Address, Loc, isLoad, FromBB,
Chris Lattner9e59c642008-12-15 03:35:32 +0000827 Result, Visited, true))
828 return;
Chris Lattner3af23f82008-12-15 04:58:29 +0000829 Result.clear();
Chris Lattner0ee443d2009-12-22 04:25:02 +0000830 Result.push_back(NonLocalDepResult(FromBB,
Eli Friedmana990e072011-06-15 00:47:34 +0000831 MemDepResult::getUnknown(),
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000832 const_cast<Value *>(Loc.Ptr)));
Chris Lattner9a193fd2008-12-07 02:56:57 +0000833}
834
Chris Lattner9863c3f2008-12-09 07:47:11 +0000835/// GetNonLocalInfoForBlock - Compute the memdep value for BB with
836/// Pointer/PointeeSize using either cached information in Cache or by doing a
837/// lookup (which may use dirty cache info if available). If we do a lookup,
838/// add the result to the cache.
839MemDepResult MemoryDependenceAnalysis::
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000840GetNonLocalInfoForBlock(const AliasAnalysis::Location &Loc,
Chris Lattner9863c3f2008-12-09 07:47:11 +0000841 bool isLoad, BasicBlock *BB,
842 NonLocalDepInfo *Cache, unsigned NumSortedEntries) {
843
844 // Do a binary search to see if we already have an entry for this block in
845 // the cache set. If so, find it.
846 NonLocalDepInfo::iterator Entry =
847 std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
Chris Lattnerdad451c2009-12-09 07:31:04 +0000848 NonLocalDepEntry(BB));
Chris Lattnere18b9712009-12-09 07:08:01 +0000849 if (Entry != Cache->begin() && (Entry-1)->getBB() == BB)
Chris Lattner9863c3f2008-12-09 07:47:11 +0000850 --Entry;
851
Chris Lattnere18b9712009-12-09 07:08:01 +0000852 NonLocalDepEntry *ExistingResult = 0;
853 if (Entry != Cache->begin()+NumSortedEntries && Entry->getBB() == BB)
854 ExistingResult = &*Entry;
Chris Lattner9863c3f2008-12-09 07:47:11 +0000855
856 // If we have a cached entry, and it is non-dirty, use it as the value for
857 // this dependency.
Chris Lattnere18b9712009-12-09 07:08:01 +0000858 if (ExistingResult && !ExistingResult->getResult().isDirty()) {
Chris Lattner9863c3f2008-12-09 07:47:11 +0000859 ++NumCacheNonLocalPtr;
Chris Lattnere18b9712009-12-09 07:08:01 +0000860 return ExistingResult->getResult();
Chris Lattner9863c3f2008-12-09 07:47:11 +0000861 }
862
863 // Otherwise, we have to scan for the value. If we have a dirty cache
864 // entry, start scanning from its position, otherwise we scan from the end
865 // of the block.
866 BasicBlock::iterator ScanPos = BB->end();
Chris Lattnere18b9712009-12-09 07:08:01 +0000867 if (ExistingResult && ExistingResult->getResult().getInst()) {
868 assert(ExistingResult->getResult().getInst()->getParent() == BB &&
Chris Lattner9863c3f2008-12-09 07:47:11 +0000869 "Instruction invalidated?");
870 ++NumCacheDirtyNonLocalPtr;
Chris Lattnere18b9712009-12-09 07:08:01 +0000871 ScanPos = ExistingResult->getResult().getInst();
Chris Lattner9863c3f2008-12-09 07:47:11 +0000872
873 // Eliminating the dirty entry from 'Cache', so update the reverse info.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000874 ValueIsLoadPair CacheKey(Loc.Ptr, isLoad);
Chris Lattner6a0dcc12009-03-29 00:24:04 +0000875 RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos, CacheKey);
Chris Lattner9863c3f2008-12-09 07:47:11 +0000876 } else {
877 ++NumUncacheNonLocalPtr;
878 }
879
880 // Scan the block for the dependency.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000881 MemDepResult Dep = getPointerDependencyFrom(Loc, isLoad, ScanPos, BB);
Chris Lattner9863c3f2008-12-09 07:47:11 +0000882
883 // If we had a dirty entry for the block, update it. Otherwise, just add
884 // a new entry.
885 if (ExistingResult)
Chris Lattner0ee443d2009-12-22 04:25:02 +0000886 ExistingResult->setResult(Dep);
Chris Lattner9863c3f2008-12-09 07:47:11 +0000887 else
Chris Lattner0ee443d2009-12-22 04:25:02 +0000888 Cache->push_back(NonLocalDepEntry(BB, Dep));
Chris Lattner9863c3f2008-12-09 07:47:11 +0000889
890 // If the block has a dependency (i.e. it isn't completely transparent to
891 // the value), remember the reverse association because we just added it
892 // to Cache!
Eli Friedmanb4141422011-10-13 22:14:57 +0000893 if (!Dep.isDef() && !Dep.isClobber())
Chris Lattner9863c3f2008-12-09 07:47:11 +0000894 return Dep;
895
896 // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently
897 // update MemDep when we remove instructions.
898 Instruction *Inst = Dep.getInst();
899 assert(Inst && "Didn't depend on anything?");
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000900 ValueIsLoadPair CacheKey(Loc.Ptr, isLoad);
Chris Lattner6a0dcc12009-03-29 00:24:04 +0000901 ReverseNonLocalPtrDeps[Inst].insert(CacheKey);
Chris Lattner9863c3f2008-12-09 07:47:11 +0000902 return Dep;
903}
904
Chris Lattnera2f55dd2009-07-13 17:20:05 +0000905/// SortNonLocalDepInfoCache - Sort the a NonLocalDepInfo cache, given a certain
906/// number of elements in the array that are already properly ordered. This is
907/// optimized for the case when only a few entries are added.
908static void
909SortNonLocalDepInfoCache(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
910 unsigned NumSortedEntries) {
911 switch (Cache.size() - NumSortedEntries) {
912 case 0:
913 // done, no new entries.
914 break;
915 case 2: {
916 // Two new entries, insert the last one into place.
Chris Lattnere18b9712009-12-09 07:08:01 +0000917 NonLocalDepEntry Val = Cache.back();
Chris Lattnera2f55dd2009-07-13 17:20:05 +0000918 Cache.pop_back();
919 MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
920 std::upper_bound(Cache.begin(), Cache.end()-1, Val);
921 Cache.insert(Entry, Val);
922 // FALL THROUGH.
923 }
924 case 1:
925 // One new entry, Just insert the new value at the appropriate position.
926 if (Cache.size() != 1) {
Chris Lattnere18b9712009-12-09 07:08:01 +0000927 NonLocalDepEntry Val = Cache.back();
Chris Lattnera2f55dd2009-07-13 17:20:05 +0000928 Cache.pop_back();
929 MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
930 std::upper_bound(Cache.begin(), Cache.end(), Val);
931 Cache.insert(Entry, Val);
932 }
933 break;
934 default:
935 // Added many values, do a full scale sort.
936 std::sort(Cache.begin(), Cache.end());
937 break;
938 }
939}
940
Chris Lattner9e59c642008-12-15 03:35:32 +0000941/// getNonLocalPointerDepFromBB - Perform a dependency query based on
942/// pointer/pointeesize starting at the end of StartBB. Add any clobber/def
943/// results to the results vector and keep track of which blocks are visited in
944/// 'Visited'.
945///
946/// This has special behavior for the first block queries (when SkipFirstBlock
947/// is true). In this special case, it ignores the contents of the specified
948/// block and starts returning dependence info for its predecessors.
949///
950/// This function returns false on success, or true to indicate that it could
951/// not compute dependence information for some reason. This should be treated
952/// as a clobber dependence on the first instruction in the predecessor block.
953bool MemoryDependenceAnalysis::
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000954getNonLocalPointerDepFromBB(const PHITransAddr &Pointer,
955 const AliasAnalysis::Location &Loc,
Chris Lattner9863c3f2008-12-09 07:47:11 +0000956 bool isLoad, BasicBlock *StartBB,
Chris Lattner0ee443d2009-12-22 04:25:02 +0000957 SmallVectorImpl<NonLocalDepResult> &Result,
Chris Lattner9e59c642008-12-15 03:35:32 +0000958 DenseMap<BasicBlock*, Value*> &Visited,
959 bool SkipFirstBlock) {
Chris Lattner66364342009-09-20 22:44:26 +0000960
Chris Lattner6290f5c2008-12-07 08:50:20 +0000961 // Look up the cached info for Pointer.
Chris Lattner05e15f82009-12-09 01:59:31 +0000962 ValueIsLoadPair CacheKey(Pointer.getAddr(), isLoad);
Dan Gohmanc1ac0d72010-09-22 21:41:02 +0000963
Dan Gohman075fb5d2010-11-10 20:37:15 +0000964 // Set up a temporary NLPI value. If the map doesn't yet have an entry for
965 // CacheKey, this value will be inserted as the associated value. Otherwise,
966 // it'll be ignored, and we'll have to check to see if the cached size and
967 // tbaa tag are consistent with the current query.
968 NonLocalPointerInfo InitialNLPI;
969 InitialNLPI.Size = Loc.Size;
970 InitialNLPI.TBAATag = Loc.TBAATag;
971
972 // Get the NLPI for CacheKey, inserting one into the map if it doesn't
973 // already have one.
974 std::pair<CachedNonLocalPointerInfo::iterator, bool> Pair =
975 NonLocalPointerDeps.insert(std::make_pair(CacheKey, InitialNLPI));
976 NonLocalPointerInfo *CacheInfo = &Pair.first->second;
977
Dan Gohman733c54d2010-11-10 21:45:11 +0000978 // If we already have a cache entry for this CacheKey, we may need to do some
979 // work to reconcile the cache entry and the current query.
Dan Gohman075fb5d2010-11-10 20:37:15 +0000980 if (!Pair.second) {
Dan Gohman733c54d2010-11-10 21:45:11 +0000981 if (CacheInfo->Size < Loc.Size) {
982 // The query's Size is greater than the cached one. Throw out the
983 // cached data and procede with the query at the greater size.
984 CacheInfo->Pair = BBSkipFirstBlockPair();
985 CacheInfo->Size = Loc.Size;
Dan Gohman2365f082010-11-10 22:35:02 +0000986 for (NonLocalDepInfo::iterator DI = CacheInfo->NonLocalDeps.begin(),
987 DE = CacheInfo->NonLocalDeps.end(); DI != DE; ++DI)
988 if (Instruction *Inst = DI->getResult().getInst())
989 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Inst, CacheKey);
Dan Gohman733c54d2010-11-10 21:45:11 +0000990 CacheInfo->NonLocalDeps.clear();
991 } else if (CacheInfo->Size > Loc.Size) {
992 // This query's Size is less than the cached one. Conservatively restart
993 // the query using the greater size.
Dan Gohman075fb5d2010-11-10 20:37:15 +0000994 return getNonLocalPointerDepFromBB(Pointer,
995 Loc.getWithNewSize(CacheInfo->Size),
996 isLoad, StartBB, Result, Visited,
997 SkipFirstBlock);
998 }
999
Dan Gohman733c54d2010-11-10 21:45:11 +00001000 // If the query's TBAATag is inconsistent with the cached one,
1001 // conservatively throw out the cached data and restart the query with
1002 // no tag if needed.
Dan Gohman075fb5d2010-11-10 20:37:15 +00001003 if (CacheInfo->TBAATag != Loc.TBAATag) {
Dan Gohman733c54d2010-11-10 21:45:11 +00001004 if (CacheInfo->TBAATag) {
1005 CacheInfo->Pair = BBSkipFirstBlockPair();
1006 CacheInfo->TBAATag = 0;
Dan Gohman2365f082010-11-10 22:35:02 +00001007 for (NonLocalDepInfo::iterator DI = CacheInfo->NonLocalDeps.begin(),
1008 DE = CacheInfo->NonLocalDeps.end(); DI != DE; ++DI)
1009 if (Instruction *Inst = DI->getResult().getInst())
1010 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Inst, CacheKey);
Dan Gohman733c54d2010-11-10 21:45:11 +00001011 CacheInfo->NonLocalDeps.clear();
1012 }
1013 if (Loc.TBAATag)
1014 return getNonLocalPointerDepFromBB(Pointer, Loc.getWithoutTBAATag(),
1015 isLoad, StartBB, Result, Visited,
1016 SkipFirstBlock);
Dan Gohman075fb5d2010-11-10 20:37:15 +00001017 }
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001018 }
1019
1020 NonLocalDepInfo *Cache = &CacheInfo->NonLocalDeps;
Chris Lattner11dcd8d2008-12-08 07:31:50 +00001021
1022 // If we have valid cached information for exactly the block we are
1023 // investigating, just return it with no recomputation.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001024 if (CacheInfo->Pair == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
Chris Lattnerf4789512008-12-16 07:10:09 +00001025 // We have a fully cached result for this query then we can just return the
1026 // cached results and populate the visited set. However, we have to verify
1027 // that we don't already have conflicting results for these blocks. Check
1028 // to ensure that if a block in the results set is in the visited set that
1029 // it was for the same pointer query.
1030 if (!Visited.empty()) {
1031 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
1032 I != E; ++I) {
Chris Lattnere18b9712009-12-09 07:08:01 +00001033 DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->getBB());
Chris Lattner05e15f82009-12-09 01:59:31 +00001034 if (VI == Visited.end() || VI->second == Pointer.getAddr())
1035 continue;
Chris Lattnerf4789512008-12-16 07:10:09 +00001036
1037 // We have a pointer mismatch in a block. Just return clobber, saying
1038 // that something was clobbered in this result. We could also do a
1039 // non-fully cached query, but there is little point in doing this.
1040 return true;
1041 }
1042 }
1043
Chris Lattner0ee443d2009-12-22 04:25:02 +00001044 Value *Addr = Pointer.getAddr();
Chris Lattner11dcd8d2008-12-08 07:31:50 +00001045 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
Chris Lattnerf4789512008-12-16 07:10:09 +00001046 I != E; ++I) {
Chris Lattner0ee443d2009-12-22 04:25:02 +00001047 Visited.insert(std::make_pair(I->getBB(), Addr));
Chris Lattnere18b9712009-12-09 07:08:01 +00001048 if (!I->getResult().isNonLocal())
Chris Lattner0ee443d2009-12-22 04:25:02 +00001049 Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(), Addr));
Chris Lattnerf4789512008-12-16 07:10:09 +00001050 }
Chris Lattner11dcd8d2008-12-08 07:31:50 +00001051 ++NumCacheCompleteNonLocalPtr;
Chris Lattner9e59c642008-12-15 03:35:32 +00001052 return false;
Chris Lattner11dcd8d2008-12-08 07:31:50 +00001053 }
1054
1055 // Otherwise, either this is a new block, a block with an invalid cache
1056 // pointer or one that we're about to invalidate by putting more info into it
1057 // than its valid cache info. If empty, the result will be valid cache info,
1058 // otherwise it isn't.
Chris Lattner9e59c642008-12-15 03:35:32 +00001059 if (Cache->empty())
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001060 CacheInfo->Pair = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
Dan Gohman8a66a202010-11-11 00:42:22 +00001061 else
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001062 CacheInfo->Pair = BBSkipFirstBlockPair();
Chris Lattner11dcd8d2008-12-08 07:31:50 +00001063
1064 SmallVector<BasicBlock*, 32> Worklist;
1065 Worklist.push_back(StartBB);
Chris Lattner6290f5c2008-12-07 08:50:20 +00001066
Eli Friedmanfc097972011-06-01 23:16:53 +00001067 // PredList used inside loop.
1068 SmallVector<std::pair<BasicBlock*, PHITransAddr>, 16> PredList;
1069
Chris Lattner6290f5c2008-12-07 08:50:20 +00001070 // Keep track of the entries that we know are sorted. Previously cached
1071 // entries will all be sorted. The entries we add we only sort on demand (we
1072 // don't insert every element into its sorted position). We know that we
1073 // won't get any reuse from currently inserted values, because we don't
1074 // revisit blocks after we insert info for them.
1075 unsigned NumSortedEntries = Cache->size();
Chris Lattner12a7db32009-01-22 07:04:01 +00001076 DEBUG(AssertSorted(*Cache));
Chris Lattner6290f5c2008-12-07 08:50:20 +00001077
Chris Lattner7ebcf032008-12-07 02:15:47 +00001078 while (!Worklist.empty()) {
Chris Lattner9a193fd2008-12-07 02:56:57 +00001079 BasicBlock *BB = Worklist.pop_back_val();
Chris Lattner7ebcf032008-12-07 02:15:47 +00001080
Chris Lattner65633712008-12-09 07:52:59 +00001081 // Skip the first block if we have it.
Chris Lattner9e59c642008-12-15 03:35:32 +00001082 if (!SkipFirstBlock) {
Chris Lattner65633712008-12-09 07:52:59 +00001083 // Analyze the dependency of *Pointer in FromBB. See if we already have
1084 // been here.
Chris Lattner9e59c642008-12-15 03:35:32 +00001085 assert(Visited.count(BB) && "Should check 'visited' before adding to WL");
Chris Lattner6290f5c2008-12-07 08:50:20 +00001086
Chris Lattner65633712008-12-09 07:52:59 +00001087 // Get the dependency info for Pointer in BB. If we have cached
1088 // information, we will use it, otherwise we compute it.
Chris Lattner12a7db32009-01-22 07:04:01 +00001089 DEBUG(AssertSorted(*Cache, NumSortedEntries));
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001090 MemDepResult Dep = GetNonLocalInfoForBlock(Loc, isLoad, BB, Cache,
Chris Lattner05e15f82009-12-09 01:59:31 +00001091 NumSortedEntries);
Chris Lattner65633712008-12-09 07:52:59 +00001092
1093 // If we got a Def or Clobber, add this to the list of results.
1094 if (!Dep.isNonLocal()) {
Chris Lattner0ee443d2009-12-22 04:25:02 +00001095 Result.push_back(NonLocalDepResult(BB, Dep, Pointer.getAddr()));
Chris Lattner65633712008-12-09 07:52:59 +00001096 continue;
1097 }
Chris Lattner7ebcf032008-12-07 02:15:47 +00001098 }
1099
Chris Lattner9e59c642008-12-15 03:35:32 +00001100 // If 'Pointer' is an instruction defined in this block, then we need to do
1101 // phi translation to change it into a value live in the predecessor block.
Chris Lattner05e15f82009-12-09 01:59:31 +00001102 // If not, we just add the predecessors to the worklist and scan them with
1103 // the same Pointer.
1104 if (!Pointer.NeedsPHITranslationFromBlock(BB)) {
Chris Lattner9e59c642008-12-15 03:35:32 +00001105 SkipFirstBlock = false;
Eli Friedmanfc097972011-06-01 23:16:53 +00001106 SmallVector<BasicBlock*, 16> NewBlocks;
Chris Lattner9e59c642008-12-15 03:35:32 +00001107 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
1108 // Verify that we haven't looked at this block yet.
1109 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
Chris Lattner05e15f82009-12-09 01:59:31 +00001110 InsertRes = Visited.insert(std::make_pair(*PI, Pointer.getAddr()));
Chris Lattner9e59c642008-12-15 03:35:32 +00001111 if (InsertRes.second) {
1112 // First time we've looked at *PI.
Eli Friedmanfc097972011-06-01 23:16:53 +00001113 NewBlocks.push_back(*PI);
Chris Lattner9e59c642008-12-15 03:35:32 +00001114 continue;
1115 }
1116
1117 // If we have seen this block before, but it was with a different
1118 // pointer then we have a phi translation failure and we have to treat
1119 // this as a clobber.
Eli Friedmanfc097972011-06-01 23:16:53 +00001120 if (InsertRes.first->second != Pointer.getAddr()) {
1121 // Make sure to clean up the Visited map before continuing on to
1122 // PredTranslationFailure.
1123 for (unsigned i = 0; i < NewBlocks.size(); i++)
1124 Visited.erase(NewBlocks[i]);
Chris Lattner9e59c642008-12-15 03:35:32 +00001125 goto PredTranslationFailure;
Eli Friedmanfc097972011-06-01 23:16:53 +00001126 }
Chris Lattner9e59c642008-12-15 03:35:32 +00001127 }
Eli Friedmanfc097972011-06-01 23:16:53 +00001128 Worklist.append(NewBlocks.begin(), NewBlocks.end());
Chris Lattner9e59c642008-12-15 03:35:32 +00001129 continue;
1130 }
1131
Chris Lattner05e15f82009-12-09 01:59:31 +00001132 // We do need to do phi translation, if we know ahead of time we can't phi
1133 // translate this value, don't even try.
1134 if (!Pointer.IsPotentiallyPHITranslatable())
1135 goto PredTranslationFailure;
1136
Chris Lattner6fbc1962009-07-13 17:14:23 +00001137 // We may have added values to the cache list before this PHI translation.
1138 // If so, we haven't done anything to ensure that the cache remains sorted.
1139 // Sort it now (if needed) so that recursive invocations of
1140 // getNonLocalPointerDepFromBB and other routines that could reuse the cache
1141 // value will only see properly sorted cache arrays.
1142 if (Cache && NumSortedEntries != Cache->size()) {
Chris Lattnera2f55dd2009-07-13 17:20:05 +00001143 SortNonLocalDepInfoCache(*Cache, NumSortedEntries);
Chris Lattner6fbc1962009-07-13 17:14:23 +00001144 NumSortedEntries = Cache->size();
1145 }
Chris Lattnere95035a2009-11-27 08:37:22 +00001146 Cache = 0;
Eli Friedmanfc097972011-06-01 23:16:53 +00001147
1148 PredList.clear();
Chris Lattnere95035a2009-11-27 08:37:22 +00001149 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
1150 BasicBlock *Pred = *PI;
Eli Friedmanfc097972011-06-01 23:16:53 +00001151 PredList.push_back(std::make_pair(Pred, Pointer));
1152
Chris Lattner05e15f82009-12-09 01:59:31 +00001153 // Get the PHI translated pointer in this predecessor. This can fail if
1154 // not translatable, in which case the getAddr() returns null.
Eli Friedmanfc097972011-06-01 23:16:53 +00001155 PHITransAddr &PredPointer = PredList.back().second;
Daniel Dunbar6d8f2ca2010-02-24 08:48:04 +00001156 PredPointer.PHITranslateValue(BB, Pred, 0);
Chris Lattner05e15f82009-12-09 01:59:31 +00001157
1158 Value *PredPtrVal = PredPointer.getAddr();
Chris Lattnere95035a2009-11-27 08:37:22 +00001159
1160 // Check to see if we have already visited this pred block with another
1161 // pointer. If so, we can't do this lookup. This failure can occur
1162 // with PHI translation when a critical edge exists and the PHI node in
1163 // the successor translates to a pointer value different than the
1164 // pointer the block was first analyzed with.
1165 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
Chris Lattner05e15f82009-12-09 01:59:31 +00001166 InsertRes = Visited.insert(std::make_pair(Pred, PredPtrVal));
Chris Lattner9e59c642008-12-15 03:35:32 +00001167
Chris Lattnere95035a2009-11-27 08:37:22 +00001168 if (!InsertRes.second) {
Eli Friedmanfc097972011-06-01 23:16:53 +00001169 // We found the pred; take it off the list of preds to visit.
1170 PredList.pop_back();
1171
Chris Lattnere95035a2009-11-27 08:37:22 +00001172 // If the predecessor was visited with PredPtr, then we already did
1173 // the analysis and can ignore it.
Chris Lattner05e15f82009-12-09 01:59:31 +00001174 if (InsertRes.first->second == PredPtrVal)
Chris Lattnere95035a2009-11-27 08:37:22 +00001175 continue;
Chris Lattner9e59c642008-12-15 03:35:32 +00001176
Chris Lattnere95035a2009-11-27 08:37:22 +00001177 // Otherwise, the block was previously analyzed with a different
1178 // pointer. We can't represent the result of this case, so we just
1179 // treat this as a phi translation failure.
Eli Friedmanfc097972011-06-01 23:16:53 +00001180
1181 // Make sure to clean up the Visited map before continuing on to
1182 // PredTranslationFailure.
1183 for (unsigned i = 0; i < PredList.size(); i++)
1184 Visited.erase(PredList[i].first);
1185
Chris Lattnere95035a2009-11-27 08:37:22 +00001186 goto PredTranslationFailure;
Chris Lattner9e59c642008-12-15 03:35:32 +00001187 }
Eli Friedmanfc097972011-06-01 23:16:53 +00001188 }
1189
1190 // Actually process results here; this need to be a separate loop to avoid
1191 // calling getNonLocalPointerDepFromBB for blocks we don't want to return
1192 // any results for. (getNonLocalPointerDepFromBB will modify our
1193 // datastructures in ways the code after the PredTranslationFailure label
1194 // doesn't expect.)
1195 for (unsigned i = 0; i < PredList.size(); i++) {
1196 BasicBlock *Pred = PredList[i].first;
1197 PHITransAddr &PredPointer = PredList[i].second;
1198 Value *PredPtrVal = PredPointer.getAddr();
1199
1200 bool CanTranslate = true;
Chris Lattner6f7b2102009-11-27 22:05:15 +00001201 // If PHI translation was unable to find an available pointer in this
1202 // predecessor, then we have to assume that the pointer is clobbered in
1203 // that predecessor. We can still do PRE of the load, which would insert
1204 // a computation of the pointer in this predecessor.
Eli Friedmanfc097972011-06-01 23:16:53 +00001205 if (PredPtrVal == 0)
1206 CanTranslate = false;
1207
1208 // FIXME: it is entirely possible that PHI translating will end up with
1209 // the same value. Consider PHI translating something like:
1210 // X = phi [x, bb1], [y, bb2]. PHI translating for bb1 doesn't *need*
1211 // to recurse here, pedantically speaking.
1212
1213 // If getNonLocalPointerDepFromBB fails here, that means the cached
1214 // result conflicted with the Visited list; we have to conservatively
Eli Friedmana990e072011-06-15 00:47:34 +00001215 // assume it is unknown, but this also does not block PRE of the load.
Eli Friedmanfc097972011-06-01 23:16:53 +00001216 if (!CanTranslate ||
1217 getNonLocalPointerDepFromBB(PredPointer,
1218 Loc.getWithNewPtr(PredPtrVal),
1219 isLoad, Pred,
1220 Result, Visited)) {
Chris Lattner855d9da2009-12-01 07:33:32 +00001221 // Add the entry to the Result list.
Eli Friedmana990e072011-06-15 00:47:34 +00001222 NonLocalDepResult Entry(Pred, MemDepResult::getUnknown(), PredPtrVal);
Chris Lattner855d9da2009-12-01 07:33:32 +00001223 Result.push_back(Entry);
1224
Chris Lattnerf6481252009-12-19 21:29:22 +00001225 // Since we had a phi translation failure, the cache for CacheKey won't
1226 // include all of the entries that we need to immediately satisfy future
1227 // queries. Mark this in NonLocalPointerDeps by setting the
1228 // BBSkipFirstBlockPair pointer to null. This requires reuse of the
1229 // cached value to do more work but not miss the phi trans failure.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001230 NonLocalPointerInfo &NLPI = NonLocalPointerDeps[CacheKey];
1231 NLPI.Pair = BBSkipFirstBlockPair();
Chris Lattner6f7b2102009-11-27 22:05:15 +00001232 continue;
Chris Lattner6f7b2102009-11-27 22:05:15 +00001233 }
Chris Lattner9e59c642008-12-15 03:35:32 +00001234 }
Chris Lattnere95035a2009-11-27 08:37:22 +00001235
1236 // Refresh the CacheInfo/Cache pointer so that it isn't invalidated.
1237 CacheInfo = &NonLocalPointerDeps[CacheKey];
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001238 Cache = &CacheInfo->NonLocalDeps;
Chris Lattnere95035a2009-11-27 08:37:22 +00001239 NumSortedEntries = Cache->size();
1240
1241 // Since we did phi translation, the "Cache" set won't contain all of the
1242 // results for the query. This is ok (we can still use it to accelerate
1243 // specific block queries) but we can't do the fastpath "return all
1244 // results from the set" Clear out the indicator for this.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001245 CacheInfo->Pair = BBSkipFirstBlockPair();
Chris Lattnere95035a2009-11-27 08:37:22 +00001246 SkipFirstBlock = false;
1247 continue;
Chris Lattnerdc593112009-11-26 23:18:49 +00001248
Chris Lattner9e59c642008-12-15 03:35:32 +00001249 PredTranslationFailure:
Eli Friedmanfc097972011-06-01 23:16:53 +00001250 // The following code is "failure"; we can't produce a sane translation
1251 // for the given block. It assumes that we haven't modified any of
1252 // our datastructures while processing the current block.
Chris Lattner9e59c642008-12-15 03:35:32 +00001253
Chris Lattner95900f22009-01-23 07:12:16 +00001254 if (Cache == 0) {
1255 // Refresh the CacheInfo/Cache pointer if it got invalidated.
1256 CacheInfo = &NonLocalPointerDeps[CacheKey];
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001257 Cache = &CacheInfo->NonLocalDeps;
Chris Lattner95900f22009-01-23 07:12:16 +00001258 NumSortedEntries = Cache->size();
Chris Lattner95900f22009-01-23 07:12:16 +00001259 }
Chris Lattner6fbc1962009-07-13 17:14:23 +00001260
Chris Lattnerf6481252009-12-19 21:29:22 +00001261 // Since we failed phi translation, the "Cache" set won't contain all of the
Chris Lattner9e59c642008-12-15 03:35:32 +00001262 // results for the query. This is ok (we can still use it to accelerate
1263 // specific block queries) but we can't do the fastpath "return all
Chris Lattnerf6481252009-12-19 21:29:22 +00001264 // results from the set". Clear out the indicator for this.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001265 CacheInfo->Pair = BBSkipFirstBlockPair();
Chris Lattner9e59c642008-12-15 03:35:32 +00001266
Eli Friedmana990e072011-06-15 00:47:34 +00001267 // If *nothing* works, mark the pointer as unknown.
Chris Lattner9e59c642008-12-15 03:35:32 +00001268 //
1269 // If this is the magic first block, return this as a clobber of the whole
1270 // incoming value. Since we can't phi translate to one of the predecessors,
1271 // we have to bail out.
1272 if (SkipFirstBlock)
1273 return true;
1274
1275 for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) {
1276 assert(I != Cache->rend() && "Didn't find current block??");
Chris Lattnere18b9712009-12-09 07:08:01 +00001277 if (I->getBB() != BB)
Chris Lattner9e59c642008-12-15 03:35:32 +00001278 continue;
1279
Chris Lattnere18b9712009-12-09 07:08:01 +00001280 assert(I->getResult().isNonLocal() &&
Chris Lattner9e59c642008-12-15 03:35:32 +00001281 "Should only be here with transparent block");
Eli Friedmana990e072011-06-15 00:47:34 +00001282 I->setResult(MemDepResult::getUnknown());
Chris Lattner0ee443d2009-12-22 04:25:02 +00001283 Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(),
1284 Pointer.getAddr()));
Chris Lattner9e59c642008-12-15 03:35:32 +00001285 break;
Chris Lattner9a193fd2008-12-07 02:56:57 +00001286 }
Chris Lattner7ebcf032008-12-07 02:15:47 +00001287 }
Chris Lattner95900f22009-01-23 07:12:16 +00001288
Chris Lattner9863c3f2008-12-09 07:47:11 +00001289 // Okay, we're done now. If we added new values to the cache, re-sort it.
Chris Lattnera2f55dd2009-07-13 17:20:05 +00001290 SortNonLocalDepInfoCache(*Cache, NumSortedEntries);
Chris Lattner12a7db32009-01-22 07:04:01 +00001291 DEBUG(AssertSorted(*Cache));
Chris Lattner9e59c642008-12-15 03:35:32 +00001292 return false;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001293}
1294
1295/// RemoveCachedNonLocalPointerDependencies - If P exists in
1296/// CachedNonLocalPointerInfo, remove it.
1297void MemoryDependenceAnalysis::
1298RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
1299 CachedNonLocalPointerInfo::iterator It =
1300 NonLocalPointerDeps.find(P);
1301 if (It == NonLocalPointerDeps.end()) return;
1302
1303 // Remove all of the entries in the BB->val map. This involves removing
1304 // instructions from the reverse map.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001305 NonLocalDepInfo &PInfo = It->second.NonLocalDeps;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001306
1307 for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
Chris Lattnere18b9712009-12-09 07:08:01 +00001308 Instruction *Target = PInfo[i].getResult().getInst();
Chris Lattner6290f5c2008-12-07 08:50:20 +00001309 if (Target == 0) continue; // Ignore non-local dep results.
Chris Lattnere18b9712009-12-09 07:08:01 +00001310 assert(Target->getParent() == PInfo[i].getBB());
Chris Lattner6290f5c2008-12-07 08:50:20 +00001311
1312 // Eliminating the dirty entry from 'Cache', so update the reverse info.
Chris Lattner6a0dcc12009-03-29 00:24:04 +00001313 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P);
Chris Lattner6290f5c2008-12-07 08:50:20 +00001314 }
1315
1316 // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo).
1317 NonLocalPointerDeps.erase(It);
Chris Lattner7ebcf032008-12-07 02:15:47 +00001318}
1319
1320
Chris Lattnerbc99be12008-12-09 22:06:23 +00001321/// invalidateCachedPointerInfo - This method is used to invalidate cached
1322/// information about the specified pointer, because it may be too
1323/// conservative in memdep. This is an optional call that can be used when
1324/// the client detects an equivalence between the pointer and some other
1325/// value and replaces the other value with ptr. This can make Ptr available
1326/// in more places that cached info does not necessarily keep.
1327void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) {
1328 // If Ptr isn't really a pointer, just ignore it.
Duncan Sands1df98592010-02-16 11:11:14 +00001329 if (!Ptr->getType()->isPointerTy()) return;
Chris Lattnerbc99be12008-12-09 22:06:23 +00001330 // Flush store info for the pointer.
1331 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false));
1332 // Flush load info for the pointer.
1333 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true));
1334}
1335
Bob Wilson484d4a32010-02-16 19:51:59 +00001336/// invalidateCachedPredecessors - Clear the PredIteratorCache info.
1337/// This needs to be done when the CFG changes, e.g., due to splitting
1338/// critical edges.
1339void MemoryDependenceAnalysis::invalidateCachedPredecessors() {
1340 PredCache->clear();
1341}
1342
Owen Anderson78e02f72007-07-06 23:14:35 +00001343/// removeInstruction - Remove an instruction from the dependence analysis,
1344/// updating the dependence of instructions that previously depended on it.
Owen Anderson642a9e32007-08-08 22:26:03 +00001345/// This method attempts to keep the cache coherent using the reverse map.
Chris Lattner5f589dc2008-11-28 22:04:47 +00001346void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
Chris Lattner5f589dc2008-11-28 22:04:47 +00001347 // Walk through the Non-local dependencies, removing this one as the value
1348 // for any cached queries.
Chris Lattnerf68f3102008-11-30 02:28:25 +00001349 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
1350 if (NLDI != NonLocalDeps.end()) {
Chris Lattnerbf145d62008-12-01 01:15:42 +00001351 NonLocalDepInfo &BlockMap = NLDI->second.first;
Chris Lattner25f4b2b2008-11-30 02:30:50 +00001352 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
1353 DI != DE; ++DI)
Chris Lattnere18b9712009-12-09 07:08:01 +00001354 if (Instruction *Inst = DI->getResult().getInst())
Chris Lattnerd44745d2008-12-07 18:39:13 +00001355 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst);
Chris Lattnerf68f3102008-11-30 02:28:25 +00001356 NonLocalDeps.erase(NLDI);
1357 }
Owen Anderson5fc4aba2007-12-08 01:37:09 +00001358
Chris Lattner5f589dc2008-11-28 22:04:47 +00001359 // If we have a cached local dependence query for this instruction, remove it.
Chris Lattnerbaad8882008-11-28 22:28:27 +00001360 //
Chris Lattner39f372e2008-11-29 01:43:36 +00001361 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
1362 if (LocalDepEntry != LocalDeps.end()) {
Chris Lattner125ce362008-11-30 01:09:30 +00001363 // Remove us from DepInst's reverse set now that the local dep info is gone.
Chris Lattnerd44745d2008-12-07 18:39:13 +00001364 if (Instruction *Inst = LocalDepEntry->second.getInst())
1365 RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst);
Chris Lattner125ce362008-11-30 01:09:30 +00001366
Chris Lattnerbaad8882008-11-28 22:28:27 +00001367 // Remove this local dependency info.
Chris Lattner39f372e2008-11-29 01:43:36 +00001368 LocalDeps.erase(LocalDepEntry);
Chris Lattner6290f5c2008-12-07 08:50:20 +00001369 }
1370
1371 // If we have any cached pointer dependencies on this instruction, remove
1372 // them. If the instruction has non-pointer type, then it can't be a pointer
1373 // base.
1374
1375 // Remove it from both the load info and the store info. The instruction
1376 // can't be in either of these maps if it is non-pointer.
Duncan Sands1df98592010-02-16 11:11:14 +00001377 if (RemInst->getType()->isPointerTy()) {
Chris Lattner6290f5c2008-12-07 08:50:20 +00001378 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false));
1379 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true));
1380 }
Chris Lattnerbaad8882008-11-28 22:28:27 +00001381
Chris Lattnerd3d12ec2008-11-28 22:51:08 +00001382 // Loop over all of the things that depend on the instruction we're removing.
1383 //
Chris Lattner4f8c18c2008-11-29 23:30:39 +00001384 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
Chris Lattner0655f732008-12-07 18:42:51 +00001385
1386 // If we find RemInst as a clobber or Def in any of the maps for other values,
1387 // we need to replace its entry with a dirty version of the instruction after
1388 // it. If RemInst is a terminator, we use a null dirty value.
1389 //
1390 // Using a dirty version of the instruction after RemInst saves having to scan
1391 // the entire block to get to this point.
1392 MemDepResult NewDirtyVal;
1393 if (!RemInst->isTerminator())
1394 NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst));
Chris Lattner4f8c18c2008-11-29 23:30:39 +00001395
Chris Lattner8c465272008-11-29 09:20:15 +00001396 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
1397 if (ReverseDepIt != ReverseLocalDeps.end()) {
Chris Lattnerd3d12ec2008-11-28 22:51:08 +00001398 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001399 // RemInst can't be the terminator if it has local stuff depending on it.
Chris Lattner125ce362008-11-30 01:09:30 +00001400 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
1401 "Nothing can locally depend on a terminator");
1402
Chris Lattnerd3d12ec2008-11-28 22:51:08 +00001403 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
1404 E = ReverseDeps.end(); I != E; ++I) {
1405 Instruction *InstDependingOnRemInst = *I;
Chris Lattnerf68f3102008-11-30 02:28:25 +00001406 assert(InstDependingOnRemInst != RemInst &&
1407 "Already removed our local dep info");
Chris Lattner125ce362008-11-30 01:09:30 +00001408
Chris Lattner0655f732008-12-07 18:42:51 +00001409 LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
Chris Lattnerd3d12ec2008-11-28 22:51:08 +00001410
Chris Lattner125ce362008-11-30 01:09:30 +00001411 // Make sure to remember that new things depend on NewDepInst.
Chris Lattner0655f732008-12-07 18:42:51 +00001412 assert(NewDirtyVal.getInst() && "There is no way something else can have "
1413 "a local dep on this if it is a terminator!");
1414 ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(),
Chris Lattner125ce362008-11-30 01:09:30 +00001415 InstDependingOnRemInst));
Chris Lattnerd3d12ec2008-11-28 22:51:08 +00001416 }
Chris Lattner4f8c18c2008-11-29 23:30:39 +00001417
1418 ReverseLocalDeps.erase(ReverseDepIt);
1419
1420 // Add new reverse deps after scanning the set, to avoid invalidating the
1421 // 'ReverseDeps' reference.
1422 while (!ReverseDepsToAdd.empty()) {
1423 ReverseLocalDeps[ReverseDepsToAdd.back().first]
1424 .insert(ReverseDepsToAdd.back().second);
1425 ReverseDepsToAdd.pop_back();
1426 }
Owen Anderson78e02f72007-07-06 23:14:35 +00001427 }
Owen Anderson4d13de42007-08-16 21:27:05 +00001428
Chris Lattner8c465272008-11-29 09:20:15 +00001429 ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
1430 if (ReverseDepIt != ReverseNonLocalDeps.end()) {
Chris Lattner6290f5c2008-12-07 08:50:20 +00001431 SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second;
1432 for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end();
Chris Lattnerf68f3102008-11-30 02:28:25 +00001433 I != E; ++I) {
1434 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
1435
Chris Lattner4a69bad2008-11-30 02:52:26 +00001436 PerInstNLInfo &INLD = NonLocalDeps[*I];
Chris Lattner4a69bad2008-11-30 02:52:26 +00001437 // The information is now dirty!
Chris Lattnerbf145d62008-12-01 01:15:42 +00001438 INLD.second = true;
Chris Lattnerf68f3102008-11-30 02:28:25 +00001439
Chris Lattnerbf145d62008-12-01 01:15:42 +00001440 for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
1441 DE = INLD.first.end(); DI != DE; ++DI) {
Chris Lattnere18b9712009-12-09 07:08:01 +00001442 if (DI->getResult().getInst() != RemInst) continue;
Chris Lattnerf68f3102008-11-30 02:28:25 +00001443
1444 // Convert to a dirty entry for the subsequent instruction.
Chris Lattner0ee443d2009-12-22 04:25:02 +00001445 DI->setResult(NewDirtyVal);
Chris Lattner0655f732008-12-07 18:42:51 +00001446
1447 if (Instruction *NextI = NewDirtyVal.getInst())
Chris Lattnerf68f3102008-11-30 02:28:25 +00001448 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
Chris Lattnerf68f3102008-11-30 02:28:25 +00001449 }
1450 }
Chris Lattner4f8c18c2008-11-29 23:30:39 +00001451
1452 ReverseNonLocalDeps.erase(ReverseDepIt);
1453
Chris Lattner0ec48dd2008-11-29 22:02:15 +00001454 // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
1455 while (!ReverseDepsToAdd.empty()) {
1456 ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
1457 .insert(ReverseDepsToAdd.back().second);
1458 ReverseDepsToAdd.pop_back();
1459 }
Owen Anderson4d13de42007-08-16 21:27:05 +00001460 }
Owen Anderson5fc4aba2007-12-08 01:37:09 +00001461
Chris Lattner6290f5c2008-12-07 08:50:20 +00001462 // If the instruction is in ReverseNonLocalPtrDeps then it appears as a
1463 // value in the NonLocalPointerDeps info.
1464 ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
1465 ReverseNonLocalPtrDeps.find(RemInst);
1466 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
Chris Lattner6a0dcc12009-03-29 00:24:04 +00001467 SmallPtrSet<ValueIsLoadPair, 4> &Set = ReversePtrDepIt->second;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001468 SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd;
1469
Chris Lattner6a0dcc12009-03-29 00:24:04 +00001470 for (SmallPtrSet<ValueIsLoadPair, 4>::iterator I = Set.begin(),
1471 E = Set.end(); I != E; ++I) {
1472 ValueIsLoadPair P = *I;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001473 assert(P.getPointer() != RemInst &&
1474 "Already removed NonLocalPointerDeps info for RemInst");
1475
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001476 NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].NonLocalDeps;
Chris Lattner11dcd8d2008-12-08 07:31:50 +00001477
1478 // The cache is not valid for any specific block anymore.
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001479 NonLocalPointerDeps[P].Pair = BBSkipFirstBlockPair();
Chris Lattner6290f5c2008-12-07 08:50:20 +00001480
Chris Lattner6290f5c2008-12-07 08:50:20 +00001481 // Update any entries for RemInst to use the instruction after it.
1482 for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
1483 DI != DE; ++DI) {
Chris Lattnere18b9712009-12-09 07:08:01 +00001484 if (DI->getResult().getInst() != RemInst) continue;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001485
1486 // Convert to a dirty entry for the subsequent instruction.
Chris Lattner0ee443d2009-12-22 04:25:02 +00001487 DI->setResult(NewDirtyVal);
Chris Lattner6290f5c2008-12-07 08:50:20 +00001488
1489 if (Instruction *NewDirtyInst = NewDirtyVal.getInst())
1490 ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P));
1491 }
Chris Lattner95900f22009-01-23 07:12:16 +00001492
1493 // Re-sort the NonLocalDepInfo. Changing the dirty entry to its
1494 // subsequent value may invalidate the sortedness.
1495 std::sort(NLPDI.begin(), NLPDI.end());
Chris Lattner6290f5c2008-12-07 08:50:20 +00001496 }
1497
1498 ReverseNonLocalPtrDeps.erase(ReversePtrDepIt);
1499
1500 while (!ReversePtrDepsToAdd.empty()) {
1501 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first]
Chris Lattner6a0dcc12009-03-29 00:24:04 +00001502 .insert(ReversePtrDepsToAdd.back().second);
Chris Lattner6290f5c2008-12-07 08:50:20 +00001503 ReversePtrDepsToAdd.pop_back();
1504 }
1505 }
1506
1507
Chris Lattnerf68f3102008-11-30 02:28:25 +00001508 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
Chris Lattnerd777d402008-11-30 19:24:31 +00001509 AA->deleteValue(RemInst);
Jakob Stoklund Olesenf7624bc2011-01-11 04:05:39 +00001510 DEBUG(verifyRemoved(RemInst));
Owen Anderson78e02f72007-07-06 23:14:35 +00001511}
Chris Lattner729b2372008-11-29 21:25:10 +00001512/// verifyRemoved - Verify that the specified instruction does not occur
1513/// in our internal data structures.
1514void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
1515 for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
1516 E = LocalDeps.end(); I != E; ++I) {
1517 assert(I->first != D && "Inst occurs in data structures");
Chris Lattnerfd3dcbe2008-11-30 23:17:19 +00001518 assert(I->second.getInst() != D &&
Chris Lattner729b2372008-11-29 21:25:10 +00001519 "Inst occurs in data structures");
1520 }
1521
Chris Lattner6290f5c2008-12-07 08:50:20 +00001522 for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(),
1523 E = NonLocalPointerDeps.end(); I != E; ++I) {
1524 assert(I->first.getPointer() != D && "Inst occurs in NLPD map key");
Dan Gohmanc1ac0d72010-09-22 21:41:02 +00001525 const NonLocalDepInfo &Val = I->second.NonLocalDeps;
Chris Lattner6290f5c2008-12-07 08:50:20 +00001526 for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
1527 II != E; ++II)
Chris Lattnere18b9712009-12-09 07:08:01 +00001528 assert(II->getResult().getInst() != D && "Inst occurs as NLPD value");
Chris Lattner6290f5c2008-12-07 08:50:20 +00001529 }
1530
Chris Lattner729b2372008-11-29 21:25:10 +00001531 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
1532 E = NonLocalDeps.end(); I != E; ++I) {
1533 assert(I->first != D && "Inst occurs in data structures");
Chris Lattner4a69bad2008-11-30 02:52:26 +00001534 const PerInstNLInfo &INLD = I->second;
Chris Lattnerbf145d62008-12-01 01:15:42 +00001535 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
1536 EE = INLD.first.end(); II != EE; ++II)
Chris Lattnere18b9712009-12-09 07:08:01 +00001537 assert(II->getResult().getInst() != D && "Inst occurs in data structures");
Chris Lattner729b2372008-11-29 21:25:10 +00001538 }
1539
1540 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
Chris Lattnerf68f3102008-11-30 02:28:25 +00001541 E = ReverseLocalDeps.end(); I != E; ++I) {
1542 assert(I->first != D && "Inst occurs in data structures");
Chris Lattner729b2372008-11-29 21:25:10 +00001543 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1544 EE = I->second.end(); II != EE; ++II)
1545 assert(*II != D && "Inst occurs in data structures");
Chris Lattnerf68f3102008-11-30 02:28:25 +00001546 }
Chris Lattner729b2372008-11-29 21:25:10 +00001547
1548 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
1549 E = ReverseNonLocalDeps.end();
Chris Lattnerf68f3102008-11-30 02:28:25 +00001550 I != E; ++I) {
1551 assert(I->first != D && "Inst occurs in data structures");
Chris Lattner729b2372008-11-29 21:25:10 +00001552 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1553 EE = I->second.end(); II != EE; ++II)
1554 assert(*II != D && "Inst occurs in data structures");
Chris Lattnerf68f3102008-11-30 02:28:25 +00001555 }
Chris Lattner6290f5c2008-12-07 08:50:20 +00001556
1557 for (ReverseNonLocalPtrDepTy::const_iterator
1558 I = ReverseNonLocalPtrDeps.begin(),
1559 E = ReverseNonLocalPtrDeps.end(); I != E; ++I) {
1560 assert(I->first != D && "Inst occurs in rev NLPD map");
1561
Chris Lattner6a0dcc12009-03-29 00:24:04 +00001562 for (SmallPtrSet<ValueIsLoadPair, 4>::const_iterator II = I->second.begin(),
Chris Lattner6290f5c2008-12-07 08:50:20 +00001563 E = I->second.end(); II != E; ++II)
Chris Lattner6a0dcc12009-03-29 00:24:04 +00001564 assert(*II != ValueIsLoadPair(D, false) &&
1565 *II != ValueIsLoadPair(D, true) &&
Chris Lattner6290f5c2008-12-07 08:50:20 +00001566 "Inst occurs in ReverseNonLocalPtrDeps map");
1567 }
1568
Chris Lattner729b2372008-11-29 21:25:10 +00001569}