blob: 58a8723626bd606a0d319eab762b63b12cc39f48 [file] [log] [blame]
Dan Gohman826bdf82010-05-28 16:19:17 +00001//===- Loads.cpp - Local load analysis ------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines simple local analyses for load instructions.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/Loads.h"
15#include "llvm/Analysis/AliasAnalysis.h"
Nuno Lopes69dcc7d2012-12-31 17:42:11 +000016#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000017#include "llvm/IR/DataLayout.h"
18#include "llvm/IR/GlobalAlias.h"
19#include "llvm/IR/GlobalVariable.h"
20#include "llvm/IR/IntrinsicInst.h"
21#include "llvm/IR/LLVMContext.h"
Mehdi Amini9a9738f2015-03-03 22:01:13 +000022#include "llvm/IR/Module.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000023#include "llvm/IR/Operator.h"
Artur Pilipenko31bcca42016-02-24 12:49:04 +000024#include "llvm/IR/Statepoint.h"
25
Dan Gohman826bdf82010-05-28 16:19:17 +000026using namespace llvm;
27
Artur Pilipenko31bcca42016-02-24 12:49:04 +000028static bool isDereferenceableFromAttribute(const Value *BV, APInt Offset,
29 Type *Ty, const DataLayout &DL,
30 const Instruction *CtxI,
31 const DominatorTree *DT,
32 const TargetLibraryInfo *TLI) {
33 assert(Offset.isNonNegative() && "offset can't be negative");
34 assert(Ty->isSized() && "must be sized");
35
Artur Pilipenko31bcca42016-02-24 12:49:04 +000036 bool CheckForNonNull = false;
Artur Pilipenko345f0142016-04-27 12:51:01 +000037 APInt DerefBytes(Offset.getBitWidth(),
38 BV->getPointerDereferenceableBytes(CheckForNonNull));
Artur Pilipenko31bcca42016-02-24 12:49:04 +000039
40 if (DerefBytes.getBoolValue())
41 if (DerefBytes.uge(Offset + DL.getTypeStoreSize(Ty)))
42 if (!CheckForNonNull || isKnownNonNullAt(BV, CtxI, DT, TLI))
43 return true;
44
45 return false;
46}
47
48static bool isDereferenceableFromAttribute(const Value *V, const DataLayout &DL,
49 const Instruction *CtxI,
50 const DominatorTree *DT,
51 const TargetLibraryInfo *TLI) {
52 Type *VTy = V->getType();
53 Type *Ty = VTy->getPointerElementType();
54 if (!Ty->isSized())
55 return false;
56
57 APInt Offset(DL.getTypeStoreSizeInBits(VTy), 0);
58 return isDereferenceableFromAttribute(V, Offset, Ty, DL, CtxI, DT, TLI);
59}
60
61static bool isAligned(const Value *Base, APInt Offset, unsigned Align,
62 const DataLayout &DL) {
63 APInt BaseAlign(Offset.getBitWidth(), Base->getPointerAlignment(DL));
64
65 if (!BaseAlign) {
66 Type *Ty = Base->getType()->getPointerElementType();
67 if (!Ty->isSized())
68 return false;
69 BaseAlign = DL.getABITypeAlignment(Ty);
70 }
71
72 APInt Alignment(Offset.getBitWidth(), Align);
73
74 assert(Alignment.isPowerOf2() && "must be a power of 2!");
75 return BaseAlign.uge(Alignment) && !(Offset & (Alignment-1));
76}
77
78static bool isAligned(const Value *Base, unsigned Align, const DataLayout &DL) {
79 Type *Ty = Base->getType();
80 assert(Ty->isSized() && "must be sized");
81 APInt Offset(DL.getTypeStoreSizeInBits(Ty), 0);
82 return isAligned(Base, Offset, Align, DL);
83}
84
85/// Test if V is always a pointer to allocated and suitably aligned memory for
86/// a simple load or store.
87static bool isDereferenceableAndAlignedPointer(
88 const Value *V, unsigned Align, const DataLayout &DL,
89 const Instruction *CtxI, const DominatorTree *DT,
90 const TargetLibraryInfo *TLI, SmallPtrSetImpl<const Value *> &Visited) {
91 // Note that it is not safe to speculate into a malloc'd region because
92 // malloc may return null.
93
94 // These are obviously ok if aligned.
95 if (isa<AllocaInst>(V))
96 return isAligned(V, Align, DL);
97
98 // It's not always safe to follow a bitcast, for example:
99 // bitcast i8* (alloca i8) to i32*
100 // would result in a 4-byte load from a 1-byte alloca. However,
101 // if we're casting from a pointer from a type of larger size
102 // to a type of smaller size (or the same size), and the alignment
103 // is at least as large as for the resulting pointer type, then
104 // we can look through the bitcast.
105 if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) {
106 Type *STy = BC->getSrcTy()->getPointerElementType(),
107 *DTy = BC->getDestTy()->getPointerElementType();
108 if (STy->isSized() && DTy->isSized() &&
109 (DL.getTypeStoreSize(STy) >= DL.getTypeStoreSize(DTy)) &&
110 (DL.getABITypeAlignment(STy) >= DL.getABITypeAlignment(DTy)))
111 return isDereferenceableAndAlignedPointer(BC->getOperand(0), Align, DL,
112 CtxI, DT, TLI, Visited);
113 }
114
115 // Global variables which can't collapse to null are ok.
116 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
117 if (!GV->hasExternalWeakLinkage())
118 return isAligned(V, Align, DL);
119
120 // byval arguments are okay.
121 if (const Argument *A = dyn_cast<Argument>(V))
122 if (A->hasByValAttr())
123 return isAligned(V, Align, DL);
124
125 if (isDereferenceableFromAttribute(V, DL, CtxI, DT, TLI))
126 return isAligned(V, Align, DL);
127
128 // For GEPs, determine if the indexing lands within the allocated object.
129 if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
130 Type *Ty = GEP->getResultElementType();
131 const Value *Base = GEP->getPointerOperand();
132
133 // Conservatively require that the base pointer be fully dereferenceable
134 // and aligned.
135 if (!Visited.insert(Base).second)
136 return false;
137 if (!isDereferenceableAndAlignedPointer(Base, Align, DL, CtxI, DT, TLI,
138 Visited))
139 return false;
140
141 APInt Offset(DL.getPointerTypeSizeInBits(GEP->getType()), 0);
142 if (!GEP->accumulateConstantOffset(DL, Offset))
143 return false;
144
145 // Check if the load is within the bounds of the underlying object
146 // and offset is aligned.
147 uint64_t LoadSize = DL.getTypeStoreSize(Ty);
148 Type *BaseType = GEP->getSourceElementType();
149 assert(isPowerOf2_32(Align) && "must be a power of 2!");
150 return (Offset + LoadSize).ule(DL.getTypeAllocSize(BaseType)) &&
151 !(Offset & APInt(Offset.getBitWidth(), Align-1));
152 }
153
154 // For gc.relocate, look through relocations
155 if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
156 return isDereferenceableAndAlignedPointer(
157 RelocateInst->getDerivedPtr(), Align, DL, CtxI, DT, TLI, Visited);
158
159 if (const AddrSpaceCastInst *ASC = dyn_cast<AddrSpaceCastInst>(V))
160 return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Align, DL,
161 CtxI, DT, TLI, Visited);
162
163 // If we don't know, assume the worst.
164 return false;
165}
166
167bool llvm::isDereferenceableAndAlignedPointer(const Value *V, unsigned Align,
168 const DataLayout &DL,
169 const Instruction *CtxI,
170 const DominatorTree *DT,
171 const TargetLibraryInfo *TLI) {
172 // When dereferenceability information is provided by a dereferenceable
173 // attribute, we know exactly how many bytes are dereferenceable. If we can
174 // determine the exact offset to the attributed variable, we can use that
175 // information here.
176 Type *VTy = V->getType();
177 Type *Ty = VTy->getPointerElementType();
178
179 // Require ABI alignment for loads without alignment specification
180 if (Align == 0)
181 Align = DL.getABITypeAlignment(Ty);
182
183 if (Ty->isSized()) {
184 APInt Offset(DL.getTypeStoreSizeInBits(VTy), 0);
185 const Value *BV = V->stripAndAccumulateInBoundsConstantOffsets(DL, Offset);
186
187 if (Offset.isNonNegative())
188 if (isDereferenceableFromAttribute(BV, Offset, Ty, DL, CtxI, DT, TLI) &&
189 isAligned(BV, Offset, Align, DL))
190 return true;
191 }
192
193 SmallPtrSet<const Value *, 32> Visited;
194 return ::isDereferenceableAndAlignedPointer(V, Align, DL, CtxI, DT, TLI,
195 Visited);
196}
197
198bool llvm::isDereferenceablePointer(const Value *V, const DataLayout &DL,
199 const Instruction *CtxI,
200 const DominatorTree *DT,
201 const TargetLibraryInfo *TLI) {
202 return isDereferenceableAndAlignedPointer(V, 1, DL, CtxI, DT, TLI);
203}
204
Chandler Carruthb56052f2014-10-18 23:31:55 +0000205/// \brief Test if A and B will obviously have the same value.
206///
207/// This includes recognizing that %t0 and %t1 will have the same
Dan Gohman826bdf82010-05-28 16:19:17 +0000208/// value in code like this:
Chandler Carruthb56052f2014-10-18 23:31:55 +0000209/// \code
Dan Gohman826bdf82010-05-28 16:19:17 +0000210/// %t0 = getelementptr \@a, 0, 3
211/// store i32 0, i32* %t0
212/// %t1 = getelementptr \@a, 0, 3
213/// %t2 = load i32* %t1
Chandler Carruthb56052f2014-10-18 23:31:55 +0000214/// \endcode
Dan Gohman826bdf82010-05-28 16:19:17 +0000215///
216static bool AreEquivalentAddressValues(const Value *A, const Value *B) {
217 // Test if the values are trivially equivalent.
Chandler Carruthbe49df32014-10-18 23:41:25 +0000218 if (A == B)
219 return true;
Hans Wennborg060b9942011-06-03 17:15:37 +0000220
Dan Gohman826bdf82010-05-28 16:19:17 +0000221 // Test if the values come from identical arithmetic instructions.
222 // Use isIdenticalToWhenDefined instead of isIdenticalTo because
223 // this function is only used when one address use dominates the
224 // other, which means that they'll always either have the same
225 // value or one of them will have an undefined value.
Chandler Carruthbe49df32014-10-18 23:41:25 +0000226 if (isa<BinaryOperator>(A) || isa<CastInst>(A) || isa<PHINode>(A) ||
227 isa<GetElementPtrInst>(A))
Dan Gohman826bdf82010-05-28 16:19:17 +0000228 if (const Instruction *BI = dyn_cast<Instruction>(B))
229 if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI))
230 return true;
Hans Wennborg060b9942011-06-03 17:15:37 +0000231
Dan Gohman826bdf82010-05-28 16:19:17 +0000232 // Otherwise they may not be equivalent.
233 return false;
234}
235
Chandler Carruth1f27f032014-10-18 23:46:17 +0000236/// \brief Check if executing a load of this pointer value cannot trap.
237///
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000238/// If DT and ScanFrom are specified this method performs context-sensitive
239/// analysis and returns true if it is safe to load immediately before ScanFrom.
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000240///
Chandler Carruth1f27f032014-10-18 23:46:17 +0000241/// If it is not obviously safe to load from the specified pointer, we do
242/// a quick local scan of the basic block containing \c ScanFrom, to determine
243/// if the address is already accessed.
244///
245/// This uses the pointee type to determine how many bytes need to be safe to
246/// load from the pointer.
Artur Pilipenko6dd69692016-01-15 15:27:46 +0000247bool llvm::isSafeToLoadUnconditionally(Value *V, unsigned Align,
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000248 const DataLayout &DL,
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000249 Instruction *ScanFrom,
250 const DominatorTree *DT,
251 const TargetLibraryInfo *TLI) {
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000252 // Zero alignment means that the load has the ABI alignment for the target
253 if (Align == 0)
254 Align = DL.getABITypeAlignment(V->getType()->getPointerElementType());
255 assert(isPowerOf2_32(Align));
256
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000257 // If DT is not specified we can't make context-sensitive query
258 const Instruction* CtxI = DT ? ScanFrom : nullptr;
259 if (isDereferenceableAndAlignedPointer(V, Align, DL, CtxI, DT, TLI))
Artur Pilipenkof84dc062016-01-17 12:35:29 +0000260 return true;
261
Nuno Lopes69dcc7d2012-12-31 17:42:11 +0000262 int64_t ByteOffset = 0;
Dan Gohman826bdf82010-05-28 16:19:17 +0000263 Value *Base = V;
Chandler Carruth38e98d52014-10-18 23:47:22 +0000264 Base = GetPointerBaseWithConstantOffset(V, ByteOffset, DL);
Nuno Lopes69dcc7d2012-12-31 17:42:11 +0000265
266 if (ByteOffset < 0) // out of bounds
267 return false;
Dan Gohman826bdf82010-05-28 16:19:17 +0000268
Craig Topper9f008862014-04-15 04:59:12 +0000269 Type *BaseType = nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000270 unsigned BaseAlign = 0;
271 if (const AllocaInst *AI = dyn_cast<AllocaInst>(Base)) {
272 // An alloca is safe to load from as load as it is suitably aligned.
273 BaseType = AI->getAllocatedType();
274 BaseAlign = AI->getAlignment();
Nuno Lopes69dcc7d2012-12-31 17:42:11 +0000275 } else if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Base)) {
Chandler Carruth8a993732014-10-19 00:42:16 +0000276 // Global variables are not necessarily safe to load from if they are
Sanjoy Das5ce32722016-04-08 00:48:30 +0000277 // interposed arbitrarily. Their size may change or they may be weak and
278 // require a test to determine if they were in fact provided.
279 if (!GV->isInterposable()) {
Dan Gohman826bdf82010-05-28 16:19:17 +0000280 BaseType = GV->getType()->getElementType();
281 BaseAlign = GV->getAlignment();
282 }
283 }
284
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000285 PointerType *AddrTy = cast<PointerType>(V->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000286 uint64_t LoadSize = DL.getTypeStoreSize(AddrTy->getElementType());
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000287
Chandler Carruth8a993732014-10-19 00:42:16 +0000288 // If we found a base allocated type from either an alloca or global variable,
289 // try to see if we are definitively within the allocated region. We need to
290 // know the size of the base type and the loaded type to do anything in this
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000291 // case.
292 if (BaseType && BaseType->isSized()) {
Chandler Carruth8a993732014-10-19 00:42:16 +0000293 if (BaseAlign == 0)
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000294 BaseAlign = DL.getPrefTypeAlignment(BaseType);
Dan Gohman826bdf82010-05-28 16:19:17 +0000295
296 if (Align <= BaseAlign) {
Dan Gohman826bdf82010-05-28 16:19:17 +0000297 // Check if the load is within the bounds of the underlying object.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000298 if (ByteOffset + LoadSize <= DL.getTypeAllocSize(BaseType) &&
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000299 ((ByteOffset % Align) == 0))
Dan Gohman826bdf82010-05-28 16:19:17 +0000300 return true;
301 }
302 }
303
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000304 if (!ScanFrom)
305 return false;
306
Dan Gohman826bdf82010-05-28 16:19:17 +0000307 // Otherwise, be a little bit aggressive by scanning the local block where we
308 // want to check to see if the pointer is already being loaded or stored
309 // from/to. If so, the previous load or store would have already trapped,
310 // so there is no harm doing an extra load (also, CSE will later eliminate
311 // the load entirely).
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000312 BasicBlock::iterator BBI = ScanFrom->getIterator(),
313 E = ScanFrom->getParent()->begin();
Dan Gohman826bdf82010-05-28 16:19:17 +0000314
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000315 // We can at least always strip pointer casts even though we can't use the
316 // base here.
317 V = V->stripPointerCasts();
318
Dan Gohman826bdf82010-05-28 16:19:17 +0000319 while (BBI != E) {
320 --BBI;
321
322 // If we see a free or a call which may write to memory (i.e. which might do
323 // a free) the pointer could be marked invalid.
324 if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() &&
325 !isa<DbgInfoIntrinsic>(BBI))
326 return false;
327
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000328 Value *AccessedPtr;
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000329 unsigned AccessedAlign;
330 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000331 AccessedPtr = LI->getPointerOperand();
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000332 AccessedAlign = LI->getAlignment();
333 } else if (StoreInst *SI = dyn_cast<StoreInst>(BBI)) {
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000334 AccessedPtr = SI->getPointerOperand();
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000335 AccessedAlign = SI->getAlignment();
336 } else
337 continue;
338
339 Type *AccessedTy = AccessedPtr->getType()->getPointerElementType();
340 if (AccessedAlign == 0)
341 AccessedAlign = DL.getABITypeAlignment(AccessedTy);
342 if (AccessedAlign < Align)
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000343 continue;
344
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000345 // Handle trivial cases.
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000346 if (AccessedPtr == V)
347 return true;
348
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000349 if (AreEquivalentAddressValues(AccessedPtr->stripPointerCasts(), V) &&
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000350 LoadSize <= DL.getTypeStoreSize(AccessedTy))
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000351 return true;
Dan Gohman826bdf82010-05-28 16:19:17 +0000352 }
353 return false;
354}
355
Larisse Voufo532bf712015-09-18 19:14:35 +0000356/// DefMaxInstsToScan - the default number of maximum instructions
357/// to scan in the block, used by FindAvailableLoadedValue().
358/// FindAvailableLoadedValue() was introduced in r60148, to improve jump
359/// threading in part by eliminating partially redundant loads.
360/// At that point, the value of MaxInstsToScan was already set to '6'
361/// without documented explanation.
362cl::opt<unsigned>
363llvm::DefMaxInstsToScan("available-load-scan-limit", cl::init(6), cl::Hidden,
364 cl::desc("Use this to specify the default maximum number of instructions "
365 "to scan backward from a given instruction, when searching for "
366 "available loaded value"));
367
Chandler Carruthb56052f2014-10-18 23:31:55 +0000368/// \brief Scan the ScanBB block backwards to see if we have the value at the
Dan Gohman826bdf82010-05-28 16:19:17 +0000369/// memory address *Ptr locally available within a small number of instructions.
Dan Gohman826bdf82010-05-28 16:19:17 +0000370///
Chandler Carruthb56052f2014-10-18 23:31:55 +0000371/// The scan starts from \c ScanFrom. \c MaxInstsToScan specifies the maximum
372/// instructions to scan in the block. If it is set to \c 0, it will scan the whole
373/// block.
Dan Gohman826bdf82010-05-28 16:19:17 +0000374///
Chandler Carruthb56052f2014-10-18 23:31:55 +0000375/// If the value is available, this function returns it. If not, it returns the
376/// iterator for the last validated instruction that the value would be live
377/// through. If we scanned the entire block and didn't find something that
378/// invalidates \c *Ptr or provides it, \c ScanFrom is left at the last
379/// instruction processed and this returns null.
Chris Lattner87fa77b2012-03-13 18:07:41 +0000380///
Chandler Carruthb56052f2014-10-18 23:31:55 +0000381/// You can also optionally specify an alias analysis implementation, which
382/// makes this more precise.
383///
384/// If \c AATags is non-null and a load or store is found, the AA tags from the
385/// load or store are recorded there. If there are no AA tags or if no access is
386/// found, it is left unmodified.
Eduard Burtescue2a69172016-01-22 01:51:51 +0000387Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BasicBlock *ScanBB,
Dan Gohman826bdf82010-05-28 16:19:17 +0000388 BasicBlock::iterator &ScanFrom,
389 unsigned MaxInstsToScan,
Chandler Carruthd67244d2014-10-18 23:19:03 +0000390 AliasAnalysis *AA, AAMDNodes *AATags) {
391 if (MaxInstsToScan == 0)
392 MaxInstsToScan = ~0U;
Dan Gohman826bdf82010-05-28 16:19:17 +0000393
Eduard Burtescue2a69172016-01-22 01:51:51 +0000394 Value *Ptr = Load->getPointerOperand();
395 Type *AccessTy = Load->getType();
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000396
Philip Reames92c43692016-04-21 16:51:08 +0000397 // We can never remove a volatile load
398 if (Load->isVolatile())
399 return nullptr;
400
401 // Anything stronger than unordered is currently unimplemented.
402 if (!Load->isUnordered())
403 return nullptr;
404
Mehdi Amini46a43552015-03-04 18:43:29 +0000405 const DataLayout &DL = ScanBB->getModule()->getDataLayout();
Chandler Carruth1a3c2c42014-11-25 08:20:27 +0000406
407 // Try to get the store size for the type.
Mehdi Amini46a43552015-03-04 18:43:29 +0000408 uint64_t AccessSize = DL.getTypeStoreSize(AccessTy);
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000409
410 Value *StrippedPtr = Ptr->stripPointerCasts();
Chandler Carruthd67244d2014-10-18 23:19:03 +0000411
Dan Gohman826bdf82010-05-28 16:19:17 +0000412 while (ScanFrom != ScanBB->begin()) {
413 // We must ignore debug info directives when counting (otherwise they
414 // would affect codegen).
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000415 Instruction *Inst = &*--ScanFrom;
Dan Gohman826bdf82010-05-28 16:19:17 +0000416 if (isa<DbgInfoIntrinsic>(Inst))
417 continue;
418
419 // Restore ScanFrom to expected value in case next test succeeds
420 ScanFrom++;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000421
Dan Gohman826bdf82010-05-28 16:19:17 +0000422 // Don't scan huge blocks.
Chandler Carruthd67244d2014-10-18 23:19:03 +0000423 if (MaxInstsToScan-- == 0)
424 return nullptr;
425
Dan Gohman826bdf82010-05-28 16:19:17 +0000426 --ScanFrom;
427 // If this is a load of Ptr, the loaded value is available.
Eli Friedman4419cd22011-08-15 21:56:39 +0000428 // (This is true even if the load is volatile or atomic, although
429 // those cases are unlikely.)
Dan Gohman826bdf82010-05-28 16:19:17 +0000430 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000431 if (AreEquivalentAddressValues(
432 LI->getPointerOperand()->stripPointerCasts(), StrippedPtr) &&
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000433 CastInst::isBitOrNoopPointerCastable(LI->getType(), AccessTy, DL)) {
Philip Reames92c43692016-04-21 16:51:08 +0000434
435 // We can value forward from an atomic to a non-atomic, but not the
436 // other way around.
437 if (LI->isAtomic() < Load->isAtomic())
438 return nullptr;
439
Chandler Carruthd67244d2014-10-18 23:19:03 +0000440 if (AATags)
441 LI->getAAMetadata(*AATags);
Dan Gohman826bdf82010-05-28 16:19:17 +0000442 return LI;
Chris Lattner87fa77b2012-03-13 18:07:41 +0000443 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000444
Dan Gohman826bdf82010-05-28 16:19:17 +0000445 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000446 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts();
Dan Gohman826bdf82010-05-28 16:19:17 +0000447 // If this is a store through Ptr, the value is available!
Eli Friedman4419cd22011-08-15 21:56:39 +0000448 // (This is true even if the store is volatile or atomic, although
449 // those cases are unlikely.)
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000450 if (AreEquivalentAddressValues(StorePtr, StrippedPtr) &&
Chandler Carruth1a3c2c42014-11-25 08:20:27 +0000451 CastInst::isBitOrNoopPointerCastable(SI->getValueOperand()->getType(),
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000452 AccessTy, DL)) {
Philip Reames92c43692016-04-21 16:51:08 +0000453
454 // We can value forward from an atomic to a non-atomic, but not the
455 // other way around.
456 if (SI->isAtomic() < Load->isAtomic())
457 return nullptr;
458
Chandler Carruthd67244d2014-10-18 23:19:03 +0000459 if (AATags)
460 SI->getAAMetadata(*AATags);
Dan Gohman826bdf82010-05-28 16:19:17 +0000461 return SI->getOperand(0);
Chris Lattner87fa77b2012-03-13 18:07:41 +0000462 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000463
Chandler Carrutha32038b2014-10-20 10:03:01 +0000464 // If both StrippedPtr and StorePtr reach all the way to an alloca or
465 // global and they are different, ignore the store. This is a trivial form
466 // of alias analysis that is important for reg2mem'd code.
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000467 if ((isa<AllocaInst>(StrippedPtr) || isa<GlobalVariable>(StrippedPtr)) &&
Chandler Carrutha32038b2014-10-20 10:03:01 +0000468 (isa<AllocaInst>(StorePtr) || isa<GlobalVariable>(StorePtr)) &&
469 StrippedPtr != StorePtr)
Dan Gohman826bdf82010-05-28 16:19:17 +0000470 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000471
Dan Gohman826bdf82010-05-28 16:19:17 +0000472 // If we have alias analysis and it says the store won't modify the loaded
473 // value, ignore the store.
Chandler Carruth194f59c2015-07-22 23:15:57 +0000474 if (AA && (AA->getModRefInfo(SI, StrippedPtr, AccessSize) & MRI_Mod) == 0)
Dan Gohman826bdf82010-05-28 16:19:17 +0000475 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000476
Dan Gohman826bdf82010-05-28 16:19:17 +0000477 // Otherwise the store that may or may not alias the pointer, bail out.
478 ++ScanFrom;
Craig Topper9f008862014-04-15 04:59:12 +0000479 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000480 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000481
Dan Gohman826bdf82010-05-28 16:19:17 +0000482 // If this is some other instruction that may clobber Ptr, bail out.
483 if (Inst->mayWriteToMemory()) {
484 // If alias analysis claims that it really won't modify the load,
485 // ignore it.
486 if (AA &&
Chandler Carruth194f59c2015-07-22 23:15:57 +0000487 (AA->getModRefInfo(Inst, StrippedPtr, AccessSize) & MRI_Mod) == 0)
Dan Gohman826bdf82010-05-28 16:19:17 +0000488 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000489
Dan Gohman826bdf82010-05-28 16:19:17 +0000490 // May modify the pointer, bail out.
491 ++ScanFrom;
Craig Topper9f008862014-04-15 04:59:12 +0000492 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000493 }
494 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000495
Dan Gohman826bdf82010-05-28 16:19:17 +0000496 // Got to the start of the block, we didn't find it, but are done for this
497 // block.
Craig Topper9f008862014-04-15 04:59:12 +0000498 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000499}