blob: 7da9bd718a51cb5d059a66a6a5236f25bbcc8ca5 [file] [log] [blame]
Dan Gohman826bdf82010-05-28 16:19:17 +00001//===- Loads.cpp - Local load analysis ------------------------------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Dan Gohman826bdf82010-05-28 16:19:17 +00006//
7//===----------------------------------------------------------------------===//
8//
9// This file defines simple local analyses for load instructions.
10//
11//===----------------------------------------------------------------------===//
12
13#include "llvm/Analysis/Loads.h"
14#include "llvm/Analysis/AliasAnalysis.h"
Nuno Lopes69dcc7d2012-12-31 17:42:11 +000015#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000016#include "llvm/IR/DataLayout.h"
17#include "llvm/IR/GlobalAlias.h"
18#include "llvm/IR/GlobalVariable.h"
19#include "llvm/IR/IntrinsicInst.h"
20#include "llvm/IR/LLVMContext.h"
Mehdi Amini9a9738f2015-03-03 22:01:13 +000021#include "llvm/IR/Module.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000022#include "llvm/IR/Operator.h"
Artur Pilipenko31bcca42016-02-24 12:49:04 +000023#include "llvm/IR/Statepoint.h"
24
Dan Gohman826bdf82010-05-28 16:19:17 +000025using namespace llvm;
26
Benjamin Kramerc321e532016-06-08 19:09:22 +000027static bool isAligned(const Value *Base, const APInt &Offset, unsigned Align,
Artur Pilipenko31bcca42016-02-24 12:49:04 +000028 const DataLayout &DL) {
29 APInt BaseAlign(Offset.getBitWidth(), Base->getPointerAlignment(DL));
30
31 if (!BaseAlign) {
32 Type *Ty = Base->getType()->getPointerElementType();
33 if (!Ty->isSized())
34 return false;
35 BaseAlign = DL.getABITypeAlignment(Ty);
36 }
37
38 APInt Alignment(Offset.getBitWidth(), Align);
39
40 assert(Alignment.isPowerOf2() && "must be a power of 2!");
41 return BaseAlign.uge(Alignment) && !(Offset & (Alignment-1));
42}
43
44static bool isAligned(const Value *Base, unsigned Align, const DataLayout &DL) {
45 Type *Ty = Base->getType();
46 assert(Ty->isSized() && "must be sized");
47 APInt Offset(DL.getTypeStoreSizeInBits(Ty), 0);
48 return isAligned(Base, Offset, Align, DL);
49}
50
51/// Test if V is always a pointer to allocated and suitably aligned memory for
52/// a simple load or store.
53static bool isDereferenceableAndAlignedPointer(
Benjamin Kramerc321e532016-06-08 19:09:22 +000054 const Value *V, unsigned Align, const APInt &Size, const DataLayout &DL,
Artur Pilipenko31bcca42016-02-24 12:49:04 +000055 const Instruction *CtxI, const DominatorTree *DT,
Sean Silva45835e72016-07-02 23:47:27 +000056 SmallPtrSetImpl<const Value *> &Visited) {
David Majnemera90e51e2016-08-31 03:22:32 +000057 // Already visited? Bail out, we've likely hit unreachable code.
58 if (!Visited.insert(V).second)
59 return false;
60
Artur Pilipenko31bcca42016-02-24 12:49:04 +000061 // Note that it is not safe to speculate into a malloc'd region because
62 // malloc may return null.
63
Sanjoy Das10df4972016-06-01 16:47:45 +000064 // bitcast instructions are no-ops as far as dereferenceability is concerned.
65 if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V))
66 return isDereferenceableAndAlignedPointer(BC->getOperand(0), Align, Size,
Sean Silva45835e72016-07-02 23:47:27 +000067 DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +000068
Sanjoy Das48cad712016-06-02 00:52:53 +000069 bool CheckForNonNull = false;
70 APInt KnownDerefBytes(Size.getBitWidth(),
71 V->getPointerDereferenceableBytes(DL, CheckForNonNull));
72 if (KnownDerefBytes.getBoolValue()) {
73 if (KnownDerefBytes.uge(Size))
Nuno Lopes404f1062017-09-09 18:23:11 +000074 if (!CheckForNonNull || isKnownNonZero(V, DL, 0, nullptr, CtxI, DT))
Sanjoy Das48cad712016-06-02 00:52:53 +000075 return isAligned(V, Align, DL);
76 }
Artur Pilipenko31bcca42016-02-24 12:49:04 +000077
78 // For GEPs, determine if the indexing lands within the allocated object.
79 if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
Artur Pilipenko31bcca42016-02-24 12:49:04 +000080 const Value *Base = GEP->getPointerOperand();
81
Elena Demikhovsky945b7e52018-02-14 06:58:08 +000082 APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0);
Sanjoy Das10df4972016-06-01 16:47:45 +000083 if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() ||
84 !Offset.urem(APInt(Offset.getBitWidth(), Align)).isMinValue())
Artur Pilipenko31bcca42016-02-24 12:49:04 +000085 return false;
86
Sanjoy Das10df4972016-06-01 16:47:45 +000087 // If the base pointer is dereferenceable for Offset+Size bytes, then the
88 // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base
89 // pointer is aligned to Align bytes, and the Offset is divisible by Align
90 // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also
91 // aligned to Align bytes.
92
Tom Stellard130689952016-10-28 15:32:28 +000093 // Offset and Size may have different bit widths if we have visited an
94 // addrspacecast, so we can't do arithmetic directly on the APInt values.
95 return isDereferenceableAndAlignedPointer(
96 Base, Align, Offset + Size.sextOrTrunc(Offset.getBitWidth()),
97 DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +000098 }
99
100 // For gc.relocate, look through relocations
101 if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
102 return isDereferenceableAndAlignedPointer(
Sean Silva45835e72016-07-02 23:47:27 +0000103 RelocateInst->getDerivedPtr(), Align, Size, DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000104
105 if (const AddrSpaceCastInst *ASC = dyn_cast<AddrSpaceCastInst>(V))
Sanjoy Das10df4972016-06-01 16:47:45 +0000106 return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Align, Size,
Sean Silva45835e72016-07-02 23:47:27 +0000107 DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000108
Chandler Carruth363ac682019-01-07 05:42:51 +0000109 if (const auto *Call = dyn_cast<CallBase>(V))
110 if (auto *RP = getArgumentAliasingToReturnedPointer(Call))
Piotr Padlewskid6f73462018-05-23 09:16:44 +0000111 return isDereferenceableAndAlignedPointer(RP, Align, Size, DL, CtxI, DT,
Hal Finkelbf3957a2016-07-11 03:08:49 +0000112 Visited);
Piotr Padlewskid6f73462018-05-23 09:16:44 +0000113
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000114 // If we don't know, assume the worst.
115 return false;
116}
117
118bool llvm::isDereferenceableAndAlignedPointer(const Value *V, unsigned Align,
Vitaly Buka9c2a0362017-06-24 01:35:13 +0000119 const APInt &Size,
120 const DataLayout &DL,
121 const Instruction *CtxI,
122 const DominatorTree *DT) {
123 SmallPtrSet<const Value *, 32> Visited;
124 return ::isDereferenceableAndAlignedPointer(V, Align, Size, DL, CtxI, DT,
125 Visited);
126}
127
128bool llvm::isDereferenceableAndAlignedPointer(const Value *V, unsigned Align,
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000129 const DataLayout &DL,
130 const Instruction *CtxI,
Sean Silva45835e72016-07-02 23:47:27 +0000131 const DominatorTree *DT) {
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000132 // When dereferenceability information is provided by a dereferenceable
133 // attribute, we know exactly how many bytes are dereferenceable. If we can
134 // determine the exact offset to the attributed variable, we can use that
135 // information here.
136 Type *VTy = V->getType();
137 Type *Ty = VTy->getPointerElementType();
138
139 // Require ABI alignment for loads without alignment specification
140 if (Align == 0)
141 Align = DL.getABITypeAlignment(Ty);
142
Sanjoy Das10df4972016-06-01 16:47:45 +0000143 if (!Ty->isSized())
144 return false;
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000145
146 SmallPtrSet<const Value *, 32> Visited;
Sanjoy Das10df4972016-06-01 16:47:45 +0000147 return ::isDereferenceableAndAlignedPointer(
Elena Demikhovsky945b7e52018-02-14 06:58:08 +0000148 V, Align, APInt(DL.getIndexTypeSizeInBits(VTy), DL.getTypeStoreSize(Ty)), DL,
Sean Silva45835e72016-07-02 23:47:27 +0000149 CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000150}
151
152bool llvm::isDereferenceablePointer(const Value *V, const DataLayout &DL,
153 const Instruction *CtxI,
Sean Silva45835e72016-07-02 23:47:27 +0000154 const DominatorTree *DT) {
155 return isDereferenceableAndAlignedPointer(V, 1, DL, CtxI, DT);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000156}
157
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000158/// Test if A and B will obviously have the same value.
Chandler Carruthb56052f2014-10-18 23:31:55 +0000159///
160/// This includes recognizing that %t0 and %t1 will have the same
Dan Gohman826bdf82010-05-28 16:19:17 +0000161/// value in code like this:
Chandler Carruthb56052f2014-10-18 23:31:55 +0000162/// \code
Dan Gohman826bdf82010-05-28 16:19:17 +0000163/// %t0 = getelementptr \@a, 0, 3
164/// store i32 0, i32* %t0
165/// %t1 = getelementptr \@a, 0, 3
166/// %t2 = load i32* %t1
Chandler Carruthb56052f2014-10-18 23:31:55 +0000167/// \endcode
Dan Gohman826bdf82010-05-28 16:19:17 +0000168///
169static bool AreEquivalentAddressValues(const Value *A, const Value *B) {
170 // Test if the values are trivially equivalent.
Chandler Carruthbe49df32014-10-18 23:41:25 +0000171 if (A == B)
172 return true;
Hans Wennborg060b9942011-06-03 17:15:37 +0000173
Dan Gohman826bdf82010-05-28 16:19:17 +0000174 // Test if the values come from identical arithmetic instructions.
175 // Use isIdenticalToWhenDefined instead of isIdenticalTo because
176 // this function is only used when one address use dominates the
177 // other, which means that they'll always either have the same
178 // value or one of them will have an undefined value.
Chandler Carruthbe49df32014-10-18 23:41:25 +0000179 if (isa<BinaryOperator>(A) || isa<CastInst>(A) || isa<PHINode>(A) ||
180 isa<GetElementPtrInst>(A))
Dan Gohman826bdf82010-05-28 16:19:17 +0000181 if (const Instruction *BI = dyn_cast<Instruction>(B))
182 if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI))
183 return true;
Hans Wennborg060b9942011-06-03 17:15:37 +0000184
Dan Gohman826bdf82010-05-28 16:19:17 +0000185 // Otherwise they may not be equivalent.
186 return false;
187}
188
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000189/// Check if executing a load of this pointer value cannot trap.
Chandler Carruth1f27f032014-10-18 23:46:17 +0000190///
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000191/// If DT and ScanFrom are specified this method performs context-sensitive
192/// analysis and returns true if it is safe to load immediately before ScanFrom.
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000193///
Chandler Carruth1f27f032014-10-18 23:46:17 +0000194/// If it is not obviously safe to load from the specified pointer, we do
195/// a quick local scan of the basic block containing \c ScanFrom, to determine
196/// if the address is already accessed.
197///
198/// This uses the pointee type to determine how many bytes need to be safe to
199/// load from the pointer.
Artur Pilipenko6dd69692016-01-15 15:27:46 +0000200bool llvm::isSafeToLoadUnconditionally(Value *V, unsigned Align,
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000201 const DataLayout &DL,
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000202 Instruction *ScanFrom,
Sean Silva45835e72016-07-02 23:47:27 +0000203 const DominatorTree *DT) {
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000204 // Zero alignment means that the load has the ABI alignment for the target
205 if (Align == 0)
206 Align = DL.getABITypeAlignment(V->getType()->getPointerElementType());
207 assert(isPowerOf2_32(Align));
208
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000209 // If DT is not specified we can't make context-sensitive query
210 const Instruction* CtxI = DT ? ScanFrom : nullptr;
Sean Silva45835e72016-07-02 23:47:27 +0000211 if (isDereferenceableAndAlignedPointer(V, Align, DL, CtxI, DT))
Artur Pilipenkof84dc062016-01-17 12:35:29 +0000212 return true;
213
Nuno Lopes69dcc7d2012-12-31 17:42:11 +0000214 int64_t ByteOffset = 0;
Dan Gohman826bdf82010-05-28 16:19:17 +0000215 Value *Base = V;
Chandler Carruth38e98d52014-10-18 23:47:22 +0000216 Base = GetPointerBaseWithConstantOffset(V, ByteOffset, DL);
Nuno Lopes69dcc7d2012-12-31 17:42:11 +0000217
218 if (ByteOffset < 0) // out of bounds
219 return false;
Dan Gohman826bdf82010-05-28 16:19:17 +0000220
Craig Topper9f008862014-04-15 04:59:12 +0000221 Type *BaseType = nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000222 unsigned BaseAlign = 0;
223 if (const AllocaInst *AI = dyn_cast<AllocaInst>(Base)) {
224 // An alloca is safe to load from as load as it is suitably aligned.
225 BaseType = AI->getAllocatedType();
226 BaseAlign = AI->getAlignment();
Nuno Lopes69dcc7d2012-12-31 17:42:11 +0000227 } else if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Base)) {
Chandler Carruth8a993732014-10-19 00:42:16 +0000228 // Global variables are not necessarily safe to load from if they are
Sanjoy Das5ce32722016-04-08 00:48:30 +0000229 // interposed arbitrarily. Their size may change or they may be weak and
230 // require a test to determine if they were in fact provided.
231 if (!GV->isInterposable()) {
Dan Gohman826bdf82010-05-28 16:19:17 +0000232 BaseType = GV->getType()->getElementType();
233 BaseAlign = GV->getAlignment();
234 }
235 }
236
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000237 PointerType *AddrTy = cast<PointerType>(V->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000238 uint64_t LoadSize = DL.getTypeStoreSize(AddrTy->getElementType());
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000239
Chandler Carruth8a993732014-10-19 00:42:16 +0000240 // If we found a base allocated type from either an alloca or global variable,
241 // try to see if we are definitively within the allocated region. We need to
242 // know the size of the base type and the loaded type to do anything in this
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000243 // case.
244 if (BaseType && BaseType->isSized()) {
Chandler Carruth8a993732014-10-19 00:42:16 +0000245 if (BaseAlign == 0)
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000246 BaseAlign = DL.getPrefTypeAlignment(BaseType);
Dan Gohman826bdf82010-05-28 16:19:17 +0000247
248 if (Align <= BaseAlign) {
Dan Gohman826bdf82010-05-28 16:19:17 +0000249 // Check if the load is within the bounds of the underlying object.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000250 if (ByteOffset + LoadSize <= DL.getTypeAllocSize(BaseType) &&
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000251 ((ByteOffset % Align) == 0))
Dan Gohman826bdf82010-05-28 16:19:17 +0000252 return true;
253 }
254 }
255
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000256 if (!ScanFrom)
257 return false;
258
Dan Gohman826bdf82010-05-28 16:19:17 +0000259 // Otherwise, be a little bit aggressive by scanning the local block where we
260 // want to check to see if the pointer is already being loaded or stored
261 // from/to. If so, the previous load or store would have already trapped,
262 // so there is no harm doing an extra load (also, CSE will later eliminate
263 // the load entirely).
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000264 BasicBlock::iterator BBI = ScanFrom->getIterator(),
265 E = ScanFrom->getParent()->begin();
Dan Gohman826bdf82010-05-28 16:19:17 +0000266
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000267 // We can at least always strip pointer casts even though we can't use the
268 // base here.
269 V = V->stripPointerCasts();
270
Dan Gohman826bdf82010-05-28 16:19:17 +0000271 while (BBI != E) {
272 --BBI;
273
274 // If we see a free or a call which may write to memory (i.e. which might do
275 // a free) the pointer could be marked invalid.
276 if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() &&
277 !isa<DbgInfoIntrinsic>(BBI))
278 return false;
279
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000280 Value *AccessedPtr;
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000281 unsigned AccessedAlign;
282 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
Eli Friedman525ef012019-01-24 21:31:13 +0000283 // Ignore volatile loads. The execution of a volatile load cannot
284 // be used to prove an address is backed by regular memory; it can,
285 // for example, point to an MMIO register.
286 if (LI->isVolatile())
287 continue;
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000288 AccessedPtr = LI->getPointerOperand();
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000289 AccessedAlign = LI->getAlignment();
290 } else if (StoreInst *SI = dyn_cast<StoreInst>(BBI)) {
Eli Friedman525ef012019-01-24 21:31:13 +0000291 // Ignore volatile stores (see comment for loads).
292 if (SI->isVolatile())
293 continue;
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000294 AccessedPtr = SI->getPointerOperand();
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000295 AccessedAlign = SI->getAlignment();
296 } else
297 continue;
298
299 Type *AccessedTy = AccessedPtr->getType()->getPointerElementType();
300 if (AccessedAlign == 0)
301 AccessedAlign = DL.getABITypeAlignment(AccessedTy);
302 if (AccessedAlign < Align)
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000303 continue;
304
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000305 // Handle trivial cases.
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000306 if (AccessedPtr == V)
307 return true;
308
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000309 if (AreEquivalentAddressValues(AccessedPtr->stripPointerCasts(), V) &&
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000310 LoadSize <= DL.getTypeStoreSize(AccessedTy))
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000311 return true;
Dan Gohman826bdf82010-05-28 16:19:17 +0000312 }
313 return false;
314}
315
Larisse Voufo532bf712015-09-18 19:14:35 +0000316/// DefMaxInstsToScan - the default number of maximum instructions
317/// to scan in the block, used by FindAvailableLoadedValue().
318/// FindAvailableLoadedValue() was introduced in r60148, to improve jump
319/// threading in part by eliminating partially redundant loads.
320/// At that point, the value of MaxInstsToScan was already set to '6'
321/// without documented explanation.
322cl::opt<unsigned>
323llvm::DefMaxInstsToScan("available-load-scan-limit", cl::init(6), cl::Hidden,
324 cl::desc("Use this to specify the default maximum number of instructions "
325 "to scan backward from a given instruction, when searching for "
326 "available loaded value"));
327
Eli Friedman02419a92016-08-08 04:10:22 +0000328Value *llvm::FindAvailableLoadedValue(LoadInst *Load,
329 BasicBlock *ScanBB,
Dan Gohman826bdf82010-05-28 16:19:17 +0000330 BasicBlock::iterator &ScanFrom,
331 unsigned MaxInstsToScan,
Xin Tongaef0fcb2017-03-19 15:27:52 +0000332 AliasAnalysis *AA, bool *IsLoad,
Jun Bum Lim180bc5a2017-02-02 15:12:34 +0000333 unsigned *NumScanedInst) {
Xin Tongaef0fcb2017-03-19 15:27:52 +0000334 // Don't CSE load that is volatile or anything stronger than unordered.
Anna Thomas9ad45ad2016-07-08 22:15:08 +0000335 if (!Load->isUnordered())
336 return nullptr;
337
Xin Tongaef0fcb2017-03-19 15:27:52 +0000338 return FindAvailablePtrLoadStore(
339 Load->getPointerOperand(), Load->getType(), Load->isAtomic(), ScanBB,
340 ScanFrom, MaxInstsToScan, AA, IsLoad, NumScanedInst);
341}
342
343Value *llvm::FindAvailablePtrLoadStore(Value *Ptr, Type *AccessTy,
344 bool AtLeastAtomic, BasicBlock *ScanBB,
345 BasicBlock::iterator &ScanFrom,
346 unsigned MaxInstsToScan,
347 AliasAnalysis *AA, bool *IsLoadCSE,
348 unsigned *NumScanedInst) {
349 if (MaxInstsToScan == 0)
350 MaxInstsToScan = ~0U;
351
Mehdi Amini46a43552015-03-04 18:43:29 +0000352 const DataLayout &DL = ScanBB->getModule()->getDataLayout();
Anna Thomas9ad45ad2016-07-08 22:15:08 +0000353
Chandler Carruth1a3c2c42014-11-25 08:20:27 +0000354 // Try to get the store size for the type.
George Burgess IV8c5413f32018-12-23 03:10:56 +0000355 auto AccessSize = LocationSize::precise(DL.getTypeStoreSize(AccessTy));
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000356
357 Value *StrippedPtr = Ptr->stripPointerCasts();
Chandler Carruthd67244d2014-10-18 23:19:03 +0000358
Dan Gohman826bdf82010-05-28 16:19:17 +0000359 while (ScanFrom != ScanBB->begin()) {
360 // We must ignore debug info directives when counting (otherwise they
361 // would affect codegen).
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000362 Instruction *Inst = &*--ScanFrom;
Dan Gohman826bdf82010-05-28 16:19:17 +0000363 if (isa<DbgInfoIntrinsic>(Inst))
364 continue;
365
366 // Restore ScanFrom to expected value in case next test succeeds
367 ScanFrom++;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000368
Jun Bum Lim180bc5a2017-02-02 15:12:34 +0000369 if (NumScanedInst)
370 ++(*NumScanedInst);
371
Dan Gohman826bdf82010-05-28 16:19:17 +0000372 // Don't scan huge blocks.
Chandler Carruthd67244d2014-10-18 23:19:03 +0000373 if (MaxInstsToScan-- == 0)
374 return nullptr;
375
Dan Gohman826bdf82010-05-28 16:19:17 +0000376 --ScanFrom;
377 // If this is a load of Ptr, the loaded value is available.
Eli Friedman4419cd22011-08-15 21:56:39 +0000378 // (This is true even if the load is volatile or atomic, although
379 // those cases are unlikely.)
Reid Klecknerfbd5eef2016-06-24 18:42:58 +0000380 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
381 if (AreEquivalentAddressValues(
382 LI->getPointerOperand()->stripPointerCasts(), StrippedPtr) &&
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000383 CastInst::isBitOrNoopPointerCastable(LI->getType(), AccessTy, DL)) {
Philip Reames92c43692016-04-21 16:51:08 +0000384
385 // We can value forward from an atomic to a non-atomic, but not the
386 // other way around.
Xin Tongaef0fcb2017-03-19 15:27:52 +0000387 if (LI->isAtomic() < AtLeastAtomic)
Philip Reames92c43692016-04-21 16:51:08 +0000388 return nullptr;
389
Eli Friedmanbd254a62016-06-16 02:33:42 +0000390 if (IsLoadCSE)
391 *IsLoadCSE = true;
Dan Gohman826bdf82010-05-28 16:19:17 +0000392 return LI;
Chris Lattner87fa77b2012-03-13 18:07:41 +0000393 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000394
Dan Gohman826bdf82010-05-28 16:19:17 +0000395 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000396 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts();
Dan Gohman826bdf82010-05-28 16:19:17 +0000397 // If this is a store through Ptr, the value is available!
Eli Friedman4419cd22011-08-15 21:56:39 +0000398 // (This is true even if the store is volatile or atomic, although
399 // those cases are unlikely.)
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000400 if (AreEquivalentAddressValues(StorePtr, StrippedPtr) &&
Chandler Carruth1a3c2c42014-11-25 08:20:27 +0000401 CastInst::isBitOrNoopPointerCastable(SI->getValueOperand()->getType(),
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000402 AccessTy, DL)) {
Philip Reames92c43692016-04-21 16:51:08 +0000403
404 // We can value forward from an atomic to a non-atomic, but not the
405 // other way around.
Xin Tongaef0fcb2017-03-19 15:27:52 +0000406 if (SI->isAtomic() < AtLeastAtomic)
Philip Reames92c43692016-04-21 16:51:08 +0000407 return nullptr;
408
Eli Friedman02419a92016-08-08 04:10:22 +0000409 if (IsLoadCSE)
410 *IsLoadCSE = false;
Dan Gohman826bdf82010-05-28 16:19:17 +0000411 return SI->getOperand(0);
Chris Lattner87fa77b2012-03-13 18:07:41 +0000412 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000413
Chandler Carrutha32038b2014-10-20 10:03:01 +0000414 // If both StrippedPtr and StorePtr reach all the way to an alloca or
415 // global and they are different, ignore the store. This is a trivial form
416 // of alias analysis that is important for reg2mem'd code.
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000417 if ((isa<AllocaInst>(StrippedPtr) || isa<GlobalVariable>(StrippedPtr)) &&
Chandler Carrutha32038b2014-10-20 10:03:01 +0000418 (isa<AllocaInst>(StorePtr) || isa<GlobalVariable>(StorePtr)) &&
419 StrippedPtr != StorePtr)
Dan Gohman826bdf82010-05-28 16:19:17 +0000420 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000421
Dan Gohman826bdf82010-05-28 16:19:17 +0000422 // If we have alias analysis and it says the store won't modify the loaded
423 // value, ignore the store.
Alina Sbirlea63d22502017-12-05 20:12:23 +0000424 if (AA && !isModSet(AA->getModRefInfo(SI, StrippedPtr, AccessSize)))
Dan Gohman826bdf82010-05-28 16:19:17 +0000425 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000426
Dan Gohman826bdf82010-05-28 16:19:17 +0000427 // Otherwise the store that may or may not alias the pointer, bail out.
428 ++ScanFrom;
Craig Topper9f008862014-04-15 04:59:12 +0000429 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000430 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000431
Dan Gohman826bdf82010-05-28 16:19:17 +0000432 // If this is some other instruction that may clobber Ptr, bail out.
433 if (Inst->mayWriteToMemory()) {
434 // If alias analysis claims that it really won't modify the load,
435 // ignore it.
Alina Sbirlea63d22502017-12-05 20:12:23 +0000436 if (AA && !isModSet(AA->getModRefInfo(Inst, StrippedPtr, AccessSize)))
Dan Gohman826bdf82010-05-28 16:19:17 +0000437 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000438
Dan Gohman826bdf82010-05-28 16:19:17 +0000439 // May modify the pointer, bail out.
440 ++ScanFrom;
Craig Topper9f008862014-04-15 04:59:12 +0000441 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000442 }
443 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000444
Dan Gohman826bdf82010-05-28 16:19:17 +0000445 // Got to the start of the block, we didn't find it, but are done for this
446 // block.
Craig Topper9f008862014-04-15 04:59:12 +0000447 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000448}