blob: 3f03d53778f4c084527e6191dc19eef4d58cca4b [file] [log] [blame]
Dan Gohman826bdf82010-05-28 16:19:17 +00001//===- Loads.cpp - Local load analysis ------------------------------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Dan Gohman826bdf82010-05-28 16:19:17 +00006//
7//===----------------------------------------------------------------------===//
8//
9// This file defines simple local analyses for load instructions.
10//
11//===----------------------------------------------------------------------===//
12
13#include "llvm/Analysis/Loads.h"
14#include "llvm/Analysis/AliasAnalysis.h"
Philip Reamescffa6302019-09-10 21:33:53 +000015#include "llvm/Analysis/LoopInfo.h"
16#include "llvm/Analysis/ScalarEvolution.h"
17#include "llvm/Analysis/ScalarEvolutionExpressions.h"
Nuno Lopes69dcc7d2012-12-31 17:42:11 +000018#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000019#include "llvm/IR/DataLayout.h"
20#include "llvm/IR/GlobalAlias.h"
21#include "llvm/IR/GlobalVariable.h"
22#include "llvm/IR/IntrinsicInst.h"
23#include "llvm/IR/LLVMContext.h"
Mehdi Amini9a9738f2015-03-03 22:01:13 +000024#include "llvm/IR/Module.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000025#include "llvm/IR/Operator.h"
Artur Pilipenko31bcca42016-02-24 12:49:04 +000026#include "llvm/IR/Statepoint.h"
27
Dan Gohman826bdf82010-05-28 16:19:17 +000028using namespace llvm;
29
Guillaume Chateletbae629b2019-10-15 13:58:22 +000030static MaybeAlign getBaseAlign(const Value *Base, const DataLayout &DL) {
31 if (const MaybeAlign PA = Base->getPointerAlignment(DL))
32 return *PA;
33 Type *const Ty = Base->getType()->getPointerElementType();
34 if (!Ty->isSized())
35 return None;
36 return Align(DL.getABITypeAlignment(Ty));
37}
38
39static bool isAligned(const Value *Base, const APInt &Offset, Align Alignment,
Artur Pilipenko31bcca42016-02-24 12:49:04 +000040 const DataLayout &DL) {
Guillaume Chateletbae629b2019-10-15 13:58:22 +000041 if (MaybeAlign BA = getBaseAlign(Base, DL)) {
42 const APInt APBaseAlign(Offset.getBitWidth(), BA->value());
43 const APInt APAlign(Offset.getBitWidth(), Alignment.value());
44 assert(APAlign.isPowerOf2() && "must be a power of 2!");
45 return APBaseAlign.uge(APAlign) && !(Offset & (APAlign - 1));
Artur Pilipenko31bcca42016-02-24 12:49:04 +000046 }
Guillaume Chateletbae629b2019-10-15 13:58:22 +000047 return false;
Artur Pilipenko31bcca42016-02-24 12:49:04 +000048}
49
Artur Pilipenko31bcca42016-02-24 12:49:04 +000050/// Test if V is always a pointer to allocated and suitably aligned memory for
51/// a simple load or store.
52static bool isDereferenceableAndAlignedPointer(
Guillaume Chatelet301b4122019-10-21 15:10:26 +000053 const Value *V, Align Alignment, const APInt &Size, const DataLayout &DL,
Artur Pilipenko31bcca42016-02-24 12:49:04 +000054 const Instruction *CtxI, const DominatorTree *DT,
Sean Silva45835e72016-07-02 23:47:27 +000055 SmallPtrSetImpl<const Value *> &Visited) {
David Majnemera90e51e2016-08-31 03:22:32 +000056 // Already visited? Bail out, we've likely hit unreachable code.
57 if (!Visited.insert(V).second)
58 return false;
59
Artur Pilipenko31bcca42016-02-24 12:49:04 +000060 // Note that it is not safe to speculate into a malloc'd region because
61 // malloc may return null.
62
Sanjoy Das10df4972016-06-01 16:47:45 +000063 // bitcast instructions are no-ops as far as dereferenceability is concerned.
64 if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V))
Guillaume Chatelet301b4122019-10-21 15:10:26 +000065 return isDereferenceableAndAlignedPointer(BC->getOperand(0), Alignment,
66 Size, DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +000067
Sanjoy Das48cad712016-06-02 00:52:53 +000068 bool CheckForNonNull = false;
69 APInt KnownDerefBytes(Size.getBitWidth(),
70 V->getPointerDereferenceableBytes(DL, CheckForNonNull));
Philip Reames2f858c22019-08-26 23:57:27 +000071 if (KnownDerefBytes.getBoolValue() && KnownDerefBytes.uge(Size))
72 if (!CheckForNonNull || isKnownNonZero(V, DL, 0, nullptr, CtxI, DT)) {
Philip Reames20650ed2019-08-27 04:52:35 +000073 // As we recursed through GEPs to get here, we've incrementally checked
74 // that each step advanced by a multiple of the alignment. If our base is
75 // properly aligned, then the original offset accessed must also be.
Philip Reames2f858c22019-08-26 23:57:27 +000076 Type *Ty = V->getType();
77 assert(Ty->isSized() && "must be sized");
78 APInt Offset(DL.getTypeStoreSizeInBits(Ty), 0);
Guillaume Chatelet301b4122019-10-21 15:10:26 +000079 return isAligned(V, Offset, Alignment, DL);
Philip Reames2f858c22019-08-26 23:57:27 +000080 }
Artur Pilipenko31bcca42016-02-24 12:49:04 +000081
82 // For GEPs, determine if the indexing lands within the allocated object.
83 if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
Artur Pilipenko31bcca42016-02-24 12:49:04 +000084 const Value *Base = GEP->getPointerOperand();
85
Elena Demikhovsky945b7e52018-02-14 06:58:08 +000086 APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0);
Sanjoy Das10df4972016-06-01 16:47:45 +000087 if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() ||
Guillaume Chatelet301b4122019-10-21 15:10:26 +000088 !Offset.urem(APInt(Offset.getBitWidth(), Alignment.value()))
89 .isMinValue())
Artur Pilipenko31bcca42016-02-24 12:49:04 +000090 return false;
91
Sanjoy Das10df4972016-06-01 16:47:45 +000092 // If the base pointer is dereferenceable for Offset+Size bytes, then the
93 // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base
94 // pointer is aligned to Align bytes, and the Offset is divisible by Align
95 // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also
96 // aligned to Align bytes.
97
Tom Stellard130689952016-10-28 15:32:28 +000098 // Offset and Size may have different bit widths if we have visited an
99 // addrspacecast, so we can't do arithmetic directly on the APInt values.
100 return isDereferenceableAndAlignedPointer(
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000101 Base, Alignment, Offset + Size.sextOrTrunc(Offset.getBitWidth()), DL,
102 CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000103 }
104
105 // For gc.relocate, look through relocations
106 if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
107 return isDereferenceableAndAlignedPointer(
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000108 RelocateInst->getDerivedPtr(), Alignment, Size, DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000109
110 if (const AddrSpaceCastInst *ASC = dyn_cast<AddrSpaceCastInst>(V))
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000111 return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment,
112 Size, DL, CtxI, DT, Visited);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000113
Chandler Carruth363ac682019-01-07 05:42:51 +0000114 if (const auto *Call = dyn_cast<CallBase>(V))
Florian Hahnfd72bf22019-08-15 12:13:02 +0000115 if (auto *RP = getArgumentAliasingToReturnedPointer(Call, true))
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000116 return isDereferenceableAndAlignedPointer(RP, Alignment, Size, DL, CtxI,
117 DT, Visited);
Piotr Padlewskid6f73462018-05-23 09:16:44 +0000118
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000119 // If we don't know, assume the worst.
120 return false;
121}
122
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000123bool llvm::isDereferenceableAndAlignedPointer(const Value *V, Align Alignment,
Vitaly Buka9c2a0362017-06-24 01:35:13 +0000124 const APInt &Size,
125 const DataLayout &DL,
126 const Instruction *CtxI,
127 const DominatorTree *DT) {
Philip Reames93a26ec2019-08-27 23:36:31 +0000128 // Note: At the moment, Size can be zero. This ends up being interpreted as
129 // a query of whether [Base, V] is dereferenceable and V is aligned (since
130 // that's what the implementation happened to do). It's unclear if this is
131 // the desired semantic, but at least SelectionDAG does exercise this case.
132
Vitaly Buka9c2a0362017-06-24 01:35:13 +0000133 SmallPtrSet<const Value *, 32> Visited;
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000134 return ::isDereferenceableAndAlignedPointer(V, Alignment, Size, DL, CtxI, DT,
Vitaly Buka9c2a0362017-06-24 01:35:13 +0000135 Visited);
136}
137
Tim Northover60afa492019-07-09 11:35:35 +0000138bool llvm::isDereferenceableAndAlignedPointer(const Value *V, Type *Ty,
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000139 MaybeAlign MA,
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000140 const DataLayout &DL,
141 const Instruction *CtxI,
Sean Silva45835e72016-07-02 23:47:27 +0000142 const DominatorTree *DT) {
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000143 if (!Ty->isSized())
144 return false;
145
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000146 // When dereferenceability information is provided by a dereferenceable
147 // attribute, we know exactly how many bytes are dereferenceable. If we can
148 // determine the exact offset to the attributed variable, we can use that
149 // information here.
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000150
151 // Require ABI alignment for loads without alignment specification
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000152 const Align Alignment = DL.getValueOrABITypeAlignment(MA, Ty);
Nicola Zaghenf798eb22019-12-12 10:25:14 +0000153 APInt AccessSize(DL.getIndexTypeSizeInBits(V->getType()),
Philip Reames93a26ec2019-08-27 23:36:31 +0000154 DL.getTypeStoreSize(Ty));
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000155 return isDereferenceableAndAlignedPointer(V, Alignment, AccessSize, DL, CtxI,
156 DT);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000157}
158
Tim Northover60afa492019-07-09 11:35:35 +0000159bool llvm::isDereferenceablePointer(const Value *V, Type *Ty,
160 const DataLayout &DL,
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000161 const Instruction *CtxI,
Sean Silva45835e72016-07-02 23:47:27 +0000162 const DominatorTree *DT) {
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000163 return isDereferenceableAndAlignedPointer(V, Ty, Align::None(), DL, CtxI, DT);
Artur Pilipenko31bcca42016-02-24 12:49:04 +0000164}
165
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000166/// Test if A and B will obviously have the same value.
Chandler Carruthb56052f2014-10-18 23:31:55 +0000167///
168/// This includes recognizing that %t0 and %t1 will have the same
Dan Gohman826bdf82010-05-28 16:19:17 +0000169/// value in code like this:
Chandler Carruthb56052f2014-10-18 23:31:55 +0000170/// \code
Dan Gohman826bdf82010-05-28 16:19:17 +0000171/// %t0 = getelementptr \@a, 0, 3
172/// store i32 0, i32* %t0
173/// %t1 = getelementptr \@a, 0, 3
174/// %t2 = load i32* %t1
Chandler Carruthb56052f2014-10-18 23:31:55 +0000175/// \endcode
Dan Gohman826bdf82010-05-28 16:19:17 +0000176///
177static bool AreEquivalentAddressValues(const Value *A, const Value *B) {
178 // Test if the values are trivially equivalent.
Chandler Carruthbe49df32014-10-18 23:41:25 +0000179 if (A == B)
180 return true;
Hans Wennborg060b9942011-06-03 17:15:37 +0000181
Dan Gohman826bdf82010-05-28 16:19:17 +0000182 // Test if the values come from identical arithmetic instructions.
183 // Use isIdenticalToWhenDefined instead of isIdenticalTo because
184 // this function is only used when one address use dominates the
185 // other, which means that they'll always either have the same
186 // value or one of them will have an undefined value.
Chandler Carruthbe49df32014-10-18 23:41:25 +0000187 if (isa<BinaryOperator>(A) || isa<CastInst>(A) || isa<PHINode>(A) ||
188 isa<GetElementPtrInst>(A))
Dan Gohman826bdf82010-05-28 16:19:17 +0000189 if (const Instruction *BI = dyn_cast<Instruction>(B))
190 if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI))
191 return true;
Hans Wennborg060b9942011-06-03 17:15:37 +0000192
Dan Gohman826bdf82010-05-28 16:19:17 +0000193 // Otherwise they may not be equivalent.
194 return false;
195}
196
Philip Reamescffa6302019-09-10 21:33:53 +0000197bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
198 ScalarEvolution &SE,
199 DominatorTree &DT) {
200 auto &DL = LI->getModule()->getDataLayout();
201 Value *Ptr = LI->getPointerOperand();
Philip Reamesb90f94f2019-09-12 16:49:10 +0000202
203 APInt EltSize(DL.getIndexTypeSizeInBits(Ptr->getType()),
204 DL.getTypeStoreSize(LI->getType()));
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000205 const Align Alignment = DL.getValueOrABITypeAlignment(
206 MaybeAlign(LI->getAlignment()), LI->getType());
Philip Reamesb90f94f2019-09-12 16:49:10 +0000207
208 Instruction *HeaderFirstNonPHI = L->getHeader()->getFirstNonPHI();
209
210 // If given a uniform (i.e. non-varying) address, see if we can prove the
211 // access is safe within the loop w/o needing predication.
212 if (L->isLoopInvariant(Ptr))
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000213 return isDereferenceableAndAlignedPointer(Ptr, Alignment, EltSize, DL,
214 HeaderFirstNonPHI, &DT);
Philip Reamesb90f94f2019-09-12 16:49:10 +0000215
216 // Otherwise, check to see if we have a repeating access pattern where we can
217 // prove that all accesses are well aligned and dereferenceable.
Philip Reamescffa6302019-09-10 21:33:53 +0000218 auto *AddRec = dyn_cast<SCEVAddRecExpr>(SE.getSCEV(Ptr));
219 if (!AddRec || AddRec->getLoop() != L || !AddRec->isAffine())
220 return false;
221 auto* Step = dyn_cast<SCEVConstant>(AddRec->getStepRecurrence(SE));
222 if (!Step)
223 return false;
Philip Reamescffa6302019-09-10 21:33:53 +0000224 // TODO: generalize to access patterns which have gaps
Philip Reamesb90f94f2019-09-12 16:49:10 +0000225 if (Step->getAPInt() != EltSize)
Philip Reamescffa6302019-09-10 21:33:53 +0000226 return false;
227
228 // TODO: If the symbolic trip count has a small bound (max count), we might
229 // be able to prove safety.
230 auto TC = SE.getSmallConstantTripCount(L);
231 if (!TC)
232 return false;
233
234 const APInt AccessSize = TC * EltSize;
235
236 auto *StartS = dyn_cast<SCEVUnknown>(AddRec->getStart());
237 if (!StartS)
238 return false;
239 assert(SE.isLoopInvariant(StartS, L) && "implied by addrec definition");
240 Value *Base = StartS->getValue();
241
Philip Reamescffa6302019-09-10 21:33:53 +0000242 // For the moment, restrict ourselves to the case where the access size is a
243 // multiple of the requested alignment and the base is aligned.
244 // TODO: generalize if a case found which warrants
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000245 if (EltSize.urem(Alignment.value()) != 0)
Philip Reamescffa6302019-09-10 21:33:53 +0000246 return false;
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000247 return isDereferenceableAndAlignedPointer(Base, Alignment, AccessSize, DL,
248 HeaderFirstNonPHI, &DT);
Philip Reamescffa6302019-09-10 21:33:53 +0000249}
250
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000251/// Check if executing a load of this pointer value cannot trap.
Chandler Carruth1f27f032014-10-18 23:46:17 +0000252///
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000253/// If DT and ScanFrom are specified this method performs context-sensitive
254/// analysis and returns true if it is safe to load immediately before ScanFrom.
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000255///
Chandler Carruth1f27f032014-10-18 23:46:17 +0000256/// If it is not obviously safe to load from the specified pointer, we do
257/// a quick local scan of the basic block containing \c ScanFrom, to determine
258/// if the address is already accessed.
259///
260/// This uses the pointee type to determine how many bytes need to be safe to
261/// load from the pointer.
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000262bool llvm::isSafeToLoadUnconditionally(Value *V, MaybeAlign MA, APInt &Size,
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000263 const DataLayout &DL,
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000264 Instruction *ScanFrom,
Sean Silva45835e72016-07-02 23:47:27 +0000265 const DominatorTree *DT) {
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000266 // Zero alignment means that the load has the ABI alignment for the target
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000267 const Align Alignment =
268 DL.getValueOrABITypeAlignment(MA, V->getType()->getPointerElementType());
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000269
Artur Pilipenko66d6d3e2016-02-11 13:42:59 +0000270 // If DT is not specified we can't make context-sensitive query
271 const Instruction* CtxI = DT ? ScanFrom : nullptr;
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000272 if (isDereferenceableAndAlignedPointer(V, Alignment, Size, DL, CtxI, DT))
Artur Pilipenkof84dc062016-01-17 12:35:29 +0000273 return true;
274
Artur Pilipenko9bb6bea2016-04-27 11:00:48 +0000275 if (!ScanFrom)
276 return false;
277
Philip Reames26945222019-08-27 19:34:43 +0000278 if (Size.getBitWidth() > 64)
279 return false;
280 const uint64_t LoadSize = Size.getZExtValue();
281
Dan Gohman826bdf82010-05-28 16:19:17 +0000282 // Otherwise, be a little bit aggressive by scanning the local block where we
283 // want to check to see if the pointer is already being loaded or stored
284 // from/to. If so, the previous load or store would have already trapped,
285 // so there is no harm doing an extra load (also, CSE will later eliminate
286 // the load entirely).
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000287 BasicBlock::iterator BBI = ScanFrom->getIterator(),
288 E = ScanFrom->getParent()->begin();
Dan Gohman826bdf82010-05-28 16:19:17 +0000289
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000290 // We can at least always strip pointer casts even though we can't use the
291 // base here.
292 V = V->stripPointerCasts();
293
Dan Gohman826bdf82010-05-28 16:19:17 +0000294 while (BBI != E) {
295 --BBI;
296
297 // If we see a free or a call which may write to memory (i.e. which might do
298 // a free) the pointer could be marked invalid.
299 if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() &&
300 !isa<DbgInfoIntrinsic>(BBI))
301 return false;
302
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000303 Value *AccessedPtr;
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000304 MaybeAlign MaybeAccessedAlign;
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000305 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
Eli Friedman525ef012019-01-24 21:31:13 +0000306 // Ignore volatile loads. The execution of a volatile load cannot
307 // be used to prove an address is backed by regular memory; it can,
308 // for example, point to an MMIO register.
309 if (LI->isVolatile())
310 continue;
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000311 AccessedPtr = LI->getPointerOperand();
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000312 MaybeAccessedAlign = MaybeAlign(LI->getAlignment());
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000313 } else if (StoreInst *SI = dyn_cast<StoreInst>(BBI)) {
Eli Friedman525ef012019-01-24 21:31:13 +0000314 // Ignore volatile stores (see comment for loads).
315 if (SI->isVolatile())
316 continue;
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000317 AccessedPtr = SI->getPointerOperand();
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000318 MaybeAccessedAlign = MaybeAlign(SI->getAlignment());
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000319 } else
320 continue;
321
322 Type *AccessedTy = AccessedPtr->getType()->getPointerElementType();
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000323
324 const Align AccessedAlign =
325 DL.getValueOrABITypeAlignment(MaybeAccessedAlign, AccessedTy);
326 if (AccessedAlign < Alignment)
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000327 continue;
328
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000329 // Handle trivial cases.
Philip Reames26945222019-08-27 19:34:43 +0000330 if (AccessedPtr == V &&
331 LoadSize <= DL.getTypeStoreSize(AccessedTy))
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000332 return true;
333
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000334 if (AreEquivalentAddressValues(AccessedPtr->stripPointerCasts(), V) &&
Artur Pilipenko0e21d542015-06-25 12:18:43 +0000335 LoadSize <= DL.getTypeStoreSize(AccessedTy))
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000336 return true;
Dan Gohman826bdf82010-05-28 16:19:17 +0000337 }
338 return false;
339}
340
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000341bool llvm::isSafeToLoadUnconditionally(Value *V, Type *Ty, MaybeAlign Alignment,
Tim Northover60afa492019-07-09 11:35:35 +0000342 const DataLayout &DL,
343 Instruction *ScanFrom,
344 const DominatorTree *DT) {
345 APInt Size(DL.getIndexTypeSizeInBits(V->getType()), DL.getTypeStoreSize(Ty));
Guillaume Chatelet301b4122019-10-21 15:10:26 +0000346 return isSafeToLoadUnconditionally(V, Alignment, Size, DL, ScanFrom, DT);
Tim Northover60afa492019-07-09 11:35:35 +0000347}
348
349 /// DefMaxInstsToScan - the default number of maximum instructions
Larisse Voufo532bf712015-09-18 19:14:35 +0000350/// to scan in the block, used by FindAvailableLoadedValue().
351/// FindAvailableLoadedValue() was introduced in r60148, to improve jump
352/// threading in part by eliminating partially redundant loads.
353/// At that point, the value of MaxInstsToScan was already set to '6'
354/// without documented explanation.
355cl::opt<unsigned>
356llvm::DefMaxInstsToScan("available-load-scan-limit", cl::init(6), cl::Hidden,
357 cl::desc("Use this to specify the default maximum number of instructions "
358 "to scan backward from a given instruction, when searching for "
359 "available loaded value"));
360
Eli Friedman02419a92016-08-08 04:10:22 +0000361Value *llvm::FindAvailableLoadedValue(LoadInst *Load,
362 BasicBlock *ScanBB,
Dan Gohman826bdf82010-05-28 16:19:17 +0000363 BasicBlock::iterator &ScanFrom,
364 unsigned MaxInstsToScan,
Xin Tongaef0fcb2017-03-19 15:27:52 +0000365 AliasAnalysis *AA, bool *IsLoad,
Jun Bum Lim180bc5a2017-02-02 15:12:34 +0000366 unsigned *NumScanedInst) {
Xin Tongaef0fcb2017-03-19 15:27:52 +0000367 // Don't CSE load that is volatile or anything stronger than unordered.
Anna Thomas9ad45ad2016-07-08 22:15:08 +0000368 if (!Load->isUnordered())
369 return nullptr;
370
Xin Tongaef0fcb2017-03-19 15:27:52 +0000371 return FindAvailablePtrLoadStore(
372 Load->getPointerOperand(), Load->getType(), Load->isAtomic(), ScanBB,
373 ScanFrom, MaxInstsToScan, AA, IsLoad, NumScanedInst);
374}
375
376Value *llvm::FindAvailablePtrLoadStore(Value *Ptr, Type *AccessTy,
377 bool AtLeastAtomic, BasicBlock *ScanBB,
378 BasicBlock::iterator &ScanFrom,
379 unsigned MaxInstsToScan,
380 AliasAnalysis *AA, bool *IsLoadCSE,
381 unsigned *NumScanedInst) {
382 if (MaxInstsToScan == 0)
383 MaxInstsToScan = ~0U;
384
Mehdi Amini46a43552015-03-04 18:43:29 +0000385 const DataLayout &DL = ScanBB->getModule()->getDataLayout();
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000386 Value *StrippedPtr = Ptr->stripPointerCasts();
Chandler Carruthd67244d2014-10-18 23:19:03 +0000387
Dan Gohman826bdf82010-05-28 16:19:17 +0000388 while (ScanFrom != ScanBB->begin()) {
389 // We must ignore debug info directives when counting (otherwise they
390 // would affect codegen).
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000391 Instruction *Inst = &*--ScanFrom;
Dan Gohman826bdf82010-05-28 16:19:17 +0000392 if (isa<DbgInfoIntrinsic>(Inst))
393 continue;
394
395 // Restore ScanFrom to expected value in case next test succeeds
396 ScanFrom++;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000397
Jun Bum Lim180bc5a2017-02-02 15:12:34 +0000398 if (NumScanedInst)
399 ++(*NumScanedInst);
400
Dan Gohman826bdf82010-05-28 16:19:17 +0000401 // Don't scan huge blocks.
Chandler Carruthd67244d2014-10-18 23:19:03 +0000402 if (MaxInstsToScan-- == 0)
403 return nullptr;
404
Dan Gohman826bdf82010-05-28 16:19:17 +0000405 --ScanFrom;
406 // If this is a load of Ptr, the loaded value is available.
Eli Friedman4419cd22011-08-15 21:56:39 +0000407 // (This is true even if the load is volatile or atomic, although
408 // those cases are unlikely.)
Reid Klecknerfbd5eef2016-06-24 18:42:58 +0000409 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
410 if (AreEquivalentAddressValues(
411 LI->getPointerOperand()->stripPointerCasts(), StrippedPtr) &&
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000412 CastInst::isBitOrNoopPointerCastable(LI->getType(), AccessTy, DL)) {
Philip Reames92c43692016-04-21 16:51:08 +0000413
414 // We can value forward from an atomic to a non-atomic, but not the
415 // other way around.
Xin Tongaef0fcb2017-03-19 15:27:52 +0000416 if (LI->isAtomic() < AtLeastAtomic)
Philip Reames92c43692016-04-21 16:51:08 +0000417 return nullptr;
418
Eli Friedmanbd254a62016-06-16 02:33:42 +0000419 if (IsLoadCSE)
420 *IsLoadCSE = true;
Dan Gohman826bdf82010-05-28 16:19:17 +0000421 return LI;
Chris Lattner87fa77b2012-03-13 18:07:41 +0000422 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000423
Diana Picus7f1dcc82019-11-07 13:09:32 +0100424 // Try to get the store size for the type.
425 auto AccessSize = LocationSize::precise(DL.getTypeStoreSize(AccessTy));
426
Dan Gohman826bdf82010-05-28 16:19:17 +0000427 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000428 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts();
Dan Gohman826bdf82010-05-28 16:19:17 +0000429 // If this is a store through Ptr, the value is available!
Eli Friedman4419cd22011-08-15 21:56:39 +0000430 // (This is true even if the store is volatile or atomic, although
431 // those cases are unlikely.)
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000432 if (AreEquivalentAddressValues(StorePtr, StrippedPtr) &&
Chandler Carruth1a3c2c42014-11-25 08:20:27 +0000433 CastInst::isBitOrNoopPointerCastable(SI->getValueOperand()->getType(),
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000434 AccessTy, DL)) {
Philip Reames92c43692016-04-21 16:51:08 +0000435
436 // We can value forward from an atomic to a non-atomic, but not the
437 // other way around.
Xin Tongaef0fcb2017-03-19 15:27:52 +0000438 if (SI->isAtomic() < AtLeastAtomic)
Philip Reames92c43692016-04-21 16:51:08 +0000439 return nullptr;
440
Eli Friedman02419a92016-08-08 04:10:22 +0000441 if (IsLoadCSE)
442 *IsLoadCSE = false;
Dan Gohman826bdf82010-05-28 16:19:17 +0000443 return SI->getOperand(0);
Chris Lattner87fa77b2012-03-13 18:07:41 +0000444 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000445
Chandler Carrutha32038b2014-10-20 10:03:01 +0000446 // If both StrippedPtr and StorePtr reach all the way to an alloca or
447 // global and they are different, ignore the store. This is a trivial form
448 // of alias analysis that is important for reg2mem'd code.
Chandler Carrutheeec35a2014-10-20 00:24:14 +0000449 if ((isa<AllocaInst>(StrippedPtr) || isa<GlobalVariable>(StrippedPtr)) &&
Chandler Carrutha32038b2014-10-20 10:03:01 +0000450 (isa<AllocaInst>(StorePtr) || isa<GlobalVariable>(StorePtr)) &&
451 StrippedPtr != StorePtr)
Dan Gohman826bdf82010-05-28 16:19:17 +0000452 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000453
Dan Gohman826bdf82010-05-28 16:19:17 +0000454 // If we have alias analysis and it says the store won't modify the loaded
455 // value, ignore the store.
Alina Sbirlea63d22502017-12-05 20:12:23 +0000456 if (AA && !isModSet(AA->getModRefInfo(SI, StrippedPtr, AccessSize)))
Dan Gohman826bdf82010-05-28 16:19:17 +0000457 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000458
Dan Gohman826bdf82010-05-28 16:19:17 +0000459 // Otherwise the store that may or may not alias the pointer, bail out.
460 ++ScanFrom;
Craig Topper9f008862014-04-15 04:59:12 +0000461 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000462 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000463
Dan Gohman826bdf82010-05-28 16:19:17 +0000464 // If this is some other instruction that may clobber Ptr, bail out.
465 if (Inst->mayWriteToMemory()) {
466 // If alias analysis claims that it really won't modify the load,
467 // ignore it.
Alina Sbirlea63d22502017-12-05 20:12:23 +0000468 if (AA && !isModSet(AA->getModRefInfo(Inst, StrippedPtr, AccessSize)))
Dan Gohman826bdf82010-05-28 16:19:17 +0000469 continue;
Chandler Carruthd67244d2014-10-18 23:19:03 +0000470
Dan Gohman826bdf82010-05-28 16:19:17 +0000471 // May modify the pointer, bail out.
472 ++ScanFrom;
Craig Topper9f008862014-04-15 04:59:12 +0000473 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000474 }
475 }
Chandler Carruthd67244d2014-10-18 23:19:03 +0000476
Dan Gohman826bdf82010-05-28 16:19:17 +0000477 // Got to the start of the block, we didn't find it, but are done for this
478 // block.
Craig Topper9f008862014-04-15 04:59:12 +0000479 return nullptr;
Dan Gohman826bdf82010-05-28 16:19:17 +0000480}