blob: 84eb7699a0f2e173450078c5e728661abe2a9a1c [file] [log] [blame]
Dan Gohmanda85ed82010-10-19 23:09:08 +00001//===- BasicAliasAnalysis.cpp - Stateless Alias Analysis Impl -------------===//
Misha Brukman01808ca2005-04-21 21:13:18 +00002//
John Criswell482202a2003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukman01808ca2005-04-21 21:13:18 +00007//
John Criswell482202a2003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattnerd6a2a992003-02-26 19:41:54 +00009//
Dan Gohmanda85ed82010-10-19 23:09:08 +000010// This file defines the primary stateless implementation of the
11// Alias Analysis interface that implements identities (two different
12// globals cannot alias, etc), but does no stateful analysis.
Chris Lattnerd6a2a992003-02-26 19:41:54 +000013//
14//===----------------------------------------------------------------------===//
15
Chandler Carruth17e0bc32015-08-06 07:33:15 +000016#include "llvm/Analysis/BasicAliasAnalysis.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000017#include "llvm/ADT/APInt.h"
18#include "llvm/ADT/SmallPtrSet.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000019#include "llvm/ADT/SmallVector.h"
Wei Mid67daae2015-08-05 23:40:30 +000020#include "llvm/ADT/Statistic.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000021#include "llvm/Analysis/AliasAnalysis.h"
Craig Topperdfc89552017-05-14 06:18:34 +000022#include "llvm/Analysis/AssumptionCache.h"
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +000023#include "llvm/Analysis/CFG.h"
Chandler Carruth8a8cd2b2014-01-07 11:48:04 +000024#include "llvm/Analysis/CaptureTracking.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000025#include "llvm/Analysis/InstructionSimplify.h"
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +000026#include "llvm/Analysis/LoopInfo.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000027#include "llvm/Analysis/MemoryBuiltins.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000028#include "llvm/Analysis/MemoryLocation.h"
29#include "llvm/Analysis/TargetLibraryInfo.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000030#include "llvm/Analysis/ValueTracking.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000031#include "llvm/IR/Argument.h"
32#include "llvm/IR/Attributes.h"
33#include "llvm/IR/CallSite.h"
34#include "llvm/IR/Constant.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000035#include "llvm/IR/Constants.h"
36#include "llvm/IR/DataLayout.h"
37#include "llvm/IR/DerivedTypes.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000038#include "llvm/IR/Dominators.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000039#include "llvm/IR/Function.h"
40#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000041#include "llvm/IR/GlobalAlias.h"
42#include "llvm/IR/GlobalVariable.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000043#include "llvm/IR/InstrTypes.h"
44#include "llvm/IR/Instruction.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000045#include "llvm/IR/Instructions.h"
46#include "llvm/IR/IntrinsicInst.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000047#include "llvm/IR/Intrinsics.h"
48#include "llvm/IR/Metadata.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000049#include "llvm/IR/Operator.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000050#include "llvm/IR/Type.h"
51#include "llvm/IR/User.h"
52#include "llvm/IR/Value.h"
Chris Lattnerd82256a2004-03-15 03:36:49 +000053#include "llvm/Pass.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000054#include "llvm/Support/Casting.h"
55#include "llvm/Support/CommandLine.h"
56#include "llvm/Support/Compiler.h"
Craig Topper1a36b7d2017-05-15 06:39:41 +000057#include "llvm/Support/KnownBits.h"
Eugene Zelenko530851c2017-08-11 21:30:02 +000058#include <cassert>
59#include <cstdint>
60#include <cstdlib>
61#include <utility>
Gerolf Hoflehner1d1fbb52016-01-30 02:42:11 +000062
63#define DEBUG_TYPE "basicaa"
64
Chris Lattner35997482003-11-25 18:33:40 +000065using namespace llvm;
Brian Gaeke960707c2003-11-11 22:41:34 +000066
Tobias Edler von Kochd8ce16b2015-07-15 19:32:22 +000067/// Enable analysis of recursive PHI nodes.
Chandler Carruth903c5f92015-08-06 07:57:58 +000068static cl::opt<bool> EnableRecPhiAnalysis("basicaa-recphi", cl::Hidden,
69 cl::init(false));
Wei Mid67daae2015-08-05 23:40:30 +000070/// SearchLimitReached / SearchTimes shows how often the limit of
71/// to decompose GEPs is reached. It will affect the precision
72/// of basic alias analysis.
Wei Mid67daae2015-08-05 23:40:30 +000073STATISTIC(SearchLimitReached, "Number of times the limit to "
74 "decompose GEPs is reached");
75STATISTIC(SearchTimes, "Number of times a GEP is decomposed");
76
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +000077/// Cutoff after which to stop analysing a set of phi nodes potentially involved
Sanjay Patel9613b292016-01-17 23:13:48 +000078/// in a cycle. Because we are analysing 'through' phi nodes, we need to be
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +000079/// careful with value equivalence. We use reachability to make sure a value
80/// cannot be involved in a cycle.
81const unsigned MaxNumPhiBBsValueReachabilityCheck = 20;
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +000082
Arnold Schwaighofer1a444482014-03-26 21:30:19 +000083// The max limit of the search depth in DecomposeGEPExpression() and
84// GetUnderlyingObject(), both functions need to use the same search
85// depth otherwise the algorithm in aliasGEP will assert.
86static const unsigned MaxLookupSearchDepth = 6;
87
Chandler Carruthaa351672016-12-27 10:30:45 +000088bool BasicAAResult::invalidate(Function &F, const PreservedAnalyses &PA,
89 FunctionAnalysisManager::Invalidator &Inv) {
Chandler Carruthaa351672016-12-27 10:30:45 +000090 // We don't care if this analysis itself is preserved, it has no state. But
91 // we need to check that the analyses it depends on have been. Note that we
92 // may be created without handles to some analyses and in that case don't
93 // depend on them.
94 if (Inv.invalidate<AssumptionAnalysis>(F, PA) ||
95 (DT && Inv.invalidate<DominatorTreeAnalysis>(F, PA)) ||
96 (LI && Inv.invalidate<LoopAnalysis>(F, PA)))
97 return true;
98
99 // Otherwise this analysis result remains valid.
100 return false;
101}
102
Chris Lattner2d332972008-06-16 06:30:22 +0000103//===----------------------------------------------------------------------===//
104// Useful predicates
105//===----------------------------------------------------------------------===//
Devang Patel09f162c2007-05-01 21:15:47 +0000106
Chandler Carruthc5d81122015-08-06 08:17:06 +0000107/// Returns true if the pointer is to a function-local object that never
108/// escapes from the function.
Dan Gohman84f90a32010-07-01 20:08:40 +0000109static bool isNonEscapingLocalObject(const Value *V) {
Chris Lattnerfa482582008-06-16 06:28:01 +0000110 // If this is a local allocation, check to see if it escapes.
Dan Gohman84f90a32010-07-01 20:08:40 +0000111 if (isa<AllocaInst>(V) || isNoAliasCall(V))
Dan Gohman94e61762009-11-19 21:57:48 +0000112 // Set StoreCaptures to True so that we can assume in our callers that the
113 // pointer is not the result of a load instruction. Currently
114 // PointerMayBeCaptured doesn't have any special analysis for the
115 // StoreCaptures=false case; if it did, our callers could be refined to be
116 // more precise.
117 return !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
Duncan Sands8d65f362009-01-05 21:19:53 +0000118
Chris Lattnerfa482582008-06-16 06:28:01 +0000119 // If this is an argument that corresponds to a byval or noalias argument,
Duncan Sands8d65f362009-01-05 21:19:53 +0000120 // then it has not escaped before entering the function. Check if it escapes
121 // inside the function.
Dan Gohman84f90a32010-07-01 20:08:40 +0000122 if (const Argument *A = dyn_cast<Argument>(V))
Richard Osbornea1fffcf2012-11-05 10:48:24 +0000123 if (A->hasByValAttr() || A->hasNoAliasAttr())
Sanjay Patel9613b292016-01-17 23:13:48 +0000124 // Note even if the argument is marked nocapture, we still need to check
Richard Osbornea1fffcf2012-11-05 10:48:24 +0000125 // for copies made inside the function. The nocapture attribute only
126 // specifies that there are no copies made that outlive the function.
Dan Gohman84f90a32010-07-01 20:08:40 +0000127 return !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
Richard Osbornea1fffcf2012-11-05 10:48:24 +0000128
Chris Lattnerb35d9b52008-06-16 06:19:11 +0000129 return false;
130}
131
Chandler Carruthc5d81122015-08-06 08:17:06 +0000132/// Returns true if the pointer is one which would have been considered an
133/// escape by isNonEscapingLocalObject.
Dan Gohman84f90a32010-07-01 20:08:40 +0000134static bool isEscapeSource(const Value *V) {
135 if (isa<CallInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V))
136 return true;
Dan Gohman0824aff2010-06-29 00:50:39 +0000137
138 // The load case works because isNonEscapingLocalObject considers all
139 // stores to be escapes (it passes true for the StoreCaptures argument
140 // to PointerMayBeCaptured).
141 if (isa<LoadInst>(V))
142 return true;
143
144 return false;
145}
Chris Lattnerb35d9b52008-06-16 06:19:11 +0000146
Sanjay Patel9613b292016-01-17 23:13:48 +0000147/// Returns the size of the object specified by V or UnknownSize if unknown.
Rafael Espindola5f57f462014-02-21 18:34:28 +0000148static uint64_t getObjectSize(const Value *V, const DataLayout &DL,
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000149 const TargetLibraryInfo &TLI,
Eli Friedman8bc169c2012-02-27 20:46:07 +0000150 bool RoundToAlign = false) {
Nuno Lopes55fff832012-06-21 15:45:28 +0000151 uint64_t Size;
George Burgess IV56c7e882017-03-21 20:08:59 +0000152 ObjectSizeOpts Opts;
153 Opts.RoundToAlign = RoundToAlign;
154 if (getObjectSize(V, Size, DL, &TLI, Opts))
Nuno Lopes55fff832012-06-21 15:45:28 +0000155 return Size;
Chandler Carruthecbd1682015-06-17 07:21:38 +0000156 return MemoryLocation::UnknownSize;
Dan Gohman44da55b2011-01-18 21:16:06 +0000157}
158
Chandler Carruthc5d81122015-08-06 08:17:06 +0000159/// Returns true if we can prove that the object specified by V is smaller than
160/// Size.
Dan Gohman44da55b2011-01-18 21:16:06 +0000161static bool isObjectSmallerThan(const Value *V, uint64_t Size,
Rafael Espindola5f57f462014-02-21 18:34:28 +0000162 const DataLayout &DL,
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000163 const TargetLibraryInfo &TLI) {
Nadav Rotemabcc64f2013-04-09 18:16:05 +0000164 // Note that the meanings of the "object" are slightly different in the
165 // following contexts:
166 // c1: llvm::getObjectSize()
167 // c2: llvm.objectsize() intrinsic
168 // c3: isObjectSmallerThan()
169 // c1 and c2 share the same meaning; however, the meaning of "object" in c3
170 // refers to the "entire object".
171 //
172 // Consider this example:
173 // char *p = (char*)malloc(100)
174 // char *q = p+80;
175 //
176 // In the context of c1 and c2, the "object" pointed by q refers to the
177 // stretch of memory of q[0:19]. So, getObjectSize(q) should return 20.
178 //
179 // However, in the context of c3, the "object" refers to the chunk of memory
180 // being allocated. So, the "object" has 100 bytes, and q points to the middle
181 // the "object". In case q is passed to isObjectSmallerThan() as the 1st
182 // parameter, before the llvm::getObjectSize() is called to get the size of
183 // entire object, we should:
184 // - either rewind the pointer q to the base-address of the object in
185 // question (in this case rewind to p), or
186 // - just give up. It is up to caller to make sure the pointer is pointing
187 // to the base address the object.
Jakub Staszak07f383f2013-08-24 14:16:00 +0000188 //
Nadav Rotemabcc64f2013-04-09 18:16:05 +0000189 // We go for 2nd option for simplicity.
190 if (!isIdentifiedObject(V))
191 return false;
192
Eli Friedman8bc169c2012-02-27 20:46:07 +0000193 // This function needs to use the aligned object size because we allow
194 // reads a bit past the end given sufficient alignment.
Chandler Carruth903c5f92015-08-06 07:57:58 +0000195 uint64_t ObjectSize = getObjectSize(V, DL, TLI, /*RoundToAlign*/ true);
Jakub Staszak07f383f2013-08-24 14:16:00 +0000196
Chandler Carruthecbd1682015-06-17 07:21:38 +0000197 return ObjectSize != MemoryLocation::UnknownSize && ObjectSize < Size;
Dan Gohman44da55b2011-01-18 21:16:06 +0000198}
199
Chandler Carruthc5d81122015-08-06 08:17:06 +0000200/// Returns true if we can prove that the object specified by V has size Size.
Chandler Carruth903c5f92015-08-06 07:57:58 +0000201static bool isObjectSize(const Value *V, uint64_t Size, const DataLayout &DL,
202 const TargetLibraryInfo &TLI) {
Rafael Espindola5f57f462014-02-21 18:34:28 +0000203 uint64_t ObjectSize = getObjectSize(V, DL, TLI);
Chandler Carruthecbd1682015-06-17 07:21:38 +0000204 return ObjectSize != MemoryLocation::UnknownSize && ObjectSize == Size;
Chris Lattner98ad8162008-06-16 06:10:11 +0000205}
206
Chris Lattner2d332972008-06-16 06:30:22 +0000207//===----------------------------------------------------------------------===//
Chris Lattner9f7500f2010-08-18 22:07:29 +0000208// GetElementPtr Instruction Decomposition and Analysis
209//===----------------------------------------------------------------------===//
210
Chandler Carruthc5d81122015-08-06 08:17:06 +0000211/// Analyzes the specified value as a linear expression: "A*V + B", where A and
212/// B are constant integers.
213///
214/// Returns the scale and offset values as APInts and return V as a Value*, and
215/// return whether we looked through any sign or zero extends. The incoming
Sanjay Patel9613b292016-01-17 23:13:48 +0000216/// Value is known to have IntegerType, and it may already be sign or zero
Chandler Carruthc5d81122015-08-06 08:17:06 +0000217/// extended.
Chris Lattner3decde92010-08-18 23:09:49 +0000218///
219/// Note that this looks through extends, so the high bits may not be
220/// represented in the result.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000221/*static*/ const Value *BasicAAResult::GetLinearExpression(
Quentin Colombet5989bc62015-08-31 22:32:47 +0000222 const Value *V, APInt &Scale, APInt &Offset, unsigned &ZExtBits,
223 unsigned &SExtBits, const DataLayout &DL, unsigned Depth,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000224 AssumptionCache *AC, DominatorTree *DT, bool &NSW, bool &NUW) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000225 assert(V->getType()->isIntegerTy() && "Not an integer value");
226
227 // Limit our recursion depth.
228 if (Depth == 6) {
229 Scale = 1;
230 Offset = 0;
231 return V;
232 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000233
Quentin Colombet5989bc62015-08-31 22:32:47 +0000234 if (const ConstantInt *Const = dyn_cast<ConstantInt>(V)) {
Sanjay Patel9613b292016-01-17 23:13:48 +0000235 // If it's a constant, just convert it to an offset and remove the variable.
236 // If we've been called recursively, the Offset bit width will be greater
Quentin Colombet5989bc62015-08-31 22:32:47 +0000237 // than the constant's (the Offset's always as wide as the outermost call),
238 // so we'll zext here and process any extension in the isa<SExtInst> &
239 // isa<ZExtInst> cases below.
240 Offset += Const->getValue().zextOrSelf(Offset.getBitWidth());
241 assert(Scale == 0 && "Constant values don't have a scale");
242 return V;
243 }
244
245 if (const BinaryOperator *BOp = dyn_cast<BinaryOperator>(V)) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000246 if (ConstantInt *RHSC = dyn_cast<ConstantInt>(BOp->getOperand(1))) {
Sanjay Patel9613b292016-01-17 23:13:48 +0000247 // If we've been called recursively, then Offset and Scale will be wider
248 // than the BOp operands. We'll always zext it here as we'll process sign
Quentin Colombet5989bc62015-08-31 22:32:47 +0000249 // extensions below (see the isa<SExtInst> / isa<ZExtInst> cases).
250 APInt RHS = RHSC->getValue().zextOrSelf(Offset.getBitWidth());
251
Chris Lattner9f7500f2010-08-18 22:07:29 +0000252 switch (BOp->getOpcode()) {
Chandler Carruth903c5f92015-08-06 07:57:58 +0000253 default:
Quentin Colombet5989bc62015-08-31 22:32:47 +0000254 // We don't understand this instruction, so we can't decompose it any
255 // further.
256 Scale = 1;
257 Offset = 0;
258 return V;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000259 case Instruction::Or:
260 // X|C == X+C if all the bits in C are unset in X. Otherwise we can't
261 // analyze it.
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000262 if (!MaskedValueIsZero(BOp->getOperand(0), RHSC->getValue(), DL, 0, AC,
Quentin Colombet5989bc62015-08-31 22:32:47 +0000263 BOp, DT)) {
264 Scale = 1;
265 Offset = 0;
266 return V;
267 }
Justin Bognercd1d5aa2016-08-17 20:30:52 +0000268 LLVM_FALLTHROUGH;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000269 case Instruction::Add:
Quentin Colombet5989bc62015-08-31 22:32:47 +0000270 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000271 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet5989bc62015-08-31 22:32:47 +0000272 Offset += RHS;
273 break;
274 case Instruction::Sub:
275 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000276 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet5989bc62015-08-31 22:32:47 +0000277 Offset -= RHS;
278 break;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000279 case Instruction::Mul:
Quentin Colombet5989bc62015-08-31 22:32:47 +0000280 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000281 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet5989bc62015-08-31 22:32:47 +0000282 Offset *= RHS;
283 Scale *= RHS;
284 break;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000285 case Instruction::Shl:
Quentin Colombet5989bc62015-08-31 22:32:47 +0000286 V = GetLinearExpression(BOp->getOperand(0), Scale, Offset, ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000287 SExtBits, DL, Depth + 1, AC, DT, NSW, NUW);
Quentin Colombet5989bc62015-08-31 22:32:47 +0000288 Offset <<= RHS.getLimitedValue();
289 Scale <<= RHS.getLimitedValue();
290 // the semantics of nsw and nuw for left shifts don't match those of
291 // multiplications, so we won't propagate them.
292 NSW = NUW = false;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000293 return V;
294 }
Quentin Colombet5989bc62015-08-31 22:32:47 +0000295
296 if (isa<OverflowingBinaryOperator>(BOp)) {
297 NUW &= BOp->hasNoUnsignedWrap();
298 NSW &= BOp->hasNoSignedWrap();
299 }
300 return V;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000301 }
302 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000303
Chris Lattner9f7500f2010-08-18 22:07:29 +0000304 // Since GEP indices are sign extended anyway, we don't care about the high
Chris Lattner3decde92010-08-18 23:09:49 +0000305 // bits of a sign or zero extended value - just scales and offsets. The
306 // extensions have to be consistent though.
Quentin Colombet5989bc62015-08-31 22:32:47 +0000307 if (isa<SExtInst>(V) || isa<ZExtInst>(V)) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000308 Value *CastOp = cast<CastInst>(V)->getOperand(0);
Quentin Colombet5989bc62015-08-31 22:32:47 +0000309 unsigned NewWidth = V->getType()->getPrimitiveSizeInBits();
Chris Lattner9f7500f2010-08-18 22:07:29 +0000310 unsigned SmallWidth = CastOp->getType()->getPrimitiveSizeInBits();
Quentin Colombet5989bc62015-08-31 22:32:47 +0000311 unsigned OldZExtBits = ZExtBits, OldSExtBits = SExtBits;
312 const Value *Result =
313 GetLinearExpression(CastOp, Scale, Offset, ZExtBits, SExtBits, DL,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000314 Depth + 1, AC, DT, NSW, NUW);
Chris Lattner3decde92010-08-18 23:09:49 +0000315
Simon Pilgrimf2fbf432016-11-20 13:47:59 +0000316 // zext(zext(%x)) == zext(%x), and similarly for sext; we'll handle this
Quentin Colombet5989bc62015-08-31 22:32:47 +0000317 // by just incrementing the number of bits we've extended by.
318 unsigned ExtendedBy = NewWidth - SmallWidth;
319
320 if (isa<SExtInst>(V) && ZExtBits == 0) {
321 // sext(sext(%x, a), b) == sext(%x, a + b)
322
323 if (NSW) {
324 // We haven't sign-wrapped, so it's valid to decompose sext(%x + c)
325 // into sext(%x) + sext(c). We'll sext the Offset ourselves:
326 unsigned OldWidth = Offset.getBitWidth();
327 Offset = Offset.trunc(SmallWidth).sext(NewWidth).zextOrSelf(OldWidth);
328 } else {
329 // We may have signed-wrapped, so don't decompose sext(%x + c) into
330 // sext(%x) + sext(c)
331 Scale = 1;
332 Offset = 0;
333 Result = CastOp;
334 ZExtBits = OldZExtBits;
335 SExtBits = OldSExtBits;
336 }
337 SExtBits += ExtendedBy;
338 } else {
339 // sext(zext(%x, a), b) = zext(zext(%x, a), b) = zext(%x, a + b)
340
341 if (!NUW) {
342 // We may have unsigned-wrapped, so don't decompose zext(%x + c) into
343 // zext(%x) + zext(c)
344 Scale = 1;
345 Offset = 0;
346 Result = CastOp;
347 ZExtBits = OldZExtBits;
348 SExtBits = OldSExtBits;
349 }
350 ZExtBits += ExtendedBy;
351 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000352
Chris Lattner9f7500f2010-08-18 22:07:29 +0000353 return Result;
354 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000355
Chris Lattner9f7500f2010-08-18 22:07:29 +0000356 Scale = 1;
357 Offset = 0;
358 return V;
359}
360
Gerolf Hoflehner1d1fbb52016-01-30 02:42:11 +0000361/// To ensure a pointer offset fits in an integer of size PointerSize
362/// (in bits) when that size is smaller than 64. This is an issue in
363/// particular for 32b programs with negative indices that rely on two's
Gerolf Hoflehnerd24671f2016-01-30 05:58:38 +0000364/// complement wrap-arounds for precise alias information.
Gerolf Hoflehner1d1fbb52016-01-30 02:42:11 +0000365static int64_t adjustToPointerSize(int64_t Offset, unsigned PointerSize) {
366 assert(PointerSize <= 64 && "Invalid PointerSize!");
367 unsigned ShiftBits = 64 - PointerSize;
Gerolf Hoflehner73fc84b2016-01-30 05:35:09 +0000368 return (int64_t)((uint64_t)Offset << ShiftBits) >> ShiftBits;
Gerolf Hoflehner1d1fbb52016-01-30 02:42:11 +0000369}
370
Chandler Carruthc5d81122015-08-06 08:17:06 +0000371/// If V is a symbolic pointer expression, decompose it into a base pointer
372/// with a constant offset and a number of scaled symbolic offsets.
Chris Lattner9f7500f2010-08-18 22:07:29 +0000373///
Chandler Carruthc5d81122015-08-06 08:17:06 +0000374/// The scaled symbolic offsets (represented by pairs of a Value* and a scale
375/// in the VarIndices vector) are Value*'s that are known to be scaled by the
376/// specified amount, but which may have other unrepresented high bits. As
377/// such, the gep cannot necessarily be reconstructed from its decomposed form.
Chris Lattner9f7500f2010-08-18 22:07:29 +0000378///
Micah Villmowcdfe20b2012-10-08 16:38:25 +0000379/// When DataLayout is around, this function is capable of analyzing everything
Arnold Schwaighofer1a444482014-03-26 21:30:19 +0000380/// that GetUnderlyingObject can look through. To be able to do that
381/// GetUnderlyingObject and DecomposeGEPExpression must use the same search
Chandler Carruthc5d81122015-08-06 08:17:06 +0000382/// depth (MaxLookupSearchDepth). When DataLayout not is around, it just looks
383/// through pointer casts.
Michael Kuperstein82069c42016-05-25 22:23:08 +0000384bool BasicAAResult::DecomposeGEPExpression(const Value *V,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000385 DecomposedGEP &Decomposed, const DataLayout &DL, AssumptionCache *AC,
Michael Kuperstein82069c42016-05-25 22:23:08 +0000386 DominatorTree *DT) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000387 // Limit recursion depth to limit compile time in crazy cases.
Arnold Schwaighofer1a444482014-03-26 21:30:19 +0000388 unsigned MaxLookup = MaxLookupSearchDepth;
Wei Mid67daae2015-08-05 23:40:30 +0000389 SearchTimes++;
Jakub Staszak07f383f2013-08-24 14:16:00 +0000390
Michael Kuperstein82069c42016-05-25 22:23:08 +0000391 Decomposed.StructOffset = 0;
392 Decomposed.OtherOffset = 0;
393 Decomposed.VarIndices.clear();
Chris Lattner9f7500f2010-08-18 22:07:29 +0000394 do {
395 // See if this is a bitcast or GEP.
396 const Operator *Op = dyn_cast<Operator>(V);
Craig Topper9f008862014-04-15 04:59:12 +0000397 if (!Op) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000398 // The only non-operator case we can handle are GlobalAliases.
399 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +0000400 if (!GA->isInterposable()) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000401 V = GA->getAliasee();
402 continue;
403 }
404 }
Michael Kuperstein82069c42016-05-25 22:23:08 +0000405 Decomposed.Base = V;
406 return false;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000407 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000408
Matt Arsenault70f4db882014-07-15 00:56:40 +0000409 if (Op->getOpcode() == Instruction::BitCast ||
410 Op->getOpcode() == Instruction::AddrSpaceCast) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000411 V = Op->getOperand(0);
412 continue;
413 }
Dan Gohman05b18f12010-12-15 20:49:55 +0000414
Chris Lattner9f7500f2010-08-18 22:07:29 +0000415 const GEPOperator *GEPOp = dyn_cast<GEPOperator>(Op);
Craig Topper9f008862014-04-15 04:59:12 +0000416 if (!GEPOp) {
Hal Finkel5c12d8f2016-07-11 01:32:20 +0000417 if (auto CS = ImmutableCallSite(V))
418 if (const Value *RV = CS.getReturnedArgOperand()) {
419 V = RV;
420 continue;
421 }
422
Dan Gohman0573b552011-05-24 18:24:08 +0000423 // If it's not a GEP, hand it off to SimplifyInstruction to see if it
424 // can come up with something. This matches what GetUnderlyingObject does.
425 if (const Instruction *I = dyn_cast<Instruction>(V))
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000426 // TODO: Get a DominatorTree and AssumptionCache and use them here
427 // (these are both now available in this function, but this should be
428 // updated when GetUnderlyingObject is updated). TLI should be
429 // provided also.
Dan Gohman0573b552011-05-24 18:24:08 +0000430 if (const Value *Simplified =
Chandler Carruth903c5f92015-08-06 07:57:58 +0000431 SimplifyInstruction(const_cast<Instruction *>(I), DL)) {
Dan Gohman0573b552011-05-24 18:24:08 +0000432 V = Simplified;
433 continue;
434 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000435
Michael Kuperstein82069c42016-05-25 22:23:08 +0000436 Decomposed.Base = V;
437 return false;
Dan Gohman0573b552011-05-24 18:24:08 +0000438 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000439
Chris Lattner9f7500f2010-08-18 22:07:29 +0000440 // Don't attempt to analyze GEPs over unsized objects.
Michael Kuperstein82069c42016-05-25 22:23:08 +0000441 if (!GEPOp->getSourceElementType()->isSized()) {
442 Decomposed.Base = V;
443 return false;
444 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000445
Matt Arsenaulta8fe22b2013-11-16 00:36:43 +0000446 unsigned AS = GEPOp->getPointerAddressSpace();
Chris Lattner9f7500f2010-08-18 22:07:29 +0000447 // Walk the indices of the GEP, accumulating them into BaseOff/VarIndices.
448 gep_type_iterator GTI = gep_type_begin(GEPOp);
Gerolf Hoflehner1d1fbb52016-01-30 02:42:11 +0000449 unsigned PointerSize = DL.getPointerSizeInBits(AS);
Gerolf Hoflehner9e2afa82016-10-22 02:41:39 +0000450 // Assume all GEP operands are constants until proven otherwise.
451 bool GepHasConstantOffset = true;
Chandler Carruth903c5f92015-08-06 07:57:58 +0000452 for (User::const_op_iterator I = GEPOp->op_begin() + 1, E = GEPOp->op_end();
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000453 I != E; ++I, ++GTI) {
Quentin Colombet5989bc62015-08-31 22:32:47 +0000454 const Value *Index = *I;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000455 // Compute the (potentially symbolic) offset in bytes for this index.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000456 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chris Lattner9f7500f2010-08-18 22:07:29 +0000457 // For a struct, add the member offset.
458 unsigned FieldNo = cast<ConstantInt>(Index)->getZExtValue();
Chandler Carruth903c5f92015-08-06 07:57:58 +0000459 if (FieldNo == 0)
460 continue;
Jakub Staszak07f383f2013-08-24 14:16:00 +0000461
Michael Kuperstein82069c42016-05-25 22:23:08 +0000462 Decomposed.StructOffset +=
463 DL.getStructLayout(STy)->getElementOffset(FieldNo);
Chris Lattner9f7500f2010-08-18 22:07:29 +0000464 continue;
465 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000466
Chris Lattner9f7500f2010-08-18 22:07:29 +0000467 // For an array/pointer, add the element offset, explicitly scaled.
Quentin Colombet5989bc62015-08-31 22:32:47 +0000468 if (const ConstantInt *CIdx = dyn_cast<ConstantInt>(Index)) {
Chandler Carruth903c5f92015-08-06 07:57:58 +0000469 if (CIdx->isZero())
470 continue;
Michael Kuperstein82069c42016-05-25 22:23:08 +0000471 Decomposed.OtherOffset +=
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000472 DL.getTypeAllocSize(GTI.getIndexedType()) * CIdx->getSExtValue();
Chris Lattner9f7500f2010-08-18 22:07:29 +0000473 continue;
474 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000475
Gerolf Hoflehner9e2afa82016-10-22 02:41:39 +0000476 GepHasConstantOffset = false;
477
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000478 uint64_t Scale = DL.getTypeAllocSize(GTI.getIndexedType());
Quentin Colombet5989bc62015-08-31 22:32:47 +0000479 unsigned ZExtBits = 0, SExtBits = 0;
Jakub Staszak07f383f2013-08-24 14:16:00 +0000480
Chris Lattner3decde92010-08-18 23:09:49 +0000481 // If the integer type is smaller than the pointer size, it is implicitly
482 // sign extended to pointer size.
Matt Arsenaultfa252722013-09-27 22:18:51 +0000483 unsigned Width = Index->getType()->getIntegerBitWidth();
Quentin Colombet5989bc62015-08-31 22:32:47 +0000484 if (PointerSize > Width)
485 SExtBits += PointerSize - Width;
Jakub Staszak07f383f2013-08-24 14:16:00 +0000486
Chris Lattner3decde92010-08-18 23:09:49 +0000487 // Use GetLinearExpression to decompose the index into a C1*V+C2 form.
Chris Lattner9f7500f2010-08-18 22:07:29 +0000488 APInt IndexScale(Width, 0), IndexOffset(Width, 0);
Quentin Colombet5989bc62015-08-31 22:32:47 +0000489 bool NSW = true, NUW = true;
490 Index = GetLinearExpression(Index, IndexScale, IndexOffset, ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000491 SExtBits, DL, 0, AC, DT, NSW, NUW);
Jakub Staszak07f383f2013-08-24 14:16:00 +0000492
Chris Lattner9f7500f2010-08-18 22:07:29 +0000493 // The GEP index scale ("Scale") scales C1*V+C2, yielding (C1*V+C2)*Scale.
494 // This gives us an aggregate computation of (C1*Scale)*V + C2*Scale.
Michael Kuperstein82069c42016-05-25 22:23:08 +0000495 Decomposed.OtherOffset += IndexOffset.getSExtValue() * Scale;
Eli Friedmanab3a1282010-09-15 20:08:03 +0000496 Scale *= IndexScale.getSExtValue();
Jakub Staszak07f383f2013-08-24 14:16:00 +0000497
Chris Lattner0ab5e2c2011-04-15 05:18:47 +0000498 // If we already had an occurrence of this index variable, merge this
Chris Lattner9f7500f2010-08-18 22:07:29 +0000499 // scale into it. For example, we want to handle:
500 // A[x][x] -> x*16 + x*4 -> x*20
501 // This also ensures that 'x' only appears in the index list once.
Michael Kuperstein82069c42016-05-25 22:23:08 +0000502 for (unsigned i = 0, e = Decomposed.VarIndices.size(); i != e; ++i) {
Gerolf Hoflehner9e2afa82016-10-22 02:41:39 +0000503 if (Decomposed.VarIndices[i].V == Index &&
Michael Kuperstein82069c42016-05-25 22:23:08 +0000504 Decomposed.VarIndices[i].ZExtBits == ZExtBits &&
505 Decomposed.VarIndices[i].SExtBits == SExtBits) {
506 Scale += Decomposed.VarIndices[i].Scale;
507 Decomposed.VarIndices.erase(Decomposed.VarIndices.begin() + i);
Chris Lattner9f7500f2010-08-18 22:07:29 +0000508 break;
509 }
510 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000511
Chris Lattner9f7500f2010-08-18 22:07:29 +0000512 // Make sure that we have a scale that makes sense for this target's
513 // pointer size.
Gerolf Hoflehner1d1fbb52016-01-30 02:42:11 +0000514 Scale = adjustToPointerSize(Scale, PointerSize);
Jakub Staszak07f383f2013-08-24 14:16:00 +0000515
Chris Lattner1b9c3872010-08-18 22:47:56 +0000516 if (Scale) {
Quentin Colombet5989bc62015-08-31 22:32:47 +0000517 VariableGEPIndex Entry = {Index, ZExtBits, SExtBits,
Jeffrey Yasskin6381c012011-07-27 06:22:51 +0000518 static_cast<int64_t>(Scale)};
Michael Kuperstein82069c42016-05-25 22:23:08 +0000519 Decomposed.VarIndices.push_back(Entry);
Chris Lattner1b9c3872010-08-18 22:47:56 +0000520 }
Chris Lattner9f7500f2010-08-18 22:07:29 +0000521 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000522
Gerolf Hoflehner87ddb652016-01-30 05:52:53 +0000523 // Take care of wrap-arounds
Gerolf Hoflehner9e2afa82016-10-22 02:41:39 +0000524 if (GepHasConstantOffset) {
525 Decomposed.StructOffset =
526 adjustToPointerSize(Decomposed.StructOffset, PointerSize);
527 Decomposed.OtherOffset =
528 adjustToPointerSize(Decomposed.OtherOffset, PointerSize);
529 }
Gerolf Hoflehner87ddb652016-01-30 05:52:53 +0000530
Chris Lattner9f7500f2010-08-18 22:07:29 +0000531 // Analyze the base pointer next.
532 V = GEPOp->getOperand(0);
533 } while (--MaxLookup);
Jakub Staszak07f383f2013-08-24 14:16:00 +0000534
Chris Lattner9f7500f2010-08-18 22:07:29 +0000535 // If the chain of expressions is too deep, just return early.
Michael Kuperstein82069c42016-05-25 22:23:08 +0000536 Decomposed.Base = V;
Wei Mid67daae2015-08-05 23:40:30 +0000537 SearchLimitReached++;
Michael Kuperstein82069c42016-05-25 22:23:08 +0000538 return true;
Chris Lattner9f7500f2010-08-18 22:07:29 +0000539}
540
Chandler Carruthc5d81122015-08-06 08:17:06 +0000541/// Returns whether the given pointer value points to memory that is local to
542/// the function, with global constants being considered local to all
543/// functions.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000544bool BasicAAResult::pointsToConstantMemory(const MemoryLocation &Loc,
545 bool OrLocal) {
Dan Gohman9130bad2010-11-08 16:45:26 +0000546 assert(Visited.empty() && "Visited must be cleared after use!");
Chris Lattner2d332972008-06-16 06:30:22 +0000547
Dan Gohman142ff822010-11-08 20:26:19 +0000548 unsigned MaxLookup = 8;
Dan Gohman9130bad2010-11-08 16:45:26 +0000549 SmallVector<const Value *, 16> Worklist;
550 Worklist.push_back(Loc.Ptr);
551 do {
Chandler Carruth7b560d42015-09-09 17:55:00 +0000552 const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), DL);
David Blaikie70573dc2014-11-19 07:49:26 +0000553 if (!Visited.insert(V).second) {
Dan Gohman9130bad2010-11-08 16:45:26 +0000554 Visited.clear();
Chandler Carruth7b560d42015-09-09 17:55:00 +0000555 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohman9130bad2010-11-08 16:45:26 +0000556 }
Dan Gohman5f1702e2010-08-06 01:25:49 +0000557
Dan Gohman9130bad2010-11-08 16:45:26 +0000558 // An alloca instruction defines local memory.
559 if (OrLocal && isa<AllocaInst>(V))
560 continue;
561
562 // A global constant counts as local memory for our purposes.
563 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
564 // Note: this doesn't require GV to be "ODR" because it isn't legal for a
565 // global to be marked constant in some modules and non-constant in
566 // others. GV may even be a declaration, not a definition.
567 if (!GV->isConstant()) {
568 Visited.clear();
Chandler Carruth7b560d42015-09-09 17:55:00 +0000569 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohman9130bad2010-11-08 16:45:26 +0000570 }
571 continue;
572 }
573
574 // If both select values point to local memory, then so does the select.
575 if (const SelectInst *SI = dyn_cast<SelectInst>(V)) {
576 Worklist.push_back(SI->getTrueValue());
577 Worklist.push_back(SI->getFalseValue());
578 continue;
579 }
580
581 // If all values incoming to a phi node point to local memory, then so does
582 // the phi.
583 if (const PHINode *PN = dyn_cast<PHINode>(V)) {
Dan Gohman142ff822010-11-08 20:26:19 +0000584 // Don't bother inspecting phi nodes with many operands.
585 if (PN->getNumIncomingValues() > MaxLookup) {
586 Visited.clear();
Chandler Carruth7b560d42015-09-09 17:55:00 +0000587 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohman142ff822010-11-08 20:26:19 +0000588 }
Pete Cooper833f34d2015-05-12 20:05:31 +0000589 for (Value *IncValue : PN->incoming_values())
590 Worklist.push_back(IncValue);
Dan Gohman9130bad2010-11-08 16:45:26 +0000591 continue;
592 }
593
594 // Otherwise be conservative.
595 Visited.clear();
Chandler Carruth7b560d42015-09-09 17:55:00 +0000596 return AAResultBase::pointsToConstantMemory(Loc, OrLocal);
Dan Gohman142ff822010-11-08 20:26:19 +0000597 } while (!Worklist.empty() && --MaxLookup);
Dan Gohman9130bad2010-11-08 16:45:26 +0000598
599 Visited.clear();
Dan Gohman142ff822010-11-08 20:26:19 +0000600 return Worklist.empty();
Chris Lattner2d332972008-06-16 06:30:22 +0000601}
602
Chandler Carruthc5d81122015-08-06 08:17:06 +0000603/// Returns the behavior when calling the given call site.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000604FunctionModRefBehavior BasicAAResult::getModRefBehavior(ImmutableCallSite CS) {
Dan Gohman5f1702e2010-08-06 01:25:49 +0000605 if (CS.doesNotAccessMemory())
606 // Can't do better than this.
Chandler Carruth194f59c2015-07-22 23:15:57 +0000607 return FMRB_DoesNotAccessMemory;
Dan Gohman5f1702e2010-08-06 01:25:49 +0000608
Chandler Carruth194f59c2015-07-22 23:15:57 +0000609 FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
Dan Gohman5f1702e2010-08-06 01:25:49 +0000610
611 // If the callsite knows it only reads memory, don't return worse
612 // than that.
613 if (CS.onlyReadsMemory())
Chandler Carruth194f59c2015-07-22 23:15:57 +0000614 Min = FMRB_OnlyReadsMemory;
Nicolai Haehnle84c9f992016-07-04 08:01:29 +0000615 else if (CS.doesNotReadMemory())
616 Min = FMRB_DoesNotReadMemory;
Dan Gohman5f1702e2010-08-06 01:25:49 +0000617
Igor Laevsky39d662f2015-07-11 10:30:36 +0000618 if (CS.onlyAccessesArgMemory())
Chandler Carruth194f59c2015-07-22 23:15:57 +0000619 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesArgumentPointees);
Igor Laevsky39d662f2015-07-11 10:30:36 +0000620
Chandler Carruth12884f72016-03-02 15:56:53 +0000621 // If CS has operand bundles then aliasing attributes from the function it
622 // calls do not directly apply to the CallSite. This can be made more
623 // precise in the future.
624 if (!CS.hasOperandBundles())
625 if (const Function *F = CS.getCalledFunction())
626 Min =
627 FunctionModRefBehavior(Min & getBestAAResults().getModRefBehavior(F));
628
629 return Min;
Dan Gohman5f1702e2010-08-06 01:25:49 +0000630}
631
Chandler Carruthc5d81122015-08-06 08:17:06 +0000632/// Returns the behavior when calling the given function. For use when the call
633/// site is not known.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000634FunctionModRefBehavior BasicAAResult::getModRefBehavior(const Function *F) {
Dan Gohmane461d7d2010-11-08 16:08:43 +0000635 // If the function declares it doesn't access memory, we can't do better.
Dan Gohman5f1702e2010-08-06 01:25:49 +0000636 if (F->doesNotAccessMemory())
Chandler Carruth194f59c2015-07-22 23:15:57 +0000637 return FMRB_DoesNotAccessMemory;
Dan Gohmane461d7d2010-11-08 16:08:43 +0000638
Chandler Carruth194f59c2015-07-22 23:15:57 +0000639 FunctionModRefBehavior Min = FMRB_UnknownModRefBehavior;
Dan Gohman2694e142010-11-10 01:02:18 +0000640
Dan Gohmane461d7d2010-11-08 16:08:43 +0000641 // If the function declares it only reads memory, go with that.
Dan Gohman5f1702e2010-08-06 01:25:49 +0000642 if (F->onlyReadsMemory())
Chandler Carruth194f59c2015-07-22 23:15:57 +0000643 Min = FMRB_OnlyReadsMemory;
Nicolai Haehnle84c9f992016-07-04 08:01:29 +0000644 else if (F->doesNotReadMemory())
645 Min = FMRB_DoesNotReadMemory;
Dan Gohman5f1702e2010-08-06 01:25:49 +0000646
Igor Laevsky39d662f2015-07-11 10:30:36 +0000647 if (F->onlyAccessesArgMemory())
Chandler Carruth194f59c2015-07-22 23:15:57 +0000648 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesArgumentPointees);
Andrew Kaylor9604f342016-11-08 21:07:42 +0000649 else if (F->onlyAccessesInaccessibleMemory())
650 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesInaccessibleMem);
651 else if (F->onlyAccessesInaccessibleMemOrArgMem())
652 Min = FunctionModRefBehavior(Min & FMRB_OnlyAccessesInaccessibleOrArgMem);
Igor Laevsky39d662f2015-07-11 10:30:36 +0000653
Chandler Carruth12884f72016-03-02 15:56:53 +0000654 return Min;
Dan Gohman5f1702e2010-08-06 01:25:49 +0000655}
Owen Anderson98a36172009-02-05 23:36:27 +0000656
Nicolai Haehnle84c9f992016-07-04 08:01:29 +0000657/// Returns true if this is a writeonly (i.e Mod only) parameter.
Philip Reamesfe46cad2016-01-06 18:10:35 +0000658static bool isWriteOnlyParam(ImmutableCallSite CS, unsigned ArgIdx,
659 const TargetLibraryInfo &TLI) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000660 if (CS.paramHasAttr(ArgIdx, Attribute::WriteOnly))
Nicolai Haehnle84c9f992016-07-04 08:01:29 +0000661 return true;
Hal Finkel354e23b2014-07-17 01:28:25 +0000662
663 // We can bound the aliasing properties of memset_pattern16 just as we can
664 // for memcpy/memset. This is particularly important because the
665 // LoopIdiomRecognizer likes to turn loops into calls to memset_pattern16
Nicolai Haehnle84c9f992016-07-04 08:01:29 +0000666 // whenever possible.
667 // FIXME Consider handling this in InferFunctionAttr.cpp together with other
668 // attributes.
David L. Jonesd21529f2017-01-23 23:16:46 +0000669 LibFunc F;
Ahmed Bougachad765a822016-04-27 19:04:35 +0000670 if (CS.getCalledFunction() && TLI.getLibFunc(*CS.getCalledFunction(), F) &&
David L. Jonesd21529f2017-01-23 23:16:46 +0000671 F == LibFunc_memset_pattern16 && TLI.has(F))
Philip Reamesae050a52016-01-06 04:53:16 +0000672 if (ArgIdx == 0)
Philip Reamesfe46cad2016-01-06 18:10:35 +0000673 return true;
674
675 // TODO: memset_pattern4, memset_pattern8
676 // TODO: _chk variants
677 // TODO: strcmp, strcpy
678
679 return false;
680}
681
682ModRefInfo BasicAAResult::getArgModRefInfo(ImmutableCallSite CS,
683 unsigned ArgIdx) {
Nicolai Haehnle84c9f992016-07-04 08:01:29 +0000684 // Checking for known builtin intrinsics and target library functions.
Philip Reamesfe46cad2016-01-06 18:10:35 +0000685 if (isWriteOnlyParam(CS, ArgIdx, TLI))
686 return MRI_Mod;
Hal Finkel354e23b2014-07-17 01:28:25 +0000687
Reid Klecknerfb502d22017-04-14 20:19:02 +0000688 if (CS.paramHasAttr(ArgIdx, Attribute::ReadOnly))
Igor Laevsky36e84c02015-10-28 16:42:00 +0000689 return MRI_Ref;
690
Reid Klecknerfb502d22017-04-14 20:19:02 +0000691 if (CS.paramHasAttr(ArgIdx, Attribute::ReadNone))
Igor Laevsky559d1702015-10-28 17:54:48 +0000692 return MRI_NoModRef;
693
Chandler Carruth7b560d42015-09-09 17:55:00 +0000694 return AAResultBase::getArgModRefInfo(CS, ArgIdx);
Hal Finkel354e23b2014-07-17 01:28:25 +0000695}
696
Sanjoy Dasd47f4242016-05-10 02:35:41 +0000697static bool isIntrinsicCall(ImmutableCallSite CS, Intrinsic::ID IID) {
Hal Finkel93046912014-07-25 21:13:35 +0000698 const IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction());
Sanjoy Dasd47f4242016-05-10 02:35:41 +0000699 return II && II->getIntrinsicID() == IID;
Hal Finkel93046912014-07-25 21:13:35 +0000700}
701
Justin Bognerabdcb3c2015-09-24 05:29:31 +0000702#ifndef NDEBUG
Justin Bogner49655f82015-09-24 04:59:24 +0000703static const Function *getParent(const Value *V) {
Daniel Berlina5130bb2017-05-19 19:01:21 +0000704 if (const Instruction *inst = dyn_cast<Instruction>(V)) {
705 if (!inst->getParent())
706 return nullptr;
Justin Bogner49655f82015-09-24 04:59:24 +0000707 return inst->getParent()->getParent();
Daniel Berlina5130bb2017-05-19 19:01:21 +0000708 }
Justin Bogner49655f82015-09-24 04:59:24 +0000709
710 if (const Argument *arg = dyn_cast<Argument>(V))
711 return arg->getParent();
712
713 return nullptr;
714}
715
716static bool notDifferentParent(const Value *O1, const Value *O2) {
717
718 const Function *F1 = getParent(O1);
719 const Function *F2 = getParent(O2);
720
721 return !F1 || !F2 || F1 == F2;
722}
Justin Bognerabdcb3c2015-09-24 05:29:31 +0000723#endif
Justin Bogner49655f82015-09-24 04:59:24 +0000724
725AliasResult BasicAAResult::alias(const MemoryLocation &LocA,
726 const MemoryLocation &LocB) {
727 assert(notDifferentParent(LocA.Ptr, LocB.Ptr) &&
728 "BasicAliasAnalysis doesn't support interprocedural queries.");
729
730 // If we have a directly cached entry for these locations, we have recursed
731 // through this once, so just return the cached results. Notably, when this
732 // happens, we don't clear the cache.
733 auto CacheIt = AliasCache.find(LocPair(LocA, LocB));
734 if (CacheIt != AliasCache.end())
735 return CacheIt->second;
736
737 AliasResult Alias = aliasCheck(LocA.Ptr, LocA.Size, LocA.AATags, LocB.Ptr,
738 LocB.Size, LocB.AATags);
739 // AliasCache rarely has more than 1 or 2 elements, always use
740 // shrink_and_clear so it quickly returns to the inline capacity of the
741 // SmallDenseMap if it ever grows larger.
742 // FIXME: This should really be shrink_to_inline_capacity_and_clear().
743 AliasCache.shrink_and_clear();
744 VisitedPhiBBs.clear();
745 return Alias;
746}
747
Chandler Carruthc5d81122015-08-06 08:17:06 +0000748/// Checks to see if the specified callsite can clobber the specified memory
749/// object.
750///
751/// Since we only look at local properties of this function, we really can't
752/// say much about this query. We do, however, use simple "address taken"
753/// analysis on local objects.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000754ModRefInfo BasicAAResult::getModRefInfo(ImmutableCallSite CS,
755 const MemoryLocation &Loc) {
Dan Gohman41f14cf2010-09-14 21:25:10 +0000756 assert(notDifferentParent(CS.getInstruction(), Loc.Ptr) &&
Dan Gohman00ef9322010-07-07 14:27:09 +0000757 "AliasAnalysis query involving multiple functions!");
758
Chandler Carruth7b560d42015-09-09 17:55:00 +0000759 const Value *Object = GetUnderlyingObject(Loc.Ptr, DL);
Jakub Staszak07f383f2013-08-24 14:16:00 +0000760
Dan Gohman41f14cf2010-09-14 21:25:10 +0000761 // If this is a tail call and Loc.Ptr points to a stack location, we know that
Chris Lattnerd6a49ad2009-11-22 16:05:05 +0000762 // the tail call cannot access or modify the local stack.
763 // We cannot exclude byval arguments here; these belong to the caller of
764 // the current function not to the current function, and a tail callee
765 // may reference them.
766 if (isa<AllocaInst>(Object))
Dan Gohman5442c712010-08-03 21:48:53 +0000767 if (const CallInst *CI = dyn_cast<CallInst>(CS.getInstruction()))
Chris Lattnerd6a49ad2009-11-22 16:05:05 +0000768 if (CI->isTailCall())
Chandler Carruth194f59c2015-07-22 23:15:57 +0000769 return MRI_NoModRef;
Jakub Staszak07f383f2013-08-24 14:16:00 +0000770
Chris Lattnerd6a49ad2009-11-22 16:05:05 +0000771 // If the pointer is to a locally allocated object that does not escape,
Chris Lattner84ed59ab2009-11-23 16:44:43 +0000772 // then the call can not mod/ref the pointer unless the call takes the pointer
773 // as an argument, and itself doesn't capture it.
Chris Lattner1e7b37e2009-11-23 16:46:41 +0000774 if (!isa<Constant>(Object) && CS.getInstruction() != Object &&
Dan Gohman84f90a32010-07-01 20:08:40 +0000775 isNonEscapingLocalObject(Object)) {
Igor Laevsky37cba432017-03-01 13:19:51 +0000776
777 // Optimistically assume that call doesn't touch Object and check this
778 // assumption in the following loop.
779 ModRefInfo Result = MRI_NoModRef;
780
Igor Laevsky28eeb3f2016-01-16 12:15:53 +0000781 unsigned OperandNo = 0;
782 for (auto CI = CS.data_operands_begin(), CE = CS.data_operands_end();
783 CI != CE; ++CI, ++OperandNo) {
Chris Lattner026f5e62011-05-23 05:15:43 +0000784 // Only look at the no-capture or byval pointer arguments. If this
785 // pointer were passed to arguments that were neither of these, then it
786 // couldn't be no-capture.
Duncan Sands19d0b472010-02-16 11:11:14 +0000787 if (!(*CI)->getType()->isPointerTy() ||
Hal Finkel39fed392016-12-15 05:09:15 +0000788 (!CS.doesNotCapture(OperandNo) &&
789 OperandNo < CS.getNumArgOperands() && !CS.isByValArgument(OperandNo)))
Chris Lattner84ed59ab2009-11-23 16:44:43 +0000790 continue;
Jakub Staszak07f383f2013-08-24 14:16:00 +0000791
Igor Laevsky37cba432017-03-01 13:19:51 +0000792 // Call doesn't access memory through this operand, so we don't care
793 // if it aliases with Object.
794 if (CS.doesNotAccessMemory(OperandNo))
795 continue;
796
Dan Gohman41f14cf2010-09-14 21:25:10 +0000797 // If this is a no-capture pointer argument, see if we can tell that it
Igor Laevsky37cba432017-03-01 13:19:51 +0000798 // is impossible to alias the pointer we're checking.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000799 AliasResult AR =
800 getBestAAResults().alias(MemoryLocation(*CI), MemoryLocation(Object));
Igor Laevsky37cba432017-03-01 13:19:51 +0000801
802 // Operand doesnt alias 'Object', continue looking for other aliases
803 if (AR == NoAlias)
804 continue;
805 // Operand aliases 'Object', but call doesn't modify it. Strengthen
806 // initial assumption and keep looking in case if there are more aliases.
807 if (CS.onlyReadsMemory(OperandNo)) {
808 Result = static_cast<ModRefInfo>(Result | MRI_Ref);
809 continue;
Chris Lattner84ed59ab2009-11-23 16:44:43 +0000810 }
Igor Laevsky37cba432017-03-01 13:19:51 +0000811 // Operand aliases 'Object' but call only writes into it.
812 if (CS.doesNotReadMemory(OperandNo)) {
813 Result = static_cast<ModRefInfo>(Result | MRI_Mod);
814 continue;
815 }
816 // This operand aliases 'Object' and call reads and writes into it.
817 Result = MRI_ModRef;
818 break;
Chris Lattner84ed59ab2009-11-23 16:44:43 +0000819 }
Jakub Staszak07f383f2013-08-24 14:16:00 +0000820
Igor Laevsky37cba432017-03-01 13:19:51 +0000821 // Early return if we improved mod ref information
822 if (Result != MRI_ModRef)
823 return Result;
Chris Lattnerd6a49ad2009-11-22 16:05:05 +0000824 }
825
Philip Reamesd9f4a3d2016-03-09 23:19:56 +0000826 // If the CallSite is to malloc or calloc, we can assume that it doesn't
827 // modify any IR visible value. This is only valid because we assume these
828 // routines do not read values visible in the IR. TODO: Consider special
829 // casing realloc and strdup routines which access only their arguments as
830 // well. Or alternatively, replace all of this with inaccessiblememonly once
831 // that's implemented fully.
832 auto *Inst = CS.getInstruction();
Craig Topper09bb7602017-04-18 21:43:46 +0000833 if (isMallocOrCallocLikeFn(Inst, &TLI)) {
Philip Reamesd9f4a3d2016-03-09 23:19:56 +0000834 // Be conservative if the accessed pointer may alias the allocation -
835 // fallback to the generic handling below.
836 if (getBestAAResults().alias(MemoryLocation(Inst), Loc) == NoAlias)
837 return MRI_NoModRef;
838 }
839
Bryant Wonga07d9b12016-12-25 22:42:27 +0000840 // The semantics of memcpy intrinsics forbid overlap between their respective
841 // operands, i.e., source and destination of any given memcpy must no-alias.
842 // If Loc must-aliases either one of these two locations, then it necessarily
843 // no-aliases the other.
844 if (auto *Inst = dyn_cast<MemCpyInst>(CS.getInstruction())) {
845 AliasResult SrcAA, DestAA;
846
847 if ((SrcAA = getBestAAResults().alias(MemoryLocation::getForSource(Inst),
848 Loc)) == MustAlias)
849 // Loc is exactly the memcpy source thus disjoint from memcpy dest.
850 return MRI_Ref;
851 if ((DestAA = getBestAAResults().alias(MemoryLocation::getForDest(Inst),
852 Loc)) == MustAlias)
853 // The converse case.
854 return MRI_Mod;
855
856 // It's also possible for Loc to alias both src and dest, or neither.
857 ModRefInfo rv = MRI_NoModRef;
858 if (SrcAA != NoAlias)
859 rv = static_cast<ModRefInfo>(rv | MRI_Ref);
860 if (DestAA != NoAlias)
861 rv = static_cast<ModRefInfo>(rv | MRI_Mod);
862 return rv;
863 }
864
Hal Finkel93046912014-07-25 21:13:35 +0000865 // While the assume intrinsic is marked as arbitrarily writing so that
866 // proper control dependencies will be maintained, it never aliases any
867 // particular memory location.
Sanjoy Dasd47f4242016-05-10 02:35:41 +0000868 if (isIntrinsicCall(CS, Intrinsic::assume))
Chandler Carruth194f59c2015-07-22 23:15:57 +0000869 return MRI_NoModRef;
Hal Finkel93046912014-07-25 21:13:35 +0000870
Sanjoy Dasd47f4242016-05-10 02:35:41 +0000871 // Like assumes, guard intrinsics are also marked as arbitrarily writing so
872 // that proper control dependencies are maintained but they never mods any
873 // particular memory location.
874 //
875 // *Unlike* assumes, guard intrinsics are modeled as reading memory since the
876 // heap state at the point the guard is issued needs to be consistent in case
877 // the guard invokes the "deopt" continuation.
878 if (isIntrinsicCall(CS, Intrinsic::experimental_guard))
879 return MRI_Ref;
880
Anna Thomas037e5402016-08-09 17:18:05 +0000881 // Like assumes, invariant.start intrinsics were also marked as arbitrarily
882 // writing so that proper control dependencies are maintained but they never
883 // mod any particular memory location visible to the IR.
884 // *Unlike* assumes (which are now modeled as NoModRef), invariant.start
885 // intrinsic is now modeled as reading memory. This prevents hoisting the
886 // invariant.start intrinsic over stores. Consider:
887 // *ptr = 40;
888 // *ptr = 50;
889 // invariant_start(ptr)
890 // int val = *ptr;
891 // print(val);
892 //
893 // This cannot be transformed to:
894 //
895 // *ptr = 40;
896 // invariant_start(ptr)
897 // *ptr = 50;
898 // int val = *ptr;
899 // print(val);
900 //
901 // The transformation will cause the second store to be ignored (based on
902 // rules of invariant.start) and print 40, while the first program always
903 // prints 50.
904 if (isIntrinsicCall(CS, Intrinsic::invariant_start))
905 return MRI_Ref;
906
Chandler Carruth7b560d42015-09-09 17:55:00 +0000907 // The AAResultBase base class has some smarts, lets use them.
908 return AAResultBase::getModRefInfo(CS, Loc);
Dan Gohman64d842e2010-09-08 01:32:20 +0000909}
Chris Lattner2d332972008-06-16 06:30:22 +0000910
Chandler Carruth7b560d42015-09-09 17:55:00 +0000911ModRefInfo BasicAAResult::getModRefInfo(ImmutableCallSite CS1,
912 ImmutableCallSite CS2) {
Hal Finkel93046912014-07-25 21:13:35 +0000913 // While the assume intrinsic is marked as arbitrarily writing so that
914 // proper control dependencies will be maintained, it never aliases any
915 // particular memory location.
Sanjoy Dasd47f4242016-05-10 02:35:41 +0000916 if (isIntrinsicCall(CS1, Intrinsic::assume) ||
917 isIntrinsicCall(CS2, Intrinsic::assume))
Chandler Carruth194f59c2015-07-22 23:15:57 +0000918 return MRI_NoModRef;
Hal Finkel93046912014-07-25 21:13:35 +0000919
Sanjoy Dasd47f4242016-05-10 02:35:41 +0000920 // Like assumes, guard intrinsics are also marked as arbitrarily writing so
921 // that proper control dependencies are maintained but they never mod any
922 // particular memory location.
923 //
924 // *Unlike* assumes, guard intrinsics are modeled as reading memory since the
925 // heap state at the point the guard is issued needs to be consistent in case
926 // the guard invokes the "deopt" continuation.
927
928 // NB! This function is *not* commutative, so we specical case two
929 // possibilities for guard intrinsics.
930
931 if (isIntrinsicCall(CS1, Intrinsic::experimental_guard))
932 return getModRefBehavior(CS2) & MRI_Mod ? MRI_Ref : MRI_NoModRef;
933
934 if (isIntrinsicCall(CS2, Intrinsic::experimental_guard))
935 return getModRefBehavior(CS1) & MRI_Mod ? MRI_Mod : MRI_NoModRef;
936
Chandler Carruth7b560d42015-09-09 17:55:00 +0000937 // The AAResultBase base class has some smarts, lets use them.
938 return AAResultBase::getModRefInfo(CS1, CS2);
Hal Finkel93046912014-07-25 21:13:35 +0000939}
940
Chandler Carruthc5d81122015-08-06 08:17:06 +0000941/// Provide ad-hoc rules to disambiguate accesses through two GEP operators,
942/// both having the exact same pointer operand.
NAKAMURA Takumi76bab1f2017-07-11 02:31:51 +0000943static AliasResult aliasSameBasePointerGEPs(const GEPOperator *GEP1,
944 uint64_t V1Size,
945 const GEPOperator *GEP2,
946 uint64_t V2Size,
947 const DataLayout &DL) {
Piotr Padlewski610c9662017-04-24 19:37:17 +0000948 assert(GEP1->getPointerOperand()->stripPointerCastsAndBarriers() ==
949 GEP2->getPointerOperand()->stripPointerCastsAndBarriers() &&
Sanjoy Dasf09c1e32017-04-18 22:00:54 +0000950 GEP1->getPointerOperandType() == GEP2->getPointerOperandType() &&
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +0000951 "Expected GEPs with the same pointer operand");
952
953 // Try to determine whether GEP1 and GEP2 index through arrays, into structs,
954 // such that the struct field accesses provably cannot alias.
955 // We also need at least two indices (the pointer, and the struct field).
Daniel Berlin73694bb2016-06-01 18:55:32 +0000956 if (GEP1->getNumIndices() != GEP2->getNumIndices() ||
957 GEP1->getNumIndices() < 2)
Chandler Carruthc3f49eb2015-06-22 02:16:51 +0000958 return MayAlias;
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +0000959
960 // If we don't know the size of the accesses through both GEPs, we can't
961 // determine whether the struct fields accessed can't alias.
Chandler Carruthecbd1682015-06-17 07:21:38 +0000962 if (V1Size == MemoryLocation::UnknownSize ||
963 V2Size == MemoryLocation::UnknownSize)
Chandler Carruthc3f49eb2015-06-22 02:16:51 +0000964 return MayAlias;
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +0000965
966 ConstantInt *C1 =
967 dyn_cast<ConstantInt>(GEP1->getOperand(GEP1->getNumOperands() - 1));
968 ConstantInt *C2 =
969 dyn_cast<ConstantInt>(GEP2->getOperand(GEP2->getNumOperands() - 1));
970
James Molloy5a4d8cd2015-10-22 13:28:18 +0000971 // If the last (struct) indices are constants and are equal, the other indices
972 // might be also be dynamically equal, so the GEPs can alias.
Vedant Kumaree202942016-05-11 15:45:43 +0000973 if (C1 && C2 && C1->getSExtValue() == C2->getSExtValue())
Chandler Carruthc3f49eb2015-06-22 02:16:51 +0000974 return MayAlias;
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +0000975
976 // Find the last-indexed type of the GEP, i.e., the type you'd get if
977 // you stripped the last index.
978 // On the way, look at each indexed type. If there's something other
979 // than an array, different indices can lead to different final types.
980 SmallVector<Value *, 8> IntermediateIndices;
981
982 // Insert the first index; we don't need to check the type indexed
983 // through it as it only drops the pointer indirection.
984 assert(GEP1->getNumIndices() > 1 && "Not enough GEP indices to examine");
985 IntermediateIndices.push_back(GEP1->getOperand(1));
986
987 // Insert all the remaining indices but the last one.
988 // Also, check that they all index through arrays.
989 for (unsigned i = 1, e = GEP1->getNumIndices() - 1; i != e; ++i) {
990 if (!isa<ArrayType>(GetElementPtrInst::getIndexedType(
David Blaikied288fb82015-03-30 21:41:43 +0000991 GEP1->getSourceElementType(), IntermediateIndices)))
Chandler Carruthc3f49eb2015-06-22 02:16:51 +0000992 return MayAlias;
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +0000993 IntermediateIndices.push_back(GEP1->getOperand(i + 1));
994 }
995
James Molloy5a4d8cd2015-10-22 13:28:18 +0000996 auto *Ty = GetElementPtrInst::getIndexedType(
997 GEP1->getSourceElementType(), IntermediateIndices);
998 StructType *LastIndexedStruct = dyn_cast<StructType>(Ty);
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +0000999
James Molloy5a4d8cd2015-10-22 13:28:18 +00001000 if (isa<SequentialType>(Ty)) {
1001 // We know that:
1002 // - both GEPs begin indexing from the exact same pointer;
1003 // - the last indices in both GEPs are constants, indexing into a sequential
1004 // type (array or pointer);
1005 // - both GEPs only index through arrays prior to that.
1006 //
1007 // Because array indices greater than the number of elements are valid in
1008 // GEPs, unless we know the intermediate indices are identical between
1009 // GEP1 and GEP2 we cannot guarantee that the last indexed arrays don't
James Molloy05a896a2015-10-23 14:17:03 +00001010 // partially overlap. We also need to check that the loaded size matches
1011 // the element size, otherwise we could still have overlap.
1012 const uint64_t ElementSize =
1013 DL.getTypeStoreSize(cast<SequentialType>(Ty)->getElementType());
1014 if (V1Size != ElementSize || V2Size != ElementSize)
1015 return MayAlias;
1016
James Molloy5a4d8cd2015-10-22 13:28:18 +00001017 for (unsigned i = 0, e = GEP1->getNumIndices() - 1; i != e; ++i)
1018 if (GEP1->getOperand(i + 1) != GEP2->getOperand(i + 1))
1019 return MayAlias;
James Molloy05a896a2015-10-23 14:17:03 +00001020
James Molloy5a4d8cd2015-10-22 13:28:18 +00001021 // Now we know that the array/pointer that GEP1 indexes into and that
1022 // that GEP2 indexes into must either precisely overlap or be disjoint.
1023 // Because they cannot partially overlap and because fields in an array
1024 // cannot overlap, if we can prove the final indices are different between
1025 // GEP1 and GEP2, we can conclude GEP1 and GEP2 don't alias.
NAKAMURA Takumia089dd82017-07-11 02:31:54 +00001026
James Molloy5a4d8cd2015-10-22 13:28:18 +00001027 // If the last indices are constants, we've already checked they don't
1028 // equal each other so we can exit early.
1029 if (C1 && C2)
1030 return NoAlias;
Craig Topper58752542017-06-15 17:16:56 +00001031 {
1032 Value *GEP1LastIdx = GEP1->getOperand(GEP1->getNumOperands() - 1);
1033 Value *GEP2LastIdx = GEP2->getOperand(GEP2->getNumOperands() - 1);
NAKAMURA Takumi76bab1f2017-07-11 02:31:51 +00001034 if (isa<PHINode>(GEP1LastIdx) || isa<PHINode>(GEP2LastIdx)) {
Craig Topper58752542017-06-15 17:16:56 +00001035 // If one of the indices is a PHI node, be safe and only use
1036 // computeKnownBits so we don't make any assumptions about the
1037 // relationships between the two indices. This is important if we're
1038 // asking about values from different loop iterations. See PR32314.
1039 // TODO: We may be able to change the check so we only do this when
1040 // we definitely looked through a PHINode.
Craig Topperd3711ee2017-06-22 19:04:14 +00001041 if (GEP1LastIdx != GEP2LastIdx &&
1042 GEP1LastIdx->getType() == GEP2LastIdx->getType()) {
1043 KnownBits Known1 = computeKnownBits(GEP1LastIdx, DL);
1044 KnownBits Known2 = computeKnownBits(GEP2LastIdx, DL);
1045 if (Known1.Zero.intersects(Known2.One) ||
1046 Known1.One.intersects(Known2.Zero))
1047 return NoAlias;
1048 }
Craig Topper58752542017-06-15 17:16:56 +00001049 } else if (isKnownNonEqual(GEP1LastIdx, GEP2LastIdx, DL))
1050 return NoAlias;
1051 }
Chandler Carruthc3f49eb2015-06-22 02:16:51 +00001052 return MayAlias;
James Molloy5a4d8cd2015-10-22 13:28:18 +00001053 } else if (!LastIndexedStruct || !C1 || !C2) {
1054 return MayAlias;
1055 }
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +00001056
1057 // We know that:
1058 // - both GEPs begin indexing from the exact same pointer;
1059 // - the last indices in both GEPs are constants, indexing into a struct;
1060 // - said indices are different, hence, the pointed-to fields are different;
1061 // - both GEPs only index through arrays prior to that.
1062 //
1063 // This lets us determine that the struct that GEP1 indexes into and the
1064 // struct that GEP2 indexes into must either precisely overlap or be
1065 // completely disjoint. Because they cannot partially overlap, indexing into
1066 // different non-overlapping fields of the struct will never alias.
1067
1068 // Therefore, the only remaining thing needed to show that both GEPs can't
1069 // alias is that the fields are not overlapping.
1070 const StructLayout *SL = DL.getStructLayout(LastIndexedStruct);
1071 const uint64_t StructSize = SL->getSizeInBytes();
1072 const uint64_t V1Off = SL->getElementOffset(C1->getZExtValue());
1073 const uint64_t V2Off = SL->getElementOffset(C2->getZExtValue());
1074
1075 auto EltsDontOverlap = [StructSize](uint64_t V1Off, uint64_t V1Size,
1076 uint64_t V2Off, uint64_t V2Size) {
1077 return V1Off < V2Off && V1Off + V1Size <= V2Off &&
1078 ((V2Off + V2Size <= StructSize) ||
1079 (V2Off + V2Size - StructSize <= V1Off));
1080 };
1081
1082 if (EltsDontOverlap(V1Off, V1Size, V2Off, V2Size) ||
1083 EltsDontOverlap(V2Off, V2Size, V1Off, V1Size))
Chandler Carruthc3f49eb2015-06-22 02:16:51 +00001084 return NoAlias;
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +00001085
Chandler Carruthc3f49eb2015-06-22 02:16:51 +00001086 return MayAlias;
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +00001087}
1088
Michael Kuperstein82069c42016-05-25 22:23:08 +00001089// If a we have (a) a GEP and (b) a pointer based on an alloca, and the
1090// beginning of the object the GEP points would have a negative offset with
1091// repsect to the alloca, that means the GEP can not alias pointer (b).
1092// Note that the pointer based on the alloca may not be a GEP. For
1093// example, it may be the alloca itself.
Michael Kupersteinae214912016-05-26 19:30:49 +00001094// The same applies if (b) is based on a GlobalVariable. Note that just being
1095// based on isIdentifiedObject() is not enough - we need an identified object
1096// that does not permit access to negative offsets. For example, a negative
1097// offset from a noalias argument or call can be inbounds w.r.t the actual
1098// underlying object.
Michael Kuperstein82069c42016-05-25 22:23:08 +00001099//
1100// For example, consider:
1101//
1102// struct { int f0, int f1, ...} foo;
1103// foo alloca;
1104// foo* random = bar(alloca);
1105// int *f0 = &alloca.f0
1106// int *f1 = &random->f1;
1107//
1108// Which is lowered, approximately, to:
1109//
1110// %alloca = alloca %struct.foo
1111// %random = call %struct.foo* @random(%struct.foo* %alloca)
1112// %f0 = getelementptr inbounds %struct, %struct.foo* %alloca, i32 0, i32 0
1113// %f1 = getelementptr inbounds %struct, %struct.foo* %random, i32 0, i32 1
1114//
1115// Assume %f1 and %f0 alias. Then %f1 would point into the object allocated
1116// by %alloca. Since the %f1 GEP is inbounds, that means %random must also
1117// point into the same object. But since %f0 points to the beginning of %alloca,
1118// the highest %f1 can be is (%alloca + 3). This means %random can not be higher
1119// than (%alloca - 1), and so is not inbounds, a contradiction.
1120bool BasicAAResult::isGEPBaseAtNegativeOffset(const GEPOperator *GEPOp,
Michael Kupersteinae214912016-05-26 19:30:49 +00001121 const DecomposedGEP &DecompGEP, const DecomposedGEP &DecompObject,
1122 uint64_t ObjectAccessSize) {
1123 // If the object access size is unknown, or the GEP isn't inbounds, bail.
1124 if (ObjectAccessSize == MemoryLocation::UnknownSize || !GEPOp->isInBounds())
Michael Kuperstein82069c42016-05-25 22:23:08 +00001125 return false;
1126
Michael Kupersteinae214912016-05-26 19:30:49 +00001127 // We need the object to be an alloca or a globalvariable, and want to know
1128 // the offset of the pointer from the object precisely, so no variable
1129 // indices are allowed.
1130 if (!(isa<AllocaInst>(DecompObject.Base) ||
1131 isa<GlobalVariable>(DecompObject.Base)) ||
1132 !DecompObject.VarIndices.empty())
Michael Kuperstein82069c42016-05-25 22:23:08 +00001133 return false;
1134
Michael Kupersteinae214912016-05-26 19:30:49 +00001135 int64_t ObjectBaseOffset = DecompObject.StructOffset +
1136 DecompObject.OtherOffset;
Michael Kuperstein82069c42016-05-25 22:23:08 +00001137
1138 // If the GEP has no variable indices, we know the precise offset
1139 // from the base, then use it. If the GEP has variable indices, we're in
1140 // a bit more trouble: we can't count on the constant offsets that come
1141 // from non-struct sources, since these can be "rewound" by a negative
1142 // variable offset. So use only offsets that came from structs.
1143 int64_t GEPBaseOffset = DecompGEP.StructOffset;
1144 if (DecompGEP.VarIndices.empty())
1145 GEPBaseOffset += DecompGEP.OtherOffset;
1146
Michael Kupersteinae214912016-05-26 19:30:49 +00001147 return (GEPBaseOffset >= ObjectBaseOffset + (int64_t)ObjectAccessSize);
Michael Kuperstein82069c42016-05-25 22:23:08 +00001148}
1149
Chandler Carruthc5d81122015-08-06 08:17:06 +00001150/// Provides a bunch of ad-hoc rules to disambiguate a GEP instruction against
1151/// another pointer.
Chris Lattnera99edbe2009-11-26 02:11:08 +00001152///
Chandler Carruthc5d81122015-08-06 08:17:06 +00001153/// We know that V1 is a GEP, but we don't know anything about V2.
1154/// UnderlyingV1 is GetUnderlyingObject(GEP1, DL), UnderlyingV2 is the same for
1155/// V2.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001156AliasResult BasicAAResult::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
1157 const AAMDNodes &V1AAInfo, const Value *V2,
1158 uint64_t V2Size, const AAMDNodes &V2AAInfo,
1159 const Value *UnderlyingV1,
1160 const Value *UnderlyingV2) {
Michael Kuperstein82069c42016-05-25 22:23:08 +00001161 DecomposedGEP DecompGEP1, DecompGEP2;
1162 bool GEP1MaxLookupReached =
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001163 DecomposeGEPExpression(GEP1, DecompGEP1, DL, &AC, DT);
Michael Kuperstein82069c42016-05-25 22:23:08 +00001164 bool GEP2MaxLookupReached =
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001165 DecomposeGEPExpression(V2, DecompGEP2, DL, &AC, DT);
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001166
Michael Kuperstein82069c42016-05-25 22:23:08 +00001167 int64_t GEP1BaseOffset = DecompGEP1.StructOffset + DecompGEP1.OtherOffset;
1168 int64_t GEP2BaseOffset = DecompGEP2.StructOffset + DecompGEP2.OtherOffset;
1169
1170 assert(DecompGEP1.Base == UnderlyingV1 && DecompGEP2.Base == UnderlyingV2 &&
1171 "DecomposeGEPExpression returned a result different from "
1172 "GetUnderlyingObject");
1173
1174 // If the GEP's offset relative to its base is such that the base would
1175 // fall below the start of the object underlying V2, then the GEP and V2
1176 // cannot alias.
1177 if (!GEP1MaxLookupReached && !GEP2MaxLookupReached &&
1178 isGEPBaseAtNegativeOffset(GEP1, DecompGEP1, DecompGEP2, V2Size))
1179 return NoAlias;
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001180 // If we have two gep instructions with must-alias or not-alias'ing base
1181 // pointers, figure out if the indexes to the GEP tell us anything about the
1182 // derived pointer.
Chris Lattnera99edbe2009-11-26 02:11:08 +00001183 if (const GEPOperator *GEP2 = dyn_cast<GEPOperator>(V2)) {
Michael Kuperstein82069c42016-05-25 22:23:08 +00001184 // Check for the GEP base being at a negative offset, this time in the other
1185 // direction.
1186 if (!GEP1MaxLookupReached && !GEP2MaxLookupReached &&
1187 isGEPBaseAtNegativeOffset(GEP2, DecompGEP2, DecompGEP1, V1Size))
1188 return NoAlias;
Arnold Schwaighoferaadf1042013-03-26 18:07:53 +00001189 // Do the base pointers alias?
Chandler Carruthecbd1682015-06-17 07:21:38 +00001190 AliasResult BaseAlias =
1191 aliasCheck(UnderlyingV1, MemoryLocation::UnknownSize, AAMDNodes(),
1192 UnderlyingV2, MemoryLocation::UnknownSize, AAMDNodes());
Arnold Schwaighoferaadf1042013-03-26 18:07:53 +00001193
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001194 // Check for geps of non-aliasing underlying pointers where the offsets are
1195 // identical.
Arnold Schwaighoferaadf1042013-03-26 18:07:53 +00001196 if ((BaseAlias == MayAlias) && V1Size == V2Size) {
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001197 // Do the base pointers alias assuming type and size.
Chandler Carruth903c5f92015-08-06 07:57:58 +00001198 AliasResult PreciseBaseAlias = aliasCheck(UnderlyingV1, V1Size, V1AAInfo,
1199 UnderlyingV2, V2Size, V2AAInfo);
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001200 if (PreciseBaseAlias == NoAlias) {
1201 // See if the computed offset from the common pointer tells us about the
1202 // relation of the resulting pointer.
Arnold Schwaighofer1a444482014-03-26 21:30:19 +00001203 // If the max search depth is reached the result is undefined
1204 if (GEP2MaxLookupReached || GEP1MaxLookupReached)
1205 return MayAlias;
1206
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001207 // Same offsets.
1208 if (GEP1BaseOffset == GEP2BaseOffset &&
Michael Kuperstein82069c42016-05-25 22:23:08 +00001209 DecompGEP1.VarIndices == DecompGEP2.VarIndices)
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001210 return NoAlias;
Arnold Schwaighofer76dca582012-09-06 14:31:51 +00001211 }
1212 }
Jakub Staszak07f383f2013-08-24 14:16:00 +00001213
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001214 // If we get a No or May, then return it immediately, no amount of analysis
1215 // will improve this situation.
Nuno Lopesc7d41102017-08-08 16:13:24 +00001216 if (BaseAlias != MustAlias) {
1217 assert(BaseAlias == NoAlias || BaseAlias == MayAlias);
Chandler Carruth903c5f92015-08-06 07:57:58 +00001218 return BaseAlias;
Nuno Lopesc7d41102017-08-08 16:13:24 +00001219 }
Jakub Staszak07f383f2013-08-24 14:16:00 +00001220
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001221 // Otherwise, we have a MustAlias. Since the base pointers alias each other
1222 // exactly, see if the computed offset from the common pointer tells us
1223 // about the relation of the resulting pointer.
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +00001224 // If we know the two GEPs are based off of the exact same pointer (and not
1225 // just the same underlying object), see if that tells us anything about
1226 // the resulting pointers.
Piotr Padlewski610c9662017-04-24 19:37:17 +00001227 if (GEP1->getPointerOperand()->stripPointerCastsAndBarriers() ==
1228 GEP2->getPointerOperand()->stripPointerCastsAndBarriers() &&
Sanjoy Dasf09c1e32017-04-18 22:00:54 +00001229 GEP1->getPointerOperandType() == GEP2->getPointerOperandType()) {
Chandler Carruth7b560d42015-09-09 17:55:00 +00001230 AliasResult R = aliasSameBasePointerGEPs(GEP1, V1Size, GEP2, V2Size, DL);
Ahmed Bougacha29efe3b2015-02-07 17:04:29 +00001231 // If we couldn't find anything interesting, don't abandon just yet.
1232 if (R != MayAlias)
1233 return R;
1234 }
1235
Sanjay Patel9613b292016-01-17 23:13:48 +00001236 // If the max search depth is reached, the result is undefined
Arnold Schwaighofer1a444482014-03-26 21:30:19 +00001237 if (GEP2MaxLookupReached || GEP1MaxLookupReached)
1238 return MayAlias;
Jakub Staszak07f383f2013-08-24 14:16:00 +00001239
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001240 // Subtract the GEP2 pointer from the GEP1 pointer to find out their
1241 // symbolic difference.
1242 GEP1BaseOffset -= GEP2BaseOffset;
Michael Kuperstein82069c42016-05-25 22:23:08 +00001243 GetIndexDifference(DecompGEP1.VarIndices, DecompGEP2.VarIndices);
Jakub Staszak07f383f2013-08-24 14:16:00 +00001244
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001245 } else {
1246 // Check to see if these two pointers are related by the getelementptr
1247 // instruction. If one pointer is a GEP with a non-zero index of the other
1248 // pointer, we know they cannot alias.
Chris Lattner5c1cfc22009-11-26 16:52:32 +00001249
1250 // If both accesses are unknown size, we can't do anything useful here.
Chandler Carruthecbd1682015-06-17 07:21:38 +00001251 if (V1Size == MemoryLocation::UnknownSize &&
1252 V2Size == MemoryLocation::UnknownSize)
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001253 return MayAlias;
Chris Lattner6ea17f77f2003-12-11 22:44:13 +00001254
Chandler Carruthecbd1682015-06-17 07:21:38 +00001255 AliasResult R = aliasCheck(UnderlyingV1, MemoryLocation::UnknownSize,
Mehdi Amini1726fc62017-01-27 16:12:22 +00001256 AAMDNodes(), V2, MemoryLocation::UnknownSize,
1257 V2AAInfo, nullptr, UnderlyingV2);
Nuno Lopes598d1632017-08-08 21:25:26 +00001258 if (R != MustAlias) {
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001259 // If V2 may alias GEP base pointer, conservatively returns MayAlias.
1260 // If V2 is known not to alias GEP base pointer, then the two values
Mehdi Amini1726fc62017-01-27 16:12:22 +00001261 // cannot alias per GEP semantics: "Any memory access must be done through
1262 // a pointer value associated with an address range of the memory access,
1263 // otherwise the behavior is undefined.".
Nuno Lopes598d1632017-08-08 21:25:26 +00001264 assert(R == NoAlias || R == MayAlias);
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001265 return R;
Nuno Lopes598d1632017-08-08 21:25:26 +00001266 }
Chris Lattner6ea17f77f2003-12-11 22:44:13 +00001267
Arnold Schwaighofer1a444482014-03-26 21:30:19 +00001268 // If the max search depth is reached the result is undefined
1269 if (GEP1MaxLookupReached)
1270 return MayAlias;
Chris Lattner6ea17f77f2003-12-11 22:44:13 +00001271 }
Jakub Staszak07f383f2013-08-24 14:16:00 +00001272
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001273 // In the two GEP Case, if there is no difference in the offsets of the
1274 // computed pointers, the resultant pointers are a must alias. This
Sanjay Patel9613b292016-01-17 23:13:48 +00001275 // happens when we have two lexically identical GEP's (for example).
Chris Lattnerd6a2a992003-02-26 19:41:54 +00001276 //
Chris Lattner7a5b56a2009-11-26 02:17:34 +00001277 // In the other case, if we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2
1278 // must aliases the GEP, the end result is a must alias also.
Michael Kuperstein82069c42016-05-25 22:23:08 +00001279 if (GEP1BaseOffset == 0 && DecompGEP1.VarIndices.empty())
Evan Chengc1eed9d2009-10-14 06:41:49 +00001280 return MustAlias;
Evan Chengf1f3dd32009-10-13 18:42:04 +00001281
Eli Friedman3d1b3072011-09-08 02:23:31 +00001282 // If there is a constant difference between the pointers, but the difference
1283 // is less than the size of the associated memory object, then we know
1284 // that the objects are partially overlapping. If the difference is
1285 // greater, we know they do not overlap.
Michael Kuperstein82069c42016-05-25 22:23:08 +00001286 if (GEP1BaseOffset != 0 && DecompGEP1.VarIndices.empty()) {
Eli Friedman3d1b3072011-09-08 02:23:31 +00001287 if (GEP1BaseOffset >= 0) {
Chandler Carruthecbd1682015-06-17 07:21:38 +00001288 if (V2Size != MemoryLocation::UnknownSize) {
Eli Friedman3d1b3072011-09-08 02:23:31 +00001289 if ((uint64_t)GEP1BaseOffset < V2Size)
1290 return PartialAlias;
1291 return NoAlias;
1292 }
1293 } else {
Arnold Schwaighofere3ac0992014-01-16 04:53:18 +00001294 // We have the situation where:
1295 // + +
1296 // | BaseOffset |
1297 // ---------------->|
1298 // |-->V1Size |-------> V2Size
1299 // GEP1 V2
1300 // We need to know that V2Size is not unknown, otherwise we might have
1301 // stripped a gep with negative index ('gep <ptr>, -1, ...).
Chandler Carruthecbd1682015-06-17 07:21:38 +00001302 if (V1Size != MemoryLocation::UnknownSize &&
1303 V2Size != MemoryLocation::UnknownSize) {
Eli Friedman3d1b3072011-09-08 02:23:31 +00001304 if (-(uint64_t)GEP1BaseOffset < V1Size)
1305 return PartialAlias;
1306 return NoAlias;
1307 }
1308 }
Dan Gohmanc4bf5ca2010-12-13 22:50:24 +00001309 }
1310
Michael Kuperstein82069c42016-05-25 22:23:08 +00001311 if (!DecompGEP1.VarIndices.empty()) {
Eli Friedmanb78ac542011-09-08 02:37:07 +00001312 uint64_t Modulo = 0;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001313 bool AllPositive = true;
Michael Kuperstein82069c42016-05-25 22:23:08 +00001314 for (unsigned i = 0, e = DecompGEP1.VarIndices.size(); i != e; ++i) {
Quentin Colombet5989bc62015-08-31 22:32:47 +00001315
1316 // Try to distinguish something like &A[i][1] against &A[42][0].
1317 // Grab the least significant bit set in any of the scales. We
1318 // don't need std::abs here (even if the scale's negative) as we'll
1319 // be ^'ing Modulo with itself later.
Michael Kuperstein82069c42016-05-25 22:23:08 +00001320 Modulo |= (uint64_t)DecompGEP1.VarIndices[i].Scale;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001321
1322 if (AllPositive) {
1323 // If the Value could change between cycles, then any reasoning about
1324 // the Value this cycle may not hold in the next cycle. We'll just
1325 // give up if we can't determine conditions that hold for every cycle:
Michael Kuperstein82069c42016-05-25 22:23:08 +00001326 const Value *V = DecompGEP1.VarIndices[i].V;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001327
Craig Topper1a36b7d2017-05-15 06:39:41 +00001328 KnownBits Known = computeKnownBits(V, DL, 0, &AC, nullptr, DT);
1329 bool SignKnownZero = Known.isNonNegative();
1330 bool SignKnownOne = Known.isNegative();
Quentin Colombet5989bc62015-08-31 22:32:47 +00001331
1332 // Zero-extension widens the variable, and so forces the sign
1333 // bit to zero.
Michael Kuperstein82069c42016-05-25 22:23:08 +00001334 bool IsZExt = DecompGEP1.VarIndices[i].ZExtBits > 0 || isa<ZExtInst>(V);
Quentin Colombet5989bc62015-08-31 22:32:47 +00001335 SignKnownZero |= IsZExt;
1336 SignKnownOne &= !IsZExt;
1337
1338 // If the variable begins with a zero then we know it's
1339 // positive, regardless of whether the value is signed or
1340 // unsigned.
Michael Kuperstein82069c42016-05-25 22:23:08 +00001341 int64_t Scale = DecompGEP1.VarIndices[i].Scale;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001342 AllPositive =
1343 (SignKnownZero && Scale >= 0) || (SignKnownOne && Scale < 0);
1344 }
1345 }
1346
Eli Friedmanb78ac542011-09-08 02:37:07 +00001347 Modulo = Modulo ^ (Modulo & (Modulo - 1));
Eli Friedman3d1b3072011-09-08 02:23:31 +00001348
Eli Friedmanb78ac542011-09-08 02:37:07 +00001349 // We can compute the difference between the two addresses
1350 // mod Modulo. Check whether that difference guarantees that the
1351 // two locations do not alias.
1352 uint64_t ModOffset = (uint64_t)GEP1BaseOffset & (Modulo - 1);
Chandler Carruthecbd1682015-06-17 07:21:38 +00001353 if (V1Size != MemoryLocation::UnknownSize &&
1354 V2Size != MemoryLocation::UnknownSize && ModOffset >= V2Size &&
1355 V1Size <= Modulo - ModOffset)
Eli Friedmanb78ac542011-09-08 02:37:07 +00001356 return NoAlias;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001357
1358 // If we know all the variables are positive, then GEP1 >= GEP1BasePtr.
1359 // If GEP1BasePtr > V2 (GEP1BaseOffset > 0) then we know the pointers
1360 // don't alias if V2Size can fit in the gap between V2 and GEP1BasePtr.
1361 if (AllPositive && GEP1BaseOffset > 0 && V2Size <= (uint64_t)GEP1BaseOffset)
1362 return NoAlias;
1363
Michael Kuperstein82069c42016-05-25 22:23:08 +00001364 if (constantOffsetHeuristic(DecompGEP1.VarIndices, V1Size, V2Size,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001365 GEP1BaseOffset, &AC, DT))
Quentin Colombet5989bc62015-08-31 22:32:47 +00001366 return NoAlias;
Eli Friedmanb78ac542011-09-08 02:37:07 +00001367 }
Eli Friedman3d1b3072011-09-08 02:23:31 +00001368
Dan Gohmanadf80ae2011-06-04 06:50:18 +00001369 // Statically, we can see that the base objects are the same, but the
1370 // pointers have dynamic offsets which we can't resolve. And none of our
1371 // little tricks above worked.
Michael Kruse47f85602017-06-21 18:25:37 +00001372 return MayAlias;
Evan Chengf1f3dd32009-10-13 18:42:04 +00001373}
1374
Chandler Carruthc3f49eb2015-06-22 02:16:51 +00001375static AliasResult MergeAliasResults(AliasResult A, AliasResult B) {
Dan Gohman4e7e7952011-06-03 20:17:36 +00001376 // If the results agree, take it.
1377 if (A == B)
1378 return A;
1379 // A mix of PartialAlias and MustAlias is PartialAlias.
Chandler Carruthc3f49eb2015-06-22 02:16:51 +00001380 if ((A == PartialAlias && B == MustAlias) ||
1381 (B == PartialAlias && A == MustAlias))
1382 return PartialAlias;
Dan Gohman4e7e7952011-06-03 20:17:36 +00001383 // Otherwise, we don't know anything.
Chandler Carruthc3f49eb2015-06-22 02:16:51 +00001384 return MayAlias;
Dan Gohman4e7e7952011-06-03 20:17:36 +00001385}
1386
Chandler Carruthc5d81122015-08-06 08:17:06 +00001387/// Provides a bunch of ad-hoc rules to disambiguate a Select instruction
1388/// against another.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001389AliasResult BasicAAResult::aliasSelect(const SelectInst *SI, uint64_t SISize,
1390 const AAMDNodes &SIAAInfo,
1391 const Value *V2, uint64_t V2Size,
Ehsan Amiri17e17012016-08-12 16:05:03 +00001392 const AAMDNodes &V2AAInfo,
1393 const Value *UnderV2) {
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001394 // If the values are Selects with the same condition, we can do a more precise
1395 // check: just check for aliases between the values on corresponding arms.
1396 if (const SelectInst *SI2 = dyn_cast<SelectInst>(V2))
1397 if (SI->getCondition() == SI2->getCondition()) {
Chandler Carruth903c5f92015-08-06 07:57:58 +00001398 AliasResult Alias = aliasCheck(SI->getTrueValue(), SISize, SIAAInfo,
1399 SI2->getTrueValue(), V2Size, V2AAInfo);
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001400 if (Alias == MayAlias)
1401 return MayAlias;
1402 AliasResult ThisAlias =
Chandler Carruth903c5f92015-08-06 07:57:58 +00001403 aliasCheck(SI->getFalseValue(), SISize, SIAAInfo,
1404 SI2->getFalseValue(), V2Size, V2AAInfo);
Dan Gohman4e7e7952011-06-03 20:17:36 +00001405 return MergeAliasResults(ThisAlias, Alias);
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001406 }
1407
1408 // If both arms of the Select node NoAlias or MustAlias V2, then returns
1409 // NoAlias / MustAlias. Otherwise, returns MayAlias.
1410 AliasResult Alias =
Ehsan Amiri17e17012016-08-12 16:05:03 +00001411 aliasCheck(V2, V2Size, V2AAInfo, SI->getTrueValue(),
1412 SISize, SIAAInfo, UnderV2);
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001413 if (Alias == MayAlias)
1414 return MayAlias;
Dan Gohman7c34ece2010-06-28 21:16:52 +00001415
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001416 AliasResult ThisAlias =
Ehsan Amiri17e17012016-08-12 16:05:03 +00001417 aliasCheck(V2, V2Size, V2AAInfo, SI->getFalseValue(), SISize, SIAAInfo,
1418 UnderV2);
Dan Gohman4e7e7952011-06-03 20:17:36 +00001419 return MergeAliasResults(ThisAlias, Alias);
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001420}
1421
Chandler Carruthc5d81122015-08-06 08:17:06 +00001422/// Provide a bunch of ad-hoc rules to disambiguate a PHI instruction against
1423/// another.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001424AliasResult BasicAAResult::aliasPHI(const PHINode *PN, uint64_t PNSize,
1425 const AAMDNodes &PNAAInfo, const Value *V2,
Ehsan Amiri17e17012016-08-12 16:05:03 +00001426 uint64_t V2Size, const AAMDNodes &V2AAInfo,
1427 const Value *UnderV2) {
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001428 // Track phi nodes we have visited. We use this information when we determine
1429 // value equivalence.
1430 VisitedPhiBBs.insert(PN->getParent());
1431
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001432 // If the values are PHIs in the same block, we can do a more precise
1433 // as well as efficient check: just check for aliases between the values
1434 // on corresponding edges.
1435 if (const PHINode *PN2 = dyn_cast<PHINode>(V2))
1436 if (PN2->getParent() == PN->getParent()) {
Chandler Carruthac80dc72015-06-17 07:18:54 +00001437 LocPair Locs(MemoryLocation(PN, PNSize, PNAAInfo),
1438 MemoryLocation(V2, V2Size, V2AAInfo));
Arnold Schwaighofer8dc34cf2012-09-06 14:41:53 +00001439 if (PN > V2)
1440 std::swap(Locs.first, Locs.second);
Arnold Schwaighoferedd62b12012-12-10 23:02:41 +00001441 // Analyse the PHIs' inputs under the assumption that the PHIs are
1442 // NoAlias.
1443 // If the PHIs are May/MustAlias there must be (recursively) an input
1444 // operand from outside the PHIs' cycle that is MayAlias/MustAlias or
1445 // there must be an operation on the PHIs within the PHIs' value cycle
1446 // that causes a MayAlias.
1447 // Pretend the phis do not alias.
1448 AliasResult Alias = NoAlias;
1449 assert(AliasCache.count(Locs) &&
1450 "There must exist an entry for the phi node");
1451 AliasResult OrigAliasResult = AliasCache[Locs];
1452 AliasCache[Locs] = NoAlias;
Arnold Schwaighofer8dc34cf2012-09-06 14:41:53 +00001453
Hal Finkela6f86fc2012-11-17 02:33:15 +00001454 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001455 AliasResult ThisAlias =
Chandler Carruth903c5f92015-08-06 07:57:58 +00001456 aliasCheck(PN->getIncomingValue(i), PNSize, PNAAInfo,
1457 PN2->getIncomingValueForBlock(PN->getIncomingBlock(i)),
1458 V2Size, V2AAInfo);
Dan Gohman4e7e7952011-06-03 20:17:36 +00001459 Alias = MergeAliasResults(ThisAlias, Alias);
1460 if (Alias == MayAlias)
1461 break;
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001462 }
Arnold Schwaighofer8dc34cf2012-09-06 14:41:53 +00001463
1464 // Reset if speculation failed.
Arnold Schwaighoferedd62b12012-12-10 23:02:41 +00001465 if (Alias != NoAlias)
Arnold Schwaighofer8dc34cf2012-09-06 14:41:53 +00001466 AliasCache[Locs] = OrigAliasResult;
1467
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001468 return Alias;
1469 }
1470
Chandler Carruth903c5f92015-08-06 07:57:58 +00001471 SmallPtrSet<Value *, 4> UniqueSrc;
1472 SmallVector<Value *, 4> V1Srcs;
Tobias Edler von Kochd8ce16b2015-07-15 19:32:22 +00001473 bool isRecursive = false;
Pete Cooper833f34d2015-05-12 20:05:31 +00001474 for (Value *PV1 : PN->incoming_values()) {
Evan Chengc10e88d2009-10-13 22:02:20 +00001475 if (isa<PHINode>(PV1))
1476 // If any of the source itself is a PHI, return MayAlias conservatively
Evan Chengc1eed9d2009-10-14 06:41:49 +00001477 // to avoid compile time explosion. The worst possible case is if both
1478 // sides are PHI nodes. In which case, this is O(m x n) time where 'm'
1479 // and 'n' are the number of PHI sources.
Evan Chengc10e88d2009-10-13 22:02:20 +00001480 return MayAlias;
Tobias Edler von Kochd8ce16b2015-07-15 19:32:22 +00001481
1482 if (EnableRecPhiAnalysis)
1483 if (GEPOperator *PV1GEP = dyn_cast<GEPOperator>(PV1)) {
1484 // Check whether the incoming value is a GEP that advances the pointer
1485 // result of this PHI node (e.g. in a loop). If this is the case, we
1486 // would recurse and always get a MayAlias. Handle this case specially
1487 // below.
1488 if (PV1GEP->getPointerOperand() == PN && PV1GEP->getNumIndices() == 1 &&
1489 isa<ConstantInt>(PV1GEP->idx_begin())) {
1490 isRecursive = true;
1491 continue;
1492 }
1493 }
1494
David Blaikie70573dc2014-11-19 07:49:26 +00001495 if (UniqueSrc.insert(PV1).second)
Evan Chengc10e88d2009-10-13 22:02:20 +00001496 V1Srcs.push_back(PV1);
1497 }
1498
Tobias Edler von Kochd8ce16b2015-07-15 19:32:22 +00001499 // If this PHI node is recursive, set the size of the accessed memory to
1500 // unknown to represent all the possible values the GEP could advance the
1501 // pointer to.
1502 if (isRecursive)
1503 PNSize = MemoryLocation::UnknownSize;
1504
Chandler Carruth903c5f92015-08-06 07:57:58 +00001505 AliasResult Alias =
Ehsan Amiri17e17012016-08-12 16:05:03 +00001506 aliasCheck(V2, V2Size, V2AAInfo, V1Srcs[0],
1507 PNSize, PNAAInfo, UnderV2);
Tobias Edler von Kochd8ce16b2015-07-15 19:32:22 +00001508
Evan Chengf92f5552009-10-14 05:22:03 +00001509 // Early exit if the check of the first PHI source against V2 is MayAlias.
1510 // Other results are not possible.
1511 if (Alias == MayAlias)
1512 return MayAlias;
1513
Evan Chengc10e88d2009-10-13 22:02:20 +00001514 // If all sources of the PHI node NoAlias or MustAlias V2, then returns
1515 // NoAlias / MustAlias. Otherwise, returns MayAlias.
Evan Chengc10e88d2009-10-13 22:02:20 +00001516 for (unsigned i = 1, e = V1Srcs.size(); i != e; ++i) {
1517 Value *V = V1Srcs[i];
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001518
Chandler Carruth903c5f92015-08-06 07:57:58 +00001519 AliasResult ThisAlias =
Ehsan Amiri17e17012016-08-12 16:05:03 +00001520 aliasCheck(V2, V2Size, V2AAInfo, V, PNSize, PNAAInfo, UnderV2);
Dan Gohman4e7e7952011-06-03 20:17:36 +00001521 Alias = MergeAliasResults(ThisAlias, Alias);
1522 if (Alias == MayAlias)
1523 break;
Evan Chengc10e88d2009-10-13 22:02:20 +00001524 }
1525
1526 return Alias;
1527}
1528
David Majnemer0345b0f2015-11-17 08:15:08 +00001529/// Provides a bunch of ad-hoc rules to disambiguate in common cases, such as
Chandler Carruthc5d81122015-08-06 08:17:06 +00001530/// array references.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001531AliasResult BasicAAResult::aliasCheck(const Value *V1, uint64_t V1Size,
1532 AAMDNodes V1AAInfo, const Value *V2,
Ehsan Amiri17e17012016-08-12 16:05:03 +00001533 uint64_t V2Size, AAMDNodes V2AAInfo,
1534 const Value *O1, const Value *O2) {
Dan Gohmancb45bd92010-04-08 18:11:50 +00001535 // If either of the memory references is empty, it doesn't matter what the
1536 // pointer values are.
1537 if (V1Size == 0 || V2Size == 0)
1538 return NoAlias;
1539
Evan Chengf1f3dd32009-10-13 18:42:04 +00001540 // Strip off any casts if they exist.
Piotr Padlewski610c9662017-04-24 19:37:17 +00001541 V1 = V1->stripPointerCastsAndBarriers();
1542 V2 = V2->stripPointerCastsAndBarriers();
Evan Chengf1f3dd32009-10-13 18:42:04 +00001543
Daniel Berlin3459d6e2015-05-05 18:10:49 +00001544 // If V1 or V2 is undef, the result is NoAlias because we can always pick a
1545 // value for undef that aliases nothing in the program.
1546 if (isa<UndefValue>(V1) || isa<UndefValue>(V2))
1547 return NoAlias;
1548
Evan Chengf1f3dd32009-10-13 18:42:04 +00001549 // Are we checking for alias of the same value?
Sanjay Patel9613b292016-01-17 23:13:48 +00001550 // Because we look 'through' phi nodes, we could look at "Value" pointers from
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +00001551 // different iterations. We must therefore make sure that this is not the
1552 // case. The function isValueEqualInPotentialCycles ensures that this cannot
1553 // happen by looking at the visited phi nodes and making sure they cannot
1554 // reach the value.
1555 if (isValueEqualInPotentialCycles(V1, V2))
1556 return MustAlias;
Evan Chengf1f3dd32009-10-13 18:42:04 +00001557
Duncan Sands19d0b472010-02-16 11:11:14 +00001558 if (!V1->getType()->isPointerTy() || !V2->getType()->isPointerTy())
Chandler Carruth903c5f92015-08-06 07:57:58 +00001559 return NoAlias; // Scalars cannot alias each other
Evan Chengf1f3dd32009-10-13 18:42:04 +00001560
1561 // Figure out what objects these things are pointing to if we can.
Ehsan Amiri17e17012016-08-12 16:05:03 +00001562 if (O1 == nullptr)
1563 O1 = GetUnderlyingObject(V1, DL, MaxLookupSearchDepth);
1564
1565 if (O2 == nullptr)
1566 O2 = GetUnderlyingObject(V2, DL, MaxLookupSearchDepth);
Evan Chengf1f3dd32009-10-13 18:42:04 +00001567
Dan Gohmanccb45842009-11-09 19:29:11 +00001568 // Null values in the default address space don't point to any object, so they
1569 // don't alias any other pointer.
1570 if (const ConstantPointerNull *CPN = dyn_cast<ConstantPointerNull>(O1))
1571 if (CPN->getType()->getAddressSpace() == 0)
1572 return NoAlias;
1573 if (const ConstantPointerNull *CPN = dyn_cast<ConstantPointerNull>(O2))
1574 if (CPN->getType()->getAddressSpace() == 0)
1575 return NoAlias;
1576
Evan Chengf1f3dd32009-10-13 18:42:04 +00001577 if (O1 != O2) {
Sanjay Patel9613b292016-01-17 23:13:48 +00001578 // If V1/V2 point to two different objects, we know that we have no alias.
Dan Gohman00ef9322010-07-07 14:27:09 +00001579 if (isIdentifiedObject(O1) && isIdentifiedObject(O2))
Evan Chengf1f3dd32009-10-13 18:42:04 +00001580 return NoAlias;
Nick Lewyckyc53e2ec2009-11-14 06:15:14 +00001581
1582 // Constant pointers can't alias with non-const isIdentifiedObject objects.
Dan Gohman00ef9322010-07-07 14:27:09 +00001583 if ((isa<Constant>(O1) && isIdentifiedObject(O2) && !isa<Constant>(O2)) ||
1584 (isa<Constant>(O2) && isIdentifiedObject(O1) && !isa<Constant>(O1)))
Nick Lewyckyc53e2ec2009-11-14 06:15:14 +00001585 return NoAlias;
1586
Michael Kupersteinf3e663a2013-05-28 08:17:48 +00001587 // Function arguments can't alias with things that are known to be
1588 // unambigously identified at the function level.
1589 if ((isa<Argument>(O1) && isIdentifiedFunctionLocal(O2)) ||
1590 (isa<Argument>(O2) && isIdentifiedFunctionLocal(O1)))
Dan Gohman84f90a32010-07-01 20:08:40 +00001591 return NoAlias;
Evan Chengf1f3dd32009-10-13 18:42:04 +00001592
1593 // Most objects can't alias null.
Dan Gohman00ef9322010-07-07 14:27:09 +00001594 if ((isa<ConstantPointerNull>(O2) && isKnownNonNull(O1)) ||
1595 (isa<ConstantPointerNull>(O1) && isKnownNonNull(O2)))
Evan Chengf1f3dd32009-10-13 18:42:04 +00001596 return NoAlias;
Jakub Staszak07f383f2013-08-24 14:16:00 +00001597
Dan Gohman5b0a8a82010-07-07 14:30:04 +00001598 // If one pointer is the result of a call/invoke or load and the other is a
1599 // non-escaping local object within the same function, then we know the
1600 // object couldn't escape to a point where the call could return it.
1601 //
1602 // Note that if the pointers are in different functions, there are a
1603 // variety of complications. A call with a nocapture argument may still
1604 // temporary store the nocapture argument's value in a temporary memory
1605 // location if that memory location doesn't escape. Or it may pass a
1606 // nocapture value to other functions as long as they don't capture it.
1607 if (isEscapeSource(O1) && isNonEscapingLocalObject(O2))
1608 return NoAlias;
1609 if (isEscapeSource(O2) && isNonEscapingLocalObject(O1))
1610 return NoAlias;
1611 }
1612
Evan Chengf1f3dd32009-10-13 18:42:04 +00001613 // If the size of one access is larger than the entire object on the other
1614 // side, then we know such behavior is undefined and can assume no alias.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001615 if ((V1Size != MemoryLocation::UnknownSize &&
1616 isObjectSmallerThan(O2, V1Size, DL, TLI)) ||
1617 (V2Size != MemoryLocation::UnknownSize &&
1618 isObjectSmallerThan(O1, V2Size, DL, TLI)))
1619 return NoAlias;
Jakub Staszak07f383f2013-08-24 14:16:00 +00001620
Dan Gohmanfb02cec2011-06-04 00:31:50 +00001621 // Check the cache before climbing up use-def chains. This also terminates
1622 // otherwise infinitely recursive queries.
Chandler Carruthac80dc72015-06-17 07:18:54 +00001623 LocPair Locs(MemoryLocation(V1, V1Size, V1AAInfo),
1624 MemoryLocation(V2, V2Size, V2AAInfo));
Dan Gohmanfb02cec2011-06-04 00:31:50 +00001625 if (V1 > V2)
1626 std::swap(Locs.first, Locs.second);
1627 std::pair<AliasCacheTy::iterator, bool> Pair =
Chandler Carruth903c5f92015-08-06 07:57:58 +00001628 AliasCache.insert(std::make_pair(Locs, MayAlias));
Dan Gohmanfb02cec2011-06-04 00:31:50 +00001629 if (!Pair.second)
1630 return Pair.first->second;
1631
Chris Lattner89288992009-11-26 02:13:03 +00001632 // FIXME: This isn't aggressively handling alias(GEP, PHI) for example: if the
1633 // GEP can't simplify, we don't even look at the PHI cases.
Chris Lattnerb2647b92009-10-17 23:48:54 +00001634 if (!isa<GEPOperator>(V1) && isa<GEPOperator>(V2)) {
Chris Lattnerd6a2a992003-02-26 19:41:54 +00001635 std::swap(V1, V2);
1636 std::swap(V1Size, V2Size);
Chris Lattner5341c962009-11-26 02:14:59 +00001637 std::swap(O1, O2);
Hal Finkelcc39b672014-07-24 12:16:19 +00001638 std::swap(V1AAInfo, V2AAInfo);
Chris Lattnerd6a2a992003-02-26 19:41:54 +00001639 }
Dan Gohman02538ac2010-10-18 18:04:47 +00001640 if (const GEPOperator *GV1 = dyn_cast<GEPOperator>(V1)) {
Chandler Carruth903c5f92015-08-06 07:57:58 +00001641 AliasResult Result =
1642 aliasGEP(GV1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O1, O2);
1643 if (Result != MayAlias)
1644 return AliasCache[Locs] = Result;
Dan Gohman02538ac2010-10-18 18:04:47 +00001645 }
Evan Chengc10e88d2009-10-13 22:02:20 +00001646
1647 if (isa<PHINode>(V2) && !isa<PHINode>(V1)) {
1648 std::swap(V1, V2);
Ehsan Amiri17e17012016-08-12 16:05:03 +00001649 std::swap(O1, O2);
Evan Chengc10e88d2009-10-13 22:02:20 +00001650 std::swap(V1Size, V2Size);
Hal Finkelcc39b672014-07-24 12:16:19 +00001651 std::swap(V1AAInfo, V2AAInfo);
Evan Chengc10e88d2009-10-13 22:02:20 +00001652 }
Dan Gohman02538ac2010-10-18 18:04:47 +00001653 if (const PHINode *PN = dyn_cast<PHINode>(V1)) {
Ehsan Amiri17e17012016-08-12 16:05:03 +00001654 AliasResult Result = aliasPHI(PN, V1Size, V1AAInfo,
1655 V2, V2Size, V2AAInfo, O2);
Chandler Carruth903c5f92015-08-06 07:57:58 +00001656 if (Result != MayAlias)
1657 return AliasCache[Locs] = Result;
Dan Gohman02538ac2010-10-18 18:04:47 +00001658 }
Misha Brukman01808ca2005-04-21 21:13:18 +00001659
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001660 if (isa<SelectInst>(V2) && !isa<SelectInst>(V1)) {
1661 std::swap(V1, V2);
Ehsan Amiri17e17012016-08-12 16:05:03 +00001662 std::swap(O1, O2);
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001663 std::swap(V1Size, V2Size);
Hal Finkelcc39b672014-07-24 12:16:19 +00001664 std::swap(V1AAInfo, V2AAInfo);
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001665 }
Dan Gohman02538ac2010-10-18 18:04:47 +00001666 if (const SelectInst *S1 = dyn_cast<SelectInst>(V1)) {
Chandler Carruth903c5f92015-08-06 07:57:58 +00001667 AliasResult Result =
Ehsan Amiri17e17012016-08-12 16:05:03 +00001668 aliasSelect(S1, V1Size, V1AAInfo, V2, V2Size, V2AAInfo, O2);
Chandler Carruth903c5f92015-08-06 07:57:58 +00001669 if (Result != MayAlias)
1670 return AliasCache[Locs] = Result;
Dan Gohman02538ac2010-10-18 18:04:47 +00001671 }
Dan Gohman3b7ba5f2009-10-26 21:55:43 +00001672
Dan Gohman44da55b2011-01-18 21:16:06 +00001673 // If both pointers are pointing into the same object and one of them
Sanjay Patel9613b292016-01-17 23:13:48 +00001674 // accesses the entire object, then the accesses must overlap in some way.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001675 if (O1 == O2)
Chandler Carruthecbd1682015-06-17 07:21:38 +00001676 if ((V1Size != MemoryLocation::UnknownSize &&
Chandler Carruth7b560d42015-09-09 17:55:00 +00001677 isObjectSize(O1, V1Size, DL, TLI)) ||
Chandler Carruthecbd1682015-06-17 07:21:38 +00001678 (V2Size != MemoryLocation::UnknownSize &&
Chandler Carruth7b560d42015-09-09 17:55:00 +00001679 isObjectSize(O2, V2Size, DL, TLI)))
Dan Gohmanfb02cec2011-06-04 00:31:50 +00001680 return AliasCache[Locs] = PartialAlias;
Dan Gohman44da55b2011-01-18 21:16:06 +00001681
Chandler Carruth7b560d42015-09-09 17:55:00 +00001682 // Recurse back into the best AA results we have, potentially with refined
1683 // memory locations. We have already ensured that BasicAA has a MayAlias
1684 // cache result for these, so any recursion back into BasicAA won't loop.
1685 AliasResult Result = getBestAAResults().alias(Locs.first, Locs.second);
Dan Gohmanfb02cec2011-06-04 00:31:50 +00001686 return AliasCache[Locs] = Result;
Chris Lattnerd6a2a992003-02-26 19:41:54 +00001687}
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001688
Chandler Carruthc5d81122015-08-06 08:17:06 +00001689/// Check whether two Values can be considered equivalent.
1690///
1691/// In addition to pointer equivalence of \p V1 and \p V2 this checks whether
1692/// they can not be part of a cycle in the value graph by looking at all
1693/// visited phi nodes an making sure that the phis cannot reach the value. We
1694/// have to do this because we are looking through phi nodes (That is we say
1695/// noalias(V, phi(VA, VB)) if noalias(V, VA) and noalias(V, VB).
Chandler Carruth7b560d42015-09-09 17:55:00 +00001696bool BasicAAResult::isValueEqualInPotentialCycles(const Value *V,
1697 const Value *V2) {
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001698 if (V != V2)
1699 return false;
1700
1701 const Instruction *Inst = dyn_cast<Instruction>(V);
1702 if (!Inst)
1703 return true;
1704
Daniel Berlin9e77de22015-03-20 18:05:49 +00001705 if (VisitedPhiBBs.empty())
1706 return true;
1707
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +00001708 if (VisitedPhiBBs.size() > MaxNumPhiBBsValueReachabilityCheck)
1709 return false;
1710
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +00001711 // Make sure that the visited phis cannot reach the Value. This ensures that
1712 // the Values cannot come from different iterations of a potential cycle the
1713 // phi nodes could be involved in.
Craig Topper46276792014-08-24 23:23:06 +00001714 for (auto *P : VisitedPhiBBs)
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001715 if (isPotentiallyReachable(&P->front(), Inst, DT, LI))
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001716 return false;
1717
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +00001718 return true;
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001719}
1720
Chandler Carruthc5d81122015-08-06 08:17:06 +00001721/// Computes the symbolic difference between two de-composed GEPs.
1722///
1723/// Dest and Src are the variable indices from two decomposed GetElementPtr
1724/// instructions GEP1 and GEP2 which have common base pointers.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001725void BasicAAResult::GetIndexDifference(
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001726 SmallVectorImpl<VariableGEPIndex> &Dest,
1727 const SmallVectorImpl<VariableGEPIndex> &Src) {
1728 if (Src.empty())
1729 return;
1730
1731 for (unsigned i = 0, e = Src.size(); i != e; ++i) {
1732 const Value *V = Src[i].V;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001733 unsigned ZExtBits = Src[i].ZExtBits, SExtBits = Src[i].SExtBits;
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001734 int64_t Scale = Src[i].Scale;
1735
1736 // Find V in Dest. This is N^2, but pointer indices almost never have more
1737 // than a few variable indexes.
1738 for (unsigned j = 0, e = Dest.size(); j != e; ++j) {
Arnold Schwaighofer833a82e2014-01-03 05:47:03 +00001739 if (!isValueEqualInPotentialCycles(Dest[j].V, V) ||
Quentin Colombet5989bc62015-08-31 22:32:47 +00001740 Dest[j].ZExtBits != ZExtBits || Dest[j].SExtBits != SExtBits)
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001741 continue;
1742
1743 // If we found it, subtract off Scale V's from the entry in Dest. If it
1744 // goes to zero, remove the entry.
1745 if (Dest[j].Scale != Scale)
1746 Dest[j].Scale -= Scale;
1747 else
1748 Dest.erase(Dest.begin() + j);
1749 Scale = 0;
1750 break;
1751 }
1752
1753 // If we didn't consume this entry, add it to the end of the Dest list.
1754 if (Scale) {
Quentin Colombet5989bc62015-08-31 22:32:47 +00001755 VariableGEPIndex Entry = {V, ZExtBits, SExtBits, -Scale};
Arnold Schwaighofer0d10a9d2014-01-02 03:31:36 +00001756 Dest.push_back(Entry);
1757 }
1758 }
1759}
Quentin Colombet5989bc62015-08-31 22:32:47 +00001760
Chandler Carruth7b560d42015-09-09 17:55:00 +00001761bool BasicAAResult::constantOffsetHeuristic(
Quentin Colombet5989bc62015-08-31 22:32:47 +00001762 const SmallVectorImpl<VariableGEPIndex> &VarIndices, uint64_t V1Size,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001763 uint64_t V2Size, int64_t BaseOffset, AssumptionCache *AC,
Chandler Carruth7b560d42015-09-09 17:55:00 +00001764 DominatorTree *DT) {
Quentin Colombet5989bc62015-08-31 22:32:47 +00001765 if (VarIndices.size() != 2 || V1Size == MemoryLocation::UnknownSize ||
Chandler Carruth7b560d42015-09-09 17:55:00 +00001766 V2Size == MemoryLocation::UnknownSize)
Quentin Colombet5989bc62015-08-31 22:32:47 +00001767 return false;
1768
1769 const VariableGEPIndex &Var0 = VarIndices[0], &Var1 = VarIndices[1];
1770
1771 if (Var0.ZExtBits != Var1.ZExtBits || Var0.SExtBits != Var1.SExtBits ||
1772 Var0.Scale != -Var1.Scale)
1773 return false;
1774
1775 unsigned Width = Var1.V->getType()->getIntegerBitWidth();
1776
1777 // We'll strip off the Extensions of Var0 and Var1 and do another round
1778 // of GetLinearExpression decomposition. In the example above, if Var0
1779 // is zext(%x + 1) we should get V1 == %x and V1Offset == 1.
1780
1781 APInt V0Scale(Width, 0), V0Offset(Width, 0), V1Scale(Width, 0),
1782 V1Offset(Width, 0);
1783 bool NSW = true, NUW = true;
1784 unsigned V0ZExtBits = 0, V0SExtBits = 0, V1ZExtBits = 0, V1SExtBits = 0;
1785 const Value *V0 = GetLinearExpression(Var0.V, V0Scale, V0Offset, V0ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001786 V0SExtBits, DL, 0, AC, DT, NSW, NUW);
Richard Trieu7a083812016-02-18 22:09:30 +00001787 NSW = true;
1788 NUW = true;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001789 const Value *V1 = GetLinearExpression(Var1.V, V1Scale, V1Offset, V1ZExtBits,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001790 V1SExtBits, DL, 0, AC, DT, NSW, NUW);
Quentin Colombet5989bc62015-08-31 22:32:47 +00001791
1792 if (V0Scale != V1Scale || V0ZExtBits != V1ZExtBits ||
1793 V0SExtBits != V1SExtBits || !isValueEqualInPotentialCycles(V0, V1))
1794 return false;
1795
1796 // We have a hit - Var0 and Var1 only differ by a constant offset!
1797
1798 // If we've been sext'ed then zext'd the maximum difference between Var0 and
1799 // Var1 is possible to calculate, but we're just interested in the absolute
Benjamin Kramer557b6012015-10-24 11:38:01 +00001800 // minimum difference between the two. The minimum distance may occur due to
Quentin Colombet5989bc62015-08-31 22:32:47 +00001801 // wrapping; consider "add i3 %i, 5": if %i == 7 then 7 + 5 mod 8 == 4, and so
1802 // the minimum distance between %i and %i + 5 is 3.
Benjamin Kramer557b6012015-10-24 11:38:01 +00001803 APInt MinDiff = V0Offset - V1Offset, Wrapped = -MinDiff;
Quentin Colombet5989bc62015-08-31 22:32:47 +00001804 MinDiff = APIntOps::umin(MinDiff, Wrapped);
1805 uint64_t MinDiffBytes = MinDiff.getZExtValue() * std::abs(Var0.Scale);
1806
1807 // We can't definitely say whether GEP1 is before or after V2 due to wrapping
1808 // arithmetic (i.e. for some values of GEP1 and V2 GEP1 < V2, and for other
1809 // values GEP1 > V2). We'll therefore only declare NoAlias if both V1Size and
1810 // V2Size can fit in the MinDiffBytes gap.
1811 return V1Size + std::abs(BaseOffset) <= MinDiffBytes &&
1812 V2Size + std::abs(BaseOffset) <= MinDiffBytes;
1813}
Chandler Carruth7b560d42015-09-09 17:55:00 +00001814
1815//===----------------------------------------------------------------------===//
1816// BasicAliasAnalysis Pass
1817//===----------------------------------------------------------------------===//
1818
Chandler Carruthdab4eae2016-11-23 17:53:26 +00001819AnalysisKey BasicAA::Key;
Chandler Carruthb4faf132016-03-11 10:22:49 +00001820
Sean Silva36e0d012016-08-09 00:28:15 +00001821BasicAAResult BasicAA::run(Function &F, FunctionAnalysisManager &AM) {
Chandler Carruth7b560d42015-09-09 17:55:00 +00001822 return BasicAAResult(F.getParent()->getDataLayout(),
Chandler Carruthb47f8012016-03-11 11:05:24 +00001823 AM.getResult<TargetLibraryAnalysis>(F),
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001824 AM.getResult<AssumptionAnalysis>(F),
Chandler Carruth5bfbc3f2016-03-11 13:53:18 +00001825 &AM.getResult<DominatorTreeAnalysis>(F),
Chandler Carruthb47f8012016-03-11 11:05:24 +00001826 AM.getCachedResult<LoopAnalysis>(F));
Chandler Carruth7b560d42015-09-09 17:55:00 +00001827}
1828
Keno Fischer277bfae2015-10-26 21:22:58 +00001829BasicAAWrapperPass::BasicAAWrapperPass() : FunctionPass(ID) {
1830 initializeBasicAAWrapperPassPass(*PassRegistry::getPassRegistry());
1831}
1832
Chandler Carruth7b560d42015-09-09 17:55:00 +00001833char BasicAAWrapperPass::ID = 0;
Eugene Zelenko530851c2017-08-11 21:30:02 +00001834
Chandler Carruth7b560d42015-09-09 17:55:00 +00001835void BasicAAWrapperPass::anchor() {}
1836
1837INITIALIZE_PASS_BEGIN(BasicAAWrapperPass, "basicaa",
1838 "Basic Alias Analysis (stateless AA impl)", true, true)
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001839INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Chandler Carruth5bfbc3f2016-03-11 13:53:18 +00001840INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
Chandler Carruth7b560d42015-09-09 17:55:00 +00001841INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1842INITIALIZE_PASS_END(BasicAAWrapperPass, "basicaa",
1843 "Basic Alias Analysis (stateless AA impl)", true, true)
1844
1845FunctionPass *llvm::createBasicAAWrapperPass() {
1846 return new BasicAAWrapperPass();
1847}
1848
1849bool BasicAAWrapperPass::runOnFunction(Function &F) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001850 auto &ACT = getAnalysis<AssumptionCacheTracker>();
Chandler Carruth7b560d42015-09-09 17:55:00 +00001851 auto &TLIWP = getAnalysis<TargetLibraryInfoWrapperPass>();
Chandler Carruth5bfbc3f2016-03-11 13:53:18 +00001852 auto &DTWP = getAnalysis<DominatorTreeWrapperPass>();
Chandler Carruth7b560d42015-09-09 17:55:00 +00001853 auto *LIWP = getAnalysisIfAvailable<LoopInfoWrapperPass>();
1854
1855 Result.reset(new BasicAAResult(F.getParent()->getDataLayout(), TLIWP.getTLI(),
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001856 ACT.getAssumptionCache(F), &DTWP.getDomTree(),
Chandler Carruth7b560d42015-09-09 17:55:00 +00001857 LIWP ? &LIWP->getLoopInfo() : nullptr));
1858
1859 return false;
1860}
1861
1862void BasicAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
1863 AU.setPreservesAll();
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001864 AU.addRequired<AssumptionCacheTracker>();
Chandler Carruth5bfbc3f2016-03-11 13:53:18 +00001865 AU.addRequired<DominatorTreeWrapperPass>();
Chandler Carruth7b560d42015-09-09 17:55:00 +00001866 AU.addRequired<TargetLibraryInfoWrapperPass>();
1867}
1868
1869BasicAAResult llvm::createLegacyPMBasicAAResult(Pass &P, Function &F) {
1870 return BasicAAResult(
1871 F.getParent()->getDataLayout(),
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001872 P.getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(),
1873 P.getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F));
Chandler Carruth7b560d42015-09-09 17:55:00 +00001874}