blob: a60097c58eba4fdbeb58614c819c2e6faf31b21e [file] [log] [blame]
Chris Lattner704541b2011-01-02 21:47:05 +00001//===- EarlyCSE.cpp - Simple and fast CSE pass ----------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs a simple dominator tree walk that eliminates trivially
11// redundant instructions.
12//
13//===----------------------------------------------------------------------===//
14
Chandler Carruthe8c686a2015-02-01 10:51:23 +000015#include "llvm/Transforms/Scalar/EarlyCSE.h"
Michael Ilseman336cb792012-10-09 16:57:38 +000016#include "llvm/ADT/Hashing.h"
Chris Lattner18ae5432011-01-02 23:04:14 +000017#include "llvm/ADT/ScopedHashTable.h"
Chris Lattner8fac5db2011-01-02 23:19:45 +000018#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000019#include "llvm/Analysis/AssumptionCache.h"
Geoff Berry354fac22016-04-28 14:59:27 +000020#include "llvm/Analysis/GlobalsModRef.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000021#include "llvm/Analysis/InstructionSimplify.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000022#include "llvm/Analysis/TargetLibraryInfo.h"
Chad Rosierf9327d62015-01-26 22:51:15 +000023#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000024#include "llvm/IR/DataLayout.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000025#include "llvm/IR/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/Instructions.h"
Hal Finkel1e16fa32014-11-03 20:21:32 +000027#include "llvm/IR/IntrinsicInst.h"
28#include "llvm/IR/PatternMatch.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000029#include "llvm/Pass.h"
30#include "llvm/Support/Debug.h"
31#include "llvm/Support/RecyclingAllocator.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000032#include "llvm/Support/raw_ostream.h"
Chandler Carruthe8c686a2015-02-01 10:51:23 +000033#include "llvm/Transforms/Scalar.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000034#include "llvm/Transforms/Utils/Local.h"
Geoff Berry8d846052016-08-31 19:24:10 +000035#include "llvm/Transforms/Utils/MemorySSA.h"
Daniel Berlin17e8d0e2017-02-22 22:19:55 +000036#include "llvm/Transforms/Utils/MemorySSAUpdater.h"
Lenny Maiorani9eefc812014-09-20 13:29:20 +000037#include <deque>
Chris Lattner704541b2011-01-02 21:47:05 +000038using namespace llvm;
Hal Finkel1e16fa32014-11-03 20:21:32 +000039using namespace llvm::PatternMatch;
Chris Lattner704541b2011-01-02 21:47:05 +000040
Chandler Carruth964daaa2014-04-22 02:55:47 +000041#define DEBUG_TYPE "early-cse"
42
Chris Lattner4cb36542011-01-03 03:28:23 +000043STATISTIC(NumSimplify, "Number of instructions simplified or DCE'd");
44STATISTIC(NumCSE, "Number of instructions CSE'd");
Chad Rosier1a4bc112016-04-22 18:47:21 +000045STATISTIC(NumCSECVP, "Number of compare instructions CVP'd");
Chris Lattner92bb0f92011-01-03 03:41:27 +000046STATISTIC(NumCSELoad, "Number of load instructions CSE'd");
47STATISTIC(NumCSECall, "Number of call instructions CSE'd");
Chris Lattner9e5e9ed2011-01-03 04:17:24 +000048STATISTIC(NumDSE, "Number of trivial dead stores removed");
Chris Lattnerb9a8efc2011-01-03 03:18:43 +000049
Chris Lattner79d83062011-01-03 02:20:48 +000050//===----------------------------------------------------------------------===//
Nadav Rotem465834c2012-07-24 10:51:42 +000051// SimpleValue
Chris Lattner79d83062011-01-03 02:20:48 +000052//===----------------------------------------------------------------------===//
53
Chris Lattner704541b2011-01-02 21:47:05 +000054namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +000055/// \brief Struct representing the available values in the scoped hash table.
Chandler Carruth7253bba2015-01-24 11:33:55 +000056struct SimpleValue {
57 Instruction *Inst;
Nadav Rotem465834c2012-07-24 10:51:42 +000058
Chandler Carruth7253bba2015-01-24 11:33:55 +000059 SimpleValue(Instruction *I) : Inst(I) {
60 assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
61 }
Nadav Rotem465834c2012-07-24 10:51:42 +000062
Chandler Carruth7253bba2015-01-24 11:33:55 +000063 bool isSentinel() const {
64 return Inst == DenseMapInfo<Instruction *>::getEmptyKey() ||
65 Inst == DenseMapInfo<Instruction *>::getTombstoneKey();
66 }
Nadav Rotem465834c2012-07-24 10:51:42 +000067
Chandler Carruth7253bba2015-01-24 11:33:55 +000068 static bool canHandle(Instruction *Inst) {
69 // This can only handle non-void readnone functions.
70 if (CallInst *CI = dyn_cast<CallInst>(Inst))
71 return CI->doesNotAccessMemory() && !CI->getType()->isVoidTy();
72 return isa<CastInst>(Inst) || isa<BinaryOperator>(Inst) ||
73 isa<GetElementPtrInst>(Inst) || isa<CmpInst>(Inst) ||
74 isa<SelectInst>(Inst) || isa<ExtractElementInst>(Inst) ||
75 isa<InsertElementInst>(Inst) || isa<ShuffleVectorInst>(Inst) ||
76 isa<ExtractValueInst>(Inst) || isa<InsertValueInst>(Inst);
77 }
78};
Alexander Kornienkof00654e2015-06-23 09:49:53 +000079}
Chris Lattner18ae5432011-01-02 23:04:14 +000080
81namespace llvm {
Chandler Carruth7253bba2015-01-24 11:33:55 +000082template <> struct DenseMapInfo<SimpleValue> {
Chris Lattner79d83062011-01-03 02:20:48 +000083 static inline SimpleValue getEmptyKey() {
Chandler Carruth7253bba2015-01-24 11:33:55 +000084 return DenseMapInfo<Instruction *>::getEmptyKey();
Chris Lattner18ae5432011-01-02 23:04:14 +000085 }
Chris Lattner79d83062011-01-03 02:20:48 +000086 static inline SimpleValue getTombstoneKey() {
Chandler Carruth7253bba2015-01-24 11:33:55 +000087 return DenseMapInfo<Instruction *>::getTombstoneKey();
Chris Lattner18ae5432011-01-02 23:04:14 +000088 }
Chris Lattner79d83062011-01-03 02:20:48 +000089 static unsigned getHashValue(SimpleValue Val);
90 static bool isEqual(SimpleValue LHS, SimpleValue RHS);
Chris Lattner18ae5432011-01-02 23:04:14 +000091};
Alexander Kornienkof00654e2015-06-23 09:49:53 +000092}
Chris Lattner18ae5432011-01-02 23:04:14 +000093
Chris Lattner79d83062011-01-03 02:20:48 +000094unsigned DenseMapInfo<SimpleValue>::getHashValue(SimpleValue Val) {
Chris Lattner18ae5432011-01-02 23:04:14 +000095 Instruction *Inst = Val.Inst;
Chris Lattner02a97762011-01-03 01:10:08 +000096 // Hash in all of the operands as pointers.
Chandler Carruth7253bba2015-01-24 11:33:55 +000097 if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(Inst)) {
Michael Ilseman336cb792012-10-09 16:57:38 +000098 Value *LHS = BinOp->getOperand(0);
99 Value *RHS = BinOp->getOperand(1);
100 if (BinOp->isCommutative() && BinOp->getOperand(0) > BinOp->getOperand(1))
101 std::swap(LHS, RHS);
Chris Lattner02a97762011-01-03 01:10:08 +0000102
Michael Ilseman336cb792012-10-09 16:57:38 +0000103 return hash_combine(BinOp->getOpcode(), LHS, RHS);
Chris Lattner02a97762011-01-03 01:10:08 +0000104 }
105
Michael Ilseman336cb792012-10-09 16:57:38 +0000106 if (CmpInst *CI = dyn_cast<CmpInst>(Inst)) {
107 Value *LHS = CI->getOperand(0);
108 Value *RHS = CI->getOperand(1);
109 CmpInst::Predicate Pred = CI->getPredicate();
110 if (Inst->getOperand(0) > Inst->getOperand(1)) {
111 std::swap(LHS, RHS);
112 Pred = CI->getSwappedPredicate();
113 }
114 return hash_combine(Inst->getOpcode(), Pred, LHS, RHS);
115 }
116
117 if (CastInst *CI = dyn_cast<CastInst>(Inst))
118 return hash_combine(CI->getOpcode(), CI->getType(), CI->getOperand(0));
119
120 if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(Inst))
121 return hash_combine(EVI->getOpcode(), EVI->getOperand(0),
122 hash_combine_range(EVI->idx_begin(), EVI->idx_end()));
123
124 if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(Inst))
125 return hash_combine(IVI->getOpcode(), IVI->getOperand(0),
126 IVI->getOperand(1),
127 hash_combine_range(IVI->idx_begin(), IVI->idx_end()));
128
129 assert((isa<CallInst>(Inst) || isa<BinaryOperator>(Inst) ||
130 isa<GetElementPtrInst>(Inst) || isa<SelectInst>(Inst) ||
131 isa<ExtractElementInst>(Inst) || isa<InsertElementInst>(Inst) ||
Chandler Carruth7253bba2015-01-24 11:33:55 +0000132 isa<ShuffleVectorInst>(Inst)) &&
133 "Invalid/unknown instruction");
Michael Ilseman336cb792012-10-09 16:57:38 +0000134
Chris Lattner02a97762011-01-03 01:10:08 +0000135 // Mix in the opcode.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000136 return hash_combine(
137 Inst->getOpcode(),
138 hash_combine_range(Inst->value_op_begin(), Inst->value_op_end()));
Chris Lattner18ae5432011-01-02 23:04:14 +0000139}
140
Chris Lattner79d83062011-01-03 02:20:48 +0000141bool DenseMapInfo<SimpleValue>::isEqual(SimpleValue LHS, SimpleValue RHS) {
Chris Lattner18ae5432011-01-02 23:04:14 +0000142 Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
143
144 if (LHS.isSentinel() || RHS.isSentinel())
145 return LHSI == RHSI;
Nadav Rotem465834c2012-07-24 10:51:42 +0000146
Chandler Carruth7253bba2015-01-24 11:33:55 +0000147 if (LHSI->getOpcode() != RHSI->getOpcode())
148 return false;
David Majnemer9554c132016-04-22 06:37:45 +0000149 if (LHSI->isIdenticalToWhenDefined(RHSI))
Chandler Carruth7253bba2015-01-24 11:33:55 +0000150 return true;
Michael Ilseman336cb792012-10-09 16:57:38 +0000151
152 // If we're not strictly identical, we still might be a commutable instruction
153 if (BinaryOperator *LHSBinOp = dyn_cast<BinaryOperator>(LHSI)) {
154 if (!LHSBinOp->isCommutative())
155 return false;
156
Chandler Carruth7253bba2015-01-24 11:33:55 +0000157 assert(isa<BinaryOperator>(RHSI) &&
158 "same opcode, but different instruction type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000159 BinaryOperator *RHSBinOp = cast<BinaryOperator>(RHSI);
160
Michael Ilseman336cb792012-10-09 16:57:38 +0000161 // Commuted equality
162 return LHSBinOp->getOperand(0) == RHSBinOp->getOperand(1) &&
Chandler Carruth7253bba2015-01-24 11:33:55 +0000163 LHSBinOp->getOperand(1) == RHSBinOp->getOperand(0);
Michael Ilseman336cb792012-10-09 16:57:38 +0000164 }
165 if (CmpInst *LHSCmp = dyn_cast<CmpInst>(LHSI)) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000166 assert(isa<CmpInst>(RHSI) &&
167 "same opcode, but different instruction type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000168 CmpInst *RHSCmp = cast<CmpInst>(RHSI);
169 // Commuted equality
170 return LHSCmp->getOperand(0) == RHSCmp->getOperand(1) &&
Chandler Carruth7253bba2015-01-24 11:33:55 +0000171 LHSCmp->getOperand(1) == RHSCmp->getOperand(0) &&
172 LHSCmp->getSwappedPredicate() == RHSCmp->getPredicate();
Michael Ilseman336cb792012-10-09 16:57:38 +0000173 }
174
175 return false;
Chris Lattner18ae5432011-01-02 23:04:14 +0000176}
177
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000178//===----------------------------------------------------------------------===//
Nadav Rotem465834c2012-07-24 10:51:42 +0000179// CallValue
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000180//===----------------------------------------------------------------------===//
181
182namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000183/// \brief Struct representing the available call values in the scoped hash
184/// table.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000185struct CallValue {
186 Instruction *Inst;
Nadav Rotem465834c2012-07-24 10:51:42 +0000187
Chandler Carruth7253bba2015-01-24 11:33:55 +0000188 CallValue(Instruction *I) : Inst(I) {
189 assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
190 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000191
Chandler Carruth7253bba2015-01-24 11:33:55 +0000192 bool isSentinel() const {
193 return Inst == DenseMapInfo<Instruction *>::getEmptyKey() ||
194 Inst == DenseMapInfo<Instruction *>::getTombstoneKey();
195 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000196
Chandler Carruth7253bba2015-01-24 11:33:55 +0000197 static bool canHandle(Instruction *Inst) {
198 // Don't value number anything that returns void.
199 if (Inst->getType()->isVoidTy())
200 return false;
Nadav Rotem465834c2012-07-24 10:51:42 +0000201
Chandler Carruth7253bba2015-01-24 11:33:55 +0000202 CallInst *CI = dyn_cast<CallInst>(Inst);
203 if (!CI || !CI->onlyReadsMemory())
204 return false;
205 return true;
206 }
207};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000208}
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000209
210namespace llvm {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000211template <> struct DenseMapInfo<CallValue> {
212 static inline CallValue getEmptyKey() {
213 return DenseMapInfo<Instruction *>::getEmptyKey();
214 }
215 static inline CallValue getTombstoneKey() {
216 return DenseMapInfo<Instruction *>::getTombstoneKey();
217 }
218 static unsigned getHashValue(CallValue Val);
219 static bool isEqual(CallValue LHS, CallValue RHS);
220};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000221}
Chandler Carruth7253bba2015-01-24 11:33:55 +0000222
Chris Lattner92bb0f92011-01-03 03:41:27 +0000223unsigned DenseMapInfo<CallValue>::getHashValue(CallValue Val) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000224 Instruction *Inst = Val.Inst;
Benjamin Kramer6ab86b12015-02-01 12:30:59 +0000225 // Hash all of the operands as pointers and mix in the opcode.
226 return hash_combine(
227 Inst->getOpcode(),
228 hash_combine_range(Inst->value_op_begin(), Inst->value_op_end()));
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000229}
230
Chris Lattner92bb0f92011-01-03 03:41:27 +0000231bool DenseMapInfo<CallValue>::isEqual(CallValue LHS, CallValue RHS) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000232 Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000233 if (LHS.isSentinel() || RHS.isSentinel())
234 return LHSI == RHSI;
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000235 return LHSI->isIdenticalTo(RHSI);
236}
237
Chris Lattner79d83062011-01-03 02:20:48 +0000238//===----------------------------------------------------------------------===//
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000239// EarlyCSE implementation
Chris Lattner79d83062011-01-03 02:20:48 +0000240//===----------------------------------------------------------------------===//
241
Chris Lattner18ae5432011-01-02 23:04:14 +0000242namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000243/// \brief A simple and fast domtree-based CSE pass.
244///
245/// This pass does a simple depth-first walk over the dominator tree,
246/// eliminating trivially redundant instructions and using instsimplify to
247/// canonicalize things as it goes. It is intended to be fast and catch obvious
248/// cases so that instcombine and other passes are more effective. It is
249/// expected that a later pass of GVN will catch the interesting/hard cases.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000250class EarlyCSE {
Chris Lattner704541b2011-01-02 21:47:05 +0000251public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000252 const TargetLibraryInfo &TLI;
253 const TargetTransformInfo &TTI;
254 DominatorTree &DT;
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000255 AssumptionCache &AC;
Geoff Berry8d846052016-08-31 19:24:10 +0000256 MemorySSA *MSSA;
Daniel Berlin17e8d0e2017-02-22 22:19:55 +0000257 std::unique_ptr<MemorySSAUpdater> MSSAUpdater;
Chandler Carruth7253bba2015-01-24 11:33:55 +0000258 typedef RecyclingAllocator<
259 BumpPtrAllocator, ScopedHashTableVal<SimpleValue, Value *>> AllocatorTy;
260 typedef ScopedHashTable<SimpleValue, Value *, DenseMapInfo<SimpleValue>,
Chris Lattnerd815f692011-01-03 01:42:46 +0000261 AllocatorTy> ScopedHTType;
Nadav Rotem465834c2012-07-24 10:51:42 +0000262
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000263 /// \brief A scoped hash table of the current values of all of our simple
264 /// scalar expressions.
265 ///
266 /// As we walk down the domtree, we look to see if instructions are in this:
267 /// if so, we replace them with what we find, otherwise we insert them so
268 /// that dominated values can succeed in their lookup.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000269 ScopedHTType AvailableValues;
Nadav Rotem465834c2012-07-24 10:51:42 +0000270
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000271 /// A scoped hash table of the current values of previously encounted memory
272 /// locations.
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000273 ///
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000274 /// This allows us to get efficient access to dominating loads or stores when
275 /// we have a fully redundant load. In addition to the most recent load, we
276 /// keep track of a generation count of the read, which is compared against
277 /// the current generation count. The current generation count is incremented
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000278 /// after every possibly writing memory operation, which ensures that we only
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000279 /// CSE loads with other loads that have no intervening store. Ordering
280 /// events (such as fences or atomic instructions) increment the generation
281 /// count as well; essentially, we model these as writes to all possible
282 /// locations. Note that atomic and/or volatile loads and stores can be
283 /// present the table; it is the responsibility of the consumer to inspect
284 /// the atomicity/volatility if needed.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000285 struct LoadValue {
Philip Reames32b55182016-05-06 01:13:58 +0000286 Instruction *DefInst;
Arnaud A. de Grandmaison859b2ac2015-10-09 09:23:01 +0000287 unsigned Generation;
288 int MatchingId;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000289 bool IsAtomic;
Sanjoy Das07c65212016-06-16 20:47:57 +0000290 bool IsInvariant;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000291 LoadValue()
Sanjoy Das07c65212016-06-16 20:47:57 +0000292 : DefInst(nullptr), Generation(0), MatchingId(-1), IsAtomic(false),
293 IsInvariant(false) {}
Geoff Berry5ae272c2016-04-28 15:22:37 +0000294 LoadValue(Instruction *Inst, unsigned Generation, unsigned MatchingId,
Sanjoy Das07c65212016-06-16 20:47:57 +0000295 bool IsAtomic, bool IsInvariant)
296 : DefInst(Inst), Generation(Generation), MatchingId(MatchingId),
297 IsAtomic(IsAtomic), IsInvariant(IsInvariant) {}
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000298 };
299 typedef RecyclingAllocator<BumpPtrAllocator,
300 ScopedHashTableVal<Value *, LoadValue>>
Chandler Carruth7253bba2015-01-24 11:33:55 +0000301 LoadMapAllocator;
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000302 typedef ScopedHashTable<Value *, LoadValue, DenseMapInfo<Value *>,
303 LoadMapAllocator> LoadHTType;
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000304 LoadHTType AvailableLoads;
Nadav Rotem465834c2012-07-24 10:51:42 +0000305
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000306 /// \brief A scoped hash table of the current values of read-only call
307 /// values.
308 ///
309 /// It uses the same generation count as loads.
Geoff Berry2f64c202016-05-13 17:54:58 +0000310 typedef ScopedHashTable<CallValue, std::pair<Instruction *, unsigned>>
311 CallHTType;
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000312 CallHTType AvailableCalls;
Nadav Rotem465834c2012-07-24 10:51:42 +0000313
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000314 /// \brief This is the current generation of the memory value.
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000315 unsigned CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000316
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000317 /// \brief Set up the EarlyCSE runner for a particular function.
Benjamin Kramer6db33382015-10-15 15:08:58 +0000318 EarlyCSE(const TargetLibraryInfo &TLI, const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000319 DominatorTree &DT, AssumptionCache &AC, MemorySSA *MSSA)
Daniel Berlin17e8d0e2017-02-22 22:19:55 +0000320 : TLI(TLI), TTI(TTI), DT(DT), AC(AC), MSSA(MSSA),
321 MSSAUpdater(make_unique<MemorySSAUpdater>(MSSA)), CurrentGeneration(0) {
322 }
Chris Lattner704541b2011-01-02 21:47:05 +0000323
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000324 bool run();
Chris Lattner704541b2011-01-02 21:47:05 +0000325
326private:
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000327 // Almost a POD, but needs to call the constructors for the scoped hash
328 // tables so that a new scope gets pushed on. These are RAII so that the
329 // scope gets popped when the NodeScope is destroyed.
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000330 class NodeScope {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000331 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000332 NodeScope(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads,
333 CallHTType &AvailableCalls)
334 : Scope(AvailableValues), LoadScope(AvailableLoads),
335 CallScope(AvailableCalls) {}
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000336
Chandler Carruth7253bba2015-01-24 11:33:55 +0000337 private:
Aaron Ballmanf9a18972015-02-15 22:54:22 +0000338 NodeScope(const NodeScope &) = delete;
339 void operator=(const NodeScope &) = delete;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000340
341 ScopedHTType::ScopeTy Scope;
342 LoadHTType::ScopeTy LoadScope;
343 CallHTType::ScopeTy CallScope;
344 };
345
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000346 // Contains all the needed information to create a stack for doing a depth
Nick Lewyckyedd0a702016-09-07 01:49:41 +0000347 // first traversal of the tree. This includes scopes for values, loads, and
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000348 // calls as well as the generation. There is a child iterator so that the
Sanjoy Das5253a082016-04-27 01:44:31 +0000349 // children do not need to be store separately.
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000350 class StackNode {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000351 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000352 StackNode(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads,
353 CallHTType &AvailableCalls, unsigned cg, DomTreeNode *n,
Chandler Carruth7253bba2015-01-24 11:33:55 +0000354 DomTreeNode::iterator child, DomTreeNode::iterator end)
355 : CurrentGeneration(cg), ChildGeneration(cg), Node(n), ChildIter(child),
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000356 EndIter(end), Scopes(AvailableValues, AvailableLoads, AvailableCalls),
Chandler Carruth7253bba2015-01-24 11:33:55 +0000357 Processed(false) {}
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000358
359 // Accessors.
360 unsigned currentGeneration() { return CurrentGeneration; }
361 unsigned childGeneration() { return ChildGeneration; }
362 void childGeneration(unsigned generation) { ChildGeneration = generation; }
363 DomTreeNode *node() { return Node; }
364 DomTreeNode::iterator childIter() { return ChildIter; }
365 DomTreeNode *nextChild() {
366 DomTreeNode *child = *ChildIter;
367 ++ChildIter;
368 return child;
369 }
370 DomTreeNode::iterator end() { return EndIter; }
371 bool isProcessed() { return Processed; }
372 void process() { Processed = true; }
373
Chandler Carruth7253bba2015-01-24 11:33:55 +0000374 private:
Aaron Ballmanf9a18972015-02-15 22:54:22 +0000375 StackNode(const StackNode &) = delete;
376 void operator=(const StackNode &) = delete;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000377
378 // Members.
379 unsigned CurrentGeneration;
380 unsigned ChildGeneration;
381 DomTreeNode *Node;
382 DomTreeNode::iterator ChildIter;
383 DomTreeNode::iterator EndIter;
384 NodeScope Scopes;
385 bool Processed;
386 };
387
Chad Rosierf9327d62015-01-26 22:51:15 +0000388 /// \brief Wrapper class to handle memory instructions, including loads,
389 /// stores and intrinsic loads and stores defined by the target.
390 class ParseMemoryInst {
391 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000392 ParseMemoryInst(Instruction *Inst, const TargetTransformInfo &TTI)
Philip Reames9e5e2d62015-12-07 22:41:23 +0000393 : IsTargetMemInst(false), Inst(Inst) {
394 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst))
395 if (TTI.getTgtMemIntrinsic(II, Info) && Info.NumMemRefs == 1)
396 IsTargetMemInst = true;
397 }
398 bool isLoad() const {
399 if (IsTargetMemInst) return Info.ReadMem;
400 return isa<LoadInst>(Inst);
401 }
402 bool isStore() const {
403 if (IsTargetMemInst) return Info.WriteMem;
404 return isa<StoreInst>(Inst);
405 }
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000406 bool isAtomic() const {
407 if (IsTargetMemInst) {
408 assert(Info.IsSimple && "need to refine IsSimple in TTI");
409 return false;
410 }
411 return Inst->isAtomic();
412 }
413 bool isUnordered() const {
414 if (IsTargetMemInst) {
415 assert(Info.IsSimple && "need to refine IsSimple in TTI");
416 return true;
417 }
418 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
419 return LI->isUnordered();
420 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
421 return SI->isUnordered();
422 }
423 // Conservative answer
424 return !Inst->isAtomic();
425 }
426
427 bool isVolatile() const {
428 if (IsTargetMemInst) {
429 assert(Info.IsSimple && "need to refine IsSimple in TTI");
430 return false;
431 }
432 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
433 return LI->isVolatile();
434 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
435 return SI->isVolatile();
436 }
437 // Conservative answer
438 return true;
439 }
440
Sanjoy Das07c65212016-06-16 20:47:57 +0000441 bool isInvariantLoad() const {
442 if (auto *LI = dyn_cast<LoadInst>(Inst))
Sanjoy Das1ab2fad2016-06-16 21:00:57 +0000443 return LI->getMetadata(LLVMContext::MD_invariant_load) != nullptr;
Sanjoy Das07c65212016-06-16 20:47:57 +0000444 return false;
445 }
Junmo Park80440eb2016-02-18 10:09:20 +0000446
Arnaud A. de Grandmaison6fd488b2015-10-06 13:35:30 +0000447 bool isMatchingMemLoc(const ParseMemoryInst &Inst) const {
Philip Reames9e5e2d62015-12-07 22:41:23 +0000448 return (getPointerOperand() == Inst.getPointerOperand() &&
449 getMatchingId() == Inst.getMatchingId());
Chad Rosierf9327d62015-01-26 22:51:15 +0000450 }
Philip Reames9e5e2d62015-12-07 22:41:23 +0000451 bool isValid() const { return getPointerOperand() != nullptr; }
Chad Rosierf9327d62015-01-26 22:51:15 +0000452
Chad Rosierf9327d62015-01-26 22:51:15 +0000453 // For regular (non-intrinsic) loads/stores, this is set to -1. For
454 // intrinsic loads/stores, the id is retrieved from the corresponding
455 // field in the MemIntrinsicInfo structure. That field contains
456 // non-negative values only.
Philip Reames9e5e2d62015-12-07 22:41:23 +0000457 int getMatchingId() const {
458 if (IsTargetMemInst) return Info.MatchingId;
459 return -1;
460 }
461 Value *getPointerOperand() const {
462 if (IsTargetMemInst) return Info.PtrVal;
463 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
464 return LI->getPointerOperand();
465 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
466 return SI->getPointerOperand();
467 }
468 return nullptr;
469 }
470 bool mayReadFromMemory() const {
471 if (IsTargetMemInst) return Info.ReadMem;
472 return Inst->mayReadFromMemory();
473 }
474 bool mayWriteToMemory() const {
475 if (IsTargetMemInst) return Info.WriteMem;
476 return Inst->mayWriteToMemory();
477 }
478
479 private:
480 bool IsTargetMemInst;
481 MemIntrinsicInfo Info;
482 Instruction *Inst;
Chad Rosierf9327d62015-01-26 22:51:15 +0000483 };
484
Chris Lattner18ae5432011-01-02 23:04:14 +0000485 bool processNode(DomTreeNode *Node);
Nadav Rotem465834c2012-07-24 10:51:42 +0000486
Chad Rosierf9327d62015-01-26 22:51:15 +0000487 Value *getOrCreateResult(Value *Inst, Type *ExpectedType) const {
Sanjay Patel1c9867d2017-01-03 00:16:24 +0000488 if (auto *LI = dyn_cast<LoadInst>(Inst))
Chad Rosierf9327d62015-01-26 22:51:15 +0000489 return LI;
Sanjay Patel1c9867d2017-01-03 00:16:24 +0000490 if (auto *SI = dyn_cast<StoreInst>(Inst))
Chad Rosierf9327d62015-01-26 22:51:15 +0000491 return SI->getValueOperand();
492 assert(isa<IntrinsicInst>(Inst) && "Instruction not supported");
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000493 return TTI.getOrCreateResultFromMemIntrinsic(cast<IntrinsicInst>(Inst),
494 ExpectedType);
Chad Rosierf9327d62015-01-26 22:51:15 +0000495 }
Geoff Berry8d846052016-08-31 19:24:10 +0000496
497 bool isSameMemGeneration(unsigned EarlierGeneration, unsigned LaterGeneration,
498 Instruction *EarlierInst, Instruction *LaterInst);
499
500 void removeMSSA(Instruction *Inst) {
501 if (!MSSA)
502 return;
Geoff Berry91e9a5c2016-10-25 16:18:47 +0000503 // Removing a store here can leave MemorySSA in an unoptimized state by
504 // creating MemoryPhis that have identical arguments and by creating
Geoff Berry68154682016-10-24 15:54:00 +0000505 // MemoryUses whose defining access is not an actual clobber. We handle the
Geoff Berry91e9a5c2016-10-25 16:18:47 +0000506 // phi case eagerly here. The non-optimized MemoryUse case is lazily
507 // updated by MemorySSA getClobberingMemoryAccess.
Geoff Berry68154682016-10-24 15:54:00 +0000508 if (MemoryAccess *MA = MSSA->getMemoryAccess(Inst)) {
509 // Optimize MemoryPhi nodes that may become redundant by having all the
510 // same input values once MA is removed.
511 SmallVector<MemoryPhi *, 4> PhisToCheck;
512 SmallVector<MemoryAccess *, 8> WorkQueue;
513 WorkQueue.push_back(MA);
514 // Process MemoryPhi nodes in FIFO order using a ever-growing vector since
515 // we shouldn't be processing that many phis and this will avoid an
516 // allocation in almost all cases.
517 for (unsigned I = 0; I < WorkQueue.size(); ++I) {
518 MemoryAccess *WI = WorkQueue[I];
519
520 for (auto *U : WI->users())
521 if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U))
522 PhisToCheck.push_back(MP);
523
Daniel Berlin17e8d0e2017-02-22 22:19:55 +0000524 MSSAUpdater->removeMemoryAccess(WI);
Geoff Berry68154682016-10-24 15:54:00 +0000525
526 for (MemoryPhi *MP : PhisToCheck) {
527 MemoryAccess *FirstIn = MP->getIncomingValue(0);
528 if (all_of(MP->incoming_values(),
529 [=](Use &In) { return In == FirstIn; }))
530 WorkQueue.push_back(MP);
531 }
532 PhisToCheck.clear();
533 }
534 }
Geoff Berry8d846052016-08-31 19:24:10 +0000535 }
Chris Lattner704541b2011-01-02 21:47:05 +0000536};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000537}
Chris Lattner704541b2011-01-02 21:47:05 +0000538
Geoff Berry68154682016-10-24 15:54:00 +0000539/// Determine if the memory referenced by LaterInst is from the same heap
540/// version as EarlierInst.
Geoff Berry8d846052016-08-31 19:24:10 +0000541/// This is currently called in two scenarios:
542///
543/// load p
544/// ...
545/// load p
546///
547/// and
548///
549/// x = load p
550/// ...
551/// store x, p
552///
553/// in both cases we want to verify that there are no possible writes to the
554/// memory referenced by p between the earlier and later instruction.
555bool EarlyCSE::isSameMemGeneration(unsigned EarlierGeneration,
556 unsigned LaterGeneration,
557 Instruction *EarlierInst,
558 Instruction *LaterInst) {
559 // Check the simple memory generation tracking first.
560 if (EarlierGeneration == LaterGeneration)
561 return true;
562
563 if (!MSSA)
564 return false;
565
566 // Since we know LaterDef dominates LaterInst and EarlierInst dominates
567 // LaterInst, if LaterDef dominates EarlierInst then it can't occur between
568 // EarlierInst and LaterInst and neither can any other write that potentially
569 // clobbers LaterInst.
Geoff Berry91e9a5c2016-10-25 16:18:47 +0000570 MemoryAccess *LaterDef =
571 MSSA->getWalker()->getClobberingMemoryAccess(LaterInst);
Geoff Berry8d846052016-08-31 19:24:10 +0000572 return MSSA->dominates(LaterDef, MSSA->getMemoryAccess(EarlierInst));
573}
574
Chris Lattner18ae5432011-01-02 23:04:14 +0000575bool EarlyCSE::processNode(DomTreeNode *Node) {
Chad Rosier1a4bc112016-04-22 18:47:21 +0000576 bool Changed = false;
Chris Lattner18ae5432011-01-02 23:04:14 +0000577 BasicBlock *BB = Node->getBlock();
Nadav Rotem465834c2012-07-24 10:51:42 +0000578
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000579 // If this block has a single predecessor, then the predecessor is the parent
580 // of the domtree node and all of the live out memory values are still current
581 // in this block. If this block has multiple predecessors, then they could
582 // have invalidated the live-out memory values of our parent value. For now,
583 // just be conservative and invalidate memory if this block has multiple
584 // predecessors.
Craig Topperf40110f2014-04-25 05:29:35 +0000585 if (!BB->getSinglePredecessor())
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000586 ++CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000587
Philip Reames7c78ef72015-05-22 23:53:24 +0000588 // If this node has a single predecessor which ends in a conditional branch,
589 // we can infer the value of the branch condition given that we took this
Chad Rosierb346dcb2016-04-20 19:16:23 +0000590 // path. We need the single predecessor to ensure there's not another path
Philip Reames7c78ef72015-05-22 23:53:24 +0000591 // which reaches this block where the condition might hold a different
592 // value. Since we're adding this to the scoped hash table (like any other
593 // def), it will have been popped if we encounter a future merge block.
Sanjay Patelf1e1fba2017-03-15 20:25:05 +0000594 if (BasicBlock *Pred = BB->getSinglePredecessor()) {
595 auto *BI = dyn_cast<BranchInst>(Pred->getTerminator());
596 if (BI && BI->isConditional()) {
597 auto *CondInst = dyn_cast<Instruction>(BI->getCondition());
598 if (CondInst && SimpleValue::canHandle(CondInst)) {
599 assert(BI->getSuccessor(0) == BB || BI->getSuccessor(1) == BB);
600 auto *TorF = (BI->getSuccessor(0) == BB)
601 ? ConstantInt::getTrue(BB->getContext())
602 : ConstantInt::getFalse(BB->getContext());
603 AvailableValues.insert(CondInst, TorF);
604 DEBUG(dbgs() << "EarlyCSE CVP: Add conditional value for '"
605 << CondInst->getName() << "' as " << *TorF << " in "
606 << BB->getName() << "\n");
607 // Replace all dominated uses with the known value.
608 if (unsigned Count = replaceDominatedUsesWith(
609 CondInst, TorF, DT, BasicBlockEdge(Pred, BB))) {
610 Changed = true;
611 NumCSECVP = NumCSECVP + Count;
612 }
613 }
614 }
615 }
Philip Reames7c78ef72015-05-22 23:53:24 +0000616
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000617 /// LastStore - Keep track of the last non-volatile store that we saw... for
618 /// as long as there in no instruction that reads memory. If we see a store
619 /// to the same location, we delete the dead store. This zaps trivial dead
620 /// stores which can occur in bitfield code among other things.
Chad Rosierf9327d62015-01-26 22:51:15 +0000621 Instruction *LastStore = nullptr;
Nadav Rotem465834c2012-07-24 10:51:42 +0000622
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000623 const DataLayout &DL = BB->getModule()->getDataLayout();
Chris Lattner18ae5432011-01-02 23:04:14 +0000624
625 // See if any instructions in the block can be eliminated. If so, do it. If
626 // not, add them to AvailableValues.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000627 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E;) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000628 Instruction *Inst = &*I++;
Nadav Rotem465834c2012-07-24 10:51:42 +0000629
Chris Lattner18ae5432011-01-02 23:04:14 +0000630 // Dead instructions should just be removed.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000631 if (isInstructionTriviallyDead(Inst, &TLI)) {
Chris Lattner8fac5db2011-01-02 23:19:45 +0000632 DEBUG(dbgs() << "EarlyCSE DCE: " << *Inst << '\n');
Geoff Berry8d846052016-08-31 19:24:10 +0000633 removeMSSA(Inst);
Chris Lattner18ae5432011-01-02 23:04:14 +0000634 Inst->eraseFromParent();
635 Changed = true;
Chris Lattner8fac5db2011-01-02 23:19:45 +0000636 ++NumSimplify;
Chris Lattner18ae5432011-01-02 23:04:14 +0000637 continue;
638 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000639
Hal Finkel1e16fa32014-11-03 20:21:32 +0000640 // Skip assume intrinsics, they don't really have side effects (although
641 // they're marked as such to ensure preservation of control dependencies),
642 // and this pass will not disturb any of the assumption's control
643 // dependencies.
644 if (match(Inst, m_Intrinsic<Intrinsic::assume>())) {
645 DEBUG(dbgs() << "EarlyCSE skipping assumption: " << *Inst << '\n');
646 continue;
647 }
648
Anna Thomasb2d12b82016-08-09 20:00:47 +0000649 // Skip invariant.start intrinsics since they only read memory, and we can
650 // forward values across it. Also, we dont need to consume the last store
651 // since the semantics of invariant.start allow us to perform DSE of the
652 // last store, if there was a store following invariant.start. Consider:
653 //
654 // store 30, i8* p
655 // invariant.start(p)
656 // store 40, i8* p
657 // We can DSE the store to 30, since the store 40 to invariant location p
658 // causes undefined behaviour.
659 if (match(Inst, m_Intrinsic<Intrinsic::invariant_start>()))
660 continue;
661
Sanjoy Dasee81b232016-04-29 21:52:58 +0000662 if (match(Inst, m_Intrinsic<Intrinsic::experimental_guard>())) {
Sanjoy Das107aefc2016-04-29 22:23:16 +0000663 if (auto *CondI =
664 dyn_cast<Instruction>(cast<CallInst>(Inst)->getArgOperand(0))) {
Sanjoy Dasee81b232016-04-29 21:52:58 +0000665 // The condition we're on guarding here is true for all dominated
666 // locations.
667 if (SimpleValue::canHandle(CondI))
668 AvailableValues.insert(CondI, ConstantInt::getTrue(BB->getContext()));
669 }
670
671 // Guard intrinsics read all memory, but don't write any memory.
672 // Accordingly, don't update the generation but consume the last store (to
673 // avoid an incorrect DSE).
674 LastStore = nullptr;
675 continue;
676 }
677
Chris Lattner18ae5432011-01-02 23:04:14 +0000678 // If the instruction can be simplified (e.g. X+0 = X) then replace it with
679 // its simpler value.
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000680 if (Value *V = SimplifyInstruction(Inst, DL, &TLI, &DT, &AC)) {
Chris Lattner8fac5db2011-01-02 23:19:45 +0000681 DEBUG(dbgs() << "EarlyCSE Simplify: " << *Inst << " to: " << *V << '\n');
David Majnemer130b9f92016-07-29 05:39:21 +0000682 bool Killed = false;
David Majnemerb8da3a22016-06-25 00:04:10 +0000683 if (!Inst->use_empty()) {
684 Inst->replaceAllUsesWith(V);
685 Changed = true;
686 }
687 if (isInstructionTriviallyDead(Inst, &TLI)) {
Geoff Berry8d846052016-08-31 19:24:10 +0000688 removeMSSA(Inst);
David Majnemerb8da3a22016-06-25 00:04:10 +0000689 Inst->eraseFromParent();
690 Changed = true;
David Majnemer130b9f92016-07-29 05:39:21 +0000691 Killed = true;
David Majnemerb8da3a22016-06-25 00:04:10 +0000692 }
David Majnemer130b9f92016-07-29 05:39:21 +0000693 if (Changed)
David Majnemerb8da3a22016-06-25 00:04:10 +0000694 ++NumSimplify;
David Majnemer130b9f92016-07-29 05:39:21 +0000695 if (Killed)
David Majnemerb8da3a22016-06-25 00:04:10 +0000696 continue;
Chris Lattner18ae5432011-01-02 23:04:14 +0000697 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000698
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000699 // If this is a simple instruction that we can value number, process it.
700 if (SimpleValue::canHandle(Inst)) {
701 // See if the instruction has an available value. If so, use it.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000702 if (Value *V = AvailableValues.lookup(Inst)) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000703 DEBUG(dbgs() << "EarlyCSE CSE: " << *Inst << " to: " << *V << '\n');
David Majnemer9554c132016-04-22 06:37:45 +0000704 if (auto *I = dyn_cast<Instruction>(V))
705 I->andIRFlags(Inst);
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000706 Inst->replaceAllUsesWith(V);
Geoff Berry8d846052016-08-31 19:24:10 +0000707 removeMSSA(Inst);
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000708 Inst->eraseFromParent();
709 Changed = true;
710 ++NumCSE;
711 continue;
712 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000713
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000714 // Otherwise, just remember that this value is available.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000715 AvailableValues.insert(Inst, Inst);
Chris Lattner18ae5432011-01-02 23:04:14 +0000716 continue;
717 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000718
Chad Rosierf9327d62015-01-26 22:51:15 +0000719 ParseMemoryInst MemInst(Inst, TTI);
Chris Lattner92bb0f92011-01-03 03:41:27 +0000720 // If this is a non-volatile load, process it.
Chad Rosierf9327d62015-01-26 22:51:15 +0000721 if (MemInst.isValid() && MemInst.isLoad()) {
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000722 // (conservatively) we can't peak past the ordering implied by this
723 // operation, but we can add this load to our set of available values
724 if (MemInst.isVolatile() || !MemInst.isUnordered()) {
Craig Topperf40110f2014-04-25 05:29:35 +0000725 LastStore = nullptr;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000726 ++CurrentGeneration;
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000727 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000728
Chris Lattner92bb0f92011-01-03 03:41:27 +0000729 // If we have an available version of this load, and if it is the right
Sanjoy Das07c65212016-06-16 20:47:57 +0000730 // generation or the load is known to be from an invariant location,
731 // replace this instruction.
732 //
Geoff Berry64f5ed12016-08-31 17:45:31 +0000733 // If either the dominating load or the current load are invariant, then
734 // we can assume the current load loads the same value as the dominating
735 // load.
Philip Reames9e5e2d62015-12-07 22:41:23 +0000736 LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand());
Sanjoy Das07c65212016-06-16 20:47:57 +0000737 if (InVal.DefInst != nullptr &&
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000738 InVal.MatchingId == MemInst.getMatchingId() &&
739 // We don't yet handle removing loads with ordering of any kind.
740 !MemInst.isVolatile() && MemInst.isUnordered() &&
741 // We can't replace an atomic load with one which isn't also atomic.
Geoff Berry8d846052016-08-31 19:24:10 +0000742 InVal.IsAtomic >= MemInst.isAtomic() &&
743 (InVal.IsInvariant || MemInst.isInvariantLoad() ||
744 isSameMemGeneration(InVal.Generation, CurrentGeneration,
745 InVal.DefInst, Inst))) {
Philip Reames32b55182016-05-06 01:13:58 +0000746 Value *Op = getOrCreateResult(InVal.DefInst, Inst->getType());
Chad Rosierf9327d62015-01-26 22:51:15 +0000747 if (Op != nullptr) {
748 DEBUG(dbgs() << "EarlyCSE CSE LOAD: " << *Inst
Philip Reames32b55182016-05-06 01:13:58 +0000749 << " to: " << *InVal.DefInst << '\n');
Chad Rosierf9327d62015-01-26 22:51:15 +0000750 if (!Inst->use_empty())
751 Inst->replaceAllUsesWith(Op);
Geoff Berry8d846052016-08-31 19:24:10 +0000752 removeMSSA(Inst);
Chad Rosierf9327d62015-01-26 22:51:15 +0000753 Inst->eraseFromParent();
754 Changed = true;
755 ++NumCSELoad;
756 continue;
757 }
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000758 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000759
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000760 // Otherwise, remember that we have this instruction.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000761 AvailableLoads.insert(
Philip Reames9e5e2d62015-12-07 22:41:23 +0000762 MemInst.getPointerOperand(),
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000763 LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(),
Sanjoy Das07c65212016-06-16 20:47:57 +0000764 MemInst.isAtomic(), MemInst.isInvariantLoad()));
Craig Topperf40110f2014-04-25 05:29:35 +0000765 LastStore = nullptr;
Chris Lattner92bb0f92011-01-03 03:41:27 +0000766 continue;
767 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000768
Sanjoy Das6de072a2017-01-17 20:15:47 +0000769 // If this instruction may read from memory or throw (and potentially read
770 // from memory in the exception handler), forget LastStore. Load/store
771 // intrinsics will indicate both a read and a write to memory. The target
772 // may override this (e.g. so that a store intrinsic does not read from
773 // memory, and thus will be treated the same as a regular store for
774 // commoning purposes).
775 if ((Inst->mayReadFromMemory() || Inst->mayThrow()) &&
Chad Rosierf9327d62015-01-26 22:51:15 +0000776 !(MemInst.isValid() && !MemInst.mayReadFromMemory()))
Craig Topperf40110f2014-04-25 05:29:35 +0000777 LastStore = nullptr;
Nadav Rotem465834c2012-07-24 10:51:42 +0000778
Chris Lattner92bb0f92011-01-03 03:41:27 +0000779 // If this is a read-only call, process it.
780 if (CallValue::canHandle(Inst)) {
781 // If we have an available version of this call, and if it is the right
782 // generation, replace this instruction.
Geoff Berry2f64c202016-05-13 17:54:58 +0000783 std::pair<Instruction *, unsigned> InVal = AvailableCalls.lookup(Inst);
Geoff Berry8d846052016-08-31 19:24:10 +0000784 if (InVal.first != nullptr &&
785 isSameMemGeneration(InVal.second, CurrentGeneration, InVal.first,
786 Inst)) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000787 DEBUG(dbgs() << "EarlyCSE CSE CALL: " << *Inst
788 << " to: " << *InVal.first << '\n');
789 if (!Inst->use_empty())
790 Inst->replaceAllUsesWith(InVal.first);
Geoff Berry8d846052016-08-31 19:24:10 +0000791 removeMSSA(Inst);
Chris Lattner92bb0f92011-01-03 03:41:27 +0000792 Inst->eraseFromParent();
793 Changed = true;
794 ++NumCSECall;
795 continue;
796 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000797
Chris Lattner92bb0f92011-01-03 03:41:27 +0000798 // Otherwise, remember that we have this instruction.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000799 AvailableCalls.insert(
Geoff Berry2f64c202016-05-13 17:54:58 +0000800 Inst, std::pair<Instruction *, unsigned>(Inst, CurrentGeneration));
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000801 continue;
802 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000803
Philip Reamesdfd890d2015-08-27 01:32:33 +0000804 // A release fence requires that all stores complete before it, but does
805 // not prevent the reordering of following loads 'before' the fence. As a
806 // result, we don't need to consider it as writing to memory and don't need
807 // to advance the generation. We do need to prevent DSE across the fence,
808 // but that's handled above.
809 if (FenceInst *FI = dyn_cast<FenceInst>(Inst))
JF Bastien800f87a2016-04-06 21:19:33 +0000810 if (FI->getOrdering() == AtomicOrdering::Release) {
Philip Reamesdfd890d2015-08-27 01:32:33 +0000811 assert(Inst->mayReadFromMemory() && "relied on to prevent DSE above");
812 continue;
813 }
814
Philip Reamesae1f265b2015-12-16 01:01:30 +0000815 // write back DSE - If we write back the same value we just loaded from
816 // the same location and haven't passed any intervening writes or ordering
817 // operations, we can remove the write. The primary benefit is in allowing
818 // the available load table to remain valid and value forward past where
819 // the store originally was.
820 if (MemInst.isValid() && MemInst.isStore()) {
821 LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand());
Philip Reames32b55182016-05-06 01:13:58 +0000822 if (InVal.DefInst &&
823 InVal.DefInst == getOrCreateResult(Inst, InVal.DefInst->getType()) &&
Philip Reamesae1f265b2015-12-16 01:01:30 +0000824 InVal.MatchingId == MemInst.getMatchingId() &&
825 // We don't yet handle removing stores with ordering of any kind.
Geoff Berry8d846052016-08-31 19:24:10 +0000826 !MemInst.isVolatile() && MemInst.isUnordered() &&
827 isSameMemGeneration(InVal.Generation, CurrentGeneration,
828 InVal.DefInst, Inst)) {
829 // It is okay to have a LastStore to a different pointer here if MemorySSA
830 // tells us that the load and store are from the same memory generation.
831 // In that case, LastStore should keep its present value since we're
832 // removing the current store.
Philip Reamesae1f265b2015-12-16 01:01:30 +0000833 assert((!LastStore ||
834 ParseMemoryInst(LastStore, TTI).getPointerOperand() ==
Geoff Berry8d846052016-08-31 19:24:10 +0000835 MemInst.getPointerOperand() ||
836 MSSA) &&
837 "can't have an intervening store if not using MemorySSA!");
Philip Reamesae1f265b2015-12-16 01:01:30 +0000838 DEBUG(dbgs() << "EarlyCSE DSE (writeback): " << *Inst << '\n');
Geoff Berry8d846052016-08-31 19:24:10 +0000839 removeMSSA(Inst);
Philip Reamesae1f265b2015-12-16 01:01:30 +0000840 Inst->eraseFromParent();
841 Changed = true;
842 ++NumDSE;
843 // We can avoid incrementing the generation count since we were able
844 // to eliminate this store.
845 continue;
846 }
847 }
848
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000849 // Okay, this isn't something we can CSE at all. Check to see if it is
850 // something that could modify memory. If so, our available memory values
851 // cannot be used so bump the generation count.
Chris Lattnere0e32a92011-01-03 03:46:34 +0000852 if (Inst->mayWriteToMemory()) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000853 ++CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000854
Chad Rosierf9327d62015-01-26 22:51:15 +0000855 if (MemInst.isValid() && MemInst.isStore()) {
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000856 // We do a trivial form of DSE if there are two stores to the same
Philip Reames15145fb2015-12-17 18:50:50 +0000857 // location with no intervening loads. Delete the earlier store.
858 // At the moment, we don't remove ordered stores, but do remove
859 // unordered atomic stores. There's no special requirement (for
860 // unordered atomics) about removing atomic stores only in favor of
861 // other atomic stores since we we're going to execute the non-atomic
862 // one anyway and the atomic one might never have become visible.
Chad Rosierf9327d62015-01-26 22:51:15 +0000863 if (LastStore) {
864 ParseMemoryInst LastStoreMemInst(LastStore, TTI);
Philip Reames15145fb2015-12-17 18:50:50 +0000865 assert(LastStoreMemInst.isUnordered() &&
866 !LastStoreMemInst.isVolatile() &&
867 "Violated invariant");
Chad Rosierf9327d62015-01-26 22:51:15 +0000868 if (LastStoreMemInst.isMatchingMemLoc(MemInst)) {
869 DEBUG(dbgs() << "EarlyCSE DEAD STORE: " << *LastStore
870 << " due to: " << *Inst << '\n');
Geoff Berry8d846052016-08-31 19:24:10 +0000871 removeMSSA(LastStore);
Chad Rosierf9327d62015-01-26 22:51:15 +0000872 LastStore->eraseFromParent();
873 Changed = true;
874 ++NumDSE;
875 LastStore = nullptr;
876 }
Philip Reames018dbf12014-11-18 17:46:32 +0000877 // fallthrough - we can exploit information about this store
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000878 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000879
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000880 // Okay, we just invalidated anything we knew about loaded values. Try
881 // to salvage *something* by remembering that the stored value is a live
882 // version of the pointer. It is safe to forward from volatile stores
883 // to non-volatile loads, so we don't have to check for volatility of
884 // the store.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000885 AvailableLoads.insert(
Philip Reames9e5e2d62015-12-07 22:41:23 +0000886 MemInst.getPointerOperand(),
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000887 LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(),
Sanjoy Das1ab2fad2016-06-16 21:00:57 +0000888 MemInst.isAtomic(), /*IsInvariant=*/false));
Nadav Rotem465834c2012-07-24 10:51:42 +0000889
Philip Reames15145fb2015-12-17 18:50:50 +0000890 // Remember that this was the last unordered store we saw for DSE. We
891 // don't yet handle DSE on ordered or volatile stores since we don't
892 // have a good way to model the ordering requirement for following
893 // passes once the store is removed. We could insert a fence, but
894 // since fences are slightly stronger than stores in their ordering,
895 // it's not clear this is a profitable transform. Another option would
896 // be to merge the ordering with that of the post dominating store.
897 if (MemInst.isUnordered() && !MemInst.isVolatile())
Chad Rosierf9327d62015-01-26 22:51:15 +0000898 LastStore = Inst;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000899 else
900 LastStore = nullptr;
Chris Lattnere0e32a92011-01-03 03:46:34 +0000901 }
902 }
Chris Lattner18ae5432011-01-02 23:04:14 +0000903 }
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000904
Chris Lattner18ae5432011-01-02 23:04:14 +0000905 return Changed;
Chris Lattner704541b2011-01-02 21:47:05 +0000906}
Chris Lattner18ae5432011-01-02 23:04:14 +0000907
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000908bool EarlyCSE::run() {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000909 // Note, deque is being used here because there is significant performance
910 // gains over vector when the container becomes very large due to the
911 // specific access patterns. For more information see the mailing list
912 // discussion on this:
Tanya Lattner0d28f802015-08-05 03:51:17 +0000913 // http://lists.llvm.org/pipermail/llvm-commits/Week-of-Mon-20120116/135228.html
Lenny Maiorani9eefc812014-09-20 13:29:20 +0000914 std::deque<StackNode *> nodesToProcess;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000915
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000916 bool Changed = false;
917
918 // Process the root node.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000919 nodesToProcess.push_back(new StackNode(
920 AvailableValues, AvailableLoads, AvailableCalls, CurrentGeneration,
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000921 DT.getRootNode(), DT.getRootNode()->begin(), DT.getRootNode()->end()));
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000922
923 // Save the current generation.
924 unsigned LiveOutGeneration = CurrentGeneration;
925
926 // Process the stack.
927 while (!nodesToProcess.empty()) {
928 // Grab the first item off the stack. Set the current generation, remove
929 // the node from the stack, and process it.
Michael Gottesman2bf01732013-12-05 18:42:12 +0000930 StackNode *NodeToProcess = nodesToProcess.back();
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000931
932 // Initialize class members.
933 CurrentGeneration = NodeToProcess->currentGeneration();
934
935 // Check if the node needs to be processed.
936 if (!NodeToProcess->isProcessed()) {
937 // Process the node.
938 Changed |= processNode(NodeToProcess->node());
939 NodeToProcess->childGeneration(CurrentGeneration);
940 NodeToProcess->process();
941 } else if (NodeToProcess->childIter() != NodeToProcess->end()) {
942 // Push the next child onto the stack.
943 DomTreeNode *child = NodeToProcess->nextChild();
Michael Gottesman2bf01732013-12-05 18:42:12 +0000944 nodesToProcess.push_back(
Chandler Carruth7253bba2015-01-24 11:33:55 +0000945 new StackNode(AvailableValues, AvailableLoads, AvailableCalls,
946 NodeToProcess->childGeneration(), child, child->begin(),
947 child->end()));
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000948 } else {
949 // It has been processed, and there are no more children to process,
950 // so delete it and pop it off the stack.
951 delete NodeToProcess;
Michael Gottesman2bf01732013-12-05 18:42:12 +0000952 nodesToProcess.pop_back();
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000953 }
954 } // while (!nodes...)
955
956 // Reset the current generation.
957 CurrentGeneration = LiveOutGeneration;
958
959 return Changed;
Chris Lattner18ae5432011-01-02 23:04:14 +0000960}
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000961
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000962PreservedAnalyses EarlyCSEPass::run(Function &F,
Sean Silva36e0d012016-08-09 00:28:15 +0000963 FunctionAnalysisManager &AM) {
Chandler Carruthb47f8012016-03-11 11:05:24 +0000964 auto &TLI = AM.getResult<TargetLibraryAnalysis>(F);
965 auto &TTI = AM.getResult<TargetIRAnalysis>(F);
966 auto &DT = AM.getResult<DominatorTreeAnalysis>(F);
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000967 auto &AC = AM.getResult<AssumptionAnalysis>(F);
Geoff Berry8d846052016-08-31 19:24:10 +0000968 auto *MSSA =
969 UseMemorySSA ? &AM.getResult<MemorySSAAnalysis>(F).getMSSA() : nullptr;
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000970
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000971 EarlyCSE CSE(TLI, TTI, DT, AC, MSSA);
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000972
973 if (!CSE.run())
974 return PreservedAnalyses::all();
975
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000976 PreservedAnalyses PA;
Chandler Carruthca68a3e2017-01-15 06:32:49 +0000977 PA.preserveSet<CFGAnalyses>();
Davide Italiano02861d82016-06-08 21:31:55 +0000978 PA.preserve<GlobalsAA>();
Geoff Berry8d846052016-08-31 19:24:10 +0000979 if (UseMemorySSA)
980 PA.preserve<MemorySSAAnalysis>();
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000981 return PA;
982}
983
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000984namespace {
985/// \brief A simple and fast domtree-based CSE pass.
986///
987/// This pass does a simple depth-first walk over the dominator tree,
988/// eliminating trivially redundant instructions and using instsimplify to
989/// canonicalize things as it goes. It is intended to be fast and catch obvious
990/// cases so that instcombine and other passes are more effective. It is
991/// expected that a later pass of GVN will catch the interesting/hard cases.
Geoff Berry8d846052016-08-31 19:24:10 +0000992template<bool UseMemorySSA>
993class EarlyCSELegacyCommonPass : public FunctionPass {
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000994public:
995 static char ID;
996
Geoff Berry8d846052016-08-31 19:24:10 +0000997 EarlyCSELegacyCommonPass() : FunctionPass(ID) {
998 if (UseMemorySSA)
999 initializeEarlyCSEMemSSALegacyPassPass(*PassRegistry::getPassRegistry());
1000 else
1001 initializeEarlyCSELegacyPassPass(*PassRegistry::getPassRegistry());
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001002 }
1003
1004 bool runOnFunction(Function &F) override {
Andrew Kayloraa641a52016-04-22 22:06:11 +00001005 if (skipFunction(F))
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001006 return false;
1007
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001008 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
Chandler Carruthfdb9c572015-02-01 12:01:35 +00001009 auto &TTI = getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001010 auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001011 auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
Geoff Berry8d846052016-08-31 19:24:10 +00001012 auto *MSSA =
1013 UseMemorySSA ? &getAnalysis<MemorySSAWrapperPass>().getMSSA() : nullptr;
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001014
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001015 EarlyCSE CSE(TLI, TTI, DT, AC, MSSA);
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001016
1017 return CSE.run();
1018 }
1019
1020 void getAnalysisUsage(AnalysisUsage &AU) const override {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001021 AU.addRequired<AssumptionCacheTracker>();
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001022 AU.addRequired<DominatorTreeWrapperPass>();
1023 AU.addRequired<TargetLibraryInfoWrapperPass>();
Chandler Carruth705b1852015-01-31 03:43:40 +00001024 AU.addRequired<TargetTransformInfoWrapperPass>();
Geoff Berry8d846052016-08-31 19:24:10 +00001025 if (UseMemorySSA) {
1026 AU.addRequired<MemorySSAWrapperPass>();
1027 AU.addPreserved<MemorySSAWrapperPass>();
1028 }
James Molloyefbba722015-09-10 10:22:12 +00001029 AU.addPreserved<GlobalsAAWrapperPass>();
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001030 AU.setPreservesCFG();
1031 }
1032};
Alexander Kornienkof00654e2015-06-23 09:49:53 +00001033}
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001034
Geoff Berry8d846052016-08-31 19:24:10 +00001035using EarlyCSELegacyPass = EarlyCSELegacyCommonPass</*UseMemorySSA=*/false>;
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001036
Geoff Berry8d846052016-08-31 19:24:10 +00001037template<>
1038char EarlyCSELegacyPass::ID = 0;
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001039
1040INITIALIZE_PASS_BEGIN(EarlyCSELegacyPass, "early-cse", "Early CSE", false,
1041 false)
Chandler Carruth705b1852015-01-31 03:43:40 +00001042INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001043INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Chandler Carruthd649c0a2015-01-27 01:34:14 +00001044INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
1045INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1046INITIALIZE_PASS_END(EarlyCSELegacyPass, "early-cse", "Early CSE", false, false)
Geoff Berry8d846052016-08-31 19:24:10 +00001047
1048using EarlyCSEMemSSALegacyPass =
1049 EarlyCSELegacyCommonPass</*UseMemorySSA=*/true>;
1050
1051template<>
1052char EarlyCSEMemSSALegacyPass::ID = 0;
1053
1054FunctionPass *llvm::createEarlyCSEPass(bool UseMemorySSA) {
1055 if (UseMemorySSA)
1056 return new EarlyCSEMemSSALegacyPass();
1057 else
1058 return new EarlyCSELegacyPass();
1059}
1060
1061INITIALIZE_PASS_BEGIN(EarlyCSEMemSSALegacyPass, "early-cse-memssa",
1062 "Early CSE w/ MemorySSA", false, false)
1063INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001064INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Geoff Berry8d846052016-08-31 19:24:10 +00001065INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
1066INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1067INITIALIZE_PASS_DEPENDENCY(MemorySSAWrapperPass)
1068INITIALIZE_PASS_END(EarlyCSEMemSSALegacyPass, "early-cse-memssa",
1069 "Early CSE w/ MemorySSA", false, false)