blob: eb38ef5f1645e6a74939d1c82a1f7b21d4506283 [file] [log] [blame]
Chris Lattner704541b2011-01-02 21:47:05 +00001//===- EarlyCSE.cpp - Simple and fast CSE pass ----------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs a simple dominator tree walk that eliminates trivially
11// redundant instructions.
12//
13//===----------------------------------------------------------------------===//
14
Chandler Carruthe8c686a2015-02-01 10:51:23 +000015#include "llvm/Transforms/Scalar/EarlyCSE.h"
Michael Ilseman336cb792012-10-09 16:57:38 +000016#include "llvm/ADT/Hashing.h"
Chris Lattner18ae5432011-01-02 23:04:14 +000017#include "llvm/ADT/ScopedHashTable.h"
Chris Lattner8fac5db2011-01-02 23:19:45 +000018#include "llvm/ADT/Statistic.h"
James Molloyefbba722015-09-10 10:22:12 +000019#include "llvm/Analysis/GlobalsModRef.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000021#include "llvm/Analysis/InstructionSimplify.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000022#include "llvm/Analysis/TargetLibraryInfo.h"
Chad Rosierf9327d62015-01-26 22:51:15 +000023#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000024#include "llvm/IR/DataLayout.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000025#include "llvm/IR/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/Instructions.h"
Hal Finkel1e16fa32014-11-03 20:21:32 +000027#include "llvm/IR/IntrinsicInst.h"
28#include "llvm/IR/PatternMatch.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000029#include "llvm/Pass.h"
30#include "llvm/Support/Debug.h"
31#include "llvm/Support/RecyclingAllocator.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000032#include "llvm/Support/raw_ostream.h"
Chandler Carruthe8c686a2015-02-01 10:51:23 +000033#include "llvm/Transforms/Scalar.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000034#include "llvm/Transforms/Utils/Local.h"
Lenny Maiorani9eefc812014-09-20 13:29:20 +000035#include <deque>
Chris Lattner704541b2011-01-02 21:47:05 +000036using namespace llvm;
Hal Finkel1e16fa32014-11-03 20:21:32 +000037using namespace llvm::PatternMatch;
Chris Lattner704541b2011-01-02 21:47:05 +000038
Chandler Carruth964daaa2014-04-22 02:55:47 +000039#define DEBUG_TYPE "early-cse"
40
Chris Lattner4cb36542011-01-03 03:28:23 +000041STATISTIC(NumSimplify, "Number of instructions simplified or DCE'd");
42STATISTIC(NumCSE, "Number of instructions CSE'd");
Chris Lattner92bb0f92011-01-03 03:41:27 +000043STATISTIC(NumCSELoad, "Number of load instructions CSE'd");
44STATISTIC(NumCSECall, "Number of call instructions CSE'd");
Chris Lattner9e5e9ed2011-01-03 04:17:24 +000045STATISTIC(NumDSE, "Number of trivial dead stores removed");
Chris Lattnerb9a8efc2011-01-03 03:18:43 +000046
Chris Lattner79d83062011-01-03 02:20:48 +000047//===----------------------------------------------------------------------===//
Nadav Rotem465834c2012-07-24 10:51:42 +000048// SimpleValue
Chris Lattner79d83062011-01-03 02:20:48 +000049//===----------------------------------------------------------------------===//
50
Chris Lattner704541b2011-01-02 21:47:05 +000051namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +000052/// \brief Struct representing the available values in the scoped hash table.
Chandler Carruth7253bba2015-01-24 11:33:55 +000053struct SimpleValue {
54 Instruction *Inst;
Nadav Rotem465834c2012-07-24 10:51:42 +000055
Chandler Carruth7253bba2015-01-24 11:33:55 +000056 SimpleValue(Instruction *I) : Inst(I) {
57 assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
58 }
Nadav Rotem465834c2012-07-24 10:51:42 +000059
Chandler Carruth7253bba2015-01-24 11:33:55 +000060 bool isSentinel() const {
61 return Inst == DenseMapInfo<Instruction *>::getEmptyKey() ||
62 Inst == DenseMapInfo<Instruction *>::getTombstoneKey();
63 }
Nadav Rotem465834c2012-07-24 10:51:42 +000064
Chandler Carruth7253bba2015-01-24 11:33:55 +000065 static bool canHandle(Instruction *Inst) {
66 // This can only handle non-void readnone functions.
67 if (CallInst *CI = dyn_cast<CallInst>(Inst))
68 return CI->doesNotAccessMemory() && !CI->getType()->isVoidTy();
69 return isa<CastInst>(Inst) || isa<BinaryOperator>(Inst) ||
70 isa<GetElementPtrInst>(Inst) || isa<CmpInst>(Inst) ||
71 isa<SelectInst>(Inst) || isa<ExtractElementInst>(Inst) ||
72 isa<InsertElementInst>(Inst) || isa<ShuffleVectorInst>(Inst) ||
73 isa<ExtractValueInst>(Inst) || isa<InsertValueInst>(Inst);
74 }
75};
Alexander Kornienkof00654e2015-06-23 09:49:53 +000076}
Chris Lattner18ae5432011-01-02 23:04:14 +000077
78namespace llvm {
Chandler Carruth7253bba2015-01-24 11:33:55 +000079template <> struct DenseMapInfo<SimpleValue> {
Chris Lattner79d83062011-01-03 02:20:48 +000080 static inline SimpleValue getEmptyKey() {
Chandler Carruth7253bba2015-01-24 11:33:55 +000081 return DenseMapInfo<Instruction *>::getEmptyKey();
Chris Lattner18ae5432011-01-02 23:04:14 +000082 }
Chris Lattner79d83062011-01-03 02:20:48 +000083 static inline SimpleValue getTombstoneKey() {
Chandler Carruth7253bba2015-01-24 11:33:55 +000084 return DenseMapInfo<Instruction *>::getTombstoneKey();
Chris Lattner18ae5432011-01-02 23:04:14 +000085 }
Chris Lattner79d83062011-01-03 02:20:48 +000086 static unsigned getHashValue(SimpleValue Val);
87 static bool isEqual(SimpleValue LHS, SimpleValue RHS);
Chris Lattner18ae5432011-01-02 23:04:14 +000088};
Alexander Kornienkof00654e2015-06-23 09:49:53 +000089}
Chris Lattner18ae5432011-01-02 23:04:14 +000090
Chris Lattner79d83062011-01-03 02:20:48 +000091unsigned DenseMapInfo<SimpleValue>::getHashValue(SimpleValue Val) {
Chris Lattner18ae5432011-01-02 23:04:14 +000092 Instruction *Inst = Val.Inst;
Chris Lattner02a97762011-01-03 01:10:08 +000093 // Hash in all of the operands as pointers.
Chandler Carruth7253bba2015-01-24 11:33:55 +000094 if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(Inst)) {
Michael Ilseman336cb792012-10-09 16:57:38 +000095 Value *LHS = BinOp->getOperand(0);
96 Value *RHS = BinOp->getOperand(1);
97 if (BinOp->isCommutative() && BinOp->getOperand(0) > BinOp->getOperand(1))
98 std::swap(LHS, RHS);
Chris Lattner02a97762011-01-03 01:10:08 +000099
Michael Ilseman336cb792012-10-09 16:57:38 +0000100 if (isa<OverflowingBinaryOperator>(BinOp)) {
101 // Hash the overflow behavior
102 unsigned Overflow =
Chandler Carruth7253bba2015-01-24 11:33:55 +0000103 BinOp->hasNoSignedWrap() * OverflowingBinaryOperator::NoSignedWrap |
104 BinOp->hasNoUnsignedWrap() *
105 OverflowingBinaryOperator::NoUnsignedWrap;
Michael Ilseman336cb792012-10-09 16:57:38 +0000106 return hash_combine(BinOp->getOpcode(), Overflow, LHS, RHS);
107 }
108
109 return hash_combine(BinOp->getOpcode(), LHS, RHS);
Chris Lattner02a97762011-01-03 01:10:08 +0000110 }
111
Michael Ilseman336cb792012-10-09 16:57:38 +0000112 if (CmpInst *CI = dyn_cast<CmpInst>(Inst)) {
113 Value *LHS = CI->getOperand(0);
114 Value *RHS = CI->getOperand(1);
115 CmpInst::Predicate Pred = CI->getPredicate();
116 if (Inst->getOperand(0) > Inst->getOperand(1)) {
117 std::swap(LHS, RHS);
118 Pred = CI->getSwappedPredicate();
119 }
120 return hash_combine(Inst->getOpcode(), Pred, LHS, RHS);
121 }
122
123 if (CastInst *CI = dyn_cast<CastInst>(Inst))
124 return hash_combine(CI->getOpcode(), CI->getType(), CI->getOperand(0));
125
126 if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(Inst))
127 return hash_combine(EVI->getOpcode(), EVI->getOperand(0),
128 hash_combine_range(EVI->idx_begin(), EVI->idx_end()));
129
130 if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(Inst))
131 return hash_combine(IVI->getOpcode(), IVI->getOperand(0),
132 IVI->getOperand(1),
133 hash_combine_range(IVI->idx_begin(), IVI->idx_end()));
134
135 assert((isa<CallInst>(Inst) || isa<BinaryOperator>(Inst) ||
136 isa<GetElementPtrInst>(Inst) || isa<SelectInst>(Inst) ||
137 isa<ExtractElementInst>(Inst) || isa<InsertElementInst>(Inst) ||
Chandler Carruth7253bba2015-01-24 11:33:55 +0000138 isa<ShuffleVectorInst>(Inst)) &&
139 "Invalid/unknown instruction");
Michael Ilseman336cb792012-10-09 16:57:38 +0000140
Chris Lattner02a97762011-01-03 01:10:08 +0000141 // Mix in the opcode.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000142 return hash_combine(
143 Inst->getOpcode(),
144 hash_combine_range(Inst->value_op_begin(), Inst->value_op_end()));
Chris Lattner18ae5432011-01-02 23:04:14 +0000145}
146
Chris Lattner79d83062011-01-03 02:20:48 +0000147bool DenseMapInfo<SimpleValue>::isEqual(SimpleValue LHS, SimpleValue RHS) {
Chris Lattner18ae5432011-01-02 23:04:14 +0000148 Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
149
150 if (LHS.isSentinel() || RHS.isSentinel())
151 return LHSI == RHSI;
Nadav Rotem465834c2012-07-24 10:51:42 +0000152
Chandler Carruth7253bba2015-01-24 11:33:55 +0000153 if (LHSI->getOpcode() != RHSI->getOpcode())
154 return false;
155 if (LHSI->isIdenticalTo(RHSI))
156 return true;
Michael Ilseman336cb792012-10-09 16:57:38 +0000157
158 // If we're not strictly identical, we still might be a commutable instruction
159 if (BinaryOperator *LHSBinOp = dyn_cast<BinaryOperator>(LHSI)) {
160 if (!LHSBinOp->isCommutative())
161 return false;
162
Chandler Carruth7253bba2015-01-24 11:33:55 +0000163 assert(isa<BinaryOperator>(RHSI) &&
164 "same opcode, but different instruction type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000165 BinaryOperator *RHSBinOp = cast<BinaryOperator>(RHSI);
166
167 // Check overflow attributes
168 if (isa<OverflowingBinaryOperator>(LHSBinOp)) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000169 assert(isa<OverflowingBinaryOperator>(RHSBinOp) &&
170 "same opcode, but different operator type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000171 if (LHSBinOp->hasNoUnsignedWrap() != RHSBinOp->hasNoUnsignedWrap() ||
172 LHSBinOp->hasNoSignedWrap() != RHSBinOp->hasNoSignedWrap())
173 return false;
174 }
175
176 // Commuted equality
177 return LHSBinOp->getOperand(0) == RHSBinOp->getOperand(1) &&
Chandler Carruth7253bba2015-01-24 11:33:55 +0000178 LHSBinOp->getOperand(1) == RHSBinOp->getOperand(0);
Michael Ilseman336cb792012-10-09 16:57:38 +0000179 }
180 if (CmpInst *LHSCmp = dyn_cast<CmpInst>(LHSI)) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000181 assert(isa<CmpInst>(RHSI) &&
182 "same opcode, but different instruction type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000183 CmpInst *RHSCmp = cast<CmpInst>(RHSI);
184 // Commuted equality
185 return LHSCmp->getOperand(0) == RHSCmp->getOperand(1) &&
Chandler Carruth7253bba2015-01-24 11:33:55 +0000186 LHSCmp->getOperand(1) == RHSCmp->getOperand(0) &&
187 LHSCmp->getSwappedPredicate() == RHSCmp->getPredicate();
Michael Ilseman336cb792012-10-09 16:57:38 +0000188 }
189
190 return false;
Chris Lattner18ae5432011-01-02 23:04:14 +0000191}
192
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000193//===----------------------------------------------------------------------===//
Nadav Rotem465834c2012-07-24 10:51:42 +0000194// CallValue
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000195//===----------------------------------------------------------------------===//
196
197namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000198/// \brief Struct representing the available call values in the scoped hash
199/// table.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000200struct CallValue {
201 Instruction *Inst;
Nadav Rotem465834c2012-07-24 10:51:42 +0000202
Chandler Carruth7253bba2015-01-24 11:33:55 +0000203 CallValue(Instruction *I) : Inst(I) {
204 assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
205 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000206
Chandler Carruth7253bba2015-01-24 11:33:55 +0000207 bool isSentinel() const {
208 return Inst == DenseMapInfo<Instruction *>::getEmptyKey() ||
209 Inst == DenseMapInfo<Instruction *>::getTombstoneKey();
210 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000211
Chandler Carruth7253bba2015-01-24 11:33:55 +0000212 static bool canHandle(Instruction *Inst) {
213 // Don't value number anything that returns void.
214 if (Inst->getType()->isVoidTy())
215 return false;
Nadav Rotem465834c2012-07-24 10:51:42 +0000216
Chandler Carruth7253bba2015-01-24 11:33:55 +0000217 CallInst *CI = dyn_cast<CallInst>(Inst);
218 if (!CI || !CI->onlyReadsMemory())
219 return false;
220 return true;
221 }
222};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000223}
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000224
225namespace llvm {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000226template <> struct DenseMapInfo<CallValue> {
227 static inline CallValue getEmptyKey() {
228 return DenseMapInfo<Instruction *>::getEmptyKey();
229 }
230 static inline CallValue getTombstoneKey() {
231 return DenseMapInfo<Instruction *>::getTombstoneKey();
232 }
233 static unsigned getHashValue(CallValue Val);
234 static bool isEqual(CallValue LHS, CallValue RHS);
235};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000236}
Chandler Carruth7253bba2015-01-24 11:33:55 +0000237
Chris Lattner92bb0f92011-01-03 03:41:27 +0000238unsigned DenseMapInfo<CallValue>::getHashValue(CallValue Val) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000239 Instruction *Inst = Val.Inst;
Benjamin Kramer6ab86b12015-02-01 12:30:59 +0000240 // Hash all of the operands as pointers and mix in the opcode.
241 return hash_combine(
242 Inst->getOpcode(),
243 hash_combine_range(Inst->value_op_begin(), Inst->value_op_end()));
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000244}
245
Chris Lattner92bb0f92011-01-03 03:41:27 +0000246bool DenseMapInfo<CallValue>::isEqual(CallValue LHS, CallValue RHS) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000247 Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000248 if (LHS.isSentinel() || RHS.isSentinel())
249 return LHSI == RHSI;
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000250 return LHSI->isIdenticalTo(RHSI);
251}
252
Chris Lattner79d83062011-01-03 02:20:48 +0000253//===----------------------------------------------------------------------===//
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000254// EarlyCSE implementation
Chris Lattner79d83062011-01-03 02:20:48 +0000255//===----------------------------------------------------------------------===//
256
Chris Lattner18ae5432011-01-02 23:04:14 +0000257namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000258/// \brief A simple and fast domtree-based CSE pass.
259///
260/// This pass does a simple depth-first walk over the dominator tree,
261/// eliminating trivially redundant instructions and using instsimplify to
262/// canonicalize things as it goes. It is intended to be fast and catch obvious
263/// cases so that instcombine and other passes are more effective. It is
264/// expected that a later pass of GVN will catch the interesting/hard cases.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000265class EarlyCSE {
Chris Lattner704541b2011-01-02 21:47:05 +0000266public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000267 const TargetLibraryInfo &TLI;
268 const TargetTransformInfo &TTI;
269 DominatorTree &DT;
270 AssumptionCache &AC;
Chandler Carruth7253bba2015-01-24 11:33:55 +0000271 typedef RecyclingAllocator<
272 BumpPtrAllocator, ScopedHashTableVal<SimpleValue, Value *>> AllocatorTy;
273 typedef ScopedHashTable<SimpleValue, Value *, DenseMapInfo<SimpleValue>,
Chris Lattnerd815f692011-01-03 01:42:46 +0000274 AllocatorTy> ScopedHTType;
Nadav Rotem465834c2012-07-24 10:51:42 +0000275
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000276 /// \brief A scoped hash table of the current values of all of our simple
277 /// scalar expressions.
278 ///
279 /// As we walk down the domtree, we look to see if instructions are in this:
280 /// if so, we replace them with what we find, otherwise we insert them so
281 /// that dominated values can succeed in their lookup.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000282 ScopedHTType AvailableValues;
Nadav Rotem465834c2012-07-24 10:51:42 +0000283
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000284 /// A scoped hash table of the current values of previously encounted memory
285 /// locations.
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000286 ///
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000287 /// This allows us to get efficient access to dominating loads or stores when
288 /// we have a fully redundant load. In addition to the most recent load, we
289 /// keep track of a generation count of the read, which is compared against
290 /// the current generation count. The current generation count is incremented
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000291 /// after every possibly writing memory operation, which ensures that we only
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000292 /// CSE loads with other loads that have no intervening store. Ordering
293 /// events (such as fences or atomic instructions) increment the generation
294 /// count as well; essentially, we model these as writes to all possible
295 /// locations. Note that atomic and/or volatile loads and stores can be
296 /// present the table; it is the responsibility of the consumer to inspect
297 /// the atomicity/volatility if needed.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000298 struct LoadValue {
Arnaud A. de Grandmaison859b2ac2015-10-09 09:23:01 +0000299 Value *Data;
300 unsigned Generation;
301 int MatchingId;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000302 bool IsAtomic;
303 LoadValue()
304 : Data(nullptr), Generation(0), MatchingId(-1), IsAtomic(false) {}
305 LoadValue(Value *Data, unsigned Generation, unsigned MatchingId,
306 bool IsAtomic)
307 : Data(Data), Generation(Generation), MatchingId(MatchingId),
308 IsAtomic(IsAtomic) {}
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000309 };
310 typedef RecyclingAllocator<BumpPtrAllocator,
311 ScopedHashTableVal<Value *, LoadValue>>
Chandler Carruth7253bba2015-01-24 11:33:55 +0000312 LoadMapAllocator;
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000313 typedef ScopedHashTable<Value *, LoadValue, DenseMapInfo<Value *>,
314 LoadMapAllocator> LoadHTType;
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000315 LoadHTType AvailableLoads;
Nadav Rotem465834c2012-07-24 10:51:42 +0000316
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000317 /// \brief A scoped hash table of the current values of read-only call
318 /// values.
319 ///
320 /// It uses the same generation count as loads.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000321 typedef ScopedHashTable<CallValue, std::pair<Value *, unsigned>> CallHTType;
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000322 CallHTType AvailableCalls;
Nadav Rotem465834c2012-07-24 10:51:42 +0000323
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000324 /// \brief This is the current generation of the memory value.
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000325 unsigned CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000326
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000327 /// \brief Set up the EarlyCSE runner for a particular function.
Benjamin Kramer6db33382015-10-15 15:08:58 +0000328 EarlyCSE(const TargetLibraryInfo &TLI, const TargetTransformInfo &TTI,
329 DominatorTree &DT, AssumptionCache &AC)
330 : TLI(TLI), TTI(TTI), DT(DT), AC(AC), CurrentGeneration(0) {}
Chris Lattner704541b2011-01-02 21:47:05 +0000331
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000332 bool run();
Chris Lattner704541b2011-01-02 21:47:05 +0000333
334private:
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000335 // Almost a POD, but needs to call the constructors for the scoped hash
336 // tables so that a new scope gets pushed on. These are RAII so that the
337 // scope gets popped when the NodeScope is destroyed.
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000338 class NodeScope {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000339 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000340 NodeScope(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads,
341 CallHTType &AvailableCalls)
342 : Scope(AvailableValues), LoadScope(AvailableLoads),
343 CallScope(AvailableCalls) {}
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000344
Chandler Carruth7253bba2015-01-24 11:33:55 +0000345 private:
Aaron Ballmanf9a18972015-02-15 22:54:22 +0000346 NodeScope(const NodeScope &) = delete;
347 void operator=(const NodeScope &) = delete;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000348
349 ScopedHTType::ScopeTy Scope;
350 LoadHTType::ScopeTy LoadScope;
351 CallHTType::ScopeTy CallScope;
352 };
353
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000354 // Contains all the needed information to create a stack for doing a depth
355 // first tranversal of the tree. This includes scopes for values, loads, and
356 // calls as well as the generation. There is a child iterator so that the
357 // children do not need to be store spearately.
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000358 class StackNode {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000359 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000360 StackNode(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads,
361 CallHTType &AvailableCalls, unsigned cg, DomTreeNode *n,
Chandler Carruth7253bba2015-01-24 11:33:55 +0000362 DomTreeNode::iterator child, DomTreeNode::iterator end)
363 : CurrentGeneration(cg), ChildGeneration(cg), Node(n), ChildIter(child),
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000364 EndIter(end), Scopes(AvailableValues, AvailableLoads, AvailableCalls),
Chandler Carruth7253bba2015-01-24 11:33:55 +0000365 Processed(false) {}
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000366
367 // Accessors.
368 unsigned currentGeneration() { return CurrentGeneration; }
369 unsigned childGeneration() { return ChildGeneration; }
370 void childGeneration(unsigned generation) { ChildGeneration = generation; }
371 DomTreeNode *node() { return Node; }
372 DomTreeNode::iterator childIter() { return ChildIter; }
373 DomTreeNode *nextChild() {
374 DomTreeNode *child = *ChildIter;
375 ++ChildIter;
376 return child;
377 }
378 DomTreeNode::iterator end() { return EndIter; }
379 bool isProcessed() { return Processed; }
380 void process() { Processed = true; }
381
Chandler Carruth7253bba2015-01-24 11:33:55 +0000382 private:
Aaron Ballmanf9a18972015-02-15 22:54:22 +0000383 StackNode(const StackNode &) = delete;
384 void operator=(const StackNode &) = delete;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000385
386 // Members.
387 unsigned CurrentGeneration;
388 unsigned ChildGeneration;
389 DomTreeNode *Node;
390 DomTreeNode::iterator ChildIter;
391 DomTreeNode::iterator EndIter;
392 NodeScope Scopes;
393 bool Processed;
394 };
395
Chad Rosierf9327d62015-01-26 22:51:15 +0000396 /// \brief Wrapper class to handle memory instructions, including loads,
397 /// stores and intrinsic loads and stores defined by the target.
398 class ParseMemoryInst {
399 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000400 ParseMemoryInst(Instruction *Inst, const TargetTransformInfo &TTI)
Philip Reames9e5e2d62015-12-07 22:41:23 +0000401 : IsTargetMemInst(false), Inst(Inst) {
402 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst))
403 if (TTI.getTgtMemIntrinsic(II, Info) && Info.NumMemRefs == 1)
404 IsTargetMemInst = true;
405 }
406 bool isLoad() const {
407 if (IsTargetMemInst) return Info.ReadMem;
408 return isa<LoadInst>(Inst);
409 }
410 bool isStore() const {
411 if (IsTargetMemInst) return Info.WriteMem;
412 return isa<StoreInst>(Inst);
413 }
414 bool isSimple() const {
415 if (IsTargetMemInst) return Info.IsSimple;
416 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
417 return LI->isSimple();
Chad Rosierf9327d62015-01-26 22:51:15 +0000418 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Philip Reames9e5e2d62015-12-07 22:41:23 +0000419 return SI->isSimple();
Chad Rosierf9327d62015-01-26 22:51:15 +0000420 }
Philip Reames9e5e2d62015-12-07 22:41:23 +0000421 return Inst->isAtomic();
Chad Rosierf9327d62015-01-26 22:51:15 +0000422 }
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000423 bool isAtomic() const {
424 if (IsTargetMemInst) {
425 assert(Info.IsSimple && "need to refine IsSimple in TTI");
426 return false;
427 }
428 return Inst->isAtomic();
429 }
430 bool isUnordered() const {
431 if (IsTargetMemInst) {
432 assert(Info.IsSimple && "need to refine IsSimple in TTI");
433 return true;
434 }
435 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
436 return LI->isUnordered();
437 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
438 return SI->isUnordered();
439 }
440 // Conservative answer
441 return !Inst->isAtomic();
442 }
443
444 bool isVolatile() const {
445 if (IsTargetMemInst) {
446 assert(Info.IsSimple && "need to refine IsSimple in TTI");
447 return false;
448 }
449 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
450 return LI->isVolatile();
451 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
452 return SI->isVolatile();
453 }
454 // Conservative answer
455 return true;
456 }
457
458
Arnaud A. de Grandmaison6fd488b2015-10-06 13:35:30 +0000459 bool isMatchingMemLoc(const ParseMemoryInst &Inst) const {
Philip Reames9e5e2d62015-12-07 22:41:23 +0000460 return (getPointerOperand() == Inst.getPointerOperand() &&
461 getMatchingId() == Inst.getMatchingId());
Chad Rosierf9327d62015-01-26 22:51:15 +0000462 }
Philip Reames9e5e2d62015-12-07 22:41:23 +0000463 bool isValid() const { return getPointerOperand() != nullptr; }
Chad Rosierf9327d62015-01-26 22:51:15 +0000464
Chad Rosierf9327d62015-01-26 22:51:15 +0000465 // For regular (non-intrinsic) loads/stores, this is set to -1. For
466 // intrinsic loads/stores, the id is retrieved from the corresponding
467 // field in the MemIntrinsicInfo structure. That field contains
468 // non-negative values only.
Philip Reames9e5e2d62015-12-07 22:41:23 +0000469 int getMatchingId() const {
470 if (IsTargetMemInst) return Info.MatchingId;
471 return -1;
472 }
473 Value *getPointerOperand() const {
474 if (IsTargetMemInst) return Info.PtrVal;
475 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
476 return LI->getPointerOperand();
477 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
478 return SI->getPointerOperand();
479 }
480 return nullptr;
481 }
482 bool mayReadFromMemory() const {
483 if (IsTargetMemInst) return Info.ReadMem;
484 return Inst->mayReadFromMemory();
485 }
486 bool mayWriteToMemory() const {
487 if (IsTargetMemInst) return Info.WriteMem;
488 return Inst->mayWriteToMemory();
489 }
490
491 private:
492 bool IsTargetMemInst;
493 MemIntrinsicInfo Info;
494 Instruction *Inst;
Chad Rosierf9327d62015-01-26 22:51:15 +0000495 };
496
Chris Lattner18ae5432011-01-02 23:04:14 +0000497 bool processNode(DomTreeNode *Node);
Nadav Rotem465834c2012-07-24 10:51:42 +0000498
Chad Rosierf9327d62015-01-26 22:51:15 +0000499 Value *getOrCreateResult(Value *Inst, Type *ExpectedType) const {
500 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
501 return LI;
502 else if (StoreInst *SI = dyn_cast<StoreInst>(Inst))
503 return SI->getValueOperand();
504 assert(isa<IntrinsicInst>(Inst) && "Instruction not supported");
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000505 return TTI.getOrCreateResultFromMemIntrinsic(cast<IntrinsicInst>(Inst),
506 ExpectedType);
Chad Rosierf9327d62015-01-26 22:51:15 +0000507 }
Chris Lattner704541b2011-01-02 21:47:05 +0000508};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000509}
Chris Lattner704541b2011-01-02 21:47:05 +0000510
Chris Lattner18ae5432011-01-02 23:04:14 +0000511bool EarlyCSE::processNode(DomTreeNode *Node) {
Chris Lattner18ae5432011-01-02 23:04:14 +0000512 BasicBlock *BB = Node->getBlock();
Nadav Rotem465834c2012-07-24 10:51:42 +0000513
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000514 // If this block has a single predecessor, then the predecessor is the parent
515 // of the domtree node and all of the live out memory values are still current
516 // in this block. If this block has multiple predecessors, then they could
517 // have invalidated the live-out memory values of our parent value. For now,
518 // just be conservative and invalidate memory if this block has multiple
519 // predecessors.
Craig Topperf40110f2014-04-25 05:29:35 +0000520 if (!BB->getSinglePredecessor())
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000521 ++CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000522
Philip Reames7c78ef72015-05-22 23:53:24 +0000523 // If this node has a single predecessor which ends in a conditional branch,
524 // we can infer the value of the branch condition given that we took this
525 // path. We need the single predeccesor to ensure there's not another path
526 // which reaches this block where the condition might hold a different
527 // value. Since we're adding this to the scoped hash table (like any other
528 // def), it will have been popped if we encounter a future merge block.
529 if (BasicBlock *Pred = BB->getSinglePredecessor())
530 if (auto *BI = dyn_cast<BranchInst>(Pred->getTerminator()))
531 if (BI->isConditional())
532 if (auto *CondInst = dyn_cast<Instruction>(BI->getCondition()))
533 if (SimpleValue::canHandle(CondInst)) {
534 assert(BI->getSuccessor(0) == BB || BI->getSuccessor(1) == BB);
535 auto *ConditionalConstant = (BI->getSuccessor(0) == BB) ?
536 ConstantInt::getTrue(BB->getContext()) :
537 ConstantInt::getFalse(BB->getContext());
538 AvailableValues.insert(CondInst, ConditionalConstant);
539 DEBUG(dbgs() << "EarlyCSE CVP: Add conditional value for '"
540 << CondInst->getName() << "' as " << *ConditionalConstant
541 << " in " << BB->getName() << "\n");
542 // Replace all dominated uses with the known value
543 replaceDominatedUsesWith(CondInst, ConditionalConstant, DT,
544 BasicBlockEdge(Pred, BB));
545 }
546
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000547 /// LastStore - Keep track of the last non-volatile store that we saw... for
548 /// as long as there in no instruction that reads memory. If we see a store
549 /// to the same location, we delete the dead store. This zaps trivial dead
550 /// stores which can occur in bitfield code among other things.
Chad Rosierf9327d62015-01-26 22:51:15 +0000551 Instruction *LastStore = nullptr;
Nadav Rotem465834c2012-07-24 10:51:42 +0000552
Chris Lattner18ae5432011-01-02 23:04:14 +0000553 bool Changed = false;
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000554 const DataLayout &DL = BB->getModule()->getDataLayout();
Chris Lattner18ae5432011-01-02 23:04:14 +0000555
556 // See if any instructions in the block can be eliminated. If so, do it. If
557 // not, add them to AvailableValues.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000558 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E;) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000559 Instruction *Inst = &*I++;
Nadav Rotem465834c2012-07-24 10:51:42 +0000560
Chris Lattner18ae5432011-01-02 23:04:14 +0000561 // Dead instructions should just be removed.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000562 if (isInstructionTriviallyDead(Inst, &TLI)) {
Chris Lattner8fac5db2011-01-02 23:19:45 +0000563 DEBUG(dbgs() << "EarlyCSE DCE: " << *Inst << '\n');
Chris Lattner18ae5432011-01-02 23:04:14 +0000564 Inst->eraseFromParent();
565 Changed = true;
Chris Lattner8fac5db2011-01-02 23:19:45 +0000566 ++NumSimplify;
Chris Lattner18ae5432011-01-02 23:04:14 +0000567 continue;
568 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000569
Hal Finkel1e16fa32014-11-03 20:21:32 +0000570 // Skip assume intrinsics, they don't really have side effects (although
571 // they're marked as such to ensure preservation of control dependencies),
572 // and this pass will not disturb any of the assumption's control
573 // dependencies.
574 if (match(Inst, m_Intrinsic<Intrinsic::assume>())) {
575 DEBUG(dbgs() << "EarlyCSE skipping assumption: " << *Inst << '\n');
576 continue;
577 }
578
Chris Lattner18ae5432011-01-02 23:04:14 +0000579 // If the instruction can be simplified (e.g. X+0 = X) then replace it with
580 // its simpler value.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000581 if (Value *V = SimplifyInstruction(Inst, DL, &TLI, &DT, &AC)) {
Chris Lattner8fac5db2011-01-02 23:19:45 +0000582 DEBUG(dbgs() << "EarlyCSE Simplify: " << *Inst << " to: " << *V << '\n');
Chris Lattner18ae5432011-01-02 23:04:14 +0000583 Inst->replaceAllUsesWith(V);
584 Inst->eraseFromParent();
585 Changed = true;
Chris Lattner8fac5db2011-01-02 23:19:45 +0000586 ++NumSimplify;
Chris Lattner18ae5432011-01-02 23:04:14 +0000587 continue;
588 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000589
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000590 // If this is a simple instruction that we can value number, process it.
591 if (SimpleValue::canHandle(Inst)) {
592 // See if the instruction has an available value. If so, use it.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000593 if (Value *V = AvailableValues.lookup(Inst)) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000594 DEBUG(dbgs() << "EarlyCSE CSE: " << *Inst << " to: " << *V << '\n');
595 Inst->replaceAllUsesWith(V);
596 Inst->eraseFromParent();
597 Changed = true;
598 ++NumCSE;
599 continue;
600 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000601
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000602 // Otherwise, just remember that this value is available.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000603 AvailableValues.insert(Inst, Inst);
Chris Lattner18ae5432011-01-02 23:04:14 +0000604 continue;
605 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000606
Chad Rosierf9327d62015-01-26 22:51:15 +0000607 ParseMemoryInst MemInst(Inst, TTI);
Chris Lattner92bb0f92011-01-03 03:41:27 +0000608 // If this is a non-volatile load, process it.
Chad Rosierf9327d62015-01-26 22:51:15 +0000609 if (MemInst.isValid() && MemInst.isLoad()) {
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000610 // (conservatively) we can't peak past the ordering implied by this
611 // operation, but we can add this load to our set of available values
612 if (MemInst.isVolatile() || !MemInst.isUnordered()) {
Craig Topperf40110f2014-04-25 05:29:35 +0000613 LastStore = nullptr;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000614 ++CurrentGeneration;
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000615 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000616
Chris Lattner92bb0f92011-01-03 03:41:27 +0000617 // If we have an available version of this load, and if it is the right
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000618 // generation, replace this instruction.
Philip Reames9e5e2d62015-12-07 22:41:23 +0000619 LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand());
Arnaud A. de Grandmaison859b2ac2015-10-09 09:23:01 +0000620 if (InVal.Data != nullptr && InVal.Generation == CurrentGeneration &&
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000621 InVal.MatchingId == MemInst.getMatchingId() &&
622 // We don't yet handle removing loads with ordering of any kind.
623 !MemInst.isVolatile() && MemInst.isUnordered() &&
624 // We can't replace an atomic load with one which isn't also atomic.
625 InVal.IsAtomic >= MemInst.isAtomic()) {
Arnaud A. de Grandmaison859b2ac2015-10-09 09:23:01 +0000626 Value *Op = getOrCreateResult(InVal.Data, Inst->getType());
Chad Rosierf9327d62015-01-26 22:51:15 +0000627 if (Op != nullptr) {
628 DEBUG(dbgs() << "EarlyCSE CSE LOAD: " << *Inst
Arnaud A. de Grandmaison859b2ac2015-10-09 09:23:01 +0000629 << " to: " << *InVal.Data << '\n');
Chad Rosierf9327d62015-01-26 22:51:15 +0000630 if (!Inst->use_empty())
631 Inst->replaceAllUsesWith(Op);
632 Inst->eraseFromParent();
633 Changed = true;
634 ++NumCSELoad;
635 continue;
636 }
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000637 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000638
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000639 // Otherwise, remember that we have this instruction.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000640 AvailableLoads.insert(
Philip Reames9e5e2d62015-12-07 22:41:23 +0000641 MemInst.getPointerOperand(),
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000642 LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(),
643 MemInst.isAtomic()));
Craig Topperf40110f2014-04-25 05:29:35 +0000644 LastStore = nullptr;
Chris Lattner92bb0f92011-01-03 03:41:27 +0000645 continue;
646 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000647
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000648 // If this instruction may read from memory, forget LastStore.
Chad Rosierf9327d62015-01-26 22:51:15 +0000649 // Load/store intrinsics will indicate both a read and a write to
650 // memory. The target may override this (e.g. so that a store intrinsic
651 // does not read from memory, and thus will be treated the same as a
652 // regular store for commoning purposes).
653 if (Inst->mayReadFromMemory() &&
654 !(MemInst.isValid() && !MemInst.mayReadFromMemory()))
Craig Topperf40110f2014-04-25 05:29:35 +0000655 LastStore = nullptr;
Nadav Rotem465834c2012-07-24 10:51:42 +0000656
Chris Lattner92bb0f92011-01-03 03:41:27 +0000657 // If this is a read-only call, process it.
658 if (CallValue::canHandle(Inst)) {
659 // If we have an available version of this call, and if it is the right
660 // generation, replace this instruction.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000661 std::pair<Value *, unsigned> InVal = AvailableCalls.lookup(Inst);
Craig Topperf40110f2014-04-25 05:29:35 +0000662 if (InVal.first != nullptr && InVal.second == CurrentGeneration) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000663 DEBUG(dbgs() << "EarlyCSE CSE CALL: " << *Inst
664 << " to: " << *InVal.first << '\n');
665 if (!Inst->use_empty())
666 Inst->replaceAllUsesWith(InVal.first);
Chris Lattner92bb0f92011-01-03 03:41:27 +0000667 Inst->eraseFromParent();
668 Changed = true;
669 ++NumCSECall;
670 continue;
671 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000672
Chris Lattner92bb0f92011-01-03 03:41:27 +0000673 // Otherwise, remember that we have this instruction.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000674 AvailableCalls.insert(
Chandler Carruth7253bba2015-01-24 11:33:55 +0000675 Inst, std::pair<Value *, unsigned>(Inst, CurrentGeneration));
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000676 continue;
677 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000678
Philip Reamesdfd890d2015-08-27 01:32:33 +0000679 // A release fence requires that all stores complete before it, but does
680 // not prevent the reordering of following loads 'before' the fence. As a
681 // result, we don't need to consider it as writing to memory and don't need
682 // to advance the generation. We do need to prevent DSE across the fence,
683 // but that's handled above.
684 if (FenceInst *FI = dyn_cast<FenceInst>(Inst))
685 if (FI->getOrdering() == Release) {
686 assert(Inst->mayReadFromMemory() && "relied on to prevent DSE above");
687 continue;
688 }
689
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000690 // Okay, this isn't something we can CSE at all. Check to see if it is
691 // something that could modify memory. If so, our available memory values
692 // cannot be used so bump the generation count.
Chris Lattnere0e32a92011-01-03 03:46:34 +0000693 if (Inst->mayWriteToMemory()) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000694 ++CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000695
Chad Rosierf9327d62015-01-26 22:51:15 +0000696 if (MemInst.isValid() && MemInst.isStore()) {
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000697 // We do a trivial form of DSE if there are two stores to the same
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000698 // location with no intervening loads. Delete the earlier store. Note
699 // that we can delete an earlier simple store even if the following one
700 // is ordered/volatile/atomic store.
Chad Rosierf9327d62015-01-26 22:51:15 +0000701 if (LastStore) {
702 ParseMemoryInst LastStoreMemInst(LastStore, TTI);
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000703 assert(LastStoreMemInst.isSimple() && "Violated invariant");
Chad Rosierf9327d62015-01-26 22:51:15 +0000704 if (LastStoreMemInst.isMatchingMemLoc(MemInst)) {
705 DEBUG(dbgs() << "EarlyCSE DEAD STORE: " << *LastStore
706 << " due to: " << *Inst << '\n');
707 LastStore->eraseFromParent();
708 Changed = true;
709 ++NumDSE;
710 LastStore = nullptr;
711 }
Philip Reames018dbf12014-11-18 17:46:32 +0000712 // fallthrough - we can exploit information about this store
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000713 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000714
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000715 // Okay, we just invalidated anything we knew about loaded values. Try
716 // to salvage *something* by remembering that the stored value is a live
717 // version of the pointer. It is safe to forward from volatile stores
718 // to non-volatile loads, so we don't have to check for volatility of
719 // the store.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000720 AvailableLoads.insert(
Philip Reames9e5e2d62015-12-07 22:41:23 +0000721 MemInst.getPointerOperand(),
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000722 LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(),
723 MemInst.isAtomic()));
Nadav Rotem465834c2012-07-24 10:51:42 +0000724
Philip Reames7c6692de2015-12-05 00:18:33 +0000725 // Remember that this was the last normal store we saw for DSE.
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000726 // Note that we can't delete an earlier atomic or volatile store in
727 // favor of a later one which isn't. We could in principal remove an
728 // earlier unordered store if the later one is also unordered.
Philip Reames7c6692de2015-12-05 00:18:33 +0000729 if (MemInst.isSimple())
Chad Rosierf9327d62015-01-26 22:51:15 +0000730 LastStore = Inst;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000731 else
732 LastStore = nullptr;
Chris Lattnere0e32a92011-01-03 03:46:34 +0000733 }
734 }
Chris Lattner18ae5432011-01-02 23:04:14 +0000735 }
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000736
Chris Lattner18ae5432011-01-02 23:04:14 +0000737 return Changed;
Chris Lattner704541b2011-01-02 21:47:05 +0000738}
Chris Lattner18ae5432011-01-02 23:04:14 +0000739
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000740bool EarlyCSE::run() {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000741 // Note, deque is being used here because there is significant performance
742 // gains over vector when the container becomes very large due to the
743 // specific access patterns. For more information see the mailing list
744 // discussion on this:
Tanya Lattner0d28f802015-08-05 03:51:17 +0000745 // http://lists.llvm.org/pipermail/llvm-commits/Week-of-Mon-20120116/135228.html
Lenny Maiorani9eefc812014-09-20 13:29:20 +0000746 std::deque<StackNode *> nodesToProcess;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000747
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000748 bool Changed = false;
749
750 // Process the root node.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000751 nodesToProcess.push_back(new StackNode(
752 AvailableValues, AvailableLoads, AvailableCalls, CurrentGeneration,
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000753 DT.getRootNode(), DT.getRootNode()->begin(), DT.getRootNode()->end()));
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000754
755 // Save the current generation.
756 unsigned LiveOutGeneration = CurrentGeneration;
757
758 // Process the stack.
759 while (!nodesToProcess.empty()) {
760 // Grab the first item off the stack. Set the current generation, remove
761 // the node from the stack, and process it.
Michael Gottesman2bf01732013-12-05 18:42:12 +0000762 StackNode *NodeToProcess = nodesToProcess.back();
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000763
764 // Initialize class members.
765 CurrentGeneration = NodeToProcess->currentGeneration();
766
767 // Check if the node needs to be processed.
768 if (!NodeToProcess->isProcessed()) {
769 // Process the node.
770 Changed |= processNode(NodeToProcess->node());
771 NodeToProcess->childGeneration(CurrentGeneration);
772 NodeToProcess->process();
773 } else if (NodeToProcess->childIter() != NodeToProcess->end()) {
774 // Push the next child onto the stack.
775 DomTreeNode *child = NodeToProcess->nextChild();
Michael Gottesman2bf01732013-12-05 18:42:12 +0000776 nodesToProcess.push_back(
Chandler Carruth7253bba2015-01-24 11:33:55 +0000777 new StackNode(AvailableValues, AvailableLoads, AvailableCalls,
778 NodeToProcess->childGeneration(), child, child->begin(),
779 child->end()));
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000780 } else {
781 // It has been processed, and there are no more children to process,
782 // so delete it and pop it off the stack.
783 delete NodeToProcess;
Michael Gottesman2bf01732013-12-05 18:42:12 +0000784 nodesToProcess.pop_back();
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000785 }
786 } // while (!nodes...)
787
788 // Reset the current generation.
789 CurrentGeneration = LiveOutGeneration;
790
791 return Changed;
Chris Lattner18ae5432011-01-02 23:04:14 +0000792}
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000793
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000794PreservedAnalyses EarlyCSEPass::run(Function &F,
795 AnalysisManager<Function> *AM) {
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000796 auto &TLI = AM->getResult<TargetLibraryAnalysis>(F);
797 auto &TTI = AM->getResult<TargetIRAnalysis>(F);
798 auto &DT = AM->getResult<DominatorTreeAnalysis>(F);
799 auto &AC = AM->getResult<AssumptionAnalysis>(F);
800
Benjamin Kramer6db33382015-10-15 15:08:58 +0000801 EarlyCSE CSE(TLI, TTI, DT, AC);
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000802
803 if (!CSE.run())
804 return PreservedAnalyses::all();
805
806 // CSE preserves the dominator tree because it doesn't mutate the CFG.
807 // FIXME: Bundle this with other CFG-preservation.
808 PreservedAnalyses PA;
809 PA.preserve<DominatorTreeAnalysis>();
810 return PA;
811}
812
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000813namespace {
814/// \brief A simple and fast domtree-based CSE pass.
815///
816/// This pass does a simple depth-first walk over the dominator tree,
817/// eliminating trivially redundant instructions and using instsimplify to
818/// canonicalize things as it goes. It is intended to be fast and catch obvious
819/// cases so that instcombine and other passes are more effective. It is
820/// expected that a later pass of GVN will catch the interesting/hard cases.
821class EarlyCSELegacyPass : public FunctionPass {
822public:
823 static char ID;
824
825 EarlyCSELegacyPass() : FunctionPass(ID) {
826 initializeEarlyCSELegacyPassPass(*PassRegistry::getPassRegistry());
827 }
828
829 bool runOnFunction(Function &F) override {
830 if (skipOptnoneFunction(F))
831 return false;
832
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000833 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
Chandler Carruthfdb9c572015-02-01 12:01:35 +0000834 auto &TTI = getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000835 auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
836 auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
837
Benjamin Kramer6db33382015-10-15 15:08:58 +0000838 EarlyCSE CSE(TLI, TTI, DT, AC);
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000839
840 return CSE.run();
841 }
842
843 void getAnalysisUsage(AnalysisUsage &AU) const override {
844 AU.addRequired<AssumptionCacheTracker>();
845 AU.addRequired<DominatorTreeWrapperPass>();
846 AU.addRequired<TargetLibraryInfoWrapperPass>();
Chandler Carruth705b1852015-01-31 03:43:40 +0000847 AU.addRequired<TargetTransformInfoWrapperPass>();
James Molloyefbba722015-09-10 10:22:12 +0000848 AU.addPreserved<GlobalsAAWrapperPass>();
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000849 AU.setPreservesCFG();
850 }
851};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000852}
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000853
854char EarlyCSELegacyPass::ID = 0;
855
856FunctionPass *llvm::createEarlyCSEPass() { return new EarlyCSELegacyPass(); }
857
858INITIALIZE_PASS_BEGIN(EarlyCSELegacyPass, "early-cse", "Early CSE", false,
859 false)
Chandler Carruth705b1852015-01-31 03:43:40 +0000860INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000861INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
862INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
863INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
864INITIALIZE_PASS_END(EarlyCSELegacyPass, "early-cse", "Early CSE", false, false)