blob: 48253f626340ad85e0fc81760d93f2edba591b82 [file] [log] [blame]
Chris Lattner704541b2011-01-02 21:47:05 +00001//===- EarlyCSE.cpp - Simple and fast CSE pass ----------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs a simple dominator tree walk that eliminates trivially
11// redundant instructions.
12//
13//===----------------------------------------------------------------------===//
14
Chandler Carruthe8c686a2015-02-01 10:51:23 +000015#include "llvm/Transforms/Scalar/EarlyCSE.h"
Michael Ilseman336cb792012-10-09 16:57:38 +000016#include "llvm/ADT/Hashing.h"
Chris Lattner18ae5432011-01-02 23:04:14 +000017#include "llvm/ADT/ScopedHashTable.h"
Chris Lattner8fac5db2011-01-02 23:19:45 +000018#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000019#include "llvm/Analysis/AssumptionCache.h"
Geoff Berry354fac22016-04-28 14:59:27 +000020#include "llvm/Analysis/GlobalsModRef.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000021#include "llvm/Analysis/InstructionSimplify.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000022#include "llvm/Analysis/TargetLibraryInfo.h"
Chad Rosierf9327d62015-01-26 22:51:15 +000023#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000024#include "llvm/IR/DataLayout.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000025#include "llvm/IR/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/Instructions.h"
Hal Finkel1e16fa32014-11-03 20:21:32 +000027#include "llvm/IR/IntrinsicInst.h"
28#include "llvm/IR/PatternMatch.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000029#include "llvm/Pass.h"
30#include "llvm/Support/Debug.h"
31#include "llvm/Support/RecyclingAllocator.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000032#include "llvm/Support/raw_ostream.h"
Chandler Carruthe8c686a2015-02-01 10:51:23 +000033#include "llvm/Transforms/Scalar.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000034#include "llvm/Transforms/Utils/Local.h"
Lenny Maiorani9eefc812014-09-20 13:29:20 +000035#include <deque>
Chris Lattner704541b2011-01-02 21:47:05 +000036using namespace llvm;
Hal Finkel1e16fa32014-11-03 20:21:32 +000037using namespace llvm::PatternMatch;
Chris Lattner704541b2011-01-02 21:47:05 +000038
Chandler Carruth964daaa2014-04-22 02:55:47 +000039#define DEBUG_TYPE "early-cse"
40
Chris Lattner4cb36542011-01-03 03:28:23 +000041STATISTIC(NumSimplify, "Number of instructions simplified or DCE'd");
42STATISTIC(NumCSE, "Number of instructions CSE'd");
Chad Rosier1a4bc112016-04-22 18:47:21 +000043STATISTIC(NumCSECVP, "Number of compare instructions CVP'd");
Chris Lattner92bb0f92011-01-03 03:41:27 +000044STATISTIC(NumCSELoad, "Number of load instructions CSE'd");
45STATISTIC(NumCSECall, "Number of call instructions CSE'd");
Chris Lattner9e5e9ed2011-01-03 04:17:24 +000046STATISTIC(NumDSE, "Number of trivial dead stores removed");
Chris Lattnerb9a8efc2011-01-03 03:18:43 +000047
Chris Lattner79d83062011-01-03 02:20:48 +000048//===----------------------------------------------------------------------===//
Nadav Rotem465834c2012-07-24 10:51:42 +000049// SimpleValue
Chris Lattner79d83062011-01-03 02:20:48 +000050//===----------------------------------------------------------------------===//
51
Chris Lattner704541b2011-01-02 21:47:05 +000052namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +000053/// \brief Struct representing the available values in the scoped hash table.
Chandler Carruth7253bba2015-01-24 11:33:55 +000054struct SimpleValue {
55 Instruction *Inst;
Nadav Rotem465834c2012-07-24 10:51:42 +000056
Chandler Carruth7253bba2015-01-24 11:33:55 +000057 SimpleValue(Instruction *I) : Inst(I) {
58 assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
59 }
Nadav Rotem465834c2012-07-24 10:51:42 +000060
Chandler Carruth7253bba2015-01-24 11:33:55 +000061 bool isSentinel() const {
62 return Inst == DenseMapInfo<Instruction *>::getEmptyKey() ||
63 Inst == DenseMapInfo<Instruction *>::getTombstoneKey();
64 }
Nadav Rotem465834c2012-07-24 10:51:42 +000065
Chandler Carruth7253bba2015-01-24 11:33:55 +000066 static bool canHandle(Instruction *Inst) {
67 // This can only handle non-void readnone functions.
68 if (CallInst *CI = dyn_cast<CallInst>(Inst))
69 return CI->doesNotAccessMemory() && !CI->getType()->isVoidTy();
70 return isa<CastInst>(Inst) || isa<BinaryOperator>(Inst) ||
71 isa<GetElementPtrInst>(Inst) || isa<CmpInst>(Inst) ||
72 isa<SelectInst>(Inst) || isa<ExtractElementInst>(Inst) ||
73 isa<InsertElementInst>(Inst) || isa<ShuffleVectorInst>(Inst) ||
74 isa<ExtractValueInst>(Inst) || isa<InsertValueInst>(Inst);
75 }
76};
Alexander Kornienkof00654e2015-06-23 09:49:53 +000077}
Chris Lattner18ae5432011-01-02 23:04:14 +000078
79namespace llvm {
Chandler Carruth7253bba2015-01-24 11:33:55 +000080template <> struct DenseMapInfo<SimpleValue> {
Chris Lattner79d83062011-01-03 02:20:48 +000081 static inline SimpleValue getEmptyKey() {
Chandler Carruth7253bba2015-01-24 11:33:55 +000082 return DenseMapInfo<Instruction *>::getEmptyKey();
Chris Lattner18ae5432011-01-02 23:04:14 +000083 }
Chris Lattner79d83062011-01-03 02:20:48 +000084 static inline SimpleValue getTombstoneKey() {
Chandler Carruth7253bba2015-01-24 11:33:55 +000085 return DenseMapInfo<Instruction *>::getTombstoneKey();
Chris Lattner18ae5432011-01-02 23:04:14 +000086 }
Chris Lattner79d83062011-01-03 02:20:48 +000087 static unsigned getHashValue(SimpleValue Val);
88 static bool isEqual(SimpleValue LHS, SimpleValue RHS);
Chris Lattner18ae5432011-01-02 23:04:14 +000089};
Alexander Kornienkof00654e2015-06-23 09:49:53 +000090}
Chris Lattner18ae5432011-01-02 23:04:14 +000091
Chris Lattner79d83062011-01-03 02:20:48 +000092unsigned DenseMapInfo<SimpleValue>::getHashValue(SimpleValue Val) {
Chris Lattner18ae5432011-01-02 23:04:14 +000093 Instruction *Inst = Val.Inst;
Chris Lattner02a97762011-01-03 01:10:08 +000094 // Hash in all of the operands as pointers.
Chandler Carruth7253bba2015-01-24 11:33:55 +000095 if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(Inst)) {
Michael Ilseman336cb792012-10-09 16:57:38 +000096 Value *LHS = BinOp->getOperand(0);
97 Value *RHS = BinOp->getOperand(1);
98 if (BinOp->isCommutative() && BinOp->getOperand(0) > BinOp->getOperand(1))
99 std::swap(LHS, RHS);
Chris Lattner02a97762011-01-03 01:10:08 +0000100
Michael Ilseman336cb792012-10-09 16:57:38 +0000101 return hash_combine(BinOp->getOpcode(), LHS, RHS);
Chris Lattner02a97762011-01-03 01:10:08 +0000102 }
103
Michael Ilseman336cb792012-10-09 16:57:38 +0000104 if (CmpInst *CI = dyn_cast<CmpInst>(Inst)) {
105 Value *LHS = CI->getOperand(0);
106 Value *RHS = CI->getOperand(1);
107 CmpInst::Predicate Pred = CI->getPredicate();
108 if (Inst->getOperand(0) > Inst->getOperand(1)) {
109 std::swap(LHS, RHS);
110 Pred = CI->getSwappedPredicate();
111 }
112 return hash_combine(Inst->getOpcode(), Pred, LHS, RHS);
113 }
114
115 if (CastInst *CI = dyn_cast<CastInst>(Inst))
116 return hash_combine(CI->getOpcode(), CI->getType(), CI->getOperand(0));
117
118 if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(Inst))
119 return hash_combine(EVI->getOpcode(), EVI->getOperand(0),
120 hash_combine_range(EVI->idx_begin(), EVI->idx_end()));
121
122 if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(Inst))
123 return hash_combine(IVI->getOpcode(), IVI->getOperand(0),
124 IVI->getOperand(1),
125 hash_combine_range(IVI->idx_begin(), IVI->idx_end()));
126
127 assert((isa<CallInst>(Inst) || isa<BinaryOperator>(Inst) ||
128 isa<GetElementPtrInst>(Inst) || isa<SelectInst>(Inst) ||
129 isa<ExtractElementInst>(Inst) || isa<InsertElementInst>(Inst) ||
Chandler Carruth7253bba2015-01-24 11:33:55 +0000130 isa<ShuffleVectorInst>(Inst)) &&
131 "Invalid/unknown instruction");
Michael Ilseman336cb792012-10-09 16:57:38 +0000132
Chris Lattner02a97762011-01-03 01:10:08 +0000133 // Mix in the opcode.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000134 return hash_combine(
135 Inst->getOpcode(),
136 hash_combine_range(Inst->value_op_begin(), Inst->value_op_end()));
Chris Lattner18ae5432011-01-02 23:04:14 +0000137}
138
Chris Lattner79d83062011-01-03 02:20:48 +0000139bool DenseMapInfo<SimpleValue>::isEqual(SimpleValue LHS, SimpleValue RHS) {
Chris Lattner18ae5432011-01-02 23:04:14 +0000140 Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
141
142 if (LHS.isSentinel() || RHS.isSentinel())
143 return LHSI == RHSI;
Nadav Rotem465834c2012-07-24 10:51:42 +0000144
Chandler Carruth7253bba2015-01-24 11:33:55 +0000145 if (LHSI->getOpcode() != RHSI->getOpcode())
146 return false;
David Majnemer9554c132016-04-22 06:37:45 +0000147 if (LHSI->isIdenticalToWhenDefined(RHSI))
Chandler Carruth7253bba2015-01-24 11:33:55 +0000148 return true;
Michael Ilseman336cb792012-10-09 16:57:38 +0000149
150 // If we're not strictly identical, we still might be a commutable instruction
151 if (BinaryOperator *LHSBinOp = dyn_cast<BinaryOperator>(LHSI)) {
152 if (!LHSBinOp->isCommutative())
153 return false;
154
Chandler Carruth7253bba2015-01-24 11:33:55 +0000155 assert(isa<BinaryOperator>(RHSI) &&
156 "same opcode, but different instruction type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000157 BinaryOperator *RHSBinOp = cast<BinaryOperator>(RHSI);
158
Michael Ilseman336cb792012-10-09 16:57:38 +0000159 // Commuted equality
160 return LHSBinOp->getOperand(0) == RHSBinOp->getOperand(1) &&
Chandler Carruth7253bba2015-01-24 11:33:55 +0000161 LHSBinOp->getOperand(1) == RHSBinOp->getOperand(0);
Michael Ilseman336cb792012-10-09 16:57:38 +0000162 }
163 if (CmpInst *LHSCmp = dyn_cast<CmpInst>(LHSI)) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000164 assert(isa<CmpInst>(RHSI) &&
165 "same opcode, but different instruction type?");
Michael Ilseman336cb792012-10-09 16:57:38 +0000166 CmpInst *RHSCmp = cast<CmpInst>(RHSI);
167 // Commuted equality
168 return LHSCmp->getOperand(0) == RHSCmp->getOperand(1) &&
Chandler Carruth7253bba2015-01-24 11:33:55 +0000169 LHSCmp->getOperand(1) == RHSCmp->getOperand(0) &&
170 LHSCmp->getSwappedPredicate() == RHSCmp->getPredicate();
Michael Ilseman336cb792012-10-09 16:57:38 +0000171 }
172
173 return false;
Chris Lattner18ae5432011-01-02 23:04:14 +0000174}
175
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000176//===----------------------------------------------------------------------===//
Nadav Rotem465834c2012-07-24 10:51:42 +0000177// CallValue
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000178//===----------------------------------------------------------------------===//
179
180namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000181/// \brief Struct representing the available call values in the scoped hash
182/// table.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000183struct CallValue {
184 Instruction *Inst;
Nadav Rotem465834c2012-07-24 10:51:42 +0000185
Chandler Carruth7253bba2015-01-24 11:33:55 +0000186 CallValue(Instruction *I) : Inst(I) {
187 assert((isSentinel() || canHandle(I)) && "Inst can't be handled!");
188 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000189
Chandler Carruth7253bba2015-01-24 11:33:55 +0000190 bool isSentinel() const {
191 return Inst == DenseMapInfo<Instruction *>::getEmptyKey() ||
192 Inst == DenseMapInfo<Instruction *>::getTombstoneKey();
193 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000194
Chandler Carruth7253bba2015-01-24 11:33:55 +0000195 static bool canHandle(Instruction *Inst) {
196 // Don't value number anything that returns void.
197 if (Inst->getType()->isVoidTy())
198 return false;
Nadav Rotem465834c2012-07-24 10:51:42 +0000199
Chandler Carruth7253bba2015-01-24 11:33:55 +0000200 CallInst *CI = dyn_cast<CallInst>(Inst);
201 if (!CI || !CI->onlyReadsMemory())
202 return false;
203 return true;
204 }
205};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000206}
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000207
208namespace llvm {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000209template <> struct DenseMapInfo<CallValue> {
210 static inline CallValue getEmptyKey() {
211 return DenseMapInfo<Instruction *>::getEmptyKey();
212 }
213 static inline CallValue getTombstoneKey() {
214 return DenseMapInfo<Instruction *>::getTombstoneKey();
215 }
216 static unsigned getHashValue(CallValue Val);
217 static bool isEqual(CallValue LHS, CallValue RHS);
218};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000219}
Chandler Carruth7253bba2015-01-24 11:33:55 +0000220
Chris Lattner92bb0f92011-01-03 03:41:27 +0000221unsigned DenseMapInfo<CallValue>::getHashValue(CallValue Val) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000222 Instruction *Inst = Val.Inst;
Benjamin Kramer6ab86b12015-02-01 12:30:59 +0000223 // Hash all of the operands as pointers and mix in the opcode.
224 return hash_combine(
225 Inst->getOpcode(),
226 hash_combine_range(Inst->value_op_begin(), Inst->value_op_end()));
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000227}
228
Chris Lattner92bb0f92011-01-03 03:41:27 +0000229bool DenseMapInfo<CallValue>::isEqual(CallValue LHS, CallValue RHS) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000230 Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst;
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000231 if (LHS.isSentinel() || RHS.isSentinel())
232 return LHSI == RHSI;
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000233 return LHSI->isIdenticalTo(RHSI);
234}
235
Chris Lattner79d83062011-01-03 02:20:48 +0000236//===----------------------------------------------------------------------===//
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000237// EarlyCSE implementation
Chris Lattner79d83062011-01-03 02:20:48 +0000238//===----------------------------------------------------------------------===//
239
Chris Lattner18ae5432011-01-02 23:04:14 +0000240namespace {
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000241/// \brief A simple and fast domtree-based CSE pass.
242///
243/// This pass does a simple depth-first walk over the dominator tree,
244/// eliminating trivially redundant instructions and using instsimplify to
245/// canonicalize things as it goes. It is intended to be fast and catch obvious
246/// cases so that instcombine and other passes are more effective. It is
247/// expected that a later pass of GVN will catch the interesting/hard cases.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000248class EarlyCSE {
Chris Lattner704541b2011-01-02 21:47:05 +0000249public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000250 const TargetLibraryInfo &TLI;
251 const TargetTransformInfo &TTI;
252 DominatorTree &DT;
253 AssumptionCache &AC;
Chandler Carruth7253bba2015-01-24 11:33:55 +0000254 typedef RecyclingAllocator<
255 BumpPtrAllocator, ScopedHashTableVal<SimpleValue, Value *>> AllocatorTy;
256 typedef ScopedHashTable<SimpleValue, Value *, DenseMapInfo<SimpleValue>,
Chris Lattnerd815f692011-01-03 01:42:46 +0000257 AllocatorTy> ScopedHTType;
Nadav Rotem465834c2012-07-24 10:51:42 +0000258
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000259 /// \brief A scoped hash table of the current values of all of our simple
260 /// scalar expressions.
261 ///
262 /// As we walk down the domtree, we look to see if instructions are in this:
263 /// if so, we replace them with what we find, otherwise we insert them so
264 /// that dominated values can succeed in their lookup.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000265 ScopedHTType AvailableValues;
Nadav Rotem465834c2012-07-24 10:51:42 +0000266
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000267 /// A scoped hash table of the current values of previously encounted memory
268 /// locations.
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000269 ///
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000270 /// This allows us to get efficient access to dominating loads or stores when
271 /// we have a fully redundant load. In addition to the most recent load, we
272 /// keep track of a generation count of the read, which is compared against
273 /// the current generation count. The current generation count is incremented
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000274 /// after every possibly writing memory operation, which ensures that we only
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000275 /// CSE loads with other loads that have no intervening store. Ordering
276 /// events (such as fences or atomic instructions) increment the generation
277 /// count as well; essentially, we model these as writes to all possible
278 /// locations. Note that atomic and/or volatile loads and stores can be
279 /// present the table; it is the responsibility of the consumer to inspect
280 /// the atomicity/volatility if needed.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000281 struct LoadValue {
Philip Reames32b55182016-05-06 01:13:58 +0000282 Instruction *DefInst;
Arnaud A. de Grandmaison859b2ac2015-10-09 09:23:01 +0000283 unsigned Generation;
284 int MatchingId;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000285 bool IsAtomic;
Sanjoy Das07c65212016-06-16 20:47:57 +0000286 bool IsInvariant;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000287 LoadValue()
Sanjoy Das07c65212016-06-16 20:47:57 +0000288 : DefInst(nullptr), Generation(0), MatchingId(-1), IsAtomic(false),
289 IsInvariant(false) {}
Geoff Berry5ae272c2016-04-28 15:22:37 +0000290 LoadValue(Instruction *Inst, unsigned Generation, unsigned MatchingId,
Sanjoy Das07c65212016-06-16 20:47:57 +0000291 bool IsAtomic, bool IsInvariant)
292 : DefInst(Inst), Generation(Generation), MatchingId(MatchingId),
293 IsAtomic(IsAtomic), IsInvariant(IsInvariant) {}
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000294 };
295 typedef RecyclingAllocator<BumpPtrAllocator,
296 ScopedHashTableVal<Value *, LoadValue>>
Chandler Carruth7253bba2015-01-24 11:33:55 +0000297 LoadMapAllocator;
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000298 typedef ScopedHashTable<Value *, LoadValue, DenseMapInfo<Value *>,
299 LoadMapAllocator> LoadHTType;
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000300 LoadHTType AvailableLoads;
Nadav Rotem465834c2012-07-24 10:51:42 +0000301
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000302 /// \brief A scoped hash table of the current values of read-only call
303 /// values.
304 ///
305 /// It uses the same generation count as loads.
Geoff Berry2f64c202016-05-13 17:54:58 +0000306 typedef ScopedHashTable<CallValue, std::pair<Instruction *, unsigned>>
307 CallHTType;
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000308 CallHTType AvailableCalls;
Nadav Rotem465834c2012-07-24 10:51:42 +0000309
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000310 /// \brief This is the current generation of the memory value.
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000311 unsigned CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000312
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000313 /// \brief Set up the EarlyCSE runner for a particular function.
Benjamin Kramer6db33382015-10-15 15:08:58 +0000314 EarlyCSE(const TargetLibraryInfo &TLI, const TargetTransformInfo &TTI,
315 DominatorTree &DT, AssumptionCache &AC)
316 : TLI(TLI), TTI(TTI), DT(DT), AC(AC), CurrentGeneration(0) {}
Chris Lattner704541b2011-01-02 21:47:05 +0000317
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000318 bool run();
Chris Lattner704541b2011-01-02 21:47:05 +0000319
320private:
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000321 // Almost a POD, but needs to call the constructors for the scoped hash
322 // tables so that a new scope gets pushed on. These are RAII so that the
323 // scope gets popped when the NodeScope is destroyed.
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000324 class NodeScope {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000325 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000326 NodeScope(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads,
327 CallHTType &AvailableCalls)
328 : Scope(AvailableValues), LoadScope(AvailableLoads),
329 CallScope(AvailableCalls) {}
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000330
Chandler Carruth7253bba2015-01-24 11:33:55 +0000331 private:
Aaron Ballmanf9a18972015-02-15 22:54:22 +0000332 NodeScope(const NodeScope &) = delete;
333 void operator=(const NodeScope &) = delete;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000334
335 ScopedHTType::ScopeTy Scope;
336 LoadHTType::ScopeTy LoadScope;
337 CallHTType::ScopeTy CallScope;
338 };
339
Chandler Carruth9dea5cd2015-01-24 11:44:32 +0000340 // Contains all the needed information to create a stack for doing a depth
341 // first tranversal of the tree. This includes scopes for values, loads, and
342 // calls as well as the generation. There is a child iterator so that the
Sanjoy Das5253a082016-04-27 01:44:31 +0000343 // children do not need to be store separately.
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000344 class StackNode {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000345 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000346 StackNode(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads,
347 CallHTType &AvailableCalls, unsigned cg, DomTreeNode *n,
Chandler Carruth7253bba2015-01-24 11:33:55 +0000348 DomTreeNode::iterator child, DomTreeNode::iterator end)
349 : CurrentGeneration(cg), ChildGeneration(cg), Node(n), ChildIter(child),
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000350 EndIter(end), Scopes(AvailableValues, AvailableLoads, AvailableCalls),
Chandler Carruth7253bba2015-01-24 11:33:55 +0000351 Processed(false) {}
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000352
353 // Accessors.
354 unsigned currentGeneration() { return CurrentGeneration; }
355 unsigned childGeneration() { return ChildGeneration; }
356 void childGeneration(unsigned generation) { ChildGeneration = generation; }
357 DomTreeNode *node() { return Node; }
358 DomTreeNode::iterator childIter() { return ChildIter; }
359 DomTreeNode *nextChild() {
360 DomTreeNode *child = *ChildIter;
361 ++ChildIter;
362 return child;
363 }
364 DomTreeNode::iterator end() { return EndIter; }
365 bool isProcessed() { return Processed; }
366 void process() { Processed = true; }
367
Chandler Carruth7253bba2015-01-24 11:33:55 +0000368 private:
Aaron Ballmanf9a18972015-02-15 22:54:22 +0000369 StackNode(const StackNode &) = delete;
370 void operator=(const StackNode &) = delete;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000371
372 // Members.
373 unsigned CurrentGeneration;
374 unsigned ChildGeneration;
375 DomTreeNode *Node;
376 DomTreeNode::iterator ChildIter;
377 DomTreeNode::iterator EndIter;
378 NodeScope Scopes;
379 bool Processed;
380 };
381
Chad Rosierf9327d62015-01-26 22:51:15 +0000382 /// \brief Wrapper class to handle memory instructions, including loads,
383 /// stores and intrinsic loads and stores defined by the target.
384 class ParseMemoryInst {
385 public:
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000386 ParseMemoryInst(Instruction *Inst, const TargetTransformInfo &TTI)
Philip Reames9e5e2d62015-12-07 22:41:23 +0000387 : IsTargetMemInst(false), Inst(Inst) {
388 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst))
389 if (TTI.getTgtMemIntrinsic(II, Info) && Info.NumMemRefs == 1)
390 IsTargetMemInst = true;
391 }
392 bool isLoad() const {
393 if (IsTargetMemInst) return Info.ReadMem;
394 return isa<LoadInst>(Inst);
395 }
396 bool isStore() const {
397 if (IsTargetMemInst) return Info.WriteMem;
398 return isa<StoreInst>(Inst);
399 }
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000400 bool isAtomic() const {
401 if (IsTargetMemInst) {
402 assert(Info.IsSimple && "need to refine IsSimple in TTI");
403 return false;
404 }
405 return Inst->isAtomic();
406 }
407 bool isUnordered() const {
408 if (IsTargetMemInst) {
409 assert(Info.IsSimple && "need to refine IsSimple in TTI");
410 return true;
411 }
412 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
413 return LI->isUnordered();
414 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
415 return SI->isUnordered();
416 }
417 // Conservative answer
418 return !Inst->isAtomic();
419 }
420
421 bool isVolatile() const {
422 if (IsTargetMemInst) {
423 assert(Info.IsSimple && "need to refine IsSimple in TTI");
424 return false;
425 }
426 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
427 return LI->isVolatile();
428 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
429 return SI->isVolatile();
430 }
431 // Conservative answer
432 return true;
433 }
434
Sanjoy Das07c65212016-06-16 20:47:57 +0000435 bool isInvariantLoad() const {
436 if (auto *LI = dyn_cast<LoadInst>(Inst))
437 return LI->getMetadata(LLVMContext::MD_invariant_load);
438 return false;
439 }
Junmo Park80440eb2016-02-18 10:09:20 +0000440
Arnaud A. de Grandmaison6fd488b2015-10-06 13:35:30 +0000441 bool isMatchingMemLoc(const ParseMemoryInst &Inst) const {
Philip Reames9e5e2d62015-12-07 22:41:23 +0000442 return (getPointerOperand() == Inst.getPointerOperand() &&
443 getMatchingId() == Inst.getMatchingId());
Chad Rosierf9327d62015-01-26 22:51:15 +0000444 }
Philip Reames9e5e2d62015-12-07 22:41:23 +0000445 bool isValid() const { return getPointerOperand() != nullptr; }
Chad Rosierf9327d62015-01-26 22:51:15 +0000446
Chad Rosierf9327d62015-01-26 22:51:15 +0000447 // For regular (non-intrinsic) loads/stores, this is set to -1. For
448 // intrinsic loads/stores, the id is retrieved from the corresponding
449 // field in the MemIntrinsicInfo structure. That field contains
450 // non-negative values only.
Philip Reames9e5e2d62015-12-07 22:41:23 +0000451 int getMatchingId() const {
452 if (IsTargetMemInst) return Info.MatchingId;
453 return -1;
454 }
455 Value *getPointerOperand() const {
456 if (IsTargetMemInst) return Info.PtrVal;
457 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
458 return LI->getPointerOperand();
459 } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
460 return SI->getPointerOperand();
461 }
462 return nullptr;
463 }
464 bool mayReadFromMemory() const {
465 if (IsTargetMemInst) return Info.ReadMem;
466 return Inst->mayReadFromMemory();
467 }
468 bool mayWriteToMemory() const {
469 if (IsTargetMemInst) return Info.WriteMem;
470 return Inst->mayWriteToMemory();
471 }
472
473 private:
474 bool IsTargetMemInst;
475 MemIntrinsicInfo Info;
476 Instruction *Inst;
Chad Rosierf9327d62015-01-26 22:51:15 +0000477 };
478
Chris Lattner18ae5432011-01-02 23:04:14 +0000479 bool processNode(DomTreeNode *Node);
Nadav Rotem465834c2012-07-24 10:51:42 +0000480
Chad Rosierf9327d62015-01-26 22:51:15 +0000481 Value *getOrCreateResult(Value *Inst, Type *ExpectedType) const {
482 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
483 return LI;
484 else if (StoreInst *SI = dyn_cast<StoreInst>(Inst))
485 return SI->getValueOperand();
486 assert(isa<IntrinsicInst>(Inst) && "Instruction not supported");
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000487 return TTI.getOrCreateResultFromMemIntrinsic(cast<IntrinsicInst>(Inst),
488 ExpectedType);
Chad Rosierf9327d62015-01-26 22:51:15 +0000489 }
Chris Lattner704541b2011-01-02 21:47:05 +0000490};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000491}
Chris Lattner704541b2011-01-02 21:47:05 +0000492
Chris Lattner18ae5432011-01-02 23:04:14 +0000493bool EarlyCSE::processNode(DomTreeNode *Node) {
Chad Rosier1a4bc112016-04-22 18:47:21 +0000494 bool Changed = false;
Chris Lattner18ae5432011-01-02 23:04:14 +0000495 BasicBlock *BB = Node->getBlock();
Nadav Rotem465834c2012-07-24 10:51:42 +0000496
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000497 // If this block has a single predecessor, then the predecessor is the parent
498 // of the domtree node and all of the live out memory values are still current
499 // in this block. If this block has multiple predecessors, then they could
500 // have invalidated the live-out memory values of our parent value. For now,
501 // just be conservative and invalidate memory if this block has multiple
502 // predecessors.
Craig Topperf40110f2014-04-25 05:29:35 +0000503 if (!BB->getSinglePredecessor())
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000504 ++CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000505
Philip Reames7c78ef72015-05-22 23:53:24 +0000506 // If this node has a single predecessor which ends in a conditional branch,
507 // we can infer the value of the branch condition given that we took this
Chad Rosierb346dcb2016-04-20 19:16:23 +0000508 // path. We need the single predecessor to ensure there's not another path
Philip Reames7c78ef72015-05-22 23:53:24 +0000509 // which reaches this block where the condition might hold a different
510 // value. Since we're adding this to the scoped hash table (like any other
511 // def), it will have been popped if we encounter a future merge block.
512 if (BasicBlock *Pred = BB->getSinglePredecessor())
513 if (auto *BI = dyn_cast<BranchInst>(Pred->getTerminator()))
514 if (BI->isConditional())
515 if (auto *CondInst = dyn_cast<Instruction>(BI->getCondition()))
516 if (SimpleValue::canHandle(CondInst)) {
517 assert(BI->getSuccessor(0) == BB || BI->getSuccessor(1) == BB);
518 auto *ConditionalConstant = (BI->getSuccessor(0) == BB) ?
519 ConstantInt::getTrue(BB->getContext()) :
520 ConstantInt::getFalse(BB->getContext());
521 AvailableValues.insert(CondInst, ConditionalConstant);
522 DEBUG(dbgs() << "EarlyCSE CVP: Add conditional value for '"
523 << CondInst->getName() << "' as " << *ConditionalConstant
524 << " in " << BB->getName() << "\n");
Chad Rosier1a4bc112016-04-22 18:47:21 +0000525 // Replace all dominated uses with the known value.
526 if (unsigned Count =
527 replaceDominatedUsesWith(CondInst, ConditionalConstant, DT,
528 BasicBlockEdge(Pred, BB))) {
529 Changed = true;
530 NumCSECVP = NumCSECVP + Count;
531 }
Philip Reames7c78ef72015-05-22 23:53:24 +0000532 }
533
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000534 /// LastStore - Keep track of the last non-volatile store that we saw... for
535 /// as long as there in no instruction that reads memory. If we see a store
536 /// to the same location, we delete the dead store. This zaps trivial dead
537 /// stores which can occur in bitfield code among other things.
Chad Rosierf9327d62015-01-26 22:51:15 +0000538 Instruction *LastStore = nullptr;
Nadav Rotem465834c2012-07-24 10:51:42 +0000539
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000540 const DataLayout &DL = BB->getModule()->getDataLayout();
Chris Lattner18ae5432011-01-02 23:04:14 +0000541
542 // See if any instructions in the block can be eliminated. If so, do it. If
543 // not, add them to AvailableValues.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000544 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E;) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000545 Instruction *Inst = &*I++;
Nadav Rotem465834c2012-07-24 10:51:42 +0000546
Chris Lattner18ae5432011-01-02 23:04:14 +0000547 // Dead instructions should just be removed.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000548 if (isInstructionTriviallyDead(Inst, &TLI)) {
Chris Lattner8fac5db2011-01-02 23:19:45 +0000549 DEBUG(dbgs() << "EarlyCSE DCE: " << *Inst << '\n');
Chris Lattner18ae5432011-01-02 23:04:14 +0000550 Inst->eraseFromParent();
551 Changed = true;
Chris Lattner8fac5db2011-01-02 23:19:45 +0000552 ++NumSimplify;
Chris Lattner18ae5432011-01-02 23:04:14 +0000553 continue;
554 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000555
Hal Finkel1e16fa32014-11-03 20:21:32 +0000556 // Skip assume intrinsics, they don't really have side effects (although
557 // they're marked as such to ensure preservation of control dependencies),
558 // and this pass will not disturb any of the assumption's control
559 // dependencies.
560 if (match(Inst, m_Intrinsic<Intrinsic::assume>())) {
561 DEBUG(dbgs() << "EarlyCSE skipping assumption: " << *Inst << '\n');
562 continue;
563 }
564
Sanjoy Dasee81b232016-04-29 21:52:58 +0000565 if (match(Inst, m_Intrinsic<Intrinsic::experimental_guard>())) {
Sanjoy Das107aefc2016-04-29 22:23:16 +0000566 if (auto *CondI =
567 dyn_cast<Instruction>(cast<CallInst>(Inst)->getArgOperand(0))) {
Sanjoy Dasee81b232016-04-29 21:52:58 +0000568 // The condition we're on guarding here is true for all dominated
569 // locations.
570 if (SimpleValue::canHandle(CondI))
571 AvailableValues.insert(CondI, ConstantInt::getTrue(BB->getContext()));
572 }
573
574 // Guard intrinsics read all memory, but don't write any memory.
575 // Accordingly, don't update the generation but consume the last store (to
576 // avoid an incorrect DSE).
577 LastStore = nullptr;
578 continue;
579 }
580
Chris Lattner18ae5432011-01-02 23:04:14 +0000581 // If the instruction can be simplified (e.g. X+0 = X) then replace it with
582 // its simpler value.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000583 if (Value *V = SimplifyInstruction(Inst, DL, &TLI, &DT, &AC)) {
Chris Lattner8fac5db2011-01-02 23:19:45 +0000584 DEBUG(dbgs() << "EarlyCSE Simplify: " << *Inst << " to: " << *V << '\n');
Chris Lattner18ae5432011-01-02 23:04:14 +0000585 Inst->replaceAllUsesWith(V);
586 Inst->eraseFromParent();
587 Changed = true;
Chris Lattner8fac5db2011-01-02 23:19:45 +0000588 ++NumSimplify;
Chris Lattner18ae5432011-01-02 23:04:14 +0000589 continue;
590 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000591
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000592 // If this is a simple instruction that we can value number, process it.
593 if (SimpleValue::canHandle(Inst)) {
594 // See if the instruction has an available value. If so, use it.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000595 if (Value *V = AvailableValues.lookup(Inst)) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000596 DEBUG(dbgs() << "EarlyCSE CSE: " << *Inst << " to: " << *V << '\n');
David Majnemer9554c132016-04-22 06:37:45 +0000597 if (auto *I = dyn_cast<Instruction>(V))
598 I->andIRFlags(Inst);
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000599 Inst->replaceAllUsesWith(V);
600 Inst->eraseFromParent();
601 Changed = true;
602 ++NumCSE;
603 continue;
604 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000605
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000606 // Otherwise, just remember that this value is available.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000607 AvailableValues.insert(Inst, Inst);
Chris Lattner18ae5432011-01-02 23:04:14 +0000608 continue;
609 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000610
Chad Rosierf9327d62015-01-26 22:51:15 +0000611 ParseMemoryInst MemInst(Inst, TTI);
Chris Lattner92bb0f92011-01-03 03:41:27 +0000612 // If this is a non-volatile load, process it.
Chad Rosierf9327d62015-01-26 22:51:15 +0000613 if (MemInst.isValid() && MemInst.isLoad()) {
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000614 // (conservatively) we can't peak past the ordering implied by this
615 // operation, but we can add this load to our set of available values
616 if (MemInst.isVolatile() || !MemInst.isUnordered()) {
Craig Topperf40110f2014-04-25 05:29:35 +0000617 LastStore = nullptr;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000618 ++CurrentGeneration;
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000619 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000620
Chris Lattner92bb0f92011-01-03 03:41:27 +0000621 // If we have an available version of this load, and if it is the right
Sanjoy Das07c65212016-06-16 20:47:57 +0000622 // generation or the load is known to be from an invariant location,
623 // replace this instruction.
624 //
625 // A dominating invariant load implies that the location loaded from is
626 // unchanging beginning at the point of the invariant load, so the load
627 // we're CSE'ing _away_ does not need to be invariant, only the available
628 // load we're CSE'ing _to_ does.
Philip Reames9e5e2d62015-12-07 22:41:23 +0000629 LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand());
Sanjoy Das07c65212016-06-16 20:47:57 +0000630 if (InVal.DefInst != nullptr &&
631 (InVal.Generation == CurrentGeneration || InVal.IsInvariant) &&
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000632 InVal.MatchingId == MemInst.getMatchingId() &&
633 // We don't yet handle removing loads with ordering of any kind.
634 !MemInst.isVolatile() && MemInst.isUnordered() &&
635 // We can't replace an atomic load with one which isn't also atomic.
636 InVal.IsAtomic >= MemInst.isAtomic()) {
Philip Reames32b55182016-05-06 01:13:58 +0000637 Value *Op = getOrCreateResult(InVal.DefInst, Inst->getType());
Chad Rosierf9327d62015-01-26 22:51:15 +0000638 if (Op != nullptr) {
639 DEBUG(dbgs() << "EarlyCSE CSE LOAD: " << *Inst
Philip Reames32b55182016-05-06 01:13:58 +0000640 << " to: " << *InVal.DefInst << '\n');
Chad Rosierf9327d62015-01-26 22:51:15 +0000641 if (!Inst->use_empty())
642 Inst->replaceAllUsesWith(Op);
643 Inst->eraseFromParent();
644 Changed = true;
645 ++NumCSELoad;
646 continue;
647 }
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000648 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000649
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000650 // Otherwise, remember that we have this instruction.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000651 AvailableLoads.insert(
Philip Reames9e5e2d62015-12-07 22:41:23 +0000652 MemInst.getPointerOperand(),
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000653 LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(),
Sanjoy Das07c65212016-06-16 20:47:57 +0000654 MemInst.isAtomic(), MemInst.isInvariantLoad()));
Craig Topperf40110f2014-04-25 05:29:35 +0000655 LastStore = nullptr;
Chris Lattner92bb0f92011-01-03 03:41:27 +0000656 continue;
657 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000658
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000659 // If this instruction may read from memory, forget LastStore.
Chad Rosierf9327d62015-01-26 22:51:15 +0000660 // Load/store intrinsics will indicate both a read and a write to
661 // memory. The target may override this (e.g. so that a store intrinsic
662 // does not read from memory, and thus will be treated the same as a
663 // regular store for commoning purposes).
664 if (Inst->mayReadFromMemory() &&
665 !(MemInst.isValid() && !MemInst.mayReadFromMemory()))
Craig Topperf40110f2014-04-25 05:29:35 +0000666 LastStore = nullptr;
Nadav Rotem465834c2012-07-24 10:51:42 +0000667
Chris Lattner92bb0f92011-01-03 03:41:27 +0000668 // If this is a read-only call, process it.
669 if (CallValue::canHandle(Inst)) {
670 // If we have an available version of this call, and if it is the right
671 // generation, replace this instruction.
Geoff Berry2f64c202016-05-13 17:54:58 +0000672 std::pair<Instruction *, unsigned> InVal = AvailableCalls.lookup(Inst);
Craig Topperf40110f2014-04-25 05:29:35 +0000673 if (InVal.first != nullptr && InVal.second == CurrentGeneration) {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000674 DEBUG(dbgs() << "EarlyCSE CSE CALL: " << *Inst
675 << " to: " << *InVal.first << '\n');
676 if (!Inst->use_empty())
677 Inst->replaceAllUsesWith(InVal.first);
Chris Lattner92bb0f92011-01-03 03:41:27 +0000678 Inst->eraseFromParent();
679 Changed = true;
680 ++NumCSECall;
681 continue;
682 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000683
Chris Lattner92bb0f92011-01-03 03:41:27 +0000684 // Otherwise, remember that we have this instruction.
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000685 AvailableCalls.insert(
Geoff Berry2f64c202016-05-13 17:54:58 +0000686 Inst, std::pair<Instruction *, unsigned>(Inst, CurrentGeneration));
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000687 continue;
688 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000689
Philip Reamesdfd890d2015-08-27 01:32:33 +0000690 // A release fence requires that all stores complete before it, but does
691 // not prevent the reordering of following loads 'before' the fence. As a
692 // result, we don't need to consider it as writing to memory and don't need
693 // to advance the generation. We do need to prevent DSE across the fence,
694 // but that's handled above.
695 if (FenceInst *FI = dyn_cast<FenceInst>(Inst))
JF Bastien800f87a2016-04-06 21:19:33 +0000696 if (FI->getOrdering() == AtomicOrdering::Release) {
Philip Reamesdfd890d2015-08-27 01:32:33 +0000697 assert(Inst->mayReadFromMemory() && "relied on to prevent DSE above");
698 continue;
699 }
700
Philip Reamesae1f265b2015-12-16 01:01:30 +0000701 // write back DSE - If we write back the same value we just loaded from
702 // the same location and haven't passed any intervening writes or ordering
703 // operations, we can remove the write. The primary benefit is in allowing
704 // the available load table to remain valid and value forward past where
705 // the store originally was.
706 if (MemInst.isValid() && MemInst.isStore()) {
707 LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand());
Philip Reames32b55182016-05-06 01:13:58 +0000708 if (InVal.DefInst &&
709 InVal.DefInst == getOrCreateResult(Inst, InVal.DefInst->getType()) &&
Philip Reamesae1f265b2015-12-16 01:01:30 +0000710 InVal.Generation == CurrentGeneration &&
711 InVal.MatchingId == MemInst.getMatchingId() &&
712 // We don't yet handle removing stores with ordering of any kind.
713 !MemInst.isVolatile() && MemInst.isUnordered()) {
714 assert((!LastStore ||
715 ParseMemoryInst(LastStore, TTI).getPointerOperand() ==
716 MemInst.getPointerOperand()) &&
717 "can't have an intervening store!");
718 DEBUG(dbgs() << "EarlyCSE DSE (writeback): " << *Inst << '\n');
719 Inst->eraseFromParent();
720 Changed = true;
721 ++NumDSE;
722 // We can avoid incrementing the generation count since we were able
723 // to eliminate this store.
724 continue;
725 }
726 }
727
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000728 // Okay, this isn't something we can CSE at all. Check to see if it is
729 // something that could modify memory. If so, our available memory values
730 // cannot be used so bump the generation count.
Chris Lattnere0e32a92011-01-03 03:46:34 +0000731 if (Inst->mayWriteToMemory()) {
Chris Lattnerb9a8efc2011-01-03 03:18:43 +0000732 ++CurrentGeneration;
Nadav Rotem465834c2012-07-24 10:51:42 +0000733
Chad Rosierf9327d62015-01-26 22:51:15 +0000734 if (MemInst.isValid() && MemInst.isStore()) {
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000735 // We do a trivial form of DSE if there are two stores to the same
Philip Reames15145fb2015-12-17 18:50:50 +0000736 // location with no intervening loads. Delete the earlier store.
737 // At the moment, we don't remove ordered stores, but do remove
738 // unordered atomic stores. There's no special requirement (for
739 // unordered atomics) about removing atomic stores only in favor of
740 // other atomic stores since we we're going to execute the non-atomic
741 // one anyway and the atomic one might never have become visible.
Chad Rosierf9327d62015-01-26 22:51:15 +0000742 if (LastStore) {
743 ParseMemoryInst LastStoreMemInst(LastStore, TTI);
Philip Reames15145fb2015-12-17 18:50:50 +0000744 assert(LastStoreMemInst.isUnordered() &&
745 !LastStoreMemInst.isVolatile() &&
746 "Violated invariant");
Chad Rosierf9327d62015-01-26 22:51:15 +0000747 if (LastStoreMemInst.isMatchingMemLoc(MemInst)) {
748 DEBUG(dbgs() << "EarlyCSE DEAD STORE: " << *LastStore
749 << " due to: " << *Inst << '\n');
750 LastStore->eraseFromParent();
751 Changed = true;
752 ++NumDSE;
753 LastStore = nullptr;
754 }
Philip Reames018dbf12014-11-18 17:46:32 +0000755 // fallthrough - we can exploit information about this store
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000756 }
Nadav Rotem465834c2012-07-24 10:51:42 +0000757
Chris Lattner9e5e9ed2011-01-03 04:17:24 +0000758 // Okay, we just invalidated anything we knew about loaded values. Try
759 // to salvage *something* by remembering that the stored value is a live
760 // version of the pointer. It is safe to forward from volatile stores
761 // to non-volatile loads, so we don't have to check for volatility of
762 // the store.
Arnaud A. de Grandmaisona6178a12015-10-07 07:41:29 +0000763 AvailableLoads.insert(
Philip Reames9e5e2d62015-12-07 22:41:23 +0000764 MemInst.getPointerOperand(),
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000765 LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(),
Sanjoy Das07c65212016-06-16 20:47:57 +0000766 MemInst.isAtomic(), false));
Nadav Rotem465834c2012-07-24 10:51:42 +0000767
Philip Reames15145fb2015-12-17 18:50:50 +0000768 // Remember that this was the last unordered store we saw for DSE. We
769 // don't yet handle DSE on ordered or volatile stores since we don't
770 // have a good way to model the ordering requirement for following
771 // passes once the store is removed. We could insert a fence, but
772 // since fences are slightly stronger than stores in their ordering,
773 // it's not clear this is a profitable transform. Another option would
774 // be to merge the ordering with that of the post dominating store.
775 if (MemInst.isUnordered() && !MemInst.isVolatile())
Chad Rosierf9327d62015-01-26 22:51:15 +0000776 LastStore = Inst;
Philip Reames8fc2cbf2015-12-08 21:45:41 +0000777 else
778 LastStore = nullptr;
Chris Lattnere0e32a92011-01-03 03:46:34 +0000779 }
780 }
Chris Lattner18ae5432011-01-02 23:04:14 +0000781 }
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000782
Chris Lattner18ae5432011-01-02 23:04:14 +0000783 return Changed;
Chris Lattner704541b2011-01-02 21:47:05 +0000784}
Chris Lattner18ae5432011-01-02 23:04:14 +0000785
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000786bool EarlyCSE::run() {
Chandler Carruth7253bba2015-01-24 11:33:55 +0000787 // Note, deque is being used here because there is significant performance
788 // gains over vector when the container becomes very large due to the
789 // specific access patterns. For more information see the mailing list
790 // discussion on this:
Tanya Lattner0d28f802015-08-05 03:51:17 +0000791 // http://lists.llvm.org/pipermail/llvm-commits/Week-of-Mon-20120116/135228.html
Lenny Maiorani9eefc812014-09-20 13:29:20 +0000792 std::deque<StackNode *> nodesToProcess;
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000793
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000794 bool Changed = false;
795
796 // Process the root node.
Chandler Carruth7253bba2015-01-24 11:33:55 +0000797 nodesToProcess.push_back(new StackNode(
798 AvailableValues, AvailableLoads, AvailableCalls, CurrentGeneration,
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000799 DT.getRootNode(), DT.getRootNode()->begin(), DT.getRootNode()->end()));
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000800
801 // Save the current generation.
802 unsigned LiveOutGeneration = CurrentGeneration;
803
804 // Process the stack.
805 while (!nodesToProcess.empty()) {
806 // Grab the first item off the stack. Set the current generation, remove
807 // the node from the stack, and process it.
Michael Gottesman2bf01732013-12-05 18:42:12 +0000808 StackNode *NodeToProcess = nodesToProcess.back();
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000809
810 // Initialize class members.
811 CurrentGeneration = NodeToProcess->currentGeneration();
812
813 // Check if the node needs to be processed.
814 if (!NodeToProcess->isProcessed()) {
815 // Process the node.
816 Changed |= processNode(NodeToProcess->node());
817 NodeToProcess->childGeneration(CurrentGeneration);
818 NodeToProcess->process();
819 } else if (NodeToProcess->childIter() != NodeToProcess->end()) {
820 // Push the next child onto the stack.
821 DomTreeNode *child = NodeToProcess->nextChild();
Michael Gottesman2bf01732013-12-05 18:42:12 +0000822 nodesToProcess.push_back(
Chandler Carruth7253bba2015-01-24 11:33:55 +0000823 new StackNode(AvailableValues, AvailableLoads, AvailableCalls,
824 NodeToProcess->childGeneration(), child, child->begin(),
825 child->end()));
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000826 } else {
827 // It has been processed, and there are no more children to process,
828 // so delete it and pop it off the stack.
829 delete NodeToProcess;
Michael Gottesman2bf01732013-12-05 18:42:12 +0000830 nodesToProcess.pop_back();
Lenny Maiorani8d670b82012-01-31 23:14:41 +0000831 }
832 } // while (!nodes...)
833
834 // Reset the current generation.
835 CurrentGeneration = LiveOutGeneration;
836
837 return Changed;
Chris Lattner18ae5432011-01-02 23:04:14 +0000838}
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000839
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000840PreservedAnalyses EarlyCSEPass::run(Function &F,
Chandler Carruthb47f8012016-03-11 11:05:24 +0000841 AnalysisManager<Function> &AM) {
842 auto &TLI = AM.getResult<TargetLibraryAnalysis>(F);
843 auto &TTI = AM.getResult<TargetIRAnalysis>(F);
844 auto &DT = AM.getResult<DominatorTreeAnalysis>(F);
845 auto &AC = AM.getResult<AssumptionAnalysis>(F);
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000846
Benjamin Kramer6db33382015-10-15 15:08:58 +0000847 EarlyCSE CSE(TLI, TTI, DT, AC);
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000848
849 if (!CSE.run())
850 return PreservedAnalyses::all();
851
852 // CSE preserves the dominator tree because it doesn't mutate the CFG.
853 // FIXME: Bundle this with other CFG-preservation.
854 PreservedAnalyses PA;
855 PA.preserve<DominatorTreeAnalysis>();
Davide Italiano02861d82016-06-08 21:31:55 +0000856 PA.preserve<GlobalsAA>();
Chandler Carruthe8c686a2015-02-01 10:51:23 +0000857 return PA;
858}
859
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000860namespace {
861/// \brief A simple and fast domtree-based CSE pass.
862///
863/// This pass does a simple depth-first walk over the dominator tree,
864/// eliminating trivially redundant instructions and using instsimplify to
865/// canonicalize things as it goes. It is intended to be fast and catch obvious
866/// cases so that instcombine and other passes are more effective. It is
867/// expected that a later pass of GVN will catch the interesting/hard cases.
868class EarlyCSELegacyPass : public FunctionPass {
869public:
870 static char ID;
871
872 EarlyCSELegacyPass() : FunctionPass(ID) {
873 initializeEarlyCSELegacyPassPass(*PassRegistry::getPassRegistry());
874 }
875
876 bool runOnFunction(Function &F) override {
Andrew Kayloraa641a52016-04-22 22:06:11 +0000877 if (skipFunction(F))
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000878 return false;
879
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000880 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
Chandler Carruthfdb9c572015-02-01 12:01:35 +0000881 auto &TTI = getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000882 auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
883 auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
884
Benjamin Kramer6db33382015-10-15 15:08:58 +0000885 EarlyCSE CSE(TLI, TTI, DT, AC);
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000886
887 return CSE.run();
888 }
889
890 void getAnalysisUsage(AnalysisUsage &AU) const override {
891 AU.addRequired<AssumptionCacheTracker>();
892 AU.addRequired<DominatorTreeWrapperPass>();
893 AU.addRequired<TargetLibraryInfoWrapperPass>();
Chandler Carruth705b1852015-01-31 03:43:40 +0000894 AU.addRequired<TargetTransformInfoWrapperPass>();
James Molloyefbba722015-09-10 10:22:12 +0000895 AU.addPreserved<GlobalsAAWrapperPass>();
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000896 AU.setPreservesCFG();
897 }
898};
Alexander Kornienkof00654e2015-06-23 09:49:53 +0000899}
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000900
901char EarlyCSELegacyPass::ID = 0;
902
903FunctionPass *llvm::createEarlyCSEPass() { return new EarlyCSELegacyPass(); }
904
905INITIALIZE_PASS_BEGIN(EarlyCSELegacyPass, "early-cse", "Early CSE", false,
906 false)
Chandler Carruth705b1852015-01-31 03:43:40 +0000907INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Chandler Carruthd649c0a2015-01-27 01:34:14 +0000908INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
909INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
910INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
911INITIALIZE_PASS_END(EarlyCSELegacyPass, "early-cse", "Early CSE", false, false)