| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 1 | //===- EarlyCSE.cpp - Simple and fast CSE pass ----------------------------===// | 
|  | 2 | // | 
|  | 3 | //                     The LLVM Compiler Infrastructure | 
|  | 4 | // | 
|  | 5 | // This file is distributed under the University of Illinois Open Source | 
|  | 6 | // License. See LICENSE.TXT for details. | 
|  | 7 | // | 
|  | 8 | //===----------------------------------------------------------------------===// | 
|  | 9 | // | 
|  | 10 | // This pass performs a simple dominator tree walk that eliminates trivially | 
|  | 11 | // redundant instructions. | 
|  | 12 | // | 
|  | 13 | //===----------------------------------------------------------------------===// | 
|  | 14 |  | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 15 | #include "llvm/Transforms/Scalar/EarlyCSE.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/DenseMapInfo.h" | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 17 | #include "llvm/ADT/Hashing.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 18 | #include "llvm/ADT/STLExtras.h" | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/ScopedHashTable.h" | 
| Davide Italiano | 0dc4778 | 2017-06-14 19:29:53 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/SetVector.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 21 | #include "llvm/ADT/SmallVector.h" | 
| Chris Lattner | 8fac5db | 2011-01-02 23:19:45 +0000 | [diff] [blame] | 22 | #include "llvm/ADT/Statistic.h" | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 23 | #include "llvm/Analysis/AssumptionCache.h" | 
| Geoff Berry | 354fac2 | 2016-04-28 14:59:27 +0000 | [diff] [blame] | 24 | #include "llvm/Analysis/GlobalsModRef.h" | 
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 25 | #include "llvm/Analysis/InstructionSimplify.h" | 
| Daniel Berlin | 554dcd8 | 2017-04-11 20:06:36 +0000 | [diff] [blame] | 26 | #include "llvm/Analysis/MemorySSA.h" | 
|  | 27 | #include "llvm/Analysis/MemorySSAUpdater.h" | 
| Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 28 | #include "llvm/Analysis/TargetLibraryInfo.h" | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 29 | #include "llvm/Analysis/TargetTransformInfo.h" | 
| David Blaikie | 2be3922 | 2018-03-21 22:34:23 +0000 | [diff] [blame] | 30 | #include "llvm/Analysis/Utils/Local.h" | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 31 | #include "llvm/Analysis/ValueTracking.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 32 | #include "llvm/IR/BasicBlock.h" | 
|  | 33 | #include "llvm/IR/Constants.h" | 
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 34 | #include "llvm/IR/DataLayout.h" | 
| Chandler Carruth | 5ad5f15 | 2014-01-13 09:26:24 +0000 | [diff] [blame] | 35 | #include "llvm/IR/Dominators.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 36 | #include "llvm/IR/Function.h" | 
|  | 37 | #include "llvm/IR/InstrTypes.h" | 
|  | 38 | #include "llvm/IR/Instruction.h" | 
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 39 | #include "llvm/IR/Instructions.h" | 
| Hal Finkel | 1e16fa3 | 2014-11-03 20:21:32 +0000 | [diff] [blame] | 40 | #include "llvm/IR/IntrinsicInst.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 41 | #include "llvm/IR/Intrinsics.h" | 
|  | 42 | #include "llvm/IR/LLVMContext.h" | 
|  | 43 | #include "llvm/IR/PassManager.h" | 
| Hal Finkel | 1e16fa3 | 2014-11-03 20:21:32 +0000 | [diff] [blame] | 44 | #include "llvm/IR/PatternMatch.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 45 | #include "llvm/IR/Type.h" | 
|  | 46 | #include "llvm/IR/Use.h" | 
|  | 47 | #include "llvm/IR/Value.h" | 
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 48 | #include "llvm/Pass.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 49 | #include "llvm/Support/Allocator.h" | 
|  | 50 | #include "llvm/Support/AtomicOrdering.h" | 
|  | 51 | #include "llvm/Support/Casting.h" | 
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 52 | #include "llvm/Support/Debug.h" | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 53 | #include "llvm/Support/DebugCounter.h" | 
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 54 | #include "llvm/Support/RecyclingAllocator.h" | 
| Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 55 | #include "llvm/Support/raw_ostream.h" | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 56 | #include "llvm/Transforms/Scalar.h" | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 57 | #include <cassert> | 
| Lenny Maiorani | 9eefc81 | 2014-09-20 13:29:20 +0000 | [diff] [blame] | 58 | #include <deque> | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 59 | #include <memory> | 
|  | 60 | #include <utility> | 
|  | 61 |  | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 62 | using namespace llvm; | 
| Hal Finkel | 1e16fa3 | 2014-11-03 20:21:32 +0000 | [diff] [blame] | 63 | using namespace llvm::PatternMatch; | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 64 |  | 
| Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 65 | #define DEBUG_TYPE "early-cse" | 
|  | 66 |  | 
| Chris Lattner | 4cb3654 | 2011-01-03 03:28:23 +0000 | [diff] [blame] | 67 | STATISTIC(NumSimplify, "Number of instructions simplified or DCE'd"); | 
|  | 68 | STATISTIC(NumCSE,      "Number of instructions CSE'd"); | 
| Chad Rosier | 1a4bc11 | 2016-04-22 18:47:21 +0000 | [diff] [blame] | 69 | STATISTIC(NumCSECVP,   "Number of compare instructions CVP'd"); | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 70 | STATISTIC(NumCSELoad,  "Number of load instructions CSE'd"); | 
|  | 71 | STATISTIC(NumCSECall,  "Number of call instructions CSE'd"); | 
| Chris Lattner | 9e5e9ed | 2011-01-03 04:17:24 +0000 | [diff] [blame] | 72 | STATISTIC(NumDSE,      "Number of trivial dead stores removed"); | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 73 |  | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 74 | DEBUG_COUNTER(CSECounter, "early-cse", | 
|  | 75 | "Controls which instructions are removed"); | 
|  | 76 |  | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 77 | //===----------------------------------------------------------------------===// | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 78 | // SimpleValue | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 79 | //===----------------------------------------------------------------------===// | 
|  | 80 |  | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 81 | namespace { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 82 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 83 | /// Struct representing the available values in the scoped hash table. | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 84 | struct SimpleValue { | 
|  | 85 | Instruction *Inst; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 86 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 87 | SimpleValue(Instruction *I) : Inst(I) { | 
|  | 88 | assert((isSentinel() || canHandle(I)) && "Inst can't be handled!"); | 
|  | 89 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 90 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 91 | bool isSentinel() const { | 
|  | 92 | return Inst == DenseMapInfo<Instruction *>::getEmptyKey() || | 
|  | 93 | Inst == DenseMapInfo<Instruction *>::getTombstoneKey(); | 
|  | 94 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 95 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 96 | static bool canHandle(Instruction *Inst) { | 
|  | 97 | // This can only handle non-void readnone functions. | 
|  | 98 | if (CallInst *CI = dyn_cast<CallInst>(Inst)) | 
|  | 99 | return CI->doesNotAccessMemory() && !CI->getType()->isVoidTy(); | 
|  | 100 | return isa<CastInst>(Inst) || isa<BinaryOperator>(Inst) || | 
|  | 101 | isa<GetElementPtrInst>(Inst) || isa<CmpInst>(Inst) || | 
|  | 102 | isa<SelectInst>(Inst) || isa<ExtractElementInst>(Inst) || | 
|  | 103 | isa<InsertElementInst>(Inst) || isa<ShuffleVectorInst>(Inst) || | 
|  | 104 | isa<ExtractValueInst>(Inst) || isa<InsertValueInst>(Inst); | 
|  | 105 | } | 
|  | 106 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 107 |  | 
|  | 108 | } // end anonymous namespace | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 109 |  | 
|  | 110 | namespace llvm { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 111 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 112 | template <> struct DenseMapInfo<SimpleValue> { | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 113 | static inline SimpleValue getEmptyKey() { | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 114 | return DenseMapInfo<Instruction *>::getEmptyKey(); | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 115 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 116 |  | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 117 | static inline SimpleValue getTombstoneKey() { | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 118 | return DenseMapInfo<Instruction *>::getTombstoneKey(); | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 119 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 120 |  | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 121 | static unsigned getHashValue(SimpleValue Val); | 
|  | 122 | static bool isEqual(SimpleValue LHS, SimpleValue RHS); | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 123 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 124 |  | 
|  | 125 | } // end namespace llvm | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 126 |  | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 127 | unsigned DenseMapInfo<SimpleValue>::getHashValue(SimpleValue Val) { | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 128 | Instruction *Inst = Val.Inst; | 
| Chris Lattner | 02a9776 | 2011-01-03 01:10:08 +0000 | [diff] [blame] | 129 | // Hash in all of the operands as pointers. | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 130 | if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(Inst)) { | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 131 | Value *LHS = BinOp->getOperand(0); | 
|  | 132 | Value *RHS = BinOp->getOperand(1); | 
|  | 133 | if (BinOp->isCommutative() && BinOp->getOperand(0) > BinOp->getOperand(1)) | 
|  | 134 | std::swap(LHS, RHS); | 
| Chris Lattner | 02a9776 | 2011-01-03 01:10:08 +0000 | [diff] [blame] | 135 |  | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 136 | return hash_combine(BinOp->getOpcode(), LHS, RHS); | 
| Chris Lattner | 02a9776 | 2011-01-03 01:10:08 +0000 | [diff] [blame] | 137 | } | 
|  | 138 |  | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 139 | if (CmpInst *CI = dyn_cast<CmpInst>(Inst)) { | 
|  | 140 | Value *LHS = CI->getOperand(0); | 
|  | 141 | Value *RHS = CI->getOperand(1); | 
|  | 142 | CmpInst::Predicate Pred = CI->getPredicate(); | 
|  | 143 | if (Inst->getOperand(0) > Inst->getOperand(1)) { | 
|  | 144 | std::swap(LHS, RHS); | 
|  | 145 | Pred = CI->getSwappedPredicate(); | 
|  | 146 | } | 
|  | 147 | return hash_combine(Inst->getOpcode(), Pred, LHS, RHS); | 
|  | 148 | } | 
|  | 149 |  | 
| Sanjay Patel | 558a465 | 2017-12-13 22:57:35 +0000 | [diff] [blame] | 150 | // Hash min/max/abs (cmp + select) to allow for commuted operands. | 
|  | 151 | // Min/max may also have non-canonical compare predicate (eg, the compare for | 
|  | 152 | // smin may use 'sgt' rather than 'slt'), and non-canonical operands in the | 
|  | 153 | // compare. | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 154 | Value *A, *B; | 
|  | 155 | SelectPatternFlavor SPF = matchSelectPattern(Inst, A, B).Flavor; | 
| Sanjay Patel | 558a465 | 2017-12-13 22:57:35 +0000 | [diff] [blame] | 156 | // TODO: We should also detect FP min/max. | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 157 | if (SPF == SPF_SMIN || SPF == SPF_SMAX || | 
| Craig Topper | f14e62c | 2018-05-21 18:42:42 +0000 | [diff] [blame] | 158 | SPF == SPF_UMIN || SPF == SPF_UMAX) { | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 159 | if (A > B) | 
|  | 160 | std::swap(A, B); | 
|  | 161 | return hash_combine(Inst->getOpcode(), SPF, A, B); | 
|  | 162 | } | 
| Craig Topper | f14e62c | 2018-05-21 18:42:42 +0000 | [diff] [blame] | 163 | if (SPF == SPF_ABS || SPF == SPF_NABS) { | 
|  | 164 | // ABS/NABS always puts the input in A and its negation in B. | 
|  | 165 | return hash_combine(Inst->getOpcode(), SPF, A, B); | 
|  | 166 | } | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 167 |  | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 168 | if (CastInst *CI = dyn_cast<CastInst>(Inst)) | 
|  | 169 | return hash_combine(CI->getOpcode(), CI->getType(), CI->getOperand(0)); | 
|  | 170 |  | 
|  | 171 | if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(Inst)) | 
|  | 172 | return hash_combine(EVI->getOpcode(), EVI->getOperand(0), | 
|  | 173 | hash_combine_range(EVI->idx_begin(), EVI->idx_end())); | 
|  | 174 |  | 
|  | 175 | if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(Inst)) | 
|  | 176 | return hash_combine(IVI->getOpcode(), IVI->getOperand(0), | 
|  | 177 | IVI->getOperand(1), | 
|  | 178 | hash_combine_range(IVI->idx_begin(), IVI->idx_end())); | 
|  | 179 |  | 
|  | 180 | assert((isa<CallInst>(Inst) || isa<BinaryOperator>(Inst) || | 
|  | 181 | isa<GetElementPtrInst>(Inst) || isa<SelectInst>(Inst) || | 
|  | 182 | isa<ExtractElementInst>(Inst) || isa<InsertElementInst>(Inst) || | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 183 | isa<ShuffleVectorInst>(Inst)) && | 
|  | 184 | "Invalid/unknown instruction"); | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 185 |  | 
| Chris Lattner | 02a9776 | 2011-01-03 01:10:08 +0000 | [diff] [blame] | 186 | // Mix in the opcode. | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 187 | return hash_combine( | 
|  | 188 | Inst->getOpcode(), | 
|  | 189 | hash_combine_range(Inst->value_op_begin(), Inst->value_op_end())); | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 190 | } | 
|  | 191 |  | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 192 | bool DenseMapInfo<SimpleValue>::isEqual(SimpleValue LHS, SimpleValue RHS) { | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 193 | Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst; | 
|  | 194 |  | 
|  | 195 | if (LHS.isSentinel() || RHS.isSentinel()) | 
|  | 196 | return LHSI == RHSI; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 197 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 198 | if (LHSI->getOpcode() != RHSI->getOpcode()) | 
|  | 199 | return false; | 
| David Majnemer | 9554c13 | 2016-04-22 06:37:45 +0000 | [diff] [blame] | 200 | if (LHSI->isIdenticalToWhenDefined(RHSI)) | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 201 | return true; | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 202 |  | 
|  | 203 | // If we're not strictly identical, we still might be a commutable instruction | 
|  | 204 | if (BinaryOperator *LHSBinOp = dyn_cast<BinaryOperator>(LHSI)) { | 
|  | 205 | if (!LHSBinOp->isCommutative()) | 
|  | 206 | return false; | 
|  | 207 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 208 | assert(isa<BinaryOperator>(RHSI) && | 
|  | 209 | "same opcode, but different instruction type?"); | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 210 | BinaryOperator *RHSBinOp = cast<BinaryOperator>(RHSI); | 
|  | 211 |  | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 212 | // Commuted equality | 
|  | 213 | return LHSBinOp->getOperand(0) == RHSBinOp->getOperand(1) && | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 214 | LHSBinOp->getOperand(1) == RHSBinOp->getOperand(0); | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 215 | } | 
|  | 216 | if (CmpInst *LHSCmp = dyn_cast<CmpInst>(LHSI)) { | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 217 | assert(isa<CmpInst>(RHSI) && | 
|  | 218 | "same opcode, but different instruction type?"); | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 219 | CmpInst *RHSCmp = cast<CmpInst>(RHSI); | 
|  | 220 | // Commuted equality | 
|  | 221 | return LHSCmp->getOperand(0) == RHSCmp->getOperand(1) && | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 222 | LHSCmp->getOperand(1) == RHSCmp->getOperand(0) && | 
|  | 223 | LHSCmp->getSwappedPredicate() == RHSCmp->getPredicate(); | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 224 | } | 
|  | 225 |  | 
| Sanjay Patel | 558a465 | 2017-12-13 22:57:35 +0000 | [diff] [blame] | 226 | // Min/max/abs can occur with commuted operands, non-canonical predicates, | 
|  | 227 | // and/or non-canonical operands. | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 228 | Value *LHSA, *LHSB; | 
|  | 229 | SelectPatternFlavor LSPF = matchSelectPattern(LHSI, LHSA, LHSB).Flavor; | 
| Sanjay Patel | 558a465 | 2017-12-13 22:57:35 +0000 | [diff] [blame] | 230 | // TODO: We should also detect FP min/max. | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 231 | if (LSPF == SPF_SMIN || LSPF == SPF_SMAX || | 
| Sanjay Patel | 558a465 | 2017-12-13 22:57:35 +0000 | [diff] [blame] | 232 | LSPF == SPF_UMIN || LSPF == SPF_UMAX || | 
|  | 233 | LSPF == SPF_ABS || LSPF == SPF_NABS) { | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 234 | Value *RHSA, *RHSB; | 
|  | 235 | SelectPatternFlavor RSPF = matchSelectPattern(RHSI, RHSA, RHSB).Flavor; | 
| Craig Topper | f14e62c | 2018-05-21 18:42:42 +0000 | [diff] [blame] | 236 | if (LSPF == RSPF) { | 
|  | 237 | // Abs results are placed in a defined order by matchSelectPattern. | 
|  | 238 | if (LSPF == SPF_ABS || LSPF == SPF_NABS) | 
|  | 239 | return LHSA == RHSA && LHSB == RHSB; | 
|  | 240 | return ((LHSA == RHSA && LHSB == RHSB) || | 
|  | 241 | (LHSA == RHSB && LHSB == RHSA)); | 
|  | 242 | } | 
| Sanjay Patel | 3c7a35d | 2017-12-13 21:58:15 +0000 | [diff] [blame] | 243 | } | 
|  | 244 |  | 
| Michael Ilseman | 336cb79 | 2012-10-09 16:57:38 +0000 | [diff] [blame] | 245 | return false; | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 246 | } | 
|  | 247 |  | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 248 | //===----------------------------------------------------------------------===// | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 249 | // CallValue | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 250 | //===----------------------------------------------------------------------===// | 
|  | 251 |  | 
|  | 252 | namespace { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 253 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 254 | /// Struct representing the available call values in the scoped hash | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 255 | /// table. | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 256 | struct CallValue { | 
|  | 257 | Instruction *Inst; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 258 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 259 | CallValue(Instruction *I) : Inst(I) { | 
|  | 260 | assert((isSentinel() || canHandle(I)) && "Inst can't be handled!"); | 
|  | 261 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 262 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 263 | bool isSentinel() const { | 
|  | 264 | return Inst == DenseMapInfo<Instruction *>::getEmptyKey() || | 
|  | 265 | Inst == DenseMapInfo<Instruction *>::getTombstoneKey(); | 
|  | 266 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 267 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 268 | static bool canHandle(Instruction *Inst) { | 
|  | 269 | // Don't value number anything that returns void. | 
|  | 270 | if (Inst->getType()->isVoidTy()) | 
|  | 271 | return false; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 272 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 273 | CallInst *CI = dyn_cast<CallInst>(Inst); | 
|  | 274 | if (!CI || !CI->onlyReadsMemory()) | 
|  | 275 | return false; | 
|  | 276 | return true; | 
|  | 277 | } | 
|  | 278 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 279 |  | 
|  | 280 | } // end anonymous namespace | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 281 |  | 
|  | 282 | namespace llvm { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 283 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 284 | template <> struct DenseMapInfo<CallValue> { | 
|  | 285 | static inline CallValue getEmptyKey() { | 
|  | 286 | return DenseMapInfo<Instruction *>::getEmptyKey(); | 
|  | 287 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 288 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 289 | static inline CallValue getTombstoneKey() { | 
|  | 290 | return DenseMapInfo<Instruction *>::getTombstoneKey(); | 
|  | 291 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 292 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 293 | static unsigned getHashValue(CallValue Val); | 
|  | 294 | static bool isEqual(CallValue LHS, CallValue RHS); | 
|  | 295 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 296 |  | 
|  | 297 | } // end namespace llvm | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 298 |  | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 299 | unsigned DenseMapInfo<CallValue>::getHashValue(CallValue Val) { | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 300 | Instruction *Inst = Val.Inst; | 
| Benjamin Kramer | 6ab86b1 | 2015-02-01 12:30:59 +0000 | [diff] [blame] | 301 | // Hash all of the operands as pointers and mix in the opcode. | 
|  | 302 | return hash_combine( | 
|  | 303 | Inst->getOpcode(), | 
|  | 304 | hash_combine_range(Inst->value_op_begin(), Inst->value_op_end())); | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 305 | } | 
|  | 306 |  | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 307 | bool DenseMapInfo<CallValue>::isEqual(CallValue LHS, CallValue RHS) { | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 308 | Instruction *LHSI = LHS.Inst, *RHSI = RHS.Inst; | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 309 | if (LHS.isSentinel() || RHS.isSentinel()) | 
|  | 310 | return LHSI == RHSI; | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 311 | return LHSI->isIdenticalTo(RHSI); | 
|  | 312 | } | 
|  | 313 |  | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 314 | //===----------------------------------------------------------------------===// | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 315 | // EarlyCSE implementation | 
| Chris Lattner | 79d8306 | 2011-01-03 02:20:48 +0000 | [diff] [blame] | 316 | //===----------------------------------------------------------------------===// | 
|  | 317 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 318 | namespace { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 319 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 320 | /// A simple and fast domtree-based CSE pass. | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 321 | /// | 
|  | 322 | /// This pass does a simple depth-first walk over the dominator tree, | 
|  | 323 | /// eliminating trivially redundant instructions and using instsimplify to | 
|  | 324 | /// canonicalize things as it goes. It is intended to be fast and catch obvious | 
|  | 325 | /// cases so that instcombine and other passes are more effective. It is | 
|  | 326 | /// expected that a later pass of GVN will catch the interesting/hard cases. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 327 | class EarlyCSE { | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 328 | public: | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 329 | const TargetLibraryInfo &TLI; | 
|  | 330 | const TargetTransformInfo &TTI; | 
|  | 331 | DominatorTree &DT; | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 332 | AssumptionCache &AC; | 
| Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 333 | const SimplifyQuery SQ; | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 334 | MemorySSA *MSSA; | 
| Daniel Berlin | 17e8d0e | 2017-02-22 22:19:55 +0000 | [diff] [blame] | 335 | std::unique_ptr<MemorySSAUpdater> MSSAUpdater; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 336 |  | 
|  | 337 | using AllocatorTy = | 
|  | 338 | RecyclingAllocator<BumpPtrAllocator, | 
|  | 339 | ScopedHashTableVal<SimpleValue, Value *>>; | 
|  | 340 | using ScopedHTType = | 
|  | 341 | ScopedHashTable<SimpleValue, Value *, DenseMapInfo<SimpleValue>, | 
|  | 342 | AllocatorTy>; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 343 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 344 | /// A scoped hash table of the current values of all of our simple | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 345 | /// scalar expressions. | 
|  | 346 | /// | 
|  | 347 | /// As we walk down the domtree, we look to see if instructions are in this: | 
|  | 348 | /// if so, we replace them with what we find, otherwise we insert them so | 
|  | 349 | /// that dominated values can succeed in their lookup. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 350 | ScopedHTType AvailableValues; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 351 |  | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 352 | /// A scoped hash table of the current values of previously encounted memory | 
|  | 353 | /// locations. | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 354 | /// | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 355 | /// This allows us to get efficient access to dominating loads or stores when | 
|  | 356 | /// we have a fully redundant load.  In addition to the most recent load, we | 
|  | 357 | /// keep track of a generation count of the read, which is compared against | 
|  | 358 | /// the current generation count.  The current generation count is incremented | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 359 | /// after every possibly writing memory operation, which ensures that we only | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 360 | /// CSE loads with other loads that have no intervening store.  Ordering | 
|  | 361 | /// events (such as fences or atomic instructions) increment the generation | 
|  | 362 | /// count as well; essentially, we model these as writes to all possible | 
|  | 363 | /// locations.  Note that atomic and/or volatile loads and stores can be | 
|  | 364 | /// present the table; it is the responsibility of the consumer to inspect | 
|  | 365 | /// the atomicity/volatility if needed. | 
| Arnaud A. de Grandmaison | a6178a1 | 2015-10-07 07:41:29 +0000 | [diff] [blame] | 366 | struct LoadValue { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 367 | Instruction *DefInst = nullptr; | 
|  | 368 | unsigned Generation = 0; | 
|  | 369 | int MatchingId = -1; | 
|  | 370 | bool IsAtomic = false; | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 371 |  | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 372 | LoadValue() = default; | 
| Geoff Berry | 5ae272c | 2016-04-28 15:22:37 +0000 | [diff] [blame] | 373 | LoadValue(Instruction *Inst, unsigned Generation, unsigned MatchingId, | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 374 | bool IsAtomic) | 
| Sanjoy Das | 07c6521 | 2016-06-16 20:47:57 +0000 | [diff] [blame] | 375 | : DefInst(Inst), Generation(Generation), MatchingId(MatchingId), | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 376 | IsAtomic(IsAtomic) {} | 
| Arnaud A. de Grandmaison | a6178a1 | 2015-10-07 07:41:29 +0000 | [diff] [blame] | 377 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 378 |  | 
|  | 379 | using LoadMapAllocator = | 
|  | 380 | RecyclingAllocator<BumpPtrAllocator, | 
|  | 381 | ScopedHashTableVal<Value *, LoadValue>>; | 
|  | 382 | using LoadHTType = | 
|  | 383 | ScopedHashTable<Value *, LoadValue, DenseMapInfo<Value *>, | 
|  | 384 | LoadMapAllocator>; | 
|  | 385 |  | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 386 | LoadHTType AvailableLoads; | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 387 |  | 
|  | 388 | // A scoped hash table mapping memory locations (represented as typed | 
|  | 389 | // addresses) to generation numbers at which that memory location became | 
|  | 390 | // (henceforth indefinitely) invariant. | 
|  | 391 | using InvariantMapAllocator = | 
|  | 392 | RecyclingAllocator<BumpPtrAllocator, | 
|  | 393 | ScopedHashTableVal<MemoryLocation, unsigned>>; | 
|  | 394 | using InvariantHTType = | 
|  | 395 | ScopedHashTable<MemoryLocation, unsigned, DenseMapInfo<MemoryLocation>, | 
|  | 396 | InvariantMapAllocator>; | 
|  | 397 | InvariantHTType AvailableInvariants; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 398 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 399 | /// A scoped hash table of the current values of read-only call | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 400 | /// values. | 
|  | 401 | /// | 
|  | 402 | /// It uses the same generation count as loads. | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 403 | using CallHTType = | 
|  | 404 | ScopedHashTable<CallValue, std::pair<Instruction *, unsigned>>; | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 405 | CallHTType AvailableCalls; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 406 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 407 | /// This is the current generation of the memory value. | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 408 | unsigned CurrentGeneration = 0; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 409 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 410 | /// Set up the EarlyCSE runner for a particular function. | 
| Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 411 | EarlyCSE(const DataLayout &DL, const TargetLibraryInfo &TLI, | 
|  | 412 | const TargetTransformInfo &TTI, DominatorTree &DT, | 
|  | 413 | AssumptionCache &AC, MemorySSA *MSSA) | 
|  | 414 | : TLI(TLI), TTI(TTI), DT(DT), AC(AC), SQ(DL, &TLI, &DT, &AC), MSSA(MSSA), | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 415 | MSSAUpdater(llvm::make_unique<MemorySSAUpdater>(MSSA)) {} | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 416 |  | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 417 | bool run(); | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 418 |  | 
|  | 419 | private: | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 420 | // Almost a POD, but needs to call the constructors for the scoped hash | 
|  | 421 | // tables so that a new scope gets pushed on. These are RAII so that the | 
|  | 422 | // scope gets popped when the NodeScope is destroyed. | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 423 | class NodeScope { | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 424 | public: | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 425 | NodeScope(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads, | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 426 | InvariantHTType &AvailableInvariants, CallHTType &AvailableCalls) | 
|  | 427 | : Scope(AvailableValues), LoadScope(AvailableLoads), | 
|  | 428 | InvariantScope(AvailableInvariants), CallScope(AvailableCalls) {} | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 429 | NodeScope(const NodeScope &) = delete; | 
|  | 430 | NodeScope &operator=(const NodeScope &) = delete; | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 431 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 432 | private: | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 433 | ScopedHTType::ScopeTy Scope; | 
|  | 434 | LoadHTType::ScopeTy LoadScope; | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 435 | InvariantHTType::ScopeTy InvariantScope; | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 436 | CallHTType::ScopeTy CallScope; | 
|  | 437 | }; | 
|  | 438 |  | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 439 | // Contains all the needed information to create a stack for doing a depth | 
| Nick Lewycky | edd0a70 | 2016-09-07 01:49:41 +0000 | [diff] [blame] | 440 | // first traversal of the tree. This includes scopes for values, loads, and | 
| Chandler Carruth | 9dea5cd | 2015-01-24 11:44:32 +0000 | [diff] [blame] | 441 | // calls as well as the generation. There is a child iterator so that the | 
| Sanjoy Das | 5253a08 | 2016-04-27 01:44:31 +0000 | [diff] [blame] | 442 | // children do not need to be store separately. | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 443 | class StackNode { | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 444 | public: | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 445 | StackNode(ScopedHTType &AvailableValues, LoadHTType &AvailableLoads, | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 446 | InvariantHTType &AvailableInvariants, CallHTType &AvailableCalls, | 
|  | 447 | unsigned cg, DomTreeNode *n, DomTreeNode::iterator child, | 
|  | 448 | DomTreeNode::iterator end) | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 449 | : CurrentGeneration(cg), ChildGeneration(cg), Node(n), ChildIter(child), | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 450 | EndIter(end), | 
|  | 451 | Scopes(AvailableValues, AvailableLoads, AvailableInvariants, | 
|  | 452 | AvailableCalls) | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 453 | {} | 
|  | 454 | StackNode(const StackNode &) = delete; | 
|  | 455 | StackNode &operator=(const StackNode &) = delete; | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 456 |  | 
|  | 457 | // Accessors. | 
|  | 458 | unsigned currentGeneration() { return CurrentGeneration; } | 
|  | 459 | unsigned childGeneration() { return ChildGeneration; } | 
|  | 460 | void childGeneration(unsigned generation) { ChildGeneration = generation; } | 
|  | 461 | DomTreeNode *node() { return Node; } | 
|  | 462 | DomTreeNode::iterator childIter() { return ChildIter; } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 463 |  | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 464 | DomTreeNode *nextChild() { | 
|  | 465 | DomTreeNode *child = *ChildIter; | 
|  | 466 | ++ChildIter; | 
|  | 467 | return child; | 
|  | 468 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 469 |  | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 470 | DomTreeNode::iterator end() { return EndIter; } | 
|  | 471 | bool isProcessed() { return Processed; } | 
|  | 472 | void process() { Processed = true; } | 
|  | 473 |  | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 474 | private: | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 475 | unsigned CurrentGeneration; | 
|  | 476 | unsigned ChildGeneration; | 
|  | 477 | DomTreeNode *Node; | 
|  | 478 | DomTreeNode::iterator ChildIter; | 
|  | 479 | DomTreeNode::iterator EndIter; | 
|  | 480 | NodeScope Scopes; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 481 | bool Processed = false; | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 482 | }; | 
|  | 483 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 484 | /// Wrapper class to handle memory instructions, including loads, | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 485 | /// stores and intrinsic loads and stores defined by the target. | 
|  | 486 | class ParseMemoryInst { | 
|  | 487 | public: | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 488 | ParseMemoryInst(Instruction *Inst, const TargetTransformInfo &TTI) | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 489 | : Inst(Inst) { | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 490 | if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) | 
| Matt Arsenault | 18bb24a | 2017-03-24 18:56:43 +0000 | [diff] [blame] | 491 | if (TTI.getTgtMemIntrinsic(II, Info)) | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 492 | IsTargetMemInst = true; | 
|  | 493 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 494 |  | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 495 | bool isLoad() const { | 
|  | 496 | if (IsTargetMemInst) return Info.ReadMem; | 
|  | 497 | return isa<LoadInst>(Inst); | 
|  | 498 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 499 |  | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 500 | bool isStore() const { | 
|  | 501 | if (IsTargetMemInst) return Info.WriteMem; | 
|  | 502 | return isa<StoreInst>(Inst); | 
|  | 503 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 504 |  | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 505 | bool isAtomic() const { | 
| Matt Arsenault | 18bb24a | 2017-03-24 18:56:43 +0000 | [diff] [blame] | 506 | if (IsTargetMemInst) | 
|  | 507 | return Info.Ordering != AtomicOrdering::NotAtomic; | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 508 | return Inst->isAtomic(); | 
|  | 509 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 510 |  | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 511 | bool isUnordered() const { | 
| Matt Arsenault | 18bb24a | 2017-03-24 18:56:43 +0000 | [diff] [blame] | 512 | if (IsTargetMemInst) | 
|  | 513 | return Info.isUnordered(); | 
|  | 514 |  | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 515 | if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { | 
|  | 516 | return LI->isUnordered(); | 
|  | 517 | } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { | 
|  | 518 | return SI->isUnordered(); | 
|  | 519 | } | 
|  | 520 | // Conservative answer | 
|  | 521 | return !Inst->isAtomic(); | 
|  | 522 | } | 
|  | 523 |  | 
|  | 524 | bool isVolatile() const { | 
| Matt Arsenault | 18bb24a | 2017-03-24 18:56:43 +0000 | [diff] [blame] | 525 | if (IsTargetMemInst) | 
|  | 526 | return Info.IsVolatile; | 
|  | 527 |  | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 528 | if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { | 
|  | 529 | return LI->isVolatile(); | 
|  | 530 | } else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { | 
|  | 531 | return SI->isVolatile(); | 
|  | 532 | } | 
|  | 533 | // Conservative answer | 
|  | 534 | return true; | 
|  | 535 | } | 
|  | 536 |  | 
| Sanjoy Das | 07c6521 | 2016-06-16 20:47:57 +0000 | [diff] [blame] | 537 | bool isInvariantLoad() const { | 
|  | 538 | if (auto *LI = dyn_cast<LoadInst>(Inst)) | 
| Sanjoy Das | 1ab2fad | 2016-06-16 21:00:57 +0000 | [diff] [blame] | 539 | return LI->getMetadata(LLVMContext::MD_invariant_load) != nullptr; | 
| Sanjoy Das | 07c6521 | 2016-06-16 20:47:57 +0000 | [diff] [blame] | 540 | return false; | 
|  | 541 | } | 
| Junmo Park | 80440eb | 2016-02-18 10:09:20 +0000 | [diff] [blame] | 542 |  | 
| Arnaud A. de Grandmaison | 6fd488b | 2015-10-06 13:35:30 +0000 | [diff] [blame] | 543 | bool isMatchingMemLoc(const ParseMemoryInst &Inst) const { | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 544 | return (getPointerOperand() == Inst.getPointerOperand() && | 
|  | 545 | getMatchingId() == Inst.getMatchingId()); | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 546 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 547 |  | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 548 | bool isValid() const { return getPointerOperand() != nullptr; } | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 549 |  | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 550 | // For regular (non-intrinsic) loads/stores, this is set to -1. For | 
|  | 551 | // intrinsic loads/stores, the id is retrieved from the corresponding | 
|  | 552 | // field in the MemIntrinsicInfo structure.  That field contains | 
|  | 553 | // non-negative values only. | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 554 | int getMatchingId() const { | 
|  | 555 | if (IsTargetMemInst) return Info.MatchingId; | 
|  | 556 | return -1; | 
|  | 557 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 558 |  | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 559 | Value *getPointerOperand() const { | 
|  | 560 | if (IsTargetMemInst) return Info.PtrVal; | 
| Renato Golin | 038ede2 | 2018-03-09 21:05:58 +0000 | [diff] [blame] | 561 | return getLoadStorePointerOperand(Inst); | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 562 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 563 |  | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 564 | bool mayReadFromMemory() const { | 
|  | 565 | if (IsTargetMemInst) return Info.ReadMem; | 
|  | 566 | return Inst->mayReadFromMemory(); | 
|  | 567 | } | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 568 |  | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 569 | bool mayWriteToMemory() const { | 
|  | 570 | if (IsTargetMemInst) return Info.WriteMem; | 
|  | 571 | return Inst->mayWriteToMemory(); | 
|  | 572 | } | 
|  | 573 |  | 
|  | 574 | private: | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 575 | bool IsTargetMemInst = false; | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 576 | MemIntrinsicInfo Info; | 
|  | 577 | Instruction *Inst; | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 578 | }; | 
|  | 579 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 580 | bool processNode(DomTreeNode *Node); | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 581 |  | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 582 | Value *getOrCreateResult(Value *Inst, Type *ExpectedType) const { | 
| Sanjay Patel | 1c9867d | 2017-01-03 00:16:24 +0000 | [diff] [blame] | 583 | if (auto *LI = dyn_cast<LoadInst>(Inst)) | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 584 | return LI; | 
| Sanjay Patel | 1c9867d | 2017-01-03 00:16:24 +0000 | [diff] [blame] | 585 | if (auto *SI = dyn_cast<StoreInst>(Inst)) | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 586 | return SI->getValueOperand(); | 
|  | 587 | assert(isa<IntrinsicInst>(Inst) && "Instruction not supported"); | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 588 | return TTI.getOrCreateResultFromMemIntrinsic(cast<IntrinsicInst>(Inst), | 
|  | 589 | ExpectedType); | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 590 | } | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 591 |  | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 592 | /// Return true if the instruction is known to only operate on memory | 
|  | 593 | /// provably invariant in the given "generation". | 
|  | 594 | bool isOperatingOnInvariantMemAt(Instruction *I, unsigned GenAt); | 
|  | 595 |  | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 596 | bool isSameMemGeneration(unsigned EarlierGeneration, unsigned LaterGeneration, | 
|  | 597 | Instruction *EarlierInst, Instruction *LaterInst); | 
|  | 598 |  | 
|  | 599 | void removeMSSA(Instruction *Inst) { | 
|  | 600 | if (!MSSA) | 
|  | 601 | return; | 
| Geoff Berry | 91e9a5c | 2016-10-25 16:18:47 +0000 | [diff] [blame] | 602 | // Removing a store here can leave MemorySSA in an unoptimized state by | 
|  | 603 | // creating MemoryPhis that have identical arguments and by creating | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 604 | // MemoryUses whose defining access is not an actual clobber.  We handle the | 
| Geoff Berry | 91e9a5c | 2016-10-25 16:18:47 +0000 | [diff] [blame] | 605 | // phi case eagerly here.  The non-optimized MemoryUse case is lazily | 
|  | 606 | // updated by MemorySSA getClobberingMemoryAccess. | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 607 | if (MemoryAccess *MA = MSSA->getMemoryAccess(Inst)) { | 
|  | 608 | // Optimize MemoryPhi nodes that may become redundant by having all the | 
|  | 609 | // same input values once MA is removed. | 
| Davide Italiano | 0dc4778 | 2017-06-14 19:29:53 +0000 | [diff] [blame] | 610 | SmallSetVector<MemoryPhi *, 4> PhisToCheck; | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 611 | SmallVector<MemoryAccess *, 8> WorkQueue; | 
|  | 612 | WorkQueue.push_back(MA); | 
|  | 613 | // Process MemoryPhi nodes in FIFO order using a ever-growing vector since | 
|  | 614 | // we shouldn't be processing that many phis and this will avoid an | 
|  | 615 | // allocation in almost all cases. | 
|  | 616 | for (unsigned I = 0; I < WorkQueue.size(); ++I) { | 
|  | 617 | MemoryAccess *WI = WorkQueue[I]; | 
|  | 618 |  | 
|  | 619 | for (auto *U : WI->users()) | 
|  | 620 | if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U)) | 
| Davide Italiano | 0dc4778 | 2017-06-14 19:29:53 +0000 | [diff] [blame] | 621 | PhisToCheck.insert(MP); | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 622 |  | 
| Daniel Berlin | 17e8d0e | 2017-02-22 22:19:55 +0000 | [diff] [blame] | 623 | MSSAUpdater->removeMemoryAccess(WI); | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 624 |  | 
|  | 625 | for (MemoryPhi *MP : PhisToCheck) { | 
|  | 626 | MemoryAccess *FirstIn = MP->getIncomingValue(0); | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 627 | if (llvm::all_of(MP->incoming_values(), | 
|  | 628 | [=](Use &In) { return In == FirstIn; })) | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 629 | WorkQueue.push_back(MP); | 
|  | 630 | } | 
|  | 631 | PhisToCheck.clear(); | 
|  | 632 | } | 
|  | 633 | } | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 634 | } | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 635 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 636 |  | 
|  | 637 | } // end anonymous namespace | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 638 |  | 
| Geoff Berry | 6815468 | 2016-10-24 15:54:00 +0000 | [diff] [blame] | 639 | /// Determine if the memory referenced by LaterInst is from the same heap | 
|  | 640 | /// version as EarlierInst. | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 641 | /// This is currently called in two scenarios: | 
|  | 642 | /// | 
|  | 643 | ///   load p | 
|  | 644 | ///   ... | 
|  | 645 | ///   load p | 
|  | 646 | /// | 
|  | 647 | /// and | 
|  | 648 | /// | 
|  | 649 | ///   x = load p | 
|  | 650 | ///   ... | 
|  | 651 | ///   store x, p | 
|  | 652 | /// | 
|  | 653 | /// in both cases we want to verify that there are no possible writes to the | 
|  | 654 | /// memory referenced by p between the earlier and later instruction. | 
|  | 655 | bool EarlyCSE::isSameMemGeneration(unsigned EarlierGeneration, | 
|  | 656 | unsigned LaterGeneration, | 
|  | 657 | Instruction *EarlierInst, | 
|  | 658 | Instruction *LaterInst) { | 
|  | 659 | // Check the simple memory generation tracking first. | 
|  | 660 | if (EarlierGeneration == LaterGeneration) | 
|  | 661 | return true; | 
|  | 662 |  | 
|  | 663 | if (!MSSA) | 
|  | 664 | return false; | 
|  | 665 |  | 
| Geoff Berry | f7d5daa | 2017-07-14 20:13:21 +0000 | [diff] [blame] | 666 | // If MemorySSA has determined that one of EarlierInst or LaterInst does not | 
|  | 667 | // read/write memory, then we can safely return true here. | 
|  | 668 | // FIXME: We could be more aggressive when checking doesNotAccessMemory(), | 
|  | 669 | // onlyReadsMemory(), mayReadFromMemory(), and mayWriteToMemory() in this pass | 
|  | 670 | // by also checking the MemorySSA MemoryAccess on the instruction.  Initial | 
|  | 671 | // experiments suggest this isn't worthwhile, at least for C/C++ code compiled | 
|  | 672 | // with the default optimization pipeline. | 
|  | 673 | auto *EarlierMA = MSSA->getMemoryAccess(EarlierInst); | 
|  | 674 | if (!EarlierMA) | 
|  | 675 | return true; | 
|  | 676 | auto *LaterMA = MSSA->getMemoryAccess(LaterInst); | 
|  | 677 | if (!LaterMA) | 
|  | 678 | return true; | 
|  | 679 |  | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 680 | // Since we know LaterDef dominates LaterInst and EarlierInst dominates | 
|  | 681 | // LaterInst, if LaterDef dominates EarlierInst then it can't occur between | 
|  | 682 | // EarlierInst and LaterInst and neither can any other write that potentially | 
|  | 683 | // clobbers LaterInst. | 
| Geoff Berry | 91e9a5c | 2016-10-25 16:18:47 +0000 | [diff] [blame] | 684 | MemoryAccess *LaterDef = | 
|  | 685 | MSSA->getWalker()->getClobberingMemoryAccess(LaterInst); | 
| Geoff Berry | f7d5daa | 2017-07-14 20:13:21 +0000 | [diff] [blame] | 686 | return MSSA->dominates(LaterDef, EarlierMA); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 687 | } | 
|  | 688 |  | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 689 | bool EarlyCSE::isOperatingOnInvariantMemAt(Instruction *I, unsigned GenAt) { | 
|  | 690 | // A location loaded from with an invariant_load is assumed to *never* change | 
|  | 691 | // within the visible scope of the compilation. | 
|  | 692 | if (auto *LI = dyn_cast<LoadInst>(I)) | 
|  | 693 | if (LI->getMetadata(LLVMContext::MD_invariant_load)) | 
|  | 694 | return true; | 
|  | 695 |  | 
|  | 696 | auto MemLocOpt = MemoryLocation::getOrNone(I); | 
|  | 697 | if (!MemLocOpt) | 
|  | 698 | // "target" intrinsic forms of loads aren't currently known to | 
|  | 699 | // MemoryLocation::get.  TODO | 
|  | 700 | return false; | 
|  | 701 | MemoryLocation MemLoc = *MemLocOpt; | 
|  | 702 | if (!AvailableInvariants.count(MemLoc)) | 
|  | 703 | return false; | 
|  | 704 |  | 
|  | 705 | // Is the generation at which this became invariant older than the | 
|  | 706 | // current one? | 
|  | 707 | return AvailableInvariants.lookup(MemLoc) <= GenAt; | 
|  | 708 | } | 
|  | 709 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 710 | bool EarlyCSE::processNode(DomTreeNode *Node) { | 
| Chad Rosier | 1a4bc11 | 2016-04-22 18:47:21 +0000 | [diff] [blame] | 711 | bool Changed = false; | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 712 | BasicBlock *BB = Node->getBlock(); | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 713 |  | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 714 | // If this block has a single predecessor, then the predecessor is the parent | 
|  | 715 | // of the domtree node and all of the live out memory values are still current | 
|  | 716 | // in this block.  If this block has multiple predecessors, then they could | 
|  | 717 | // have invalidated the live-out memory values of our parent value.  For now, | 
|  | 718 | // just be conservative and invalidate memory if this block has multiple | 
|  | 719 | // predecessors. | 
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 720 | if (!BB->getSinglePredecessor()) | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 721 | ++CurrentGeneration; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 722 |  | 
| Philip Reames | 7c78ef7 | 2015-05-22 23:53:24 +0000 | [diff] [blame] | 723 | // If this node has a single predecessor which ends in a conditional branch, | 
|  | 724 | // we can infer the value of the branch condition given that we took this | 
| Chad Rosier | b346dcb | 2016-04-20 19:16:23 +0000 | [diff] [blame] | 725 | // path.  We need the single predecessor to ensure there's not another path | 
| Philip Reames | 7c78ef7 | 2015-05-22 23:53:24 +0000 | [diff] [blame] | 726 | // which reaches this block where the condition might hold a different | 
|  | 727 | // value.  Since we're adding this to the scoped hash table (like any other | 
|  | 728 | // def), it will have been popped if we encounter a future merge block. | 
| Sanjay Patel | f1e1fba | 2017-03-15 20:25:05 +0000 | [diff] [blame] | 729 | if (BasicBlock *Pred = BB->getSinglePredecessor()) { | 
|  | 730 | auto *BI = dyn_cast<BranchInst>(Pred->getTerminator()); | 
|  | 731 | if (BI && BI->isConditional()) { | 
|  | 732 | auto *CondInst = dyn_cast<Instruction>(BI->getCondition()); | 
|  | 733 | if (CondInst && SimpleValue::canHandle(CondInst)) { | 
|  | 734 | assert(BI->getSuccessor(0) == BB || BI->getSuccessor(1) == BB); | 
|  | 735 | auto *TorF = (BI->getSuccessor(0) == BB) | 
|  | 736 | ? ConstantInt::getTrue(BB->getContext()) | 
|  | 737 | : ConstantInt::getFalse(BB->getContext()); | 
|  | 738 | AvailableValues.insert(CondInst, TorF); | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 739 | LLVM_DEBUG(dbgs() << "EarlyCSE CVP: Add conditional value for '" | 
|  | 740 | << CondInst->getName() << "' as " << *TorF << " in " | 
|  | 741 | << BB->getName() << "\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 742 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 743 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 744 | } else { | 
|  | 745 | // Replace all dominated uses with the known value. | 
|  | 746 | if (unsigned Count = replaceDominatedUsesWith( | 
|  | 747 | CondInst, TorF, DT, BasicBlockEdge(Pred, BB))) { | 
|  | 748 | Changed = true; | 
|  | 749 | NumCSECVP += Count; | 
|  | 750 | } | 
| Sanjay Patel | f1e1fba | 2017-03-15 20:25:05 +0000 | [diff] [blame] | 751 | } | 
|  | 752 | } | 
|  | 753 | } | 
|  | 754 | } | 
| Philip Reames | 7c78ef7 | 2015-05-22 23:53:24 +0000 | [diff] [blame] | 755 |  | 
| Chris Lattner | 9e5e9ed | 2011-01-03 04:17:24 +0000 | [diff] [blame] | 756 | /// LastStore - Keep track of the last non-volatile store that we saw... for | 
|  | 757 | /// as long as there in no instruction that reads memory.  If we see a store | 
|  | 758 | /// to the same location, we delete the dead store.  This zaps trivial dead | 
|  | 759 | /// stores which can occur in bitfield code among other things. | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 760 | Instruction *LastStore = nullptr; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 761 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 762 | // See if any instructions in the block can be eliminated.  If so, do it.  If | 
|  | 763 | // not, add them to AvailableValues. | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 764 | for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E;) { | 
| Duncan P. N. Exon Smith | 3a9c9e3 | 2015-10-13 18:26:00 +0000 | [diff] [blame] | 765 | Instruction *Inst = &*I++; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 766 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 767 | // Dead instructions should just be removed. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 768 | if (isInstructionTriviallyDead(Inst, &TLI)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 769 | LLVM_DEBUG(dbgs() << "EarlyCSE DCE: " << *Inst << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 770 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 771 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 772 | continue; | 
|  | 773 | } | 
| Petar Jovanovic | 1d26c7e | 2018-01-09 15:08:37 +0000 | [diff] [blame] | 774 | salvageDebugInfo(*Inst); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 775 | removeMSSA(Inst); | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 776 | Inst->eraseFromParent(); | 
|  | 777 | Changed = true; | 
| Chris Lattner | 8fac5db | 2011-01-02 23:19:45 +0000 | [diff] [blame] | 778 | ++NumSimplify; | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 779 | continue; | 
|  | 780 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 781 |  | 
| Hal Finkel | 1e16fa3 | 2014-11-03 20:21:32 +0000 | [diff] [blame] | 782 | // Skip assume intrinsics, they don't really have side effects (although | 
|  | 783 | // they're marked as such to ensure preservation of control dependencies), | 
| Max Kazantsev | 531db9a | 2017-04-28 06:25:39 +0000 | [diff] [blame] | 784 | // and this pass will not bother with its removal. However, we should mark | 
|  | 785 | // its condition as true for all dominated blocks. | 
| Hal Finkel | 1e16fa3 | 2014-11-03 20:21:32 +0000 | [diff] [blame] | 786 | if (match(Inst, m_Intrinsic<Intrinsic::assume>())) { | 
| Max Kazantsev | 531db9a | 2017-04-28 06:25:39 +0000 | [diff] [blame] | 787 | auto *CondI = | 
|  | 788 | dyn_cast<Instruction>(cast<CallInst>(Inst)->getArgOperand(0)); | 
|  | 789 | if (CondI && SimpleValue::canHandle(CondI)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 790 | LLVM_DEBUG(dbgs() << "EarlyCSE considering assumption: " << *Inst | 
|  | 791 | << '\n'); | 
| Max Kazantsev | 531db9a | 2017-04-28 06:25:39 +0000 | [diff] [blame] | 792 | AvailableValues.insert(CondI, ConstantInt::getTrue(BB->getContext())); | 
|  | 793 | } else | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 794 | LLVM_DEBUG(dbgs() << "EarlyCSE skipping assumption: " << *Inst << '\n'); | 
| Hal Finkel | 1e16fa3 | 2014-11-03 20:21:32 +0000 | [diff] [blame] | 795 | continue; | 
|  | 796 | } | 
|  | 797 |  | 
| Dan Gohman | 2c74fe9 | 2017-11-08 21:59:51 +0000 | [diff] [blame] | 798 | // Skip sideeffect intrinsics, for the same reason as assume intrinsics. | 
|  | 799 | if (match(Inst, m_Intrinsic<Intrinsic::sideeffect>())) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 800 | LLVM_DEBUG(dbgs() << "EarlyCSE skipping sideeffect: " << *Inst << '\n'); | 
| Dan Gohman | 2c74fe9 | 2017-11-08 21:59:51 +0000 | [diff] [blame] | 801 | continue; | 
|  | 802 | } | 
|  | 803 |  | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 804 | // We can skip all invariant.start intrinsics since they only read memory, | 
|  | 805 | // and we can forward values across it. For invariant starts without | 
|  | 806 | // invariant ends, we can use the fact that the invariantness never ends to | 
|  | 807 | // start a scope in the current generaton which is true for all future | 
|  | 808 | // generations.  Also, we dont need to consume the last store since the | 
|  | 809 | // semantics of invariant.start allow us to perform   DSE of the last | 
|  | 810 | // store, if there was a store following invariant.start. Consider: | 
| Anna Thomas | b2d12b8 | 2016-08-09 20:00:47 +0000 | [diff] [blame] | 811 | // | 
|  | 812 | // store 30, i8* p | 
|  | 813 | // invariant.start(p) | 
|  | 814 | // store 40, i8* p | 
|  | 815 | // We can DSE the store to 30, since the store 40 to invariant location p | 
|  | 816 | // causes undefined behaviour. | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 817 | if (match(Inst, m_Intrinsic<Intrinsic::invariant_start>())) { | 
|  | 818 | // If there are any uses, the scope might end. | 
|  | 819 | if (!Inst->use_empty()) | 
|  | 820 | continue; | 
|  | 821 | auto *CI = cast<CallInst>(Inst); | 
|  | 822 | MemoryLocation MemLoc = MemoryLocation::getForArgument(CI, 1, TLI); | 
| Philip Reames | 422024a | 2018-03-15 18:12:27 +0000 | [diff] [blame] | 823 | // Don't start a scope if we already have a better one pushed | 
|  | 824 | if (!AvailableInvariants.count(MemLoc)) | 
|  | 825 | AvailableInvariants.insert(MemLoc, CurrentGeneration); | 
| Anna Thomas | b2d12b8 | 2016-08-09 20:00:47 +0000 | [diff] [blame] | 826 | continue; | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 827 | } | 
| Anna Thomas | b2d12b8 | 2016-08-09 20:00:47 +0000 | [diff] [blame] | 828 |  | 
| Sanjoy Das | ee81b23 | 2016-04-29 21:52:58 +0000 | [diff] [blame] | 829 | if (match(Inst, m_Intrinsic<Intrinsic::experimental_guard>())) { | 
| Sanjoy Das | 107aefc | 2016-04-29 22:23:16 +0000 | [diff] [blame] | 830 | if (auto *CondI = | 
|  | 831 | dyn_cast<Instruction>(cast<CallInst>(Inst)->getArgOperand(0))) { | 
| Max Kazantsev | 0589d9f | 2017-04-28 06:05:48 +0000 | [diff] [blame] | 832 | if (SimpleValue::canHandle(CondI)) { | 
|  | 833 | // Do we already know the actual value of this condition? | 
|  | 834 | if (auto *KnownCond = AvailableValues.lookup(CondI)) { | 
|  | 835 | // Is the condition known to be true? | 
|  | 836 | if (isa<ConstantInt>(KnownCond) && | 
| Craig Topper | 79ab643 | 2017-07-06 18:39:47 +0000 | [diff] [blame] | 837 | cast<ConstantInt>(KnownCond)->isOne()) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 838 | LLVM_DEBUG(dbgs() | 
|  | 839 | << "EarlyCSE removing guard: " << *Inst << '\n'); | 
| Max Kazantsev | 0589d9f | 2017-04-28 06:05:48 +0000 | [diff] [blame] | 840 | removeMSSA(Inst); | 
|  | 841 | Inst->eraseFromParent(); | 
|  | 842 | Changed = true; | 
|  | 843 | continue; | 
|  | 844 | } else | 
|  | 845 | // Use the known value if it wasn't true. | 
|  | 846 | cast<CallInst>(Inst)->setArgOperand(0, KnownCond); | 
|  | 847 | } | 
|  | 848 | // The condition we're on guarding here is true for all dominated | 
|  | 849 | // locations. | 
| Sanjoy Das | ee81b23 | 2016-04-29 21:52:58 +0000 | [diff] [blame] | 850 | AvailableValues.insert(CondI, ConstantInt::getTrue(BB->getContext())); | 
| Max Kazantsev | 0589d9f | 2017-04-28 06:05:48 +0000 | [diff] [blame] | 851 | } | 
| Sanjoy Das | ee81b23 | 2016-04-29 21:52:58 +0000 | [diff] [blame] | 852 | } | 
|  | 853 |  | 
|  | 854 | // Guard intrinsics read all memory, but don't write any memory. | 
|  | 855 | // Accordingly, don't update the generation but consume the last store (to | 
|  | 856 | // avoid an incorrect DSE). | 
|  | 857 | LastStore = nullptr; | 
|  | 858 | continue; | 
|  | 859 | } | 
|  | 860 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 861 | // If the instruction can be simplified (e.g. X+0 = X) then replace it with | 
|  | 862 | // its simpler value. | 
| Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 863 | if (Value *V = SimplifyInstruction(Inst, SQ)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 864 | LLVM_DEBUG(dbgs() << "EarlyCSE Simplify: " << *Inst << "  to: " << *V | 
|  | 865 | << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 866 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 867 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 868 | } else { | 
|  | 869 | bool Killed = false; | 
|  | 870 | if (!Inst->use_empty()) { | 
|  | 871 | Inst->replaceAllUsesWith(V); | 
|  | 872 | Changed = true; | 
|  | 873 | } | 
|  | 874 | if (isInstructionTriviallyDead(Inst, &TLI)) { | 
|  | 875 | removeMSSA(Inst); | 
|  | 876 | Inst->eraseFromParent(); | 
|  | 877 | Changed = true; | 
|  | 878 | Killed = true; | 
|  | 879 | } | 
|  | 880 | if (Changed) | 
|  | 881 | ++NumSimplify; | 
|  | 882 | if (Killed) | 
|  | 883 | continue; | 
| David Majnemer | b8da3a2 | 2016-06-25 00:04:10 +0000 | [diff] [blame] | 884 | } | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 885 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 886 |  | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 887 | // If this is a simple instruction that we can value number, process it. | 
|  | 888 | if (SimpleValue::canHandle(Inst)) { | 
|  | 889 | // See if the instruction has an available value.  If so, use it. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 890 | if (Value *V = AvailableValues.lookup(Inst)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 891 | LLVM_DEBUG(dbgs() << "EarlyCSE CSE: " << *Inst << "  to: " << *V | 
|  | 892 | << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 893 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 894 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 895 | continue; | 
|  | 896 | } | 
| David Majnemer | 9554c13 | 2016-04-22 06:37:45 +0000 | [diff] [blame] | 897 | if (auto *I = dyn_cast<Instruction>(V)) | 
|  | 898 | I->andIRFlags(Inst); | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 899 | Inst->replaceAllUsesWith(V); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 900 | removeMSSA(Inst); | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 901 | Inst->eraseFromParent(); | 
|  | 902 | Changed = true; | 
|  | 903 | ++NumCSE; | 
|  | 904 | continue; | 
|  | 905 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 906 |  | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 907 | // Otherwise, just remember that this value is available. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 908 | AvailableValues.insert(Inst, Inst); | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 909 | continue; | 
|  | 910 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 911 |  | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 912 | ParseMemoryInst MemInst(Inst, TTI); | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 913 | // If this is a non-volatile load, process it. | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 914 | if (MemInst.isValid() && MemInst.isLoad()) { | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 915 | // (conservatively) we can't peak past the ordering implied by this | 
|  | 916 | // operation, but we can add this load to our set of available values | 
|  | 917 | if (MemInst.isVolatile() || !MemInst.isUnordered()) { | 
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 918 | LastStore = nullptr; | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 919 | ++CurrentGeneration; | 
| Chris Lattner | 9e5e9ed | 2011-01-03 04:17:24 +0000 | [diff] [blame] | 920 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 921 |  | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 922 | if (MemInst.isInvariantLoad()) { | 
|  | 923 | // If we pass an invariant load, we know that memory location is | 
|  | 924 | // indefinitely constant from the moment of first dereferenceability. | 
| Philip Reames | 422024a | 2018-03-15 18:12:27 +0000 | [diff] [blame] | 925 | // We conservatively treat the invariant_load as that moment.  If we | 
|  | 926 | // pass a invariant load after already establishing a scope, don't | 
|  | 927 | // restart it since we want to preserve the earliest point seen. | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 928 | auto MemLoc = MemoryLocation::get(Inst); | 
| Philip Reames | 422024a | 2018-03-15 18:12:27 +0000 | [diff] [blame] | 929 | if (!AvailableInvariants.count(MemLoc)) | 
|  | 930 | AvailableInvariants.insert(MemLoc, CurrentGeneration); | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 931 | } | 
|  | 932 |  | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 933 | // If we have an available version of this load, and if it is the right | 
| Sanjoy Das | 07c6521 | 2016-06-16 20:47:57 +0000 | [diff] [blame] | 934 | // generation or the load is known to be from an invariant location, | 
|  | 935 | // replace this instruction. | 
|  | 936 | // | 
| Geoff Berry | 64f5ed1 | 2016-08-31 17:45:31 +0000 | [diff] [blame] | 937 | // If either the dominating load or the current load are invariant, then | 
|  | 938 | // we can assume the current load loads the same value as the dominating | 
|  | 939 | // load. | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 940 | LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand()); | 
| Sanjoy Das | 07c6521 | 2016-06-16 20:47:57 +0000 | [diff] [blame] | 941 | if (InVal.DefInst != nullptr && | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 942 | InVal.MatchingId == MemInst.getMatchingId() && | 
|  | 943 | // We don't yet handle removing loads with ordering of any kind. | 
|  | 944 | !MemInst.isVolatile() && MemInst.isUnordered() && | 
|  | 945 | // We can't replace an atomic load with one which isn't also atomic. | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 946 | InVal.IsAtomic >= MemInst.isAtomic() && | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 947 | (isOperatingOnInvariantMemAt(Inst, InVal.Generation) || | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 948 | isSameMemGeneration(InVal.Generation, CurrentGeneration, | 
|  | 949 | InVal.DefInst, Inst))) { | 
| Philip Reames | 32b5518 | 2016-05-06 01:13:58 +0000 | [diff] [blame] | 950 | Value *Op = getOrCreateResult(InVal.DefInst, Inst->getType()); | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 951 | if (Op != nullptr) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 952 | LLVM_DEBUG(dbgs() << "EarlyCSE CSE LOAD: " << *Inst | 
|  | 953 | << "  to: " << *InVal.DefInst << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 954 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 955 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 956 | continue; | 
|  | 957 | } | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 958 | if (!Inst->use_empty()) | 
|  | 959 | Inst->replaceAllUsesWith(Op); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 960 | removeMSSA(Inst); | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 961 | Inst->eraseFromParent(); | 
|  | 962 | Changed = true; | 
|  | 963 | ++NumCSELoad; | 
|  | 964 | continue; | 
|  | 965 | } | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 966 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 967 |  | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 968 | // Otherwise, remember that we have this instruction. | 
| Arnaud A. de Grandmaison | a6178a1 | 2015-10-07 07:41:29 +0000 | [diff] [blame] | 969 | AvailableLoads.insert( | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 970 | MemInst.getPointerOperand(), | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 971 | LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(), | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 972 | MemInst.isAtomic())); | 
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 973 | LastStore = nullptr; | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 974 | continue; | 
|  | 975 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 976 |  | 
| Sanjoy Das | 6de072a | 2017-01-17 20:15:47 +0000 | [diff] [blame] | 977 | // If this instruction may read from memory or throw (and potentially read | 
|  | 978 | // from memory in the exception handler), forget LastStore.  Load/store | 
|  | 979 | // intrinsics will indicate both a read and a write to memory.  The target | 
|  | 980 | // may override this (e.g. so that a store intrinsic does not read from | 
|  | 981 | // memory, and thus will be treated the same as a regular store for | 
|  | 982 | // commoning purposes). | 
|  | 983 | if ((Inst->mayReadFromMemory() || Inst->mayThrow()) && | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 984 | !(MemInst.isValid() && !MemInst.mayReadFromMemory())) | 
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 985 | LastStore = nullptr; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 986 |  | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 987 | // If this is a read-only call, process it. | 
|  | 988 | if (CallValue::canHandle(Inst)) { | 
|  | 989 | // If we have an available version of this call, and if it is the right | 
|  | 990 | // generation, replace this instruction. | 
| Geoff Berry | 2f64c20 | 2016-05-13 17:54:58 +0000 | [diff] [blame] | 991 | std::pair<Instruction *, unsigned> InVal = AvailableCalls.lookup(Inst); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 992 | if (InVal.first != nullptr && | 
|  | 993 | isSameMemGeneration(InVal.second, CurrentGeneration, InVal.first, | 
|  | 994 | Inst)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 995 | LLVM_DEBUG(dbgs() << "EarlyCSE CSE CALL: " << *Inst | 
|  | 996 | << "  to: " << *InVal.first << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 997 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 998 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 999 | continue; | 
|  | 1000 | } | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 1001 | if (!Inst->use_empty()) | 
|  | 1002 | Inst->replaceAllUsesWith(InVal.first); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1003 | removeMSSA(Inst); | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 1004 | Inst->eraseFromParent(); | 
|  | 1005 | Changed = true; | 
|  | 1006 | ++NumCSECall; | 
|  | 1007 | continue; | 
|  | 1008 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 1009 |  | 
| Chris Lattner | 92bb0f9 | 2011-01-03 03:41:27 +0000 | [diff] [blame] | 1010 | // Otherwise, remember that we have this instruction. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1011 | AvailableCalls.insert( | 
| Geoff Berry | 2f64c20 | 2016-05-13 17:54:58 +0000 | [diff] [blame] | 1012 | Inst, std::pair<Instruction *, unsigned>(Inst, CurrentGeneration)); | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 1013 | continue; | 
|  | 1014 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 1015 |  | 
| Philip Reames | dfd890d | 2015-08-27 01:32:33 +0000 | [diff] [blame] | 1016 | // A release fence requires that all stores complete before it, but does | 
|  | 1017 | // not prevent the reordering of following loads 'before' the fence.  As a | 
|  | 1018 | // result, we don't need to consider it as writing to memory and don't need | 
|  | 1019 | // to advance the generation.  We do need to prevent DSE across the fence, | 
|  | 1020 | // but that's handled above. | 
|  | 1021 | if (FenceInst *FI = dyn_cast<FenceInst>(Inst)) | 
| JF Bastien | 800f87a | 2016-04-06 21:19:33 +0000 | [diff] [blame] | 1022 | if (FI->getOrdering() == AtomicOrdering::Release) { | 
| Philip Reames | dfd890d | 2015-08-27 01:32:33 +0000 | [diff] [blame] | 1023 | assert(Inst->mayReadFromMemory() && "relied on to prevent DSE above"); | 
|  | 1024 | continue; | 
|  | 1025 | } | 
|  | 1026 |  | 
| Philip Reames | ae1f265b | 2015-12-16 01:01:30 +0000 | [diff] [blame] | 1027 | // write back DSE - If we write back the same value we just loaded from | 
|  | 1028 | // the same location and haven't passed any intervening writes or ordering | 
|  | 1029 | // operations, we can remove the write.  The primary benefit is in allowing | 
|  | 1030 | // the available load table to remain valid and value forward past where | 
|  | 1031 | // the store originally was. | 
|  | 1032 | if (MemInst.isValid() && MemInst.isStore()) { | 
|  | 1033 | LoadValue InVal = AvailableLoads.lookup(MemInst.getPointerOperand()); | 
| Philip Reames | 32b5518 | 2016-05-06 01:13:58 +0000 | [diff] [blame] | 1034 | if (InVal.DefInst && | 
|  | 1035 | InVal.DefInst == getOrCreateResult(Inst, InVal.DefInst->getType()) && | 
| Philip Reames | ae1f265b | 2015-12-16 01:01:30 +0000 | [diff] [blame] | 1036 | InVal.MatchingId == MemInst.getMatchingId() && | 
|  | 1037 | // We don't yet handle removing stores with ordering of any kind. | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1038 | !MemInst.isVolatile() && MemInst.isUnordered() && | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 1039 | (isOperatingOnInvariantMemAt(Inst, InVal.Generation) || | 
|  | 1040 | isSameMemGeneration(InVal.Generation, CurrentGeneration, | 
|  | 1041 | InVal.DefInst, Inst))) { | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1042 | // It is okay to have a LastStore to a different pointer here if MemorySSA | 
|  | 1043 | // tells us that the load and store are from the same memory generation. | 
|  | 1044 | // In that case, LastStore should keep its present value since we're | 
|  | 1045 | // removing the current store. | 
| Philip Reames | ae1f265b | 2015-12-16 01:01:30 +0000 | [diff] [blame] | 1046 | assert((!LastStore || | 
|  | 1047 | ParseMemoryInst(LastStore, TTI).getPointerOperand() == | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1048 | MemInst.getPointerOperand() || | 
|  | 1049 | MSSA) && | 
|  | 1050 | "can't have an intervening store if not using MemorySSA!"); | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1051 | LLVM_DEBUG(dbgs() << "EarlyCSE DSE (writeback): " << *Inst << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 1052 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1053 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 1054 | continue; | 
|  | 1055 | } | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1056 | removeMSSA(Inst); | 
| Philip Reames | ae1f265b | 2015-12-16 01:01:30 +0000 | [diff] [blame] | 1057 | Inst->eraseFromParent(); | 
|  | 1058 | Changed = true; | 
|  | 1059 | ++NumDSE; | 
|  | 1060 | // We can avoid incrementing the generation count since we were able | 
|  | 1061 | // to eliminate this store. | 
|  | 1062 | continue; | 
|  | 1063 | } | 
|  | 1064 | } | 
|  | 1065 |  | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 1066 | // Okay, this isn't something we can CSE at all.  Check to see if it is | 
|  | 1067 | // something that could modify memory.  If so, our available memory values | 
|  | 1068 | // cannot be used so bump the generation count. | 
| Chris Lattner | e0e32a9 | 2011-01-03 03:46:34 +0000 | [diff] [blame] | 1069 | if (Inst->mayWriteToMemory()) { | 
| Chris Lattner | b9a8efc | 2011-01-03 03:18:43 +0000 | [diff] [blame] | 1070 | ++CurrentGeneration; | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 1071 |  | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 1072 | if (MemInst.isValid() && MemInst.isStore()) { | 
| Chris Lattner | 9e5e9ed | 2011-01-03 04:17:24 +0000 | [diff] [blame] | 1073 | // We do a trivial form of DSE if there are two stores to the same | 
| Philip Reames | 15145fb | 2015-12-17 18:50:50 +0000 | [diff] [blame] | 1074 | // location with no intervening loads.  Delete the earlier store. | 
|  | 1075 | // At the moment, we don't remove ordered stores, but do remove | 
|  | 1076 | // unordered atomic stores.  There's no special requirement (for | 
|  | 1077 | // unordered atomics) about removing atomic stores only in favor of | 
|  | 1078 | // other atomic stores since we we're going to execute the non-atomic | 
|  | 1079 | // one anyway and the atomic one might never have become visible. | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 1080 | if (LastStore) { | 
|  | 1081 | ParseMemoryInst LastStoreMemInst(LastStore, TTI); | 
| Philip Reames | 15145fb | 2015-12-17 18:50:50 +0000 | [diff] [blame] | 1082 | assert(LastStoreMemInst.isUnordered() && | 
|  | 1083 | !LastStoreMemInst.isVolatile() && | 
|  | 1084 | "Violated invariant"); | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 1085 | if (LastStoreMemInst.isMatchingMemLoc(MemInst)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1086 | LLVM_DEBUG(dbgs() << "EarlyCSE DEAD STORE: " << *LastStore | 
|  | 1087 | << "  due to: " << *Inst << '\n'); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 1088 | if (!DebugCounter::shouldExecute(CSECounter)) { | 
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 1089 | LLVM_DEBUG(dbgs() << "Skipping due to debug counter\n"); | 
| Geoff Berry | 5bf4a5e | 2018-04-06 18:47:33 +0000 | [diff] [blame] | 1090 | } else { | 
|  | 1091 | removeMSSA(LastStore); | 
|  | 1092 | LastStore->eraseFromParent(); | 
|  | 1093 | Changed = true; | 
|  | 1094 | ++NumDSE; | 
|  | 1095 | LastStore = nullptr; | 
|  | 1096 | } | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 1097 | } | 
| Philip Reames | 018dbf1 | 2014-11-18 17:46:32 +0000 | [diff] [blame] | 1098 | // fallthrough - we can exploit information about this store | 
| Chris Lattner | 9e5e9ed | 2011-01-03 04:17:24 +0000 | [diff] [blame] | 1099 | } | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 1100 |  | 
| Chris Lattner | 9e5e9ed | 2011-01-03 04:17:24 +0000 | [diff] [blame] | 1101 | // Okay, we just invalidated anything we knew about loaded values.  Try | 
|  | 1102 | // to salvage *something* by remembering that the stored value is a live | 
|  | 1103 | // version of the pointer.  It is safe to forward from volatile stores | 
|  | 1104 | // to non-volatile loads, so we don't have to check for volatility of | 
|  | 1105 | // the store. | 
| Arnaud A. de Grandmaison | a6178a1 | 2015-10-07 07:41:29 +0000 | [diff] [blame] | 1106 | AvailableLoads.insert( | 
| Philip Reames | 9e5e2d6 | 2015-12-07 22:41:23 +0000 | [diff] [blame] | 1107 | MemInst.getPointerOperand(), | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 1108 | LoadValue(Inst, CurrentGeneration, MemInst.getMatchingId(), | 
| Philip Reames | ca587fe | 2018-03-15 17:29:32 +0000 | [diff] [blame] | 1109 | MemInst.isAtomic())); | 
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 1110 |  | 
| Philip Reames | 15145fb | 2015-12-17 18:50:50 +0000 | [diff] [blame] | 1111 | // Remember that this was the last unordered store we saw for DSE. We | 
|  | 1112 | // don't yet handle DSE on ordered or volatile stores since we don't | 
|  | 1113 | // have a good way to model the ordering requirement for following | 
|  | 1114 | // passes  once the store is removed.  We could insert a fence, but | 
|  | 1115 | // since fences are slightly stronger than stores in their ordering, | 
|  | 1116 | // it's not clear this is a profitable transform. Another option would | 
|  | 1117 | // be to merge the ordering with that of the post dominating store. | 
|  | 1118 | if (MemInst.isUnordered() && !MemInst.isVolatile()) | 
| Chad Rosier | f9327d6 | 2015-01-26 22:51:15 +0000 | [diff] [blame] | 1119 | LastStore = Inst; | 
| Philip Reames | 8fc2cbf | 2015-12-08 21:45:41 +0000 | [diff] [blame] | 1120 | else | 
|  | 1121 | LastStore = nullptr; | 
| Chris Lattner | e0e32a9 | 2011-01-03 03:46:34 +0000 | [diff] [blame] | 1122 | } | 
|  | 1123 | } | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 1124 | } | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1125 |  | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 1126 | return Changed; | 
| Chris Lattner | 704541b | 2011-01-02 21:47:05 +0000 | [diff] [blame] | 1127 | } | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 1128 |  | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1129 | bool EarlyCSE::run() { | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 1130 | // Note, deque is being used here because there is significant performance | 
|  | 1131 | // gains over vector when the container becomes very large due to the | 
|  | 1132 | // specific access patterns. For more information see the mailing list | 
|  | 1133 | // discussion on this: | 
| Tanya Lattner | 0d28f80 | 2015-08-05 03:51:17 +0000 | [diff] [blame] | 1134 | // http://lists.llvm.org/pipermail/llvm-commits/Week-of-Mon-20120116/135228.html | 
| Lenny Maiorani | 9eefc81 | 2014-09-20 13:29:20 +0000 | [diff] [blame] | 1135 | std::deque<StackNode *> nodesToProcess; | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1136 |  | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1137 | bool Changed = false; | 
|  | 1138 |  | 
|  | 1139 | // Process the root node. | 
| Chandler Carruth | 7253bba | 2015-01-24 11:33:55 +0000 | [diff] [blame] | 1140 | nodesToProcess.push_back(new StackNode( | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 1141 | AvailableValues, AvailableLoads, AvailableInvariants, AvailableCalls, | 
|  | 1142 | CurrentGeneration, DT.getRootNode(), | 
|  | 1143 | DT.getRootNode()->begin(), DT.getRootNode()->end())); | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1144 |  | 
|  | 1145 | // Save the current generation. | 
|  | 1146 | unsigned LiveOutGeneration = CurrentGeneration; | 
|  | 1147 |  | 
|  | 1148 | // Process the stack. | 
|  | 1149 | while (!nodesToProcess.empty()) { | 
|  | 1150 | // Grab the first item off the stack. Set the current generation, remove | 
|  | 1151 | // the node from the stack, and process it. | 
| Michael Gottesman | 2bf0173 | 2013-12-05 18:42:12 +0000 | [diff] [blame] | 1152 | StackNode *NodeToProcess = nodesToProcess.back(); | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1153 |  | 
|  | 1154 | // Initialize class members. | 
|  | 1155 | CurrentGeneration = NodeToProcess->currentGeneration(); | 
|  | 1156 |  | 
|  | 1157 | // Check if the node needs to be processed. | 
|  | 1158 | if (!NodeToProcess->isProcessed()) { | 
|  | 1159 | // Process the node. | 
|  | 1160 | Changed |= processNode(NodeToProcess->node()); | 
|  | 1161 | NodeToProcess->childGeneration(CurrentGeneration); | 
|  | 1162 | NodeToProcess->process(); | 
|  | 1163 | } else if (NodeToProcess->childIter() != NodeToProcess->end()) { | 
|  | 1164 | // Push the next child onto the stack. | 
|  | 1165 | DomTreeNode *child = NodeToProcess->nextChild(); | 
| Michael Gottesman | 2bf0173 | 2013-12-05 18:42:12 +0000 | [diff] [blame] | 1166 | nodesToProcess.push_back( | 
| Philip Reames | 0adbb19 | 2018-03-14 21:35:06 +0000 | [diff] [blame] | 1167 | new StackNode(AvailableValues, AvailableLoads, AvailableInvariants, | 
|  | 1168 | AvailableCalls, NodeToProcess->childGeneration(), | 
|  | 1169 | child, child->begin(), child->end())); | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1170 | } else { | 
|  | 1171 | // It has been processed, and there are no more children to process, | 
|  | 1172 | // so delete it and pop it off the stack. | 
|  | 1173 | delete NodeToProcess; | 
| Michael Gottesman | 2bf0173 | 2013-12-05 18:42:12 +0000 | [diff] [blame] | 1174 | nodesToProcess.pop_back(); | 
| Lenny Maiorani | 8d670b8 | 2012-01-31 23:14:41 +0000 | [diff] [blame] | 1175 | } | 
|  | 1176 | } // while (!nodes...) | 
|  | 1177 |  | 
|  | 1178 | // Reset the current generation. | 
|  | 1179 | CurrentGeneration = LiveOutGeneration; | 
|  | 1180 |  | 
|  | 1181 | return Changed; | 
| Chris Lattner | 18ae543 | 2011-01-02 23:04:14 +0000 | [diff] [blame] | 1182 | } | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1183 |  | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 1184 | PreservedAnalyses EarlyCSEPass::run(Function &F, | 
| Sean Silva | 36e0d01 | 2016-08-09 00:28:15 +0000 | [diff] [blame] | 1185 | FunctionAnalysisManager &AM) { | 
| Chandler Carruth | b47f801 | 2016-03-11 11:05:24 +0000 | [diff] [blame] | 1186 | auto &TLI = AM.getResult<TargetLibraryAnalysis>(F); | 
|  | 1187 | auto &TTI = AM.getResult<TargetIRAnalysis>(F); | 
|  | 1188 | auto &DT = AM.getResult<DominatorTreeAnalysis>(F); | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1189 | auto &AC = AM.getResult<AssumptionAnalysis>(F); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1190 | auto *MSSA = | 
|  | 1191 | UseMemorySSA ? &AM.getResult<MemorySSAAnalysis>(F).getMSSA() : nullptr; | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 1192 |  | 
| Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 1193 | EarlyCSE CSE(F.getParent()->getDataLayout(), TLI, TTI, DT, AC, MSSA); | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 1194 |  | 
|  | 1195 | if (!CSE.run()) | 
|  | 1196 | return PreservedAnalyses::all(); | 
|  | 1197 |  | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 1198 | PreservedAnalyses PA; | 
| Chandler Carruth | ca68a3e | 2017-01-15 06:32:49 +0000 | [diff] [blame] | 1199 | PA.preserveSet<CFGAnalyses>(); | 
| Davide Italiano | 02861d8 | 2016-06-08 21:31:55 +0000 | [diff] [blame] | 1200 | PA.preserve<GlobalsAA>(); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1201 | if (UseMemorySSA) | 
|  | 1202 | PA.preserve<MemorySSAAnalysis>(); | 
| Chandler Carruth | e8c686a | 2015-02-01 10:51:23 +0000 | [diff] [blame] | 1203 | return PA; | 
|  | 1204 | } | 
|  | 1205 |  | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1206 | namespace { | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 1207 |  | 
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 1208 | /// A simple and fast domtree-based CSE pass. | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1209 | /// | 
|  | 1210 | /// This pass does a simple depth-first walk over the dominator tree, | 
|  | 1211 | /// eliminating trivially redundant instructions and using instsimplify to | 
|  | 1212 | /// canonicalize things as it goes. It is intended to be fast and catch obvious | 
|  | 1213 | /// cases so that instcombine and other passes are more effective. It is | 
|  | 1214 | /// expected that a later pass of GVN will catch the interesting/hard cases. | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1215 | template<bool UseMemorySSA> | 
|  | 1216 | class EarlyCSELegacyCommonPass : public FunctionPass { | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1217 | public: | 
|  | 1218 | static char ID; | 
|  | 1219 |  | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1220 | EarlyCSELegacyCommonPass() : FunctionPass(ID) { | 
|  | 1221 | if (UseMemorySSA) | 
|  | 1222 | initializeEarlyCSEMemSSALegacyPassPass(*PassRegistry::getPassRegistry()); | 
|  | 1223 | else | 
|  | 1224 | initializeEarlyCSELegacyPassPass(*PassRegistry::getPassRegistry()); | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1225 | } | 
|  | 1226 |  | 
|  | 1227 | bool runOnFunction(Function &F) override { | 
| Andrew Kaylor | aa641a5 | 2016-04-22 22:06:11 +0000 | [diff] [blame] | 1228 | if (skipFunction(F)) | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1229 | return false; | 
|  | 1230 |  | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1231 | auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(); | 
| Chandler Carruth | fdb9c57 | 2015-02-01 12:01:35 +0000 | [diff] [blame] | 1232 | auto &TTI = getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1233 | auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1234 | auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1235 | auto *MSSA = | 
|  | 1236 | UseMemorySSA ? &getAnalysis<MemorySSAWrapperPass>().getMSSA() : nullptr; | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1237 |  | 
| Daniel Berlin | 4d0fe64 | 2017-04-28 19:55:38 +0000 | [diff] [blame] | 1238 | EarlyCSE CSE(F.getParent()->getDataLayout(), TLI, TTI, DT, AC, MSSA); | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1239 |  | 
|  | 1240 | return CSE.run(); | 
|  | 1241 | } | 
|  | 1242 |  | 
|  | 1243 | void getAnalysisUsage(AnalysisUsage &AU) const override { | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1244 | AU.addRequired<AssumptionCacheTracker>(); | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1245 | AU.addRequired<DominatorTreeWrapperPass>(); | 
|  | 1246 | AU.addRequired<TargetLibraryInfoWrapperPass>(); | 
| Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 1247 | AU.addRequired<TargetTransformInfoWrapperPass>(); | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1248 | if (UseMemorySSA) { | 
|  | 1249 | AU.addRequired<MemorySSAWrapperPass>(); | 
|  | 1250 | AU.addPreserved<MemorySSAWrapperPass>(); | 
|  | 1251 | } | 
| James Molloy | efbba72 | 2015-09-10 10:22:12 +0000 | [diff] [blame] | 1252 | AU.addPreserved<GlobalsAAWrapperPass>(); | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1253 | AU.setPreservesCFG(); | 
|  | 1254 | } | 
|  | 1255 | }; | 
| Eugene Zelenko | 3b87939 | 2017-10-13 21:17:07 +0000 | [diff] [blame] | 1256 |  | 
|  | 1257 | } // end anonymous namespace | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1258 |  | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1259 | using EarlyCSELegacyPass = EarlyCSELegacyCommonPass</*UseMemorySSA=*/false>; | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1260 |  | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1261 | template<> | 
|  | 1262 | char EarlyCSELegacyPass::ID = 0; | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1263 |  | 
|  | 1264 | INITIALIZE_PASS_BEGIN(EarlyCSELegacyPass, "early-cse", "Early CSE", false, | 
|  | 1265 | false) | 
| Chandler Carruth | 705b185 | 2015-01-31 03:43:40 +0000 | [diff] [blame] | 1266 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1267 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) | 
| Chandler Carruth | d649c0a | 2015-01-27 01:34:14 +0000 | [diff] [blame] | 1268 | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) | 
|  | 1269 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) | 
|  | 1270 | INITIALIZE_PASS_END(EarlyCSELegacyPass, "early-cse", "Early CSE", false, false) | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1271 |  | 
|  | 1272 | using EarlyCSEMemSSALegacyPass = | 
|  | 1273 | EarlyCSELegacyCommonPass</*UseMemorySSA=*/true>; | 
|  | 1274 |  | 
|  | 1275 | template<> | 
|  | 1276 | char EarlyCSEMemSSALegacyPass::ID = 0; | 
|  | 1277 |  | 
|  | 1278 | FunctionPass *llvm::createEarlyCSEPass(bool UseMemorySSA) { | 
|  | 1279 | if (UseMemorySSA) | 
|  | 1280 | return new EarlyCSEMemSSALegacyPass(); | 
|  | 1281 | else | 
|  | 1282 | return new EarlyCSELegacyPass(); | 
|  | 1283 | } | 
|  | 1284 |  | 
|  | 1285 | INITIALIZE_PASS_BEGIN(EarlyCSEMemSSALegacyPass, "early-cse-memssa", | 
|  | 1286 | "Early CSE w/ MemorySSA", false, false) | 
|  | 1287 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) | 
| Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 1288 | INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) | 
| Geoff Berry | 8d84605 | 2016-08-31 19:24:10 +0000 | [diff] [blame] | 1289 | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) | 
|  | 1290 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) | 
|  | 1291 | INITIALIZE_PASS_DEPENDENCY(MemorySSAWrapperPass) | 
|  | 1292 | INITIALIZE_PASS_END(EarlyCSEMemSSALegacyPass, "early-cse-memssa", | 
|  | 1293 | "Early CSE w/ MemorySSA", false, false) |