Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1 | //===- InstCombineLoadStoreAlloca.cpp -------------------------------------===// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file implements the visit functions for load, store and alloca. |
| 10 | // |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
Chandler Carruth | a917458 | 2015-01-22 05:25:13 +0000 | [diff] [blame] | 13 | #include "InstCombineInternal.h" |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 14 | #include "llvm/ADT/MapVector.h" |
NAKAMURA Takumi | ec6b1fc | 2015-12-15 09:37:31 +0000 | [diff] [blame] | 15 | #include "llvm/ADT/SmallString.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/Statistic.h" |
Dan Gohman | 826bdf8 | 2010-05-28 16:19:17 +0000 | [diff] [blame] | 17 | #include "llvm/Analysis/Loads.h" |
David Blaikie | 31b98d2 | 2018-06-04 21:23:21 +0000 | [diff] [blame] | 18 | #include "llvm/Transforms/Utils/Local.h" |
Peter Collingbourne | ecdd58f | 2016-10-21 19:59:26 +0000 | [diff] [blame] | 19 | #include "llvm/IR/ConstantRange.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 20 | #include "llvm/IR/DataLayout.h" |
Vedant Kumar | 238533e | 2018-11-19 19:55:02 +0000 | [diff] [blame] | 21 | #include "llvm/IR/DebugInfoMetadata.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 22 | #include "llvm/IR/IntrinsicInst.h" |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 23 | #include "llvm/IR/LLVMContext.h" |
Charles Davis | 33d1dc0 | 2015-02-25 05:10:25 +0000 | [diff] [blame] | 24 | #include "llvm/IR/MDBuilder.h" |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 25 | #include "llvm/IR/PatternMatch.h" |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 26 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 27 | using namespace llvm; |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 28 | using namespace PatternMatch; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 29 | |
Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 30 | #define DEBUG_TYPE "instcombine" |
| 31 | |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 32 | STATISTIC(NumDeadStore, "Number of dead stores eliminated"); |
| 33 | STATISTIC(NumGlobalCopies, "Number of allocas copied from constant global"); |
| 34 | |
| 35 | /// pointsToConstantGlobal - Return true if V (possibly indirectly) points to |
| 36 | /// some part of a constant global variable. This intentionally only accepts |
| 37 | /// constant expressions because we can't rewrite arbitrary instructions. |
| 38 | static bool pointsToConstantGlobal(Value *V) { |
| 39 | if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) |
| 40 | return GV->isConstant(); |
Matt Arsenault | 60728177 | 2014-04-24 00:01:09 +0000 | [diff] [blame] | 41 | |
| 42 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) { |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 43 | if (CE->getOpcode() == Instruction::BitCast || |
Matt Arsenault | 60728177 | 2014-04-24 00:01:09 +0000 | [diff] [blame] | 44 | CE->getOpcode() == Instruction::AddrSpaceCast || |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 45 | CE->getOpcode() == Instruction::GetElementPtr) |
| 46 | return pointsToConstantGlobal(CE->getOperand(0)); |
Matt Arsenault | 60728177 | 2014-04-24 00:01:09 +0000 | [diff] [blame] | 47 | } |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 48 | return false; |
| 49 | } |
| 50 | |
| 51 | /// isOnlyCopiedFromConstantGlobal - Recursively walk the uses of a (derived) |
| 52 | /// pointer to an alloca. Ignore any reads of the pointer, return false if we |
| 53 | /// see any stores or other unknown uses. If we see pointer arithmetic, keep |
| 54 | /// track of whether it moves the pointer (with IsOffset) but otherwise traverse |
| 55 | /// the uses. If we see a memcpy/memmove that targets an unoffseted pointer to |
| 56 | /// the alloca, and if the source pointer is a pointer to a constant global, we |
| 57 | /// can optimize this. |
| 58 | static bool |
| 59 | isOnlyCopiedFromConstantGlobal(Value *V, MemTransferInst *&TheCopy, |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 60 | SmallVectorImpl<Instruction *> &ToDelete) { |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 61 | // We track lifetime intrinsics as we encounter them. If we decide to go |
| 62 | // ahead and replace the value with the global, this lets the caller quickly |
| 63 | // eliminate the markers. |
| 64 | |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 65 | SmallVector<std::pair<Value *, bool>, 35> ValuesToInspect; |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 66 | ValuesToInspect.emplace_back(V, false); |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 67 | while (!ValuesToInspect.empty()) { |
| 68 | auto ValuePair = ValuesToInspect.pop_back_val(); |
| 69 | const bool IsOffset = ValuePair.second; |
| 70 | for (auto &U : ValuePair.first->uses()) { |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 71 | auto *I = cast<Instruction>(U.getUser()); |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 72 | |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 73 | if (auto *LI = dyn_cast<LoadInst>(I)) { |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 74 | // Ignore non-volatile loads, they are always ok. |
| 75 | if (!LI->isSimple()) return false; |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 76 | continue; |
| 77 | } |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 78 | |
| 79 | if (isa<BitCastInst>(I) || isa<AddrSpaceCastInst>(I)) { |
| 80 | // If uses of the bitcast are ok, we are ok. |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 81 | ValuesToInspect.emplace_back(I, IsOffset); |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 82 | continue; |
| 83 | } |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 84 | if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 85 | // If the GEP has all zero indices, it doesn't offset the pointer. If it |
| 86 | // doesn't, it does. |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 87 | ValuesToInspect.emplace_back(I, IsOffset || !GEP->hasAllZeroIndices()); |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 88 | continue; |
| 89 | } |
| 90 | |
Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 91 | if (auto *Call = dyn_cast<CallBase>(I)) { |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 92 | // If this is the function being called then we treat it like a load and |
| 93 | // ignore it. |
Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 94 | if (Call->isCallee(&U)) |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 95 | continue; |
| 96 | |
Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 97 | unsigned DataOpNo = Call->getDataOperandNo(&U); |
| 98 | bool IsArgOperand = Call->isArgOperand(&U); |
David Majnemer | 02f4787 | 2015-12-23 09:58:41 +0000 | [diff] [blame] | 99 | |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 100 | // Inalloca arguments are clobbered by the call. |
Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 101 | if (IsArgOperand && Call->isInAllocaArgument(DataOpNo)) |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 102 | return false; |
| 103 | |
| 104 | // If this is a readonly/readnone call site, then we know it is just a |
| 105 | // load (but one that potentially returns the value itself), so we can |
| 106 | // ignore it if we know that the value isn't captured. |
Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 107 | if (Call->onlyReadsMemory() && |
| 108 | (Call->use_empty() || Call->doesNotCapture(DataOpNo))) |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 109 | continue; |
| 110 | |
| 111 | // If this is being passed as a byval argument, the caller is making a |
| 112 | // copy, so it is only a read of the alloca. |
Craig Topper | c1892ec | 2019-01-31 17:23:29 +0000 | [diff] [blame] | 113 | if (IsArgOperand && Call->isByValArgument(DataOpNo)) |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 114 | continue; |
| 115 | } |
| 116 | |
| 117 | // Lifetime intrinsics can be handled by the caller. |
Vedant Kumar | b264d69 | 2018-12-21 21:49:40 +0000 | [diff] [blame] | 118 | if (I->isLifetimeStartOrEnd()) { |
| 119 | assert(I->use_empty() && "Lifetime markers have no result to use!"); |
| 120 | ToDelete.push_back(I); |
| 121 | continue; |
Reid Kleckner | 813dab2 | 2014-07-01 21:36:20 +0000 | [diff] [blame] | 122 | } |
| 123 | |
| 124 | // If this is isn't our memcpy/memmove, reject it as something we can't |
| 125 | // handle. |
| 126 | MemTransferInst *MI = dyn_cast<MemTransferInst>(I); |
| 127 | if (!MI) |
| 128 | return false; |
| 129 | |
| 130 | // If the transfer is using the alloca as a source of the transfer, then |
| 131 | // ignore it since it is a load (unless the transfer is volatile). |
| 132 | if (U.getOperandNo() == 1) { |
| 133 | if (MI->isVolatile()) return false; |
| 134 | continue; |
| 135 | } |
| 136 | |
| 137 | // If we already have seen a copy, reject the second one. |
| 138 | if (TheCopy) return false; |
| 139 | |
| 140 | // If the pointer has been offset from the start of the alloca, we can't |
| 141 | // safely handle this. |
| 142 | if (IsOffset) return false; |
| 143 | |
| 144 | // If the memintrinsic isn't using the alloca as the dest, reject it. |
| 145 | if (U.getOperandNo() != 0) return false; |
| 146 | |
| 147 | // If the source of the memcpy/move is not a constant global, reject it. |
| 148 | if (!pointsToConstantGlobal(MI->getSource())) |
| 149 | return false; |
| 150 | |
| 151 | // Otherwise, the transform is safe. Remember the copy instruction. |
| 152 | TheCopy = MI; |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 153 | } |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 154 | } |
| 155 | return true; |
| 156 | } |
| 157 | |
| 158 | /// isOnlyCopiedFromConstantGlobal - Return true if the specified alloca is only |
| 159 | /// modified by a copy from a constant global. If we can prove this, we can |
| 160 | /// replace any uses of the alloca with uses of the global directly. |
| 161 | static MemTransferInst * |
| 162 | isOnlyCopiedFromConstantGlobal(AllocaInst *AI, |
| 163 | SmallVectorImpl<Instruction *> &ToDelete) { |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 164 | MemTransferInst *TheCopy = nullptr; |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 165 | if (isOnlyCopiedFromConstantGlobal(AI, TheCopy, ToDelete)) |
| 166 | return TheCopy; |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 167 | return nullptr; |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 168 | } |
| 169 | |
Vitaly Buka | df19ad4 | 2017-06-24 01:35:19 +0000 | [diff] [blame] | 170 | /// Returns true if V is dereferenceable for size of alloca. |
| 171 | static bool isDereferenceableForAllocaSize(const Value *V, const AllocaInst *AI, |
| 172 | const DataLayout &DL) { |
| 173 | if (AI->isArrayAllocation()) |
| 174 | return false; |
| 175 | uint64_t AllocaSize = DL.getTypeStoreSize(AI->getAllocatedType()); |
| 176 | if (!AllocaSize) |
| 177 | return false; |
Guillaume Chatelet | 301b412 | 2019-10-21 15:10:26 +0000 | [diff] [blame] | 178 | return isDereferenceableAndAlignedPointer(V, Align(AI->getAlignment()), |
Vitaly Buka | df19ad4 | 2017-06-24 01:35:19 +0000 | [diff] [blame] | 179 | APInt(64, AllocaSize), DL); |
| 180 | } |
| 181 | |
Duncan P. N. Exon Smith | c6820ec | 2015-03-13 19:22:03 +0000 | [diff] [blame] | 182 | static Instruction *simplifyAllocaArraySize(InstCombiner &IC, AllocaInst &AI) { |
Duncan P. N. Exon Smith | 720762e | 2015-03-13 19:30:44 +0000 | [diff] [blame] | 183 | // Check for array size of 1 (scalar allocation). |
Duncan P. N. Exon Smith | be95b4a | 2015-03-13 19:42:09 +0000 | [diff] [blame] | 184 | if (!AI.isArrayAllocation()) { |
| 185 | // i32 1 is the canonical array size for scalar allocations. |
| 186 | if (AI.getArraySize()->getType()->isIntegerTy(32)) |
| 187 | return nullptr; |
| 188 | |
| 189 | // Canonicalize it. |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 190 | Value *V = IC.Builder.getInt32(1); |
Duncan P. N. Exon Smith | be95b4a | 2015-03-13 19:42:09 +0000 | [diff] [blame] | 191 | AI.setOperand(0, V); |
| 192 | return &AI; |
| 193 | } |
Duncan P. N. Exon Smith | 720762e | 2015-03-13 19:30:44 +0000 | [diff] [blame] | 194 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 195 | // Convert: alloca Ty, C - where C is a constant != 1 into: alloca [C x Ty], 1 |
Duncan P. N. Exon Smith | bb73013 | 2015-03-13 19:26:33 +0000 | [diff] [blame] | 196 | if (const ConstantInt *C = dyn_cast<ConstantInt>(AI.getArraySize())) { |
Simon Pilgrim | 82edf8d | 2018-08-13 16:50:20 +0000 | [diff] [blame] | 197 | if (C->getValue().getActiveBits() <= 64) { |
| 198 | Type *NewTy = ArrayType::get(AI.getAllocatedType(), C->getZExtValue()); |
| 199 | AllocaInst *New = IC.Builder.CreateAlloca(NewTy, nullptr, AI.getName()); |
Guillaume Chatelet | ab11b91 | 2019-09-30 13:34:44 +0000 | [diff] [blame] | 200 | New->setAlignment(MaybeAlign(AI.getAlignment())); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 201 | |
Simon Pilgrim | 82edf8d | 2018-08-13 16:50:20 +0000 | [diff] [blame] | 202 | // Scan to the end of the allocation instructions, to skip over a block of |
| 203 | // allocas if possible...also skip interleaved debug info |
| 204 | // |
| 205 | BasicBlock::iterator It(New); |
| 206 | while (isa<AllocaInst>(*It) || isa<DbgInfoIntrinsic>(*It)) |
| 207 | ++It; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 208 | |
Simon Pilgrim | 82edf8d | 2018-08-13 16:50:20 +0000 | [diff] [blame] | 209 | // Now that I is pointing to the first non-allocation-inst in the block, |
| 210 | // insert our getelementptr instruction... |
| 211 | // |
| 212 | Type *IdxTy = IC.getDataLayout().getIntPtrType(AI.getType()); |
| 213 | Value *NullIdx = Constant::getNullValue(IdxTy); |
| 214 | Value *Idx[2] = {NullIdx, NullIdx}; |
James Y Knight | 7716075 | 2019-02-01 20:44:47 +0000 | [diff] [blame] | 215 | Instruction *GEP = GetElementPtrInst::CreateInBounds( |
| 216 | NewTy, New, Idx, New->getName() + ".sub"); |
Simon Pilgrim | 82edf8d | 2018-08-13 16:50:20 +0000 | [diff] [blame] | 217 | IC.InsertNewInstBefore(GEP, *It); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 218 | |
Simon Pilgrim | 82edf8d | 2018-08-13 16:50:20 +0000 | [diff] [blame] | 219 | // Now make everything use the getelementptr instead of the original |
| 220 | // allocation. |
| 221 | return IC.replaceInstUsesWith(AI, GEP); |
| 222 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 223 | } |
| 224 | |
Duncan P. N. Exon Smith | bb73013 | 2015-03-13 19:26:33 +0000 | [diff] [blame] | 225 | if (isa<UndefValue>(AI.getArraySize())) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 226 | return IC.replaceInstUsesWith(AI, Constant::getNullValue(AI.getType())); |
Duncan P. N. Exon Smith | bb73013 | 2015-03-13 19:26:33 +0000 | [diff] [blame] | 227 | |
Duncan P. N. Exon Smith | 07ff9b0 | 2015-03-13 19:34:55 +0000 | [diff] [blame] | 228 | // Ensure that the alloca array size argument has type intptr_t, so that |
| 229 | // any casting is exposed early. |
| 230 | Type *IntPtrTy = IC.getDataLayout().getIntPtrType(AI.getType()); |
| 231 | if (AI.getArraySize()->getType() != IntPtrTy) { |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 232 | Value *V = IC.Builder.CreateIntCast(AI.getArraySize(), IntPtrTy, false); |
Duncan P. N. Exon Smith | 07ff9b0 | 2015-03-13 19:34:55 +0000 | [diff] [blame] | 233 | AI.setOperand(0, V); |
| 234 | return &AI; |
| 235 | } |
| 236 | |
Duncan P. N. Exon Smith | c6820ec | 2015-03-13 19:22:03 +0000 | [diff] [blame] | 237 | return nullptr; |
| 238 | } |
| 239 | |
Benjamin Kramer | 03ab8a3 | 2017-02-10 22:26:35 +0000 | [diff] [blame] | 240 | namespace { |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 241 | // If I and V are pointers in different address space, it is not allowed to |
| 242 | // use replaceAllUsesWith since I and V have different types. A |
| 243 | // non-target-specific transformation should not use addrspacecast on V since |
| 244 | // the two address space may be disjoint depending on target. |
| 245 | // |
| 246 | // This class chases down uses of the old pointer until reaching the load |
| 247 | // instructions, then replaces the old pointer in the load instructions with |
| 248 | // the new pointer. If during the chasing it sees bitcast or GEP, it will |
| 249 | // create new bitcast or GEP with the new pointer and use them in the load |
| 250 | // instruction. |
| 251 | class PointerReplacer { |
| 252 | public: |
| 253 | PointerReplacer(InstCombiner &IC) : IC(IC) {} |
| 254 | void replacePointer(Instruction &I, Value *V); |
| 255 | |
| 256 | private: |
| 257 | void findLoadAndReplace(Instruction &I); |
| 258 | void replace(Instruction *I); |
| 259 | Value *getReplacement(Value *I); |
| 260 | |
| 261 | SmallVector<Instruction *, 4> Path; |
| 262 | MapVector<Value *, Value *> WorkMap; |
| 263 | InstCombiner &IC; |
| 264 | }; |
Benjamin Kramer | 03ab8a3 | 2017-02-10 22:26:35 +0000 | [diff] [blame] | 265 | } // end anonymous namespace |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 266 | |
| 267 | void PointerReplacer::findLoadAndReplace(Instruction &I) { |
| 268 | for (auto U : I.users()) { |
| 269 | auto *Inst = dyn_cast<Instruction>(&*U); |
| 270 | if (!Inst) |
| 271 | return; |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 272 | LLVM_DEBUG(dbgs() << "Found pointer user: " << *U << '\n'); |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 273 | if (isa<LoadInst>(Inst)) { |
| 274 | for (auto P : Path) |
| 275 | replace(P); |
| 276 | replace(Inst); |
| 277 | } else if (isa<GetElementPtrInst>(Inst) || isa<BitCastInst>(Inst)) { |
| 278 | Path.push_back(Inst); |
| 279 | findLoadAndReplace(*Inst); |
| 280 | Path.pop_back(); |
| 281 | } else { |
| 282 | return; |
| 283 | } |
| 284 | } |
| 285 | } |
| 286 | |
| 287 | Value *PointerReplacer::getReplacement(Value *V) { |
| 288 | auto Loc = WorkMap.find(V); |
| 289 | if (Loc != WorkMap.end()) |
| 290 | return Loc->second; |
| 291 | return nullptr; |
| 292 | } |
| 293 | |
| 294 | void PointerReplacer::replace(Instruction *I) { |
| 295 | if (getReplacement(I)) |
| 296 | return; |
| 297 | |
| 298 | if (auto *LT = dyn_cast<LoadInst>(I)) { |
| 299 | auto *V = getReplacement(LT->getPointerOperand()); |
| 300 | assert(V && "Operand not replaced"); |
James Y Knight | 14359ef | 2019-02-01 20:44:24 +0000 | [diff] [blame] | 301 | auto *NewI = new LoadInst(I->getType(), V); |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 302 | NewI->takeName(LT); |
| 303 | IC.InsertNewInstWith(NewI, *LT); |
| 304 | IC.replaceInstUsesWith(*LT, NewI); |
| 305 | WorkMap[LT] = NewI; |
| 306 | } else if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { |
| 307 | auto *V = getReplacement(GEP->getPointerOperand()); |
| 308 | assert(V && "Operand not replaced"); |
| 309 | SmallVector<Value *, 8> Indices; |
| 310 | Indices.append(GEP->idx_begin(), GEP->idx_end()); |
| 311 | auto *NewI = GetElementPtrInst::Create( |
| 312 | V->getType()->getPointerElementType(), V, Indices); |
| 313 | IC.InsertNewInstWith(NewI, *GEP); |
| 314 | NewI->takeName(GEP); |
| 315 | WorkMap[GEP] = NewI; |
| 316 | } else if (auto *BC = dyn_cast<BitCastInst>(I)) { |
| 317 | auto *V = getReplacement(BC->getOperand(0)); |
| 318 | assert(V && "Operand not replaced"); |
| 319 | auto *NewT = PointerType::get(BC->getType()->getPointerElementType(), |
| 320 | V->getType()->getPointerAddressSpace()); |
| 321 | auto *NewI = new BitCastInst(V, NewT); |
| 322 | IC.InsertNewInstWith(NewI, *BC); |
| 323 | NewI->takeName(BC); |
Yaxun Liu | e6d1ce5 | 2017-02-24 20:27:25 +0000 | [diff] [blame] | 324 | WorkMap[BC] = NewI; |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 325 | } else { |
| 326 | llvm_unreachable("should never reach here"); |
| 327 | } |
| 328 | } |
| 329 | |
| 330 | void PointerReplacer::replacePointer(Instruction &I, Value *V) { |
Benjamin Kramer | 684c87b | 2017-02-10 22:04:17 +0000 | [diff] [blame] | 331 | #ifndef NDEBUG |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 332 | auto *PT = cast<PointerType>(I.getType()); |
| 333 | auto *NT = cast<PointerType>(V->getType()); |
| 334 | assert(PT != NT && PT->getElementType() == NT->getElementType() && |
| 335 | "Invalid usage"); |
Benjamin Kramer | 684c87b | 2017-02-10 22:04:17 +0000 | [diff] [blame] | 336 | #endif |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 337 | WorkMap[&I] = V; |
| 338 | findLoadAndReplace(I); |
| 339 | } |
| 340 | |
Duncan P. N. Exon Smith | c6820ec | 2015-03-13 19:22:03 +0000 | [diff] [blame] | 341 | Instruction *InstCombiner::visitAllocaInst(AllocaInst &AI) { |
| 342 | if (auto *I = simplifyAllocaArraySize(*this, AI)) |
| 343 | return I; |
| 344 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 345 | if (AI.getAllocatedType()->isSized()) { |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 346 | // If the alignment is 0 (unspecified), assign it the preferred alignment. |
| 347 | if (AI.getAlignment() == 0) |
Guillaume Chatelet | ab11b91 | 2019-09-30 13:34:44 +0000 | [diff] [blame] | 348 | AI.setAlignment( |
| 349 | MaybeAlign(DL.getPrefTypeAlignment(AI.getAllocatedType()))); |
Duncan Sands | 8bc764a | 2012-06-26 13:39:21 +0000 | [diff] [blame] | 350 | |
| 351 | // Move all alloca's of zero byte objects to the entry block and merge them |
| 352 | // together. Note that we only do this for alloca's, because malloc should |
| 353 | // allocate and return a unique pointer, even for a zero byte allocation. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 354 | if (DL.getTypeAllocSize(AI.getAllocatedType()) == 0) { |
Duncan Sands | 8bc764a | 2012-06-26 13:39:21 +0000 | [diff] [blame] | 355 | // For a zero sized alloca there is no point in doing an array allocation. |
| 356 | // This is helpful if the array size is a complicated expression not used |
| 357 | // elsewhere. |
| 358 | if (AI.isArrayAllocation()) { |
| 359 | AI.setOperand(0, ConstantInt::get(AI.getArraySize()->getType(), 1)); |
| 360 | return &AI; |
| 361 | } |
| 362 | |
| 363 | // Get the first instruction in the entry block. |
| 364 | BasicBlock &EntryBlock = AI.getParent()->getParent()->getEntryBlock(); |
| 365 | Instruction *FirstInst = EntryBlock.getFirstNonPHIOrDbg(); |
| 366 | if (FirstInst != &AI) { |
| 367 | // If the entry block doesn't start with a zero-size alloca then move |
| 368 | // this one to the start of the entry block. There is no problem with |
| 369 | // dominance as the array size was forced to a constant earlier already. |
| 370 | AllocaInst *EntryAI = dyn_cast<AllocaInst>(FirstInst); |
| 371 | if (!EntryAI || !EntryAI->getAllocatedType()->isSized() || |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 372 | DL.getTypeAllocSize(EntryAI->getAllocatedType()) != 0) { |
Duncan Sands | 8bc764a | 2012-06-26 13:39:21 +0000 | [diff] [blame] | 373 | AI.moveBefore(FirstInst); |
| 374 | return &AI; |
| 375 | } |
| 376 | |
Richard Osborne | b68053e | 2012-09-18 09:31:44 +0000 | [diff] [blame] | 377 | // If the alignment of the entry block alloca is 0 (unspecified), |
| 378 | // assign it the preferred alignment. |
| 379 | if (EntryAI->getAlignment() == 0) |
| 380 | EntryAI->setAlignment( |
Guillaume Chatelet | ab11b91 | 2019-09-30 13:34:44 +0000 | [diff] [blame] | 381 | MaybeAlign(DL.getPrefTypeAlignment(EntryAI->getAllocatedType()))); |
Duncan Sands | 8bc764a | 2012-06-26 13:39:21 +0000 | [diff] [blame] | 382 | // Replace this zero-sized alloca with the one at the start of the entry |
| 383 | // block after ensuring that the address will be aligned enough for both |
| 384 | // types. |
Guillaume Chatelet | ab11b91 | 2019-09-30 13:34:44 +0000 | [diff] [blame] | 385 | const MaybeAlign MaxAlign( |
| 386 | std::max(EntryAI->getAlignment(), AI.getAlignment())); |
Duncan Sands | 8bc764a | 2012-06-26 13:39:21 +0000 | [diff] [blame] | 387 | EntryAI->setAlignment(MaxAlign); |
| 388 | if (AI.getType() != EntryAI->getType()) |
| 389 | return new BitCastInst(EntryAI, AI.getType()); |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 390 | return replaceInstUsesWith(AI, EntryAI); |
Duncan Sands | 8bc764a | 2012-06-26 13:39:21 +0000 | [diff] [blame] | 391 | } |
| 392 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 393 | } |
| 394 | |
Eli Friedman | b14873c | 2012-11-26 23:04:53 +0000 | [diff] [blame] | 395 | if (AI.getAlignment()) { |
Richard Osborne | 2fd29bf | 2012-09-24 17:10:03 +0000 | [diff] [blame] | 396 | // Check to see if this allocation is only modified by a memcpy/memmove from |
| 397 | // a constant global whose alignment is equal to or exceeds that of the |
| 398 | // allocation. If this is the case, we can change all users to use |
| 399 | // the constant global instead. This is commonly produced by the CFE by |
| 400 | // constructs like "void foo() { int A[] = {1,2,3,4,5,6,7,8,9...}; }" if 'A' |
| 401 | // is only subsequently read. |
| 402 | SmallVector<Instruction *, 4> ToDelete; |
| 403 | if (MemTransferInst *Copy = isOnlyCopiedFromConstantGlobal(&AI, ToDelete)) { |
Chandler Carruth | 66b3130 | 2015-01-04 12:03:27 +0000 | [diff] [blame] | 404 | unsigned SourceAlign = getOrEnforceKnownAlignment( |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 405 | Copy->getSource(), AI.getAlignment(), DL, &AI, &AC, &DT); |
Vitaly Buka | df19ad4 | 2017-06-24 01:35:19 +0000 | [diff] [blame] | 406 | if (AI.getAlignment() <= SourceAlign && |
| 407 | isDereferenceableForAllocaSize(Copy->getSource(), &AI, DL)) { |
Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 408 | LLVM_DEBUG(dbgs() << "Found alloca equal to global: " << AI << '\n'); |
| 409 | LLVM_DEBUG(dbgs() << " memcpy = " << *Copy << '\n'); |
Richard Osborne | 2fd29bf | 2012-09-24 17:10:03 +0000 | [diff] [blame] | 410 | for (unsigned i = 0, e = ToDelete.size(); i != e; ++i) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 411 | eraseInstFromFunction(*ToDelete[i]); |
Richard Osborne | 2fd29bf | 2012-09-24 17:10:03 +0000 | [diff] [blame] | 412 | Constant *TheSrc = cast<Constant>(Copy->getSource()); |
Yaxun Liu | ba01ed0 | 2017-02-10 21:46:07 +0000 | [diff] [blame] | 413 | auto *SrcTy = TheSrc->getType(); |
| 414 | auto *DestTy = PointerType::get(AI.getType()->getPointerElementType(), |
| 415 | SrcTy->getPointerAddressSpace()); |
| 416 | Constant *Cast = |
| 417 | ConstantExpr::getPointerBitCastOrAddrSpaceCast(TheSrc, DestTy); |
| 418 | if (AI.getType()->getPointerAddressSpace() == |
| 419 | SrcTy->getPointerAddressSpace()) { |
| 420 | Instruction *NewI = replaceInstUsesWith(AI, Cast); |
| 421 | eraseInstFromFunction(*Copy); |
| 422 | ++NumGlobalCopies; |
| 423 | return NewI; |
| 424 | } else { |
| 425 | PointerReplacer PtrReplacer(*this); |
| 426 | PtrReplacer.replacePointer(AI, Cast); |
| 427 | ++NumGlobalCopies; |
| 428 | } |
Richard Osborne | 2fd29bf | 2012-09-24 17:10:03 +0000 | [diff] [blame] | 429 | } |
Chandler Carruth | c908ca1 | 2012-08-21 08:39:44 +0000 | [diff] [blame] | 430 | } |
| 431 | } |
| 432 | |
Nuno Lopes | 95cc4f3 | 2012-07-09 18:38:20 +0000 | [diff] [blame] | 433 | // At last, use the generic allocation site handler to aggressively remove |
| 434 | // unused allocas. |
| 435 | return visitAllocSite(AI); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 436 | } |
| 437 | |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 438 | // Are we allowed to form a atomic load or store of this type? |
| 439 | static bool isSupportedAtomicType(Type *Ty) { |
Vedant Kumar | b3091da | 2018-07-06 20:17:42 +0000 | [diff] [blame] | 440 | return Ty->isIntOrPtrTy() || Ty->isFloatingPointTy(); |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 441 | } |
| 442 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 443 | /// Helper to combine a load to a new type. |
Chandler Carruth | bc6378d | 2014-10-19 10:46:46 +0000 | [diff] [blame] | 444 | /// |
| 445 | /// This just does the work of combining a load to a new type. It handles |
| 446 | /// metadata, etc., and returns the new instruction. The \c NewTy should be the |
| 447 | /// loaded *value* type. This will convert it to a pointer, cast the operand to |
| 448 | /// that pointer type, load it, etc. |
| 449 | /// |
| 450 | /// Note that this will create all of the instructions with whatever insert |
| 451 | /// point the \c InstCombiner currently is using. |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 452 | static LoadInst *combineLoadToNewType(InstCombiner &IC, LoadInst &LI, Type *NewTy, |
| 453 | const Twine &Suffix = "") { |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 454 | assert((!LI.isAtomic() || isSupportedAtomicType(NewTy)) && |
| 455 | "can't fold an atomic load to requested type"); |
Alexey Bataev | 7c9ad0d | 2018-05-21 17:46:34 +0000 | [diff] [blame] | 456 | |
Chandler Carruth | bc6378d | 2014-10-19 10:46:46 +0000 | [diff] [blame] | 457 | Value *Ptr = LI.getPointerOperand(); |
| 458 | unsigned AS = LI.getPointerAddressSpace(); |
Alexey Bataev | 7c9ad0d | 2018-05-21 17:46:34 +0000 | [diff] [blame] | 459 | Value *NewPtr = nullptr; |
| 460 | if (!(match(Ptr, m_BitCast(m_Value(NewPtr))) && |
| 461 | NewPtr->getType()->getPointerElementType() == NewTy && |
| 462 | NewPtr->getType()->getPointerAddressSpace() == AS)) |
| 463 | NewPtr = IC.Builder.CreateBitCast(Ptr, NewTy->getPointerTo(AS)); |
| 464 | |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 465 | LoadInst *NewLoad = IC.Builder.CreateAlignedLoad( |
James Y Knight | 14359ef | 2019-02-01 20:44:24 +0000 | [diff] [blame] | 466 | NewTy, NewPtr, LI.getAlignment(), LI.isVolatile(), LI.getName() + Suffix); |
Konstantin Zhuravlyov | bb80d3e | 2017-07-11 22:23:00 +0000 | [diff] [blame] | 467 | NewLoad->setAtomic(LI.getOrdering(), LI.getSyncScopeID()); |
Sanjay Patel | 86e9f9d | 2019-07-24 22:11:11 +0000 | [diff] [blame] | 468 | copyMetadataForLoad(*NewLoad, LI); |
Chandler Carruth | bc6378d | 2014-10-19 10:46:46 +0000 | [diff] [blame] | 469 | return NewLoad; |
| 470 | } |
| 471 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 472 | /// Combine a store to a new type. |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 473 | /// |
| 474 | /// Returns the newly created store instruction. |
| 475 | static StoreInst *combineStoreToNewValue(InstCombiner &IC, StoreInst &SI, Value *V) { |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 476 | assert((!SI.isAtomic() || isSupportedAtomicType(V->getType())) && |
| 477 | "can't fold an atomic store of requested type"); |
Fangrui Song | f78650a | 2018-07-30 19:41:25 +0000 | [diff] [blame] | 478 | |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 479 | Value *Ptr = SI.getPointerOperand(); |
| 480 | unsigned AS = SI.getPointerAddressSpace(); |
| 481 | SmallVector<std::pair<unsigned, MDNode *>, 8> MD; |
| 482 | SI.getAllMetadata(MD); |
| 483 | |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 484 | StoreInst *NewStore = IC.Builder.CreateAlignedStore( |
| 485 | V, IC.Builder.CreateBitCast(Ptr, V->getType()->getPointerTo(AS)), |
Philip Reames | 6f4d008 | 2016-05-06 22:17:01 +0000 | [diff] [blame] | 486 | SI.getAlignment(), SI.isVolatile()); |
Konstantin Zhuravlyov | bb80d3e | 2017-07-11 22:23:00 +0000 | [diff] [blame] | 487 | NewStore->setAtomic(SI.getOrdering(), SI.getSyncScopeID()); |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 488 | for (const auto &MDPair : MD) { |
| 489 | unsigned ID = MDPair.first; |
| 490 | MDNode *N = MDPair.second; |
| 491 | // Note, essentially every kind of metadata should be preserved here! This |
| 492 | // routine is supposed to clone a store instruction changing *only its |
| 493 | // type*. The only metadata it makes sense to drop is metadata which is |
| 494 | // invalidated when the pointer type changes. This should essentially |
| 495 | // never be the case in LLVM, but we explicitly switch over only known |
| 496 | // metadata to be conservatively correct. If you are adding metadata to |
| 497 | // LLVM which pertains to stores, you almost certainly want to add it |
| 498 | // here. |
| 499 | switch (ID) { |
| 500 | case LLVMContext::MD_dbg: |
| 501 | case LLVMContext::MD_tbaa: |
| 502 | case LLVMContext::MD_prof: |
| 503 | case LLVMContext::MD_fpmath: |
| 504 | case LLVMContext::MD_tbaa_struct: |
| 505 | case LLVMContext::MD_alias_scope: |
| 506 | case LLVMContext::MD_noalias: |
| 507 | case LLVMContext::MD_nontemporal: |
| 508 | case LLVMContext::MD_mem_parallel_loop_access: |
Michael Kruse | 1994271 | 2018-12-20 17:11:02 +0000 | [diff] [blame] | 509 | case LLVMContext::MD_access_group: |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 510 | // All of these directly apply. |
| 511 | NewStore->setMetadata(ID, N); |
| 512 | break; |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 513 | case LLVMContext::MD_invariant_load: |
Chandler Carruth | 87fdafc | 2015-02-13 02:30:01 +0000 | [diff] [blame] | 514 | case LLVMContext::MD_nonnull: |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 515 | case LLVMContext::MD_range: |
Artur Pilipenko | 5c5011d | 2015-11-02 17:53:51 +0000 | [diff] [blame] | 516 | case LLVMContext::MD_align: |
| 517 | case LLVMContext::MD_dereferenceable: |
| 518 | case LLVMContext::MD_dereferenceable_or_null: |
Chandler Carruth | 87fdafc | 2015-02-13 02:30:01 +0000 | [diff] [blame] | 519 | // These don't apply for stores. |
Chandler Carruth | fa11d83 | 2015-01-22 03:34:54 +0000 | [diff] [blame] | 520 | break; |
| 521 | } |
| 522 | } |
| 523 | |
| 524 | return NewStore; |
| 525 | } |
| 526 | |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 527 | /// Returns true if instruction represent minmax pattern like: |
| 528 | /// select ((cmp load V1, load V2), V1, V2). |
| 529 | static bool isMinMaxWithLoads(Value *V) { |
| 530 | assert(V->getType()->isPointerTy() && "Expected pointer type."); |
| 531 | // Ignore possible ty* to ixx* bitcast. |
| 532 | V = peekThroughBitcast(V); |
| 533 | // Check that select is select ((cmp load V1, load V2), V1, V2) - minmax |
| 534 | // pattern. |
| 535 | CmpInst::Predicate Pred; |
| 536 | Instruction *L1; |
| 537 | Instruction *L2; |
| 538 | Value *LHS; |
| 539 | Value *RHS; |
| 540 | if (!match(V, m_Select(m_Cmp(Pred, m_Instruction(L1), m_Instruction(L2)), |
| 541 | m_Value(LHS), m_Value(RHS)))) |
| 542 | return false; |
| 543 | return (match(L1, m_Load(m_Specific(LHS))) && |
| 544 | match(L2, m_Load(m_Specific(RHS)))) || |
| 545 | (match(L1, m_Load(m_Specific(RHS))) && |
| 546 | match(L2, m_Load(m_Specific(LHS)))); |
| 547 | } |
| 548 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 549 | /// Combine loads to match the type of their uses' value after looking |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 550 | /// through intervening bitcasts. |
| 551 | /// |
| 552 | /// The core idea here is that if the result of a load is used in an operation, |
| 553 | /// we should load the type most conducive to that operation. For example, when |
| 554 | /// loading an integer and converting that immediately to a pointer, we should |
| 555 | /// instead directly load a pointer. |
| 556 | /// |
| 557 | /// However, this routine must never change the width of a load or the number of |
| 558 | /// loads as that would introduce a semantic change. This combine is expected to |
| 559 | /// be a semantic no-op which just allows loads to more closely model the types |
| 560 | /// of their consuming operations. |
| 561 | /// |
| 562 | /// Currently, we also refuse to change the precise type used for an atomic load |
| 563 | /// or a volatile load. This is debatable, and might be reasonable to change |
| 564 | /// later. However, it is risky in case some backend or other part of LLVM is |
| 565 | /// relying on the exact type loaded to select appropriate atomic operations. |
| 566 | static Instruction *combineLoadToOperationType(InstCombiner &IC, LoadInst &LI) { |
Philip Reames | 6f4d008 | 2016-05-06 22:17:01 +0000 | [diff] [blame] | 567 | // FIXME: We could probably with some care handle both volatile and ordered |
| 568 | // atomic loads here but it isn't clear that this is important. |
| 569 | if (!LI.isUnordered()) |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 570 | return nullptr; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 571 | |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 572 | if (LI.use_empty()) |
| 573 | return nullptr; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 574 | |
Arnold Schwaighofer | 5d33555 | 2016-09-10 18:14:57 +0000 | [diff] [blame] | 575 | // swifterror values can't be bitcasted. |
| 576 | if (LI.getPointerOperand()->isSwiftError()) |
| 577 | return nullptr; |
| 578 | |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 579 | Type *Ty = LI.getType(); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 580 | const DataLayout &DL = IC.getDataLayout(); |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 581 | |
| 582 | // Try to canonicalize loads which are only ever stored to operate over |
| 583 | // integers instead of any other type. We only do this when the loaded type |
| 584 | // is sized and has a size exactly the same as its store size and the store |
| 585 | // size is a legal integer type. |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 586 | // Do not perform canonicalization if minmax pattern is found (to avoid |
| 587 | // infinite loop). |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 588 | if (!Ty->isIntegerTy() && Ty->isSized() && |
| 589 | DL.isLegalInteger(DL.getTypeStoreSizeInBits(Ty)) && |
Bjorn Pettersson | b477142 | 2019-05-24 09:20:20 +0000 | [diff] [blame] | 590 | DL.typeSizeEqualsStoreSize(Ty) && |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 591 | !DL.isNonIntegralPointerType(Ty) && |
| 592 | !isMinMaxWithLoads( |
| 593 | peekThroughBitcast(LI.getPointerOperand(), /*OneUseOnly=*/true))) { |
David Majnemer | 0a16c22 | 2016-08-11 21:15:00 +0000 | [diff] [blame] | 594 | if (all_of(LI.users(), [&LI](User *U) { |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 595 | auto *SI = dyn_cast<StoreInst>(U); |
Arnold Schwaighofer | c368563 | 2017-01-31 17:53:49 +0000 | [diff] [blame] | 596 | return SI && SI->getPointerOperand() != &LI && |
| 597 | !SI->getPointerOperand()->isSwiftError(); |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 598 | })) { |
| 599 | LoadInst *NewLoad = combineLoadToNewType( |
| 600 | IC, LI, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 601 | Type::getIntNTy(LI.getContext(), DL.getTypeStoreSizeInBits(Ty))); |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 602 | // Replace all the stores with stores of the newly loaded value. |
| 603 | for (auto UI = LI.user_begin(), UE = LI.user_end(); UI != UE;) { |
| 604 | auto *SI = cast<StoreInst>(*UI++); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 605 | IC.Builder.SetInsertPoint(SI); |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 606 | combineStoreToNewValue(IC, *SI, NewLoad); |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 607 | IC.eraseInstFromFunction(*SI); |
Chandler Carruth | cd8522e | 2015-01-22 05:08:12 +0000 | [diff] [blame] | 608 | } |
| 609 | assert(LI.use_empty() && "Failed to remove all users of the load!"); |
| 610 | // Return the old load so the combiner can delete it safely. |
| 611 | return &LI; |
| 612 | } |
| 613 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 614 | |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 615 | // Fold away bit casts of the loaded value by loading the desired type. |
Quentin Colombet | 490cfbe | 2016-02-11 22:30:41 +0000 | [diff] [blame] | 616 | // We can do this for BitCastInsts as well as casts from and to pointer types, |
| 617 | // as long as those are noops (i.e., the source or dest type have the same |
| 618 | // bitwidth as the target's pointers). |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 619 | if (LI.hasOneUse()) |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 620 | if (auto* CI = dyn_cast<CastInst>(LI.user_back())) |
| 621 | if (CI->isNoopCast(DL)) |
| 622 | if (!LI.isAtomic() || isSupportedAtomicType(CI->getDestTy())) { |
| 623 | LoadInst *NewLoad = combineLoadToNewType(IC, LI, CI->getDestTy()); |
| 624 | CI->replaceAllUsesWith(NewLoad); |
| 625 | IC.eraseInstFromFunction(*CI); |
| 626 | return &LI; |
| 627 | } |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 628 | |
Chandler Carruth | a7f247e | 2014-12-09 19:21:16 +0000 | [diff] [blame] | 629 | // FIXME: We should also canonicalize loads of vectors when their elements are |
| 630 | // cast to other types. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 631 | return nullptr; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 632 | } |
| 633 | |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 634 | static Instruction *unpackLoadToAggregate(InstCombiner &IC, LoadInst &LI) { |
| 635 | // FIXME: We could probably with some care handle both volatile and atomic |
| 636 | // stores here but it isn't clear that this is important. |
| 637 | if (!LI.isSimple()) |
| 638 | return nullptr; |
| 639 | |
| 640 | Type *T = LI.getType(); |
| 641 | if (!T->isAggregateType()) |
| 642 | return nullptr; |
| 643 | |
Benjamin Kramer | c126353 | 2016-03-11 10:20:56 +0000 | [diff] [blame] | 644 | StringRef Name = LI.getName(); |
Bruce Mitchener | e9ffb45 | 2015-09-12 01:17:08 +0000 | [diff] [blame] | 645 | assert(LI.getAlignment() && "Alignment must be set at this point"); |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 646 | |
| 647 | if (auto *ST = dyn_cast<StructType>(T)) { |
| 648 | // If the struct only have one element, we unpack. |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 649 | auto NumElements = ST->getNumElements(); |
| 650 | if (NumElements == 1) { |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 651 | LoadInst *NewLoad = combineLoadToNewType(IC, LI, ST->getTypeAtIndex(0U), |
| 652 | ".unpack"); |
Keno Fischer | a236dae | 2017-06-28 23:36:40 +0000 | [diff] [blame] | 653 | AAMDNodes AAMD; |
| 654 | LI.getAAMetadata(AAMD); |
| 655 | NewLoad->setAAMetadata(AAMD); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 656 | return IC.replaceInstUsesWith(LI, IC.Builder.CreateInsertValue( |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 657 | UndefValue::get(T), NewLoad, 0, Name)); |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 658 | } |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 659 | |
| 660 | // We don't want to break loads with padding here as we'd loose |
| 661 | // the knowledge that padding exists for the rest of the pipeline. |
| 662 | const DataLayout &DL = IC.getDataLayout(); |
| 663 | auto *SL = DL.getStructLayout(ST); |
| 664 | if (SL->hasPadding()) |
| 665 | return nullptr; |
| 666 | |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 667 | auto Align = LI.getAlignment(); |
| 668 | if (!Align) |
| 669 | Align = DL.getABITypeAlignment(ST); |
| 670 | |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 671 | auto *Addr = LI.getPointerOperand(); |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 672 | auto *IdxType = Type::getInt32Ty(T->getContext()); |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 673 | auto *Zero = ConstantInt::get(IdxType, 0); |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 674 | |
| 675 | Value *V = UndefValue::get(T); |
| 676 | for (unsigned i = 0; i < NumElements; i++) { |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 677 | Value *Indices[2] = { |
| 678 | Zero, |
| 679 | ConstantInt::get(IdxType, i), |
| 680 | }; |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 681 | auto *Ptr = IC.Builder.CreateInBoundsGEP(ST, Addr, makeArrayRef(Indices), |
| 682 | Name + ".elt"); |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 683 | auto EltAlign = MinAlign(Align, SL->getElementOffset(i)); |
James Y Knight | 14359ef | 2019-02-01 20:44:24 +0000 | [diff] [blame] | 684 | auto *L = IC.Builder.CreateAlignedLoad(ST->getElementType(i), Ptr, |
| 685 | EltAlign, Name + ".unpack"); |
Keno Fischer | a236dae | 2017-06-28 23:36:40 +0000 | [diff] [blame] | 686 | // Propagate AA metadata. It'll still be valid on the narrowed load. |
| 687 | AAMDNodes AAMD; |
| 688 | LI.getAAMetadata(AAMD); |
| 689 | L->setAAMetadata(AAMD); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 690 | V = IC.Builder.CreateInsertValue(V, L, i); |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 691 | } |
| 692 | |
| 693 | V->setName(Name); |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 694 | return IC.replaceInstUsesWith(LI, V); |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 695 | } |
| 696 | |
David Majnemer | 58fb038 | 2015-05-11 05:04:22 +0000 | [diff] [blame] | 697 | if (auto *AT = dyn_cast<ArrayType>(T)) { |
Amaury Sechet | 7cd3fe7 | 2016-03-02 21:28:30 +0000 | [diff] [blame] | 698 | auto *ET = AT->getElementType(); |
| 699 | auto NumElements = AT->getNumElements(); |
| 700 | if (NumElements == 1) { |
| 701 | LoadInst *NewLoad = combineLoadToNewType(IC, LI, ET, ".unpack"); |
Keno Fischer | a236dae | 2017-06-28 23:36:40 +0000 | [diff] [blame] | 702 | AAMDNodes AAMD; |
| 703 | LI.getAAMetadata(AAMD); |
| 704 | NewLoad->setAAMetadata(AAMD); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 705 | return IC.replaceInstUsesWith(LI, IC.Builder.CreateInsertValue( |
Amaury Sechet | 7cd3fe7 | 2016-03-02 21:28:30 +0000 | [diff] [blame] | 706 | UndefValue::get(T), NewLoad, 0, Name)); |
David Majnemer | 58fb038 | 2015-05-11 05:04:22 +0000 | [diff] [blame] | 707 | } |
Amaury Sechet | 7cd3fe7 | 2016-03-02 21:28:30 +0000 | [diff] [blame] | 708 | |
Davide Italiano | da11412 | 2016-10-07 20:57:42 +0000 | [diff] [blame] | 709 | // Bail out if the array is too large. Ideally we would like to optimize |
| 710 | // arrays of arbitrary size but this has a terrible impact on compile time. |
| 711 | // The threshold here is chosen arbitrarily, maybe needs a little bit of |
| 712 | // tuning. |
Davide Italiano | 2133bf5 | 2017-02-07 17:56:50 +0000 | [diff] [blame] | 713 | if (NumElements > IC.MaxArraySizeForCombine) |
Davide Italiano | da11412 | 2016-10-07 20:57:42 +0000 | [diff] [blame] | 714 | return nullptr; |
| 715 | |
Amaury Sechet | 7cd3fe7 | 2016-03-02 21:28:30 +0000 | [diff] [blame] | 716 | const DataLayout &DL = IC.getDataLayout(); |
| 717 | auto EltSize = DL.getTypeAllocSize(ET); |
| 718 | auto Align = LI.getAlignment(); |
| 719 | if (!Align) |
| 720 | Align = DL.getABITypeAlignment(T); |
| 721 | |
Amaury Sechet | 7cd3fe7 | 2016-03-02 21:28:30 +0000 | [diff] [blame] | 722 | auto *Addr = LI.getPointerOperand(); |
| 723 | auto *IdxType = Type::getInt64Ty(T->getContext()); |
| 724 | auto *Zero = ConstantInt::get(IdxType, 0); |
| 725 | |
| 726 | Value *V = UndefValue::get(T); |
| 727 | uint64_t Offset = 0; |
| 728 | for (uint64_t i = 0; i < NumElements; i++) { |
| 729 | Value *Indices[2] = { |
| 730 | Zero, |
| 731 | ConstantInt::get(IdxType, i), |
| 732 | }; |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 733 | auto *Ptr = IC.Builder.CreateInBoundsGEP(AT, Addr, makeArrayRef(Indices), |
| 734 | Name + ".elt"); |
James Y Knight | 14359ef | 2019-02-01 20:44:24 +0000 | [diff] [blame] | 735 | auto *L = IC.Builder.CreateAlignedLoad( |
| 736 | AT->getElementType(), Ptr, MinAlign(Align, Offset), Name + ".unpack"); |
Keno Fischer | a236dae | 2017-06-28 23:36:40 +0000 | [diff] [blame] | 737 | AAMDNodes AAMD; |
| 738 | LI.getAAMetadata(AAMD); |
| 739 | L->setAAMetadata(AAMD); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 740 | V = IC.Builder.CreateInsertValue(V, L, i); |
Amaury Sechet | 7cd3fe7 | 2016-03-02 21:28:30 +0000 | [diff] [blame] | 741 | Offset += EltSize; |
| 742 | } |
| 743 | |
| 744 | V->setName(Name); |
| 745 | return IC.replaceInstUsesWith(LI, V); |
David Majnemer | 58fb038 | 2015-05-11 05:04:22 +0000 | [diff] [blame] | 746 | } |
| 747 | |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 748 | return nullptr; |
| 749 | } |
| 750 | |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 751 | // If we can determine that all possible objects pointed to by the provided |
| 752 | // pointer value are, not only dereferenceable, but also definitively less than |
| 753 | // or equal to the provided maximum size, then return true. Otherwise, return |
| 754 | // false (constant global values and allocas fall into this category). |
| 755 | // |
| 756 | // FIXME: This should probably live in ValueTracking (or similar). |
| 757 | static bool isObjectSizeLessThanOrEq(Value *V, uint64_t MaxSize, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 758 | const DataLayout &DL) { |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 759 | SmallPtrSet<Value *, 4> Visited; |
| 760 | SmallVector<Value *, 4> Worklist(1, V); |
| 761 | |
| 762 | do { |
| 763 | Value *P = Worklist.pop_back_val(); |
| 764 | P = P->stripPointerCasts(); |
| 765 | |
| 766 | if (!Visited.insert(P).second) |
| 767 | continue; |
| 768 | |
| 769 | if (SelectInst *SI = dyn_cast<SelectInst>(P)) { |
| 770 | Worklist.push_back(SI->getTrueValue()); |
| 771 | Worklist.push_back(SI->getFalseValue()); |
| 772 | continue; |
| 773 | } |
| 774 | |
| 775 | if (PHINode *PN = dyn_cast<PHINode>(P)) { |
Pete Cooper | 833f34d | 2015-05-12 20:05:31 +0000 | [diff] [blame] | 776 | for (Value *IncValue : PN->incoming_values()) |
| 777 | Worklist.push_back(IncValue); |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 778 | continue; |
| 779 | } |
| 780 | |
| 781 | if (GlobalAlias *GA = dyn_cast<GlobalAlias>(P)) { |
Sanjoy Das | 9904247 | 2016-04-17 04:30:43 +0000 | [diff] [blame] | 782 | if (GA->isInterposable()) |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 783 | return false; |
| 784 | Worklist.push_back(GA->getAliasee()); |
| 785 | continue; |
| 786 | } |
| 787 | |
| 788 | // If we know how big this object is, and it is less than MaxSize, continue |
| 789 | // searching. Otherwise, return false. |
| 790 | if (AllocaInst *AI = dyn_cast<AllocaInst>(P)) { |
| 791 | if (!AI->getAllocatedType()->isSized()) |
| 792 | return false; |
| 793 | |
| 794 | ConstantInt *CS = dyn_cast<ConstantInt>(AI->getArraySize()); |
| 795 | if (!CS) |
| 796 | return false; |
| 797 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 798 | uint64_t TypeSize = DL.getTypeAllocSize(AI->getAllocatedType()); |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 799 | // Make sure that, even if the multiplication below would wrap as an |
| 800 | // uint64_t, we still do the right thing. |
| 801 | if ((CS->getValue().zextOrSelf(128)*APInt(128, TypeSize)).ugt(MaxSize)) |
| 802 | return false; |
| 803 | continue; |
| 804 | } |
| 805 | |
| 806 | if (GlobalVariable *GV = dyn_cast<GlobalVariable>(P)) { |
| 807 | if (!GV->hasDefinitiveInitializer() || !GV->isConstant()) |
| 808 | return false; |
| 809 | |
Manuel Jacob | 5f6eaac | 2016-01-16 20:30:46 +0000 | [diff] [blame] | 810 | uint64_t InitSize = DL.getTypeAllocSize(GV->getValueType()); |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 811 | if (InitSize > MaxSize) |
| 812 | return false; |
| 813 | continue; |
| 814 | } |
| 815 | |
| 816 | return false; |
| 817 | } while (!Worklist.empty()); |
| 818 | |
| 819 | return true; |
| 820 | } |
| 821 | |
| 822 | // If we're indexing into an object of a known size, and the outer index is |
| 823 | // not a constant, but having any value but zero would lead to undefined |
| 824 | // behavior, replace it with zero. |
| 825 | // |
| 826 | // For example, if we have: |
| 827 | // @f.a = private unnamed_addr constant [1 x i32] [i32 12], align 4 |
| 828 | // ... |
| 829 | // %arrayidx = getelementptr inbounds [1 x i32]* @f.a, i64 0, i64 %x |
| 830 | // ... = load i32* %arrayidx, align 4 |
| 831 | // Then we know that we can replace %x in the GEP with i64 0. |
| 832 | // |
| 833 | // FIXME: We could fold any GEP index to zero that would cause UB if it were |
| 834 | // not zero. Currently, we only handle the first such index. Also, we could |
| 835 | // also search through non-zero constant indices if we kept track of the |
| 836 | // offsets those indices implied. |
| 837 | static bool canReplaceGEPIdxWithZero(InstCombiner &IC, GetElementPtrInst *GEPI, |
| 838 | Instruction *MemI, unsigned &Idx) { |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 839 | if (GEPI->getNumOperands() < 2) |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 840 | return false; |
| 841 | |
| 842 | // Find the first non-zero index of a GEP. If all indices are zero, return |
| 843 | // one past the last index. |
| 844 | auto FirstNZIdx = [](const GetElementPtrInst *GEPI) { |
| 845 | unsigned I = 1; |
| 846 | for (unsigned IE = GEPI->getNumOperands(); I != IE; ++I) { |
| 847 | Value *V = GEPI->getOperand(I); |
| 848 | if (const ConstantInt *CI = dyn_cast<ConstantInt>(V)) |
| 849 | if (CI->isZero()) |
| 850 | continue; |
| 851 | |
| 852 | break; |
| 853 | } |
| 854 | |
| 855 | return I; |
| 856 | }; |
| 857 | |
| 858 | // Skip through initial 'zero' indices, and find the corresponding pointer |
| 859 | // type. See if the next index is not a constant. |
| 860 | Idx = FirstNZIdx(GEPI); |
| 861 | if (Idx == GEPI->getNumOperands()) |
| 862 | return false; |
| 863 | if (isa<Constant>(GEPI->getOperand(Idx))) |
| 864 | return false; |
| 865 | |
| 866 | SmallVector<Value *, 4> Ops(GEPI->idx_begin(), GEPI->idx_begin() + Idx); |
Eduard Burtescu | 19eb031 | 2016-01-19 17:28:00 +0000 | [diff] [blame] | 867 | Type *AllocTy = |
| 868 | GetElementPtrInst::getIndexedType(GEPI->getSourceElementType(), Ops); |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 869 | if (!AllocTy || !AllocTy->isSized()) |
| 870 | return false; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 871 | const DataLayout &DL = IC.getDataLayout(); |
| 872 | uint64_t TyAllocSize = DL.getTypeAllocSize(AllocTy); |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 873 | |
| 874 | // If there are more indices after the one we might replace with a zero, make |
| 875 | // sure they're all non-negative. If any of them are negative, the overall |
| 876 | // address being computed might be before the base address determined by the |
| 877 | // first non-zero index. |
| 878 | auto IsAllNonNegative = [&]() { |
| 879 | for (unsigned i = Idx+1, e = GEPI->getNumOperands(); i != e; ++i) { |
Craig Topper | 1a36b7d | 2017-05-15 06:39:41 +0000 | [diff] [blame] | 880 | KnownBits Known = IC.computeKnownBits(GEPI->getOperand(i), 0, MemI); |
| 881 | if (Known.isNonNegative()) |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 882 | continue; |
| 883 | return false; |
| 884 | } |
| 885 | |
| 886 | return true; |
| 887 | }; |
| 888 | |
| 889 | // FIXME: If the GEP is not inbounds, and there are extra indices after the |
| 890 | // one we'll replace, those could cause the address computation to wrap |
| 891 | // (rendering the IsAllNonNegative() check below insufficient). We can do |
Bruce Mitchener | e9ffb45 | 2015-09-12 01:17:08 +0000 | [diff] [blame] | 892 | // better, ignoring zero indices (and other indices we can prove small |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 893 | // enough not to wrap). |
| 894 | if (Idx+1 != GEPI->getNumOperands() && !GEPI->isInBounds()) |
| 895 | return false; |
| 896 | |
| 897 | // Note that isObjectSizeLessThanOrEq will return true only if the pointer is |
| 898 | // also known to be dereferenceable. |
| 899 | return isObjectSizeLessThanOrEq(GEPI->getOperand(0), TyAllocSize, DL) && |
| 900 | IsAllNonNegative(); |
| 901 | } |
| 902 | |
| 903 | // If we're indexing into an object with a variable index for the memory |
| 904 | // access, but the object has only one element, we can assume that the index |
| 905 | // will always be zero. If we replace the GEP, return it. |
| 906 | template <typename T> |
| 907 | static Instruction *replaceGEPIdxWithZero(InstCombiner &IC, Value *Ptr, |
| 908 | T &MemI) { |
| 909 | if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(Ptr)) { |
| 910 | unsigned Idx; |
| 911 | if (canReplaceGEPIdxWithZero(IC, GEPI, &MemI, Idx)) { |
| 912 | Instruction *NewGEPI = GEPI->clone(); |
| 913 | NewGEPI->setOperand(Idx, |
| 914 | ConstantInt::get(GEPI->getOperand(Idx)->getType(), 0)); |
| 915 | NewGEPI->insertBefore(GEPI); |
| 916 | MemI.setOperand(MemI.getPointerOperandIndex(), NewGEPI); |
| 917 | return NewGEPI; |
| 918 | } |
| 919 | } |
| 920 | |
| 921 | return nullptr; |
| 922 | } |
| 923 | |
Anna Thomas | 2dd9835 | 2017-12-12 14:12:33 +0000 | [diff] [blame] | 924 | static bool canSimplifyNullStoreOrGEP(StoreInst &SI) { |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 925 | if (NullPointerIsDefined(SI.getFunction(), SI.getPointerAddressSpace())) |
Anna Thomas | 2dd9835 | 2017-12-12 14:12:33 +0000 | [diff] [blame] | 926 | return false; |
| 927 | |
| 928 | auto *Ptr = SI.getPointerOperand(); |
| 929 | if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(Ptr)) |
| 930 | Ptr = GEPI->getOperand(0); |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 931 | return (isa<ConstantPointerNull>(Ptr) && |
| 932 | !NullPointerIsDefined(SI.getFunction(), SI.getPointerAddressSpace())); |
Anna Thomas | 2dd9835 | 2017-12-12 14:12:33 +0000 | [diff] [blame] | 933 | } |
| 934 | |
Davide Italiano | ffcb4df | 2017-04-19 17:26:57 +0000 | [diff] [blame] | 935 | static bool canSimplifyNullLoadOrGEP(LoadInst &LI, Value *Op) { |
| 936 | if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(Op)) { |
| 937 | const Value *GEPI0 = GEPI->getOperand(0); |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 938 | if (isa<ConstantPointerNull>(GEPI0) && |
| 939 | !NullPointerIsDefined(LI.getFunction(), GEPI->getPointerAddressSpace())) |
Davide Italiano | ffcb4df | 2017-04-19 17:26:57 +0000 | [diff] [blame] | 940 | return true; |
| 941 | } |
| 942 | if (isa<UndefValue>(Op) || |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 943 | (isa<ConstantPointerNull>(Op) && |
| 944 | !NullPointerIsDefined(LI.getFunction(), LI.getPointerAddressSpace()))) |
Davide Italiano | ffcb4df | 2017-04-19 17:26:57 +0000 | [diff] [blame] | 945 | return true; |
| 946 | return false; |
| 947 | } |
| 948 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 949 | Instruction *InstCombiner::visitLoadInst(LoadInst &LI) { |
| 950 | Value *Op = LI.getOperand(0); |
| 951 | |
Chandler Carruth | 2f75fcf | 2014-10-18 06:36:22 +0000 | [diff] [blame] | 952 | // Try to canonicalize the loaded type. |
| 953 | if (Instruction *Res = combineLoadToOperationType(*this, LI)) |
| 954 | return Res; |
| 955 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 956 | // Attempt to improve the alignment. |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 957 | unsigned KnownAlign = getOrEnforceKnownAlignment( |
Daniel Jasper | aec2fa3 | 2016-12-19 08:22:17 +0000 | [diff] [blame] | 958 | Op, DL.getPrefTypeAlignment(LI.getType()), DL, &LI, &AC, &DT); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 959 | unsigned LoadAlign = LI.getAlignment(); |
| 960 | unsigned EffectiveLoadAlign = |
| 961 | LoadAlign != 0 ? LoadAlign : DL.getABITypeAlignment(LI.getType()); |
Dan Gohman | 3619660 | 2010-08-03 18:20:32 +0000 | [diff] [blame] | 962 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 963 | if (KnownAlign > EffectiveLoadAlign) |
Guillaume Chatelet | 1738022 | 2019-09-30 09:37:05 +0000 | [diff] [blame] | 964 | LI.setAlignment(MaybeAlign(KnownAlign)); |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 965 | else if (LoadAlign == 0) |
Guillaume Chatelet | 1738022 | 2019-09-30 09:37:05 +0000 | [diff] [blame] | 966 | LI.setAlignment(MaybeAlign(EffectiveLoadAlign)); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 967 | |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 968 | // Replace GEP indices if possible. |
| 969 | if (Instruction *NewGEPI = replaceGEPIdxWithZero(*this, Op, LI)) { |
| 970 | Worklist.Add(NewGEPI); |
| 971 | return &LI; |
| 972 | } |
| 973 | |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 974 | if (Instruction *Res = unpackLoadToAggregate(*this, LI)) |
| 975 | return Res; |
| 976 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 977 | // Do really simple store-to-load forwarding and load CSE, to catch cases |
Duncan Sands | 75b5d27 | 2011-02-15 09:23:02 +0000 | [diff] [blame] | 978 | // where there are several consecutive memory accesses to the same location, |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 979 | // separated by a few arithmetic operations. |
Duncan P. N. Exon Smith | 9f8aaf2 | 2015-10-13 16:59:33 +0000 | [diff] [blame] | 980 | BasicBlock::iterator BBI(LI); |
Eli Friedman | bd254a6 | 2016-06-16 02:33:42 +0000 | [diff] [blame] | 981 | bool IsLoadCSE = false; |
Sanjay Patel | b38ad88e | 2017-01-02 23:25:28 +0000 | [diff] [blame] | 982 | if (Value *AvailableVal = FindAvailableLoadedValue( |
| 983 | &LI, LI.getParent(), BBI, DefMaxInstsToScan, AA, &IsLoadCSE)) { |
| 984 | if (IsLoadCSE) |
Florian Hahn | 406f1ff | 2018-08-24 11:40:04 +0000 | [diff] [blame] | 985 | combineMetadataForCSE(cast<LoadInst>(AvailableVal), &LI, false); |
Bjorn Steinbrink | a91fd09 | 2015-07-10 06:55:44 +0000 | [diff] [blame] | 986 | |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 987 | return replaceInstUsesWith( |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 988 | LI, Builder.CreateBitOrPointerCast(AvailableVal, LI.getType(), |
| 989 | LI.getName() + ".cast")); |
Bjorn Steinbrink | a91fd09 | 2015-07-10 06:55:44 +0000 | [diff] [blame] | 990 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 991 | |
Philip Reames | 3ac0718 | 2016-04-21 17:45:05 +0000 | [diff] [blame] | 992 | // None of the following transforms are legal for volatile/ordered atomic |
| 993 | // loads. Most of them do apply for unordered atomics. |
| 994 | if (!LI.isUnordered()) return nullptr; |
Philip Reames | ac55090 | 2016-04-21 17:03:33 +0000 | [diff] [blame] | 995 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 996 | // load(gep null, ...) -> unreachable |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 997 | // load null/undef -> unreachable |
Davide Italiano | ffcb4df | 2017-04-19 17:26:57 +0000 | [diff] [blame] | 998 | // TODO: Consider a target hook for valid address spaces for this xforms. |
| 999 | if (canSimplifyNullLoadOrGEP(LI, Op)) { |
| 1000 | // Insert a new store to null instruction before the load to indicate |
| 1001 | // that this code is not reachable. We do this instead of inserting |
| 1002 | // an unreachable instruction directly because we cannot modify the |
| 1003 | // CFG. |
Weiming Zhao | 984f1dc | 2017-07-19 01:27:24 +0000 | [diff] [blame] | 1004 | StoreInst *SI = new StoreInst(UndefValue::get(LI.getType()), |
| 1005 | Constant::getNullValue(Op->getType()), &LI); |
| 1006 | SI->setDebugLoc(LI.getDebugLoc()); |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1007 | return replaceInstUsesWith(LI, UndefValue::get(LI.getType())); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1008 | } |
| 1009 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1010 | if (Op->hasOneUse()) { |
| 1011 | // Change select and PHI nodes to select values instead of addresses: this |
| 1012 | // helps alias analysis out a lot, allows many others simplifications, and |
| 1013 | // exposes redundancy in the code. |
| 1014 | // |
| 1015 | // Note that we cannot do the transformation unless we know that the |
| 1016 | // introduced loads cannot trap! Something like this is valid as long as |
| 1017 | // the condition is always false: load (select bool %C, int* null, int* %G), |
| 1018 | // but it would not be valid if we transformed it to load from null |
| 1019 | // unconditionally. |
| 1020 | // |
| 1021 | if (SelectInst *SI = dyn_cast<SelectInst>(Op)) { |
| 1022 | // load (select (Cond, &V1, &V2)) --> select(Cond, load &V1, load &V2). |
Guillaume Chatelet | 301b412 | 2019-10-21 15:10:26 +0000 | [diff] [blame] | 1023 | const MaybeAlign Alignment(LI.getAlignment()); |
| 1024 | if (isSafeToLoadUnconditionally(SI->getOperand(1), LI.getType(), |
| 1025 | Alignment, DL, SI) && |
| 1026 | isSafeToLoadUnconditionally(SI->getOperand(2), LI.getType(), |
| 1027 | Alignment, DL, SI)) { |
James Y Knight | 14359ef | 2019-02-01 20:44:24 +0000 | [diff] [blame] | 1028 | LoadInst *V1 = |
| 1029 | Builder.CreateLoad(LI.getType(), SI->getOperand(1), |
| 1030 | SI->getOperand(1)->getName() + ".val"); |
| 1031 | LoadInst *V2 = |
| 1032 | Builder.CreateLoad(LI.getType(), SI->getOperand(2), |
| 1033 | SI->getOperand(2)->getName() + ".val"); |
Philip Reames | a98c7ea | 2016-04-21 17:59:40 +0000 | [diff] [blame] | 1034 | assert(LI.isUnordered() && "implied by above"); |
Guillaume Chatelet | 301b412 | 2019-10-21 15:10:26 +0000 | [diff] [blame] | 1035 | V1->setAlignment(Alignment); |
Konstantin Zhuravlyov | bb80d3e | 2017-07-11 22:23:00 +0000 | [diff] [blame] | 1036 | V1->setAtomic(LI.getOrdering(), LI.getSyncScopeID()); |
Guillaume Chatelet | 301b412 | 2019-10-21 15:10:26 +0000 | [diff] [blame] | 1037 | V2->setAlignment(Alignment); |
Konstantin Zhuravlyov | bb80d3e | 2017-07-11 22:23:00 +0000 | [diff] [blame] | 1038 | V2->setAtomic(LI.getOrdering(), LI.getSyncScopeID()); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1039 | return SelectInst::Create(SI->getCondition(), V1, V2); |
| 1040 | } |
| 1041 | |
| 1042 | // load (select (cond, null, P)) -> load P |
Larisse Voufo | 532bf71 | 2015-09-18 19:14:35 +0000 | [diff] [blame] | 1043 | if (isa<ConstantPointerNull>(SI->getOperand(1)) && |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 1044 | !NullPointerIsDefined(SI->getFunction(), |
| 1045 | LI.getPointerAddressSpace())) { |
Philip Reames | 5ad26c3 | 2014-12-29 22:46:21 +0000 | [diff] [blame] | 1046 | LI.setOperand(0, SI->getOperand(2)); |
| 1047 | return &LI; |
| 1048 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1049 | |
| 1050 | // load (select (cond, P, null)) -> load P |
Philip Reames | 5ad26c3 | 2014-12-29 22:46:21 +0000 | [diff] [blame] | 1051 | if (isa<ConstantPointerNull>(SI->getOperand(2)) && |
Manoj Gupta | 77eeac3 | 2018-07-09 22:27:23 +0000 | [diff] [blame] | 1052 | !NullPointerIsDefined(SI->getFunction(), |
| 1053 | LI.getPointerAddressSpace())) { |
Philip Reames | 5ad26c3 | 2014-12-29 22:46:21 +0000 | [diff] [blame] | 1054 | LI.setOperand(0, SI->getOperand(1)); |
| 1055 | return &LI; |
| 1056 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1057 | } |
| 1058 | } |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1059 | return nullptr; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1060 | } |
| 1061 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 1062 | /// Look for extractelement/insertvalue sequence that acts like a bitcast. |
Arch D. Robison | be0490a | 2016-04-25 22:22:39 +0000 | [diff] [blame] | 1063 | /// |
| 1064 | /// \returns underlying value that was "cast", or nullptr otherwise. |
| 1065 | /// |
| 1066 | /// For example, if we have: |
| 1067 | /// |
| 1068 | /// %E0 = extractelement <2 x double> %U, i32 0 |
| 1069 | /// %V0 = insertvalue [2 x double] undef, double %E0, 0 |
| 1070 | /// %E1 = extractelement <2 x double> %U, i32 1 |
| 1071 | /// %V1 = insertvalue [2 x double] %V0, double %E1, 1 |
| 1072 | /// |
| 1073 | /// and the layout of a <2 x double> is isomorphic to a [2 x double], |
| 1074 | /// then %V1 can be safely approximated by a conceptual "bitcast" of %U. |
| 1075 | /// Note that %U may contain non-undef values where %V1 has undef. |
| 1076 | static Value *likeBitCastFromVector(InstCombiner &IC, Value *V) { |
| 1077 | Value *U = nullptr; |
| 1078 | while (auto *IV = dyn_cast<InsertValueInst>(V)) { |
| 1079 | auto *E = dyn_cast<ExtractElementInst>(IV->getInsertedValueOperand()); |
| 1080 | if (!E) |
| 1081 | return nullptr; |
| 1082 | auto *W = E->getVectorOperand(); |
| 1083 | if (!U) |
| 1084 | U = W; |
| 1085 | else if (U != W) |
| 1086 | return nullptr; |
| 1087 | auto *CI = dyn_cast<ConstantInt>(E->getIndexOperand()); |
| 1088 | if (!CI || IV->getNumIndices() != 1 || CI->getZExtValue() != *IV->idx_begin()) |
| 1089 | return nullptr; |
| 1090 | V = IV->getAggregateOperand(); |
| 1091 | } |
| 1092 | if (!isa<UndefValue>(V) ||!U) |
| 1093 | return nullptr; |
| 1094 | |
| 1095 | auto *UT = cast<VectorType>(U->getType()); |
| 1096 | auto *VT = V->getType(); |
| 1097 | // Check that types UT and VT are bitwise isomorphic. |
| 1098 | const auto &DL = IC.getDataLayout(); |
| 1099 | if (DL.getTypeStoreSizeInBits(UT) != DL.getTypeStoreSizeInBits(VT)) { |
| 1100 | return nullptr; |
| 1101 | } |
| 1102 | if (auto *AT = dyn_cast<ArrayType>(VT)) { |
| 1103 | if (AT->getNumElements() != UT->getNumElements()) |
| 1104 | return nullptr; |
| 1105 | } else { |
| 1106 | auto *ST = cast<StructType>(VT); |
| 1107 | if (ST->getNumElements() != UT->getNumElements()) |
| 1108 | return nullptr; |
| 1109 | for (const auto *EltT : ST->elements()) { |
| 1110 | if (EltT != UT->getElementType()) |
| 1111 | return nullptr; |
| 1112 | } |
| 1113 | } |
| 1114 | return U; |
| 1115 | } |
| 1116 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 1117 | /// Combine stores to match the type of value being stored. |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1118 | /// |
| 1119 | /// The core idea here is that the memory does not have any intrinsic type and |
| 1120 | /// where we can we should match the type of a store to the type of value being |
| 1121 | /// stored. |
| 1122 | /// |
| 1123 | /// However, this routine must never change the width of a store or the number of |
| 1124 | /// stores as that would introduce a semantic change. This combine is expected to |
| 1125 | /// be a semantic no-op which just allows stores to more closely model the types |
| 1126 | /// of their incoming values. |
| 1127 | /// |
| 1128 | /// Currently, we also refuse to change the precise type used for an atomic or |
| 1129 | /// volatile store. This is debatable, and might be reasonable to change later. |
| 1130 | /// However, it is risky in case some backend or other part of LLVM is relying |
| 1131 | /// on the exact type stored to select appropriate atomic operations. |
| 1132 | /// |
| 1133 | /// \returns true if the store was successfully combined away. This indicates |
| 1134 | /// the caller must erase the store instruction. We have to let the caller erase |
Bruce Mitchener | e9ffb45 | 2015-09-12 01:17:08 +0000 | [diff] [blame] | 1135 | /// the store instruction as otherwise there is no way to signal whether it was |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1136 | /// combined or not: IC.EraseInstFromFunction returns a null pointer. |
| 1137 | static bool combineStoreToValueType(InstCombiner &IC, StoreInst &SI) { |
Philip Reames | 6f4d008 | 2016-05-06 22:17:01 +0000 | [diff] [blame] | 1138 | // FIXME: We could probably with some care handle both volatile and ordered |
| 1139 | // atomic stores here but it isn't clear that this is important. |
| 1140 | if (!SI.isUnordered()) |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1141 | return false; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1142 | |
Arnold Schwaighofer | 5d33555 | 2016-09-10 18:14:57 +0000 | [diff] [blame] | 1143 | // swifterror values can't be bitcasted. |
| 1144 | if (SI.getPointerOperand()->isSwiftError()) |
| 1145 | return false; |
| 1146 | |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1147 | Value *V = SI.getValueOperand(); |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1148 | |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1149 | // Fold away bit casts of the stored value by storing the original type. |
| 1150 | if (auto *BC = dyn_cast<BitCastInst>(V)) { |
Chandler Carruth | a7f247e | 2014-12-09 19:21:16 +0000 | [diff] [blame] | 1151 | V = BC->getOperand(0); |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 1152 | if (!SI.isAtomic() || isSupportedAtomicType(V->getType())) { |
| 1153 | combineStoreToNewValue(IC, SI, V); |
| 1154 | return true; |
| 1155 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1156 | } |
| 1157 | |
Philip Reames | 89e92d2 | 2016-12-01 20:17:06 +0000 | [diff] [blame] | 1158 | if (Value *U = likeBitCastFromVector(IC, V)) |
| 1159 | if (!SI.isAtomic() || isSupportedAtomicType(U->getType())) { |
| 1160 | combineStoreToNewValue(IC, SI, U); |
| 1161 | return true; |
| 1162 | } |
Arch D. Robison | be0490a | 2016-04-25 22:22:39 +0000 | [diff] [blame] | 1163 | |
JF Bastien | c22d299 | 2016-04-21 19:53:39 +0000 | [diff] [blame] | 1164 | // FIXME: We should also canonicalize stores of vectors when their elements |
| 1165 | // are cast to other types. |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1166 | return false; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1167 | } |
| 1168 | |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1169 | static bool unpackStoreToAggregate(InstCombiner &IC, StoreInst &SI) { |
| 1170 | // FIXME: We could probably with some care handle both volatile and atomic |
| 1171 | // stores here but it isn't clear that this is important. |
| 1172 | if (!SI.isSimple()) |
| 1173 | return false; |
| 1174 | |
| 1175 | Value *V = SI.getValueOperand(); |
| 1176 | Type *T = V->getType(); |
| 1177 | |
| 1178 | if (!T->isAggregateType()) |
| 1179 | return false; |
| 1180 | |
Mehdi Amini | 2668a48 | 2015-05-07 05:52:40 +0000 | [diff] [blame] | 1181 | if (auto *ST = dyn_cast<StructType>(T)) { |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1182 | // If the struct only have one element, we unpack. |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 1183 | unsigned Count = ST->getNumElements(); |
| 1184 | if (Count == 1) { |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1185 | V = IC.Builder.CreateExtractValue(V, 0); |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1186 | combineStoreToNewValue(IC, SI, V); |
| 1187 | return true; |
| 1188 | } |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 1189 | |
| 1190 | // We don't want to break loads with padding here as we'd loose |
| 1191 | // the knowledge that padding exists for the rest of the pipeline. |
| 1192 | const DataLayout &DL = IC.getDataLayout(); |
| 1193 | auto *SL = DL.getStructLayout(ST); |
| 1194 | if (SL->hasPadding()) |
| 1195 | return false; |
| 1196 | |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 1197 | auto Align = SI.getAlignment(); |
| 1198 | if (!Align) |
| 1199 | Align = DL.getABITypeAlignment(ST); |
| 1200 | |
NAKAMURA Takumi | ec6b1fc | 2015-12-15 09:37:31 +0000 | [diff] [blame] | 1201 | SmallString<16> EltName = V->getName(); |
| 1202 | EltName += ".elt"; |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 1203 | auto *Addr = SI.getPointerOperand(); |
NAKAMURA Takumi | ec6b1fc | 2015-12-15 09:37:31 +0000 | [diff] [blame] | 1204 | SmallString<16> AddrName = Addr->getName(); |
| 1205 | AddrName += ".repack"; |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 1206 | |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 1207 | auto *IdxType = Type::getInt32Ty(ST->getContext()); |
| 1208 | auto *Zero = ConstantInt::get(IdxType, 0); |
| 1209 | for (unsigned i = 0; i < Count; i++) { |
| 1210 | Value *Indices[2] = { |
| 1211 | Zero, |
| 1212 | ConstantInt::get(IdxType, i), |
| 1213 | }; |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1214 | auto *Ptr = IC.Builder.CreateInBoundsGEP(ST, Addr, makeArrayRef(Indices), |
| 1215 | AddrName); |
| 1216 | auto *Val = IC.Builder.CreateExtractValue(V, i, EltName); |
Amaury Sechet | 61a7d62 | 2016-02-17 19:21:28 +0000 | [diff] [blame] | 1217 | auto EltAlign = MinAlign(Align, SL->getElementOffset(i)); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1218 | llvm::Instruction *NS = IC.Builder.CreateAlignedStore(Val, Ptr, EltAlign); |
Keno Fischer | a236dae | 2017-06-28 23:36:40 +0000 | [diff] [blame] | 1219 | AAMDNodes AAMD; |
| 1220 | SI.getAAMetadata(AAMD); |
| 1221 | NS->setAAMetadata(AAMD); |
Mehdi Amini | 1c131b3 | 2015-12-15 01:44:07 +0000 | [diff] [blame] | 1222 | } |
| 1223 | |
| 1224 | return true; |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1225 | } |
| 1226 | |
David Majnemer | 7536460 | 2015-05-11 05:04:27 +0000 | [diff] [blame] | 1227 | if (auto *AT = dyn_cast<ArrayType>(T)) { |
| 1228 | // If the array only have one element, we unpack. |
Amaury Sechet | 3b8b2ea | 2016-03-02 22:36:45 +0000 | [diff] [blame] | 1229 | auto NumElements = AT->getNumElements(); |
| 1230 | if (NumElements == 1) { |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1231 | V = IC.Builder.CreateExtractValue(V, 0); |
David Majnemer | 7536460 | 2015-05-11 05:04:27 +0000 | [diff] [blame] | 1232 | combineStoreToNewValue(IC, SI, V); |
| 1233 | return true; |
| 1234 | } |
Amaury Sechet | 3b8b2ea | 2016-03-02 22:36:45 +0000 | [diff] [blame] | 1235 | |
Davide Italiano | f6988d2 | 2016-10-07 21:53:09 +0000 | [diff] [blame] | 1236 | // Bail out if the array is too large. Ideally we would like to optimize |
| 1237 | // arrays of arbitrary size but this has a terrible impact on compile time. |
| 1238 | // The threshold here is chosen arbitrarily, maybe needs a little bit of |
| 1239 | // tuning. |
Davide Italiano | 2133bf5 | 2017-02-07 17:56:50 +0000 | [diff] [blame] | 1240 | if (NumElements > IC.MaxArraySizeForCombine) |
Davide Italiano | f6988d2 | 2016-10-07 21:53:09 +0000 | [diff] [blame] | 1241 | return false; |
| 1242 | |
Amaury Sechet | 3b8b2ea | 2016-03-02 22:36:45 +0000 | [diff] [blame] | 1243 | const DataLayout &DL = IC.getDataLayout(); |
| 1244 | auto EltSize = DL.getTypeAllocSize(AT->getElementType()); |
| 1245 | auto Align = SI.getAlignment(); |
| 1246 | if (!Align) |
| 1247 | Align = DL.getABITypeAlignment(T); |
| 1248 | |
| 1249 | SmallString<16> EltName = V->getName(); |
| 1250 | EltName += ".elt"; |
| 1251 | auto *Addr = SI.getPointerOperand(); |
| 1252 | SmallString<16> AddrName = Addr->getName(); |
| 1253 | AddrName += ".repack"; |
| 1254 | |
| 1255 | auto *IdxType = Type::getInt64Ty(T->getContext()); |
| 1256 | auto *Zero = ConstantInt::get(IdxType, 0); |
| 1257 | |
| 1258 | uint64_t Offset = 0; |
| 1259 | for (uint64_t i = 0; i < NumElements; i++) { |
| 1260 | Value *Indices[2] = { |
| 1261 | Zero, |
| 1262 | ConstantInt::get(IdxType, i), |
| 1263 | }; |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1264 | auto *Ptr = IC.Builder.CreateInBoundsGEP(AT, Addr, makeArrayRef(Indices), |
| 1265 | AddrName); |
| 1266 | auto *Val = IC.Builder.CreateExtractValue(V, i, EltName); |
Amaury Sechet | 3b8b2ea | 2016-03-02 22:36:45 +0000 | [diff] [blame] | 1267 | auto EltAlign = MinAlign(Align, Offset); |
Craig Topper | bb4069e | 2017-07-07 23:16:26 +0000 | [diff] [blame] | 1268 | Instruction *NS = IC.Builder.CreateAlignedStore(Val, Ptr, EltAlign); |
Keno Fischer | a236dae | 2017-06-28 23:36:40 +0000 | [diff] [blame] | 1269 | AAMDNodes AAMD; |
| 1270 | SI.getAAMetadata(AAMD); |
| 1271 | NS->setAAMetadata(AAMD); |
Amaury Sechet | 3b8b2ea | 2016-03-02 22:36:45 +0000 | [diff] [blame] | 1272 | Offset += EltSize; |
| 1273 | } |
| 1274 | |
| 1275 | return true; |
David Majnemer | 7536460 | 2015-05-11 05:04:27 +0000 | [diff] [blame] | 1276 | } |
| 1277 | |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1278 | return false; |
| 1279 | } |
| 1280 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1281 | /// equivalentAddressValues - Test if A and B will obviously have the same |
| 1282 | /// value. This includes recognizing that %t0 and %t1 will have the same |
| 1283 | /// value in code like this: |
| 1284 | /// %t0 = getelementptr \@a, 0, 3 |
| 1285 | /// store i32 0, i32* %t0 |
| 1286 | /// %t1 = getelementptr \@a, 0, 3 |
| 1287 | /// %t2 = load i32* %t1 |
| 1288 | /// |
| 1289 | static bool equivalentAddressValues(Value *A, Value *B) { |
| 1290 | // Test if the values are trivially equivalent. |
| 1291 | if (A == B) return true; |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1292 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1293 | // Test if the values come form identical arithmetic instructions. |
| 1294 | // This uses isIdenticalToWhenDefined instead of isIdenticalTo because |
| 1295 | // its only used to compare two uses within the same basic block, which |
| 1296 | // means that they'll always either have the same value or one of them |
| 1297 | // will have an undefined value. |
| 1298 | if (isa<BinaryOperator>(A) || |
| 1299 | isa<CastInst>(A) || |
| 1300 | isa<PHINode>(A) || |
| 1301 | isa<GetElementPtrInst>(A)) |
| 1302 | if (Instruction *BI = dyn_cast<Instruction>(B)) |
| 1303 | if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI)) |
| 1304 | return true; |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1305 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1306 | // Otherwise they may not be equivalent. |
| 1307 | return false; |
| 1308 | } |
| 1309 | |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1310 | /// Converts store (bitcast (load (bitcast (select ...)))) to |
| 1311 | /// store (load (select ...)), where select is minmax: |
| 1312 | /// select ((cmp load V1, load V2), V1, V2). |
Alexey Bataev | 83c15b1 | 2017-12-12 20:28:46 +0000 | [diff] [blame] | 1313 | static bool removeBitcastsFromLoadStoreOnMinMax(InstCombiner &IC, |
| 1314 | StoreInst &SI) { |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1315 | // bitcast? |
Alexey Bataev | 83c15b1 | 2017-12-12 20:28:46 +0000 | [diff] [blame] | 1316 | if (!match(SI.getPointerOperand(), m_BitCast(m_Value()))) |
Alexey Bataev | fa0a76d | 2017-12-12 19:12:34 +0000 | [diff] [blame] | 1317 | return false; |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1318 | // load? integer? |
| 1319 | Value *LoadAddr; |
| 1320 | if (!match(SI.getValueOperand(), m_Load(m_BitCast(m_Value(LoadAddr))))) |
Alexey Bataev | fa0a76d | 2017-12-12 19:12:34 +0000 | [diff] [blame] | 1321 | return false; |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1322 | auto *LI = cast<LoadInst>(SI.getValueOperand()); |
| 1323 | if (!LI->getType()->isIntegerTy()) |
Alexey Bataev | fa0a76d | 2017-12-12 19:12:34 +0000 | [diff] [blame] | 1324 | return false; |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1325 | if (!isMinMaxWithLoads(LoadAddr)) |
Alexey Bataev | fa0a76d | 2017-12-12 19:12:34 +0000 | [diff] [blame] | 1326 | return false; |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1327 | |
Alexey Bataev | 83c15b1 | 2017-12-12 20:28:46 +0000 | [diff] [blame] | 1328 | if (!all_of(LI->users(), [LI, LoadAddr](User *U) { |
| 1329 | auto *SI = dyn_cast<StoreInst>(U); |
| 1330 | return SI && SI->getPointerOperand() != LI && |
| 1331 | peekThroughBitcast(SI->getPointerOperand()) != LoadAddr && |
| 1332 | !SI->getPointerOperand()->isSwiftError(); |
| 1333 | })) |
| 1334 | return false; |
| 1335 | |
| 1336 | IC.Builder.SetInsertPoint(LI); |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1337 | LoadInst *NewLI = combineLoadToNewType( |
| 1338 | IC, *LI, LoadAddr->getType()->getPointerElementType()); |
Alexey Bataev | 83c15b1 | 2017-12-12 20:28:46 +0000 | [diff] [blame] | 1339 | // Replace all the stores with stores of the newly loaded value. |
| 1340 | for (auto *UI : LI->users()) { |
| 1341 | auto *USI = cast<StoreInst>(UI); |
| 1342 | IC.Builder.SetInsertPoint(USI); |
| 1343 | combineStoreToNewValue(IC, *USI, NewLI); |
| 1344 | } |
| 1345 | IC.replaceInstUsesWith(*LI, UndefValue::get(LI->getType())); |
| 1346 | IC.eraseInstFromFunction(*LI); |
Alexey Bataev | fa0a76d | 2017-12-12 19:12:34 +0000 | [diff] [blame] | 1347 | return true; |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1348 | } |
| 1349 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1350 | Instruction *InstCombiner::visitStoreInst(StoreInst &SI) { |
| 1351 | Value *Val = SI.getOperand(0); |
| 1352 | Value *Ptr = SI.getOperand(1); |
| 1353 | |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1354 | // Try to canonicalize the stored type. |
| 1355 | if (combineStoreToValueType(*this, SI)) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1356 | return eraseInstFromFunction(SI); |
Chandler Carruth | 816d26f | 2014-11-25 10:09:51 +0000 | [diff] [blame] | 1357 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1358 | // Attempt to improve the alignment. |
Guillaume Chatelet | d400d45 | 2019-10-03 13:17:21 +0000 | [diff] [blame] | 1359 | const Align KnownAlign = Align(getOrEnforceKnownAlignment( |
| 1360 | Ptr, DL.getPrefTypeAlignment(Val->getType()), DL, &SI, &AC, &DT)); |
| 1361 | const MaybeAlign StoreAlign = MaybeAlign(SI.getAlignment()); |
| 1362 | const Align EffectiveStoreAlign = |
| 1363 | StoreAlign ? *StoreAlign : Align(DL.getABITypeAlignment(Val->getType())); |
Dan Gohman | 3619660 | 2010-08-03 18:20:32 +0000 | [diff] [blame] | 1364 | |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1365 | if (KnownAlign > EffectiveStoreAlign) |
| 1366 | SI.setAlignment(KnownAlign); |
Guillaume Chatelet | d400d45 | 2019-10-03 13:17:21 +0000 | [diff] [blame] | 1367 | else if (!StoreAlign) |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 1368 | SI.setAlignment(EffectiveStoreAlign); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1369 | |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1370 | // Try to canonicalize the stored type. |
| 1371 | if (unpackStoreToAggregate(*this, SI)) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1372 | return eraseInstFromFunction(SI); |
Mehdi Amini | b344ac9 | 2015-03-14 22:19:33 +0000 | [diff] [blame] | 1373 | |
Alexey Bataev | fa0a76d | 2017-12-12 19:12:34 +0000 | [diff] [blame] | 1374 | if (removeBitcastsFromLoadStoreOnMinMax(*this, SI)) |
| 1375 | return eraseInstFromFunction(SI); |
Alexey Bataev | ec95c6c | 2017-12-08 15:32:10 +0000 | [diff] [blame] | 1376 | |
Hal Finkel | 847e05f | 2015-02-20 03:05:53 +0000 | [diff] [blame] | 1377 | // Replace GEP indices if possible. |
| 1378 | if (Instruction *NewGEPI = replaceGEPIdxWithZero(*this, Ptr, SI)) { |
| 1379 | Worklist.Add(NewGEPI); |
| 1380 | return &SI; |
| 1381 | } |
| 1382 | |
Philip Reames | d7a6cc8 | 2015-12-17 22:19:27 +0000 | [diff] [blame] | 1383 | // Don't hack volatile/ordered stores. |
| 1384 | // FIXME: Some bits are legal for ordered atomic stores; needs refactoring. |
| 1385 | if (!SI.isUnordered()) return nullptr; |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1386 | |
| 1387 | // If the RHS is an alloca with a single use, zapify the store, making the |
| 1388 | // alloca dead. |
| 1389 | if (Ptr->hasOneUse()) { |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1390 | if (isa<AllocaInst>(Ptr)) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1391 | return eraseInstFromFunction(SI); |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1392 | if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr)) { |
| 1393 | if (isa<AllocaInst>(GEP->getOperand(0))) { |
| 1394 | if (GEP->getOperand(0)->hasOneUse()) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1395 | return eraseInstFromFunction(SI); |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1396 | } |
| 1397 | } |
| 1398 | } |
| 1399 | |
Philip Reames | d748689 | 2019-04-22 20:28:19 +0000 | [diff] [blame] | 1400 | // If we have a store to a location which is known constant, we can conclude |
| 1401 | // that the store must be storing the constant value (else the memory |
| 1402 | // wouldn't be constant), and this must be a noop. |
| 1403 | if (AA->pointsToConstantMemory(Ptr)) |
| 1404 | return eraseInstFromFunction(SI); |
| 1405 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1406 | // Do really simple DSE, to catch cases where there are several consecutive |
| 1407 | // stores to the same location, separated by a few arithmetic operations. This |
| 1408 | // situation often occurs with bitfield accesses. |
Duncan P. N. Exon Smith | 9f8aaf2 | 2015-10-13 16:59:33 +0000 | [diff] [blame] | 1409 | BasicBlock::iterator BBI(SI); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1410 | for (unsigned ScanInsts = 6; BBI != SI.getParent()->begin() && ScanInsts; |
| 1411 | --ScanInsts) { |
| 1412 | --BBI; |
Victor Hernandez | 5f8c8c0 | 2010-01-22 19:05:05 +0000 | [diff] [blame] | 1413 | // Don't count debug info directives, lest they affect codegen, |
| 1414 | // and we skip pointer-to-pointer bitcasts, which are NOPs. |
| 1415 | if (isa<DbgInfoIntrinsic>(BBI) || |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 1416 | (isa<BitCastInst>(BBI) && BBI->getType()->isPointerTy())) { |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1417 | ScanInsts++; |
| 1418 | continue; |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1419 | } |
| 1420 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1421 | if (StoreInst *PrevSI = dyn_cast<StoreInst>(BBI)) { |
| 1422 | // Prev store isn't volatile, and stores to the same location? |
Philip Reames | d7a6cc8 | 2015-12-17 22:19:27 +0000 | [diff] [blame] | 1423 | if (PrevSI->isUnordered() && equivalentAddressValues(PrevSI->getOperand(1), |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1424 | SI.getOperand(1))) { |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1425 | ++NumDeadStore; |
| 1426 | ++BBI; |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1427 | eraseInstFromFunction(*PrevSI); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1428 | continue; |
| 1429 | } |
| 1430 | break; |
| 1431 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1432 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1433 | // If this is a load, we have to stop. However, if the loaded value is from |
| 1434 | // the pointer we're loading and is producing the pointer we're storing, |
| 1435 | // then *this* store is dead (X = load P; store X -> P). |
| 1436 | if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) { |
Philip Reames | d7a6cc8 | 2015-12-17 22:19:27 +0000 | [diff] [blame] | 1437 | if (LI == Val && equivalentAddressValues(LI->getOperand(0), Ptr)) { |
| 1438 | assert(SI.isUnordered() && "can't eliminate ordering operation"); |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1439 | return eraseInstFromFunction(SI); |
Philip Reames | d7a6cc8 | 2015-12-17 22:19:27 +0000 | [diff] [blame] | 1440 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1441 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1442 | // Otherwise, this is a load from some other location. Stores before it |
| 1443 | // may not be dead. |
| 1444 | break; |
| 1445 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1446 | |
Sanjoy Das | 679bc32 | 2017-01-17 05:45:09 +0000 | [diff] [blame] | 1447 | // Don't skip over loads, throws or things that can modify memory. |
| 1448 | if (BBI->mayWriteToMemory() || BBI->mayReadFromMemory() || BBI->mayThrow()) |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1449 | break; |
| 1450 | } |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1451 | |
| 1452 | // store X, null -> turns into 'unreachable' in SimplifyCFG |
Anna Thomas | 2dd9835 | 2017-12-12 14:12:33 +0000 | [diff] [blame] | 1453 | // store X, GEP(null, Y) -> turns into 'unreachable' in SimplifyCFG |
| 1454 | if (canSimplifyNullStoreOrGEP(SI)) { |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1455 | if (!isa<UndefValue>(Val)) { |
| 1456 | SI.setOperand(0, UndefValue::get(Val->getType())); |
| 1457 | if (Instruction *U = dyn_cast<Instruction>(Val)) |
| 1458 | Worklist.Add(U); // Dropped a use. |
| 1459 | } |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1460 | return nullptr; // Do not modify these! |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1461 | } |
| 1462 | |
| 1463 | // store undef, Ptr -> noop |
| 1464 | if (isa<UndefValue>(Val)) |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1465 | return eraseInstFromFunction(SI); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1466 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1467 | // If this store is the second-to-last instruction in the basic block |
| 1468 | // (excluding debug info and bitcasts of pointers) and if the block ends with |
| 1469 | // an unconditional branch, try to move the store to the successor block. |
Duncan P. N. Exon Smith | 9f8aaf2 | 2015-10-13 16:59:33 +0000 | [diff] [blame] | 1470 | BBI = SI.getIterator(); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1471 | do { |
| 1472 | ++BBI; |
Victor Hernandez | 5f8c8c0 | 2010-01-22 19:05:05 +0000 | [diff] [blame] | 1473 | } while (isa<DbgInfoIntrinsic>(BBI) || |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 1474 | (isa<BitCastInst>(BBI) && BBI->getType()->isPointerTy())); |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1475 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1476 | if (BranchInst *BI = dyn_cast<BranchInst>(BBI)) |
| 1477 | if (BI->isUnconditional()) |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1478 | mergeStoreIntoSuccessor(SI); |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1479 | |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1480 | return nullptr; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1481 | } |
| 1482 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1483 | /// Try to transform: |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1484 | /// if () { *P = v1; } else { *P = v2 } |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1485 | /// or: |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1486 | /// *P = v1; if () { *P = v2; } |
| 1487 | /// into a phi node with a store in the successor. |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1488 | bool InstCombiner::mergeStoreIntoSuccessor(StoreInst &SI) { |
Philip Reames | 5f0e369 | 2016-04-22 20:53:32 +0000 | [diff] [blame] | 1489 | assert(SI.isUnordered() && |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1490 | "This code has not been audited for volatile or ordered store case."); |
Justin Bogner | c7e4fbe | 2016-08-05 01:09:48 +0000 | [diff] [blame] | 1491 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1492 | // Check if the successor block has exactly 2 incoming edges. |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1493 | BasicBlock *StoreBB = SI.getParent(); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1494 | BasicBlock *DestBB = StoreBB->getTerminator()->getSuccessor(0); |
Vedant Kumar | 4de31bb | 2018-11-19 19:54:27 +0000 | [diff] [blame] | 1495 | if (!DestBB->hasNPredecessors(2)) |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1496 | return false; |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1497 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1498 | // Capture the other block (the block that doesn't contain our store). |
| 1499 | pred_iterator PredIter = pred_begin(DestBB); |
| 1500 | if (*PredIter == StoreBB) |
| 1501 | ++PredIter; |
| 1502 | BasicBlock *OtherBB = *PredIter; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1503 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1504 | // Bail out if all of the relevant blocks aren't distinct. This can happen, |
| 1505 | // for example, if SI is in an infinite loop. |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1506 | if (StoreBB == DestBB || OtherBB == DestBB) |
| 1507 | return false; |
| 1508 | |
| 1509 | // Verify that the other block ends in a branch and is not otherwise empty. |
Duncan P. N. Exon Smith | 9f8aaf2 | 2015-10-13 16:59:33 +0000 | [diff] [blame] | 1510 | BasicBlock::iterator BBI(OtherBB->getTerminator()); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1511 | BranchInst *OtherBr = dyn_cast<BranchInst>(BBI); |
| 1512 | if (!OtherBr || BBI == OtherBB->begin()) |
| 1513 | return false; |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1514 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1515 | // If the other block ends in an unconditional branch, check for the 'if then |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1516 | // else' case. There is an instruction before the branch. |
Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 1517 | StoreInst *OtherStore = nullptr; |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1518 | if (OtherBr->isUnconditional()) { |
| 1519 | --BBI; |
| 1520 | // Skip over debugging info. |
Victor Hernandez | 5f8c8c0 | 2010-01-22 19:05:05 +0000 | [diff] [blame] | 1521 | while (isa<DbgInfoIntrinsic>(BBI) || |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 1522 | (isa<BitCastInst>(BBI) && BBI->getType()->isPointerTy())) { |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1523 | if (BBI==OtherBB->begin()) |
| 1524 | return false; |
| 1525 | --BBI; |
| 1526 | } |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1527 | // If this isn't a store, isn't a store to the same location, or is not the |
| 1528 | // right kind of store, bail out. |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1529 | OtherStore = dyn_cast<StoreInst>(BBI); |
| 1530 | if (!OtherStore || OtherStore->getOperand(1) != SI.getOperand(1) || |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1531 | !SI.isSameOperationAs(OtherStore)) |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1532 | return false; |
| 1533 | } else { |
| 1534 | // Otherwise, the other block ended with a conditional branch. If one of the |
| 1535 | // destinations is StoreBB, then we have the if/then case. |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1536 | if (OtherBr->getSuccessor(0) != StoreBB && |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1537 | OtherBr->getSuccessor(1) != StoreBB) |
| 1538 | return false; |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1539 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1540 | // Okay, we know that OtherBr now goes to Dest and StoreBB, so this is an |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1541 | // if/then triangle. See if there is a store to the same ptr as SI that |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1542 | // lives in OtherBB. |
| 1543 | for (;; --BBI) { |
| 1544 | // Check to see if we find the matching store. |
| 1545 | if ((OtherStore = dyn_cast<StoreInst>(BBI))) { |
| 1546 | if (OtherStore->getOperand(1) != SI.getOperand(1) || |
Eli Friedman | 8bc586e | 2011-08-15 22:09:40 +0000 | [diff] [blame] | 1547 | !SI.isSameOperationAs(OtherStore)) |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1548 | return false; |
| 1549 | break; |
| 1550 | } |
| 1551 | // If we find something that may be using or overwriting the stored |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1552 | // value, or if we run out of instructions, we can't do the transform. |
Sanjoy Das | 679bc32 | 2017-01-17 05:45:09 +0000 | [diff] [blame] | 1553 | if (BBI->mayReadFromMemory() || BBI->mayThrow() || |
| 1554 | BBI->mayWriteToMemory() || BBI == OtherBB->begin()) |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1555 | return false; |
| 1556 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1557 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1558 | // In order to eliminate the store in OtherBr, we have to make sure nothing |
| 1559 | // reads or overwrites the stored value in StoreBB. |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1560 | for (BasicBlock::iterator I = StoreBB->begin(); &*I != &SI; ++I) { |
| 1561 | // FIXME: This should really be AA driven. |
Sanjoy Das | 679bc32 | 2017-01-17 05:45:09 +0000 | [diff] [blame] | 1562 | if (I->mayReadFromMemory() || I->mayThrow() || I->mayWriteToMemory()) |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1563 | return false; |
| 1564 | } |
| 1565 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1566 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1567 | // Insert a PHI node now if we need it. |
| 1568 | Value *MergedVal = OtherStore->getOperand(0); |
Vedant Kumar | 238533e | 2018-11-19 19:55:02 +0000 | [diff] [blame] | 1569 | // The debug locations of the original instructions might differ. Merge them. |
| 1570 | DebugLoc MergedLoc = DILocation::getMergedLocation(SI.getDebugLoc(), |
| 1571 | OtherStore->getDebugLoc()); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1572 | if (MergedVal != SI.getOperand(0)) { |
Jay Foad | 5213134 | 2011-03-30 11:28:46 +0000 | [diff] [blame] | 1573 | PHINode *PN = PHINode::Create(MergedVal->getType(), 2, "storemerge"); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1574 | PN->addIncoming(SI.getOperand(0), SI.getParent()); |
| 1575 | PN->addIncoming(OtherStore->getOperand(0), OtherBB); |
| 1576 | MergedVal = InsertNewInstBefore(PN, DestBB->front()); |
Vedant Kumar | 238533e | 2018-11-19 19:55:02 +0000 | [diff] [blame] | 1577 | PN->setDebugLoc(MergedLoc); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1578 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1579 | |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1580 | // Advance to a place where it is safe to insert the new store and insert it. |
Bill Wendling | 8ddfc09 | 2011-08-16 20:45:24 +0000 | [diff] [blame] | 1581 | BBI = DestBB->getFirstInsertionPt(); |
Guillaume Chatelet | 5b99c18 | 2019-10-22 12:55:32 +0000 | [diff] [blame] | 1582 | StoreInst *NewSI = new StoreInst(MergedVal, SI.getOperand(1), SI.isVolatile(), |
| 1583 | MaybeAlign(SI.getAlignment()), |
Sanjay Patel | 4a12aa9 | 2018-11-10 20:29:25 +0000 | [diff] [blame] | 1584 | SI.getOrdering(), SI.getSyncScopeID()); |
Eli Friedman | 35211c6 | 2011-05-27 00:19:40 +0000 | [diff] [blame] | 1585 | InsertNewInstBefore(NewSI, *BBI); |
Vedant Kumar | 238533e | 2018-11-19 19:55:02 +0000 | [diff] [blame] | 1586 | NewSI->setDebugLoc(MergedLoc); |
Eli Friedman | 35211c6 | 2011-05-27 00:19:40 +0000 | [diff] [blame] | 1587 | |
Hal Finkel | cc39b67 | 2014-07-24 12:16:19 +0000 | [diff] [blame] | 1588 | // If the two stores had AA tags, merge them. |
| 1589 | AAMDNodes AATags; |
| 1590 | SI.getAAMetadata(AATags); |
| 1591 | if (AATags) { |
| 1592 | OtherStore->getAAMetadata(AATags, /* Merge = */ true); |
| 1593 | NewSI->setAAMetadata(AATags); |
| 1594 | } |
Jim Grosbach | bdbd734 | 2013-04-05 21:20:12 +0000 | [diff] [blame] | 1595 | |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1596 | // Nuke the old stores. |
Sanjay Patel | 4b19880 | 2016-02-01 22:23:39 +0000 | [diff] [blame] | 1597 | eraseInstFromFunction(SI); |
| 1598 | eraseInstFromFunction(*OtherStore); |
Chris Lattner | a65e2f7 | 2010-01-05 05:57:49 +0000 | [diff] [blame] | 1599 | return true; |
| 1600 | } |