blob: 94417dc551a9f21e2e67c1f1754ac138479604eb [file] [log] [blame]
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001//===-- MemorySSAUpdater.cpp - Memory SSA Updater--------------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Daniel Berlinae6b8b62017-01-28 01:35:02 +00006//
7//===----------------------------------------------------------------===//
8//
9// This file implements the MemorySSAUpdater class.
10//
11//===----------------------------------------------------------------===//
Daniel Berlin554dcd82017-04-11 20:06:36 +000012#include "llvm/Analysis/MemorySSAUpdater.h"
Daniel Berlinae6b8b62017-01-28 01:35:02 +000013#include "llvm/ADT/STLExtras.h"
Alina Sbirlea79800992018-09-10 20:13:01 +000014#include "llvm/ADT/SetVector.h"
Daniel Berlinae6b8b62017-01-28 01:35:02 +000015#include "llvm/ADT/SmallPtrSet.h"
Alina Sbirlea79800992018-09-10 20:13:01 +000016#include "llvm/Analysis/IteratedDominanceFrontier.h"
Chandler Carruth6bda14b2017-06-06 11:49:48 +000017#include "llvm/Analysis/MemorySSA.h"
Daniel Berlinae6b8b62017-01-28 01:35:02 +000018#include "llvm/IR/DataLayout.h"
19#include "llvm/IR/Dominators.h"
20#include "llvm/IR/GlobalVariable.h"
21#include "llvm/IR/IRBuilder.h"
Daniel Berlinae6b8b62017-01-28 01:35:02 +000022#include "llvm/IR/LLVMContext.h"
23#include "llvm/IR/Metadata.h"
24#include "llvm/IR/Module.h"
25#include "llvm/Support/Debug.h"
26#include "llvm/Support/FormattedStream.h"
Daniel Berlinae6b8b62017-01-28 01:35:02 +000027#include <algorithm>
28
29#define DEBUG_TYPE "memoryssa"
30using namespace llvm;
George Burgess IV56169ed2017-04-21 04:54:52 +000031
Daniel Berlinae6b8b62017-01-28 01:35:02 +000032// This is the marker algorithm from "Simple and Efficient Construction of
33// Static Single Assignment Form"
34// The simple, non-marker algorithm places phi nodes at any join
35// Here, we place markers, and only place phi nodes if they end up necessary.
36// They are only necessary if they break a cycle (IE we recursively visit
37// ourselves again), or we discover, while getting the value of the operands,
38// that there are two or more definitions needing to be merged.
39// This still will leave non-minimal form in the case of irreducible control
40// flow, where phi nodes may be in cycles with themselves, but unnecessary.
Eli Friedman88e2bac2018-03-26 19:52:54 +000041MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive(
42 BasicBlock *BB,
43 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) {
44 // First, do a cache lookup. Without this cache, certain CFG structures
45 // (like a series of if statements) take exponential time to visit.
46 auto Cached = CachedPreviousDef.find(BB);
47 if (Cached != CachedPreviousDef.end()) {
48 return Cached->second;
George Burgess IV45f263d2018-05-26 02:28:55 +000049 }
50
51 if (BasicBlock *Pred = BB->getSinglePredecessor()) {
Eli Friedman88e2bac2018-03-26 19:52:54 +000052 // Single predecessor case, just recurse, we can only have one definition.
53 MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef);
54 CachedPreviousDef.insert({BB, Result});
55 return Result;
George Burgess IV45f263d2018-05-26 02:28:55 +000056 }
57
58 if (VisitedBlocks.count(BB)) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +000059 // We hit our node again, meaning we had a cycle, we must insert a phi
60 // node to break it so we have an operand. The only case this will
61 // insert useless phis is if we have irreducible control flow.
Eli Friedman88e2bac2018-03-26 19:52:54 +000062 MemoryAccess *Result = MSSA->createMemoryPhi(BB);
63 CachedPreviousDef.insert({BB, Result});
64 return Result;
George Burgess IV45f263d2018-05-26 02:28:55 +000065 }
66
67 if (VisitedBlocks.insert(BB).second) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +000068 // Mark us visited so we can detect a cycle
Alexandros Lamprineasbf6009c2018-07-23 10:56:30 +000069 SmallVector<TrackingVH<MemoryAccess>, 8> PhiOps;
Daniel Berlinae6b8b62017-01-28 01:35:02 +000070
71 // Recurse to get the values in our predecessors for placement of a
72 // potential phi node. This will insert phi nodes if we cycle in order to
73 // break the cycle and have an operand.
74 for (auto *Pred : predecessors(BB))
Alina Sbirlea0363c3b2019-05-02 23:41:58 +000075 if (MSSA->DT->isReachableFromEntry(Pred))
76 PhiOps.push_back(getPreviousDefFromEnd(Pred, CachedPreviousDef));
77 else
78 PhiOps.push_back(MSSA->getLiveOnEntryDef());
Daniel Berlinae6b8b62017-01-28 01:35:02 +000079
80 // Now try to simplify the ops to avoid placing a phi.
81 // This may return null if we never created a phi yet, that's okay
82 MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MSSA->getMemoryAccess(BB));
Daniel Berlinae6b8b62017-01-28 01:35:02 +000083
84 // See if we can avoid the phi by simplifying it.
85 auto *Result = tryRemoveTrivialPhi(Phi, PhiOps);
86 // If we couldn't simplify, we may have to create a phi
87 if (Result == Phi) {
88 if (!Phi)
89 Phi = MSSA->createMemoryPhi(BB);
90
Alexandros Lamprineasbf6009c2018-07-23 10:56:30 +000091 // See if the existing phi operands match what we need.
92 // Unlike normal SSA, we only allow one phi node per block, so we can't just
93 // create a new one.
94 if (Phi->getNumOperands() != 0) {
95 // FIXME: Figure out whether this is dead code and if so remove it.
96 if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) {
97 // These will have been filled in by the recursive read we did above.
Fangrui Song75709322018-11-17 01:44:25 +000098 llvm::copy(PhiOps, Phi->op_begin());
Alexandros Lamprineasbf6009c2018-07-23 10:56:30 +000099 std::copy(pred_begin(BB), pred_end(BB), Phi->block_begin());
100 }
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000101 } else {
102 unsigned i = 0;
103 for (auto *Pred : predecessors(BB))
Alexandros Lamprineasbf6009c2018-07-23 10:56:30 +0000104 Phi->addIncoming(&*PhiOps[i++], Pred);
Daniel Berlin97f34e82017-09-27 05:35:19 +0000105 InsertedPHIs.push_back(Phi);
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000106 }
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000107 Result = Phi;
108 }
Daniel Berlin97f34e82017-09-27 05:35:19 +0000109
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000110 // Set ourselves up for the next variable by resetting visited state.
111 VisitedBlocks.erase(BB);
Eli Friedman88e2bac2018-03-26 19:52:54 +0000112 CachedPreviousDef.insert({BB, Result});
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000113 return Result;
114 }
115 llvm_unreachable("Should have hit one of the three cases above");
116}
117
118// This starts at the memory access, and goes backwards in the block to find the
119// previous definition. If a definition is not found the block of the access,
120// it continues globally, creating phi nodes to ensure we have a single
121// definition.
122MemoryAccess *MemorySSAUpdater::getPreviousDef(MemoryAccess *MA) {
Eli Friedman88e2bac2018-03-26 19:52:54 +0000123 if (auto *LocalResult = getPreviousDefInBlock(MA))
124 return LocalResult;
125 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef;
126 return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef);
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000127}
128
129// This starts at the memory access, and goes backwards in the block to the find
130// the previous definition. If the definition is not found in the block of the
131// access, it returns nullptr.
132MemoryAccess *MemorySSAUpdater::getPreviousDefInBlock(MemoryAccess *MA) {
133 auto *Defs = MSSA->getWritableBlockDefs(MA->getBlock());
134
135 // It's possible there are no defs, or we got handed the first def to start.
136 if (Defs) {
137 // If this is a def, we can just use the def iterators.
138 if (!isa<MemoryUse>(MA)) {
139 auto Iter = MA->getReverseDefsIterator();
140 ++Iter;
141 if (Iter != Defs->rend())
142 return &*Iter;
143 } else {
144 // Otherwise, have to walk the all access iterator.
Alina Sbirlea33e58722017-06-07 16:46:53 +0000145 auto End = MSSA->getWritableBlockAccesses(MA->getBlock())->rend();
146 for (auto &U : make_range(++MA->getReverseIterator(), End))
147 if (!isa<MemoryUse>(U))
148 return cast<MemoryAccess>(&U);
149 // Note that if MA comes before Defs->begin(), we won't hit a def.
150 return nullptr;
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000151 }
152 }
153 return nullptr;
154}
155
156// This starts at the end of block
Eli Friedman88e2bac2018-03-26 19:52:54 +0000157MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd(
158 BasicBlock *BB,
159 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000160 auto *Defs = MSSA->getWritableBlockDefs(BB);
161
Alina Sbirleaf9f073a2019-04-12 21:58:52 +0000162 if (Defs) {
163 CachedPreviousDef.insert({BB, &*Defs->rbegin()});
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000164 return &*Defs->rbegin();
Alina Sbirleaf9f073a2019-04-12 21:58:52 +0000165 }
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000166
Eli Friedman88e2bac2018-03-26 19:52:54 +0000167 return getPreviousDefRecursive(BB, CachedPreviousDef);
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000168}
169// Recurse over a set of phi uses to eliminate the trivial ones
170MemoryAccess *MemorySSAUpdater::recursePhi(MemoryAccess *Phi) {
171 if (!Phi)
172 return nullptr;
173 TrackingVH<MemoryAccess> Res(Phi);
174 SmallVector<TrackingVH<Value>, 8> Uses;
175 std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses));
Alina Sbirlea28637212019-08-20 22:47:58 +0000176 for (auto &U : Uses)
177 if (MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U))
178 tryRemoveTrivialPhi(UsePhi);
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000179 return Res;
180}
181
182// Eliminate trivial phis
183// Phis are trivial if they are defined either by themselves, or all the same
184// argument.
185// IE phi(a, a) or b = phi(a, b) or c = phi(a, a, c)
186// We recursively try to remove them.
Alina Sbirlea28637212019-08-20 22:47:58 +0000187MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi) {
188 assert(Phi && "Can only remove concrete Phi.");
189 auto OperRange = Phi->operands();
190 return tryRemoveTrivialPhi(Phi, OperRange);
191}
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000192template <class RangeType>
193MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi,
194 RangeType &Operands) {
Zhaoshi Zheng43af17b2018-04-09 20:55:37 +0000195 // Bail out on non-opt Phis.
196 if (NonOptPhis.count(Phi))
197 return Phi;
198
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000199 // Detect equal or self arguments
200 MemoryAccess *Same = nullptr;
201 for (auto &Op : Operands) {
202 // If the same or self, good so far
203 if (Op == Phi || Op == Same)
204 continue;
205 // not the same, return the phi since it's not eliminatable by us
206 if (Same)
207 return Phi;
Alexandros Lamprineasbf6009c2018-07-23 10:56:30 +0000208 Same = cast<MemoryAccess>(&*Op);
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000209 }
210 // Never found a non-self reference, the phi is undef
211 if (Same == nullptr)
212 return MSSA->getLiveOnEntryDef();
213 if (Phi) {
214 Phi->replaceAllUsesWith(Same);
Daniel Berlin17e8d0e2017-02-22 22:19:55 +0000215 removeMemoryAccess(Phi);
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000216 }
217
218 // We should only end up recursing in case we replaced something, in which
219 // case, we may have made other Phis trivial.
220 return recursePhi(Same);
221}
222
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +0000223void MemorySSAUpdater::insertUse(MemoryUse *MU, bool RenameUses) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000224 InsertedPHIs.clear();
225 MU->setDefiningAccess(getPreviousDef(MU));
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +0000226 // In cases without unreachable blocks, because uses do not create new
227 // may-defs, there are only two cases:
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000228 // 1. There was a def already below us, and therefore, we should not have
229 // created a phi node because it was already needed for the def.
230 //
231 // 2. There is no def below us, and therefore, there is no extra renaming work
232 // to do.
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +0000233
234 // In cases with unreachable blocks, where the unnecessary Phis were
235 // optimized out, adding the Use may re-insert those Phis. Hence, when
236 // inserting Uses outside of the MSSA creation process, and new Phis were
237 // added, rename all uses if we are asked.
238
239 if (!RenameUses && !InsertedPHIs.empty()) {
240 auto *Defs = MSSA->getBlockDefs(MU->getBlock());
241 (void)Defs;
242 assert((!Defs || (++Defs->begin() == Defs->end())) &&
243 "Block may have only a Phi or no defs");
244 }
245
246 if (RenameUses && InsertedPHIs.size()) {
247 SmallPtrSet<BasicBlock *, 16> Visited;
248 BasicBlock *StartBlock = MU->getBlock();
249
250 if (auto *Defs = MSSA->getWritableBlockDefs(StartBlock)) {
251 MemoryAccess *FirstDef = &*Defs->begin();
252 // Convert to incoming value if it's a memorydef. A phi *is* already an
253 // incoming value.
254 if (auto *MD = dyn_cast<MemoryDef>(FirstDef))
255 FirstDef = MD->getDefiningAccess();
256
257 MSSA->renamePass(MU->getBlock(), FirstDef, Visited);
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +0000258 }
Alina Sbirlea228ffac2019-08-27 00:34:47 +0000259 // We just inserted a phi into this block, so the incoming value will
260 // become the phi anyway, so it does not matter what we pass.
261 for (auto &MP : InsertedPHIs)
262 if (MemoryPhi *Phi = cast_or_null<MemoryPhi>(MP))
263 MSSA->renamePass(Phi->getBlock(), nullptr, Visited);
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +0000264 }
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000265}
266
Daniel Berlin9d8a3352017-01-30 11:35:39 +0000267// Set every incoming edge {BB, MP->getBlock()} of MemoryPhi MP to NewDef.
George Burgess IV56169ed2017-04-21 04:54:52 +0000268static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB,
269 MemoryAccess *NewDef) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000270 // Replace any operand with us an incoming block with the new defining
271 // access.
272 int i = MP->getBasicBlockIndex(BB);
273 assert(i != -1 && "Should have found the basic block in the phi");
Daniel Berlin9d8a3352017-01-30 11:35:39 +0000274 // We can't just compare i against getNumOperands since one is signed and the
275 // other not. So use it to index into the block iterator.
276 for (auto BBIter = MP->block_begin() + i; BBIter != MP->block_end();
277 ++BBIter) {
278 if (*BBIter != BB)
279 break;
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000280 MP->setIncomingValue(i, NewDef);
281 ++i;
282 }
283}
284
285// A brief description of the algorithm:
286// First, we compute what should define the new def, using the SSA
287// construction algorithm.
288// Then, we update the defs below us (and any new phi nodes) in the graph to
289// point to the correct new defs, to ensure we only have one variable, and no
290// disconnected stores.
Daniel Berlin78cbd282017-02-20 22:26:03 +0000291void MemorySSAUpdater::insertDef(MemoryDef *MD, bool RenameUses) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000292 InsertedPHIs.clear();
293
294 // See if we had a local def, and if not, go hunting.
Eli Friedman88e2bac2018-03-26 19:52:54 +0000295 MemoryAccess *DefBefore = getPreviousDef(MD);
296 bool DefBeforeSameBlock = DefBefore->getBlock() == MD->getBlock();
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000297
298 // There is a def before us, which means we can replace any store/phi uses
299 // of that thing with us, since we are in the way of whatever was there
300 // before.
301 // We now define that def's memorydefs and memoryphis
Daniel Berlin9d8a3352017-01-30 11:35:39 +0000302 if (DefBeforeSameBlock) {
Roman Lebedev081e9902019-08-01 12:32:08 +0000303 DefBefore->replaceUsesWithIf(MD, [MD](Use &U) {
Alexandros Lamprineas96762b32018-09-11 14:29:59 +0000304 // Leave the MemoryUses alone.
305 // Also make sure we skip ourselves to avoid self references.
Roman Lebedev081e9902019-08-01 12:32:08 +0000306 User *Usr = U.getUser();
307 return !isa<MemoryUse>(Usr) && Usr != MD;
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000308 // Defs are automatically unoptimized when the user is set to MD below,
309 // because the isOptimized() call will fail to find the same ID.
Roman Lebedev081e9902019-08-01 12:32:08 +0000310 });
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000311 }
Daniel Berlin9d8a3352017-01-30 11:35:39 +0000312
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000313 // and that def is now our defining access.
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000314 MD->setDefiningAccess(DefBefore);
315
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000316 // Remember the index where we may insert new phis below.
317 unsigned NewPhiIndex = InsertedPHIs.size();
318
Alexandros Lamprineasf854ce82018-07-16 07:51:27 +0000319 SmallVector<WeakVH, 8> FixupList(InsertedPHIs.begin(), InsertedPHIs.end());
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000320 if (!DefBeforeSameBlock) {
321 // If there was a local def before us, we must have the same effect it
322 // did. Because every may-def is the same, any phis/etc we would create, it
323 // would also have created. If there was no local def before us, we
324 // performed a global update, and have to search all successors and make
325 // sure we update the first def in each of them (following all paths until
326 // we hit the first def along each path). This may also insert phi nodes.
327 // TODO: There are other cases we can skip this work, such as when we have a
328 // single successor, and only used a straight line of single pred blocks
329 // backwards to find the def. To make that work, we'd have to track whether
330 // getDefRecursive only ever used the single predecessor case. These types
331 // of paths also only exist in between CFG simplifications.
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000332
333 // If this is the first def in the block and this insert is in an arbitrary
334 // place, compute IDF and place phis.
335 auto Iter = MD->getDefsIterator();
336 ++Iter;
337 auto IterEnd = MSSA->getBlockDefs(MD->getBlock())->end();
338 if (Iter == IterEnd) {
339 ForwardIDFCalculator IDFs(*MSSA->DT);
340 SmallVector<BasicBlock *, 32> IDFBlocks;
341 SmallPtrSet<BasicBlock *, 2> DefiningBlocks;
Alina Sbirlea4e9082e2019-09-17 16:33:35 +0000342 for (const auto &VH : InsertedPHIs)
343 if (const auto *RealPHI = cast_or_null<MemoryPhi>(VH))
344 DefiningBlocks.insert(RealPHI->getBlock());
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000345 DefiningBlocks.insert(MD->getBlock());
346 IDFs.setDefiningBlocks(DefiningBlocks);
347 IDFs.calculate(IDFBlocks);
348 SmallVector<AssertingVH<MemoryPhi>, 4> NewInsertedPHIs;
Alina Sbirlea1c528e82019-08-20 22:29:06 +0000349 for (auto *BBIDF : IDFBlocks) {
350 auto *MPhi = MSSA->getMemoryAccess(BBIDF);
351 if (!MPhi) {
352 MPhi = MSSA->createMemoryPhi(BBIDF);
Alina Sbirleae5890672019-03-29 21:16:31 +0000353 NewInsertedPHIs.push_back(MPhi);
Alina Sbirleae5890672019-03-29 21:16:31 +0000354 }
Alina Sbirlea1c528e82019-08-20 22:29:06 +0000355 // Add the phis created into the IDF blocks to NonOptPhis, so they are
356 // not optimized out as trivial by the call to getPreviousDefFromEnd
357 // below. Once they are complete, all these Phis are added to the
358 // FixupList, and removed from NonOptPhis inside fixupDefs().
359 // Existing Phis in IDF may need fixing as well, and potentially be
360 // trivial before this insertion, hence add all IDF Phis. See PR43044.
361 NonOptPhis.insert(MPhi);
362 }
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000363
364 for (auto &MPhi : NewInsertedPHIs) {
365 auto *BBIDF = MPhi->getBlock();
366 for (auto *Pred : predecessors(BBIDF)) {
367 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef;
368 MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef),
369 Pred);
370 }
371 }
372
373 // Re-take the index where we're adding the new phis, because the above
374 // call to getPreviousDefFromEnd, may have inserted into InsertedPHIs.
375 NewPhiIndex = InsertedPHIs.size();
376 for (auto &MPhi : NewInsertedPHIs) {
377 InsertedPHIs.push_back(&*MPhi);
378 FixupList.push_back(&*MPhi);
379 }
380 }
381
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000382 FixupList.push_back(MD);
383 }
384
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000385 // Remember the index where we stopped inserting new phis above, since the
386 // fixupDefs call in the loop below may insert more, that are already minimal.
387 unsigned NewPhiIndexEnd = InsertedPHIs.size();
388
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000389 while (!FixupList.empty()) {
390 unsigned StartingPHISize = InsertedPHIs.size();
391 fixupDefs(FixupList);
392 FixupList.clear();
393 // Put any new phis on the fixup list, and process them
Alexandros Lamprineasf854ce82018-07-16 07:51:27 +0000394 FixupList.append(InsertedPHIs.begin() + StartingPHISize, InsertedPHIs.end());
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000395 }
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000396
397 // Optimize potentially non-minimal phis added in this method.
Alina Sbirlea151ab482019-05-02 23:12:49 +0000398 unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex;
399 if (NewPhiSize)
400 tryRemoveTrivialPhis(ArrayRef<WeakVH>(&InsertedPHIs[NewPhiIndex], NewPhiSize));
Alina Sbirleafcfa7c52019-02-27 22:20:22 +0000401
Daniel Berlin78cbd282017-02-20 22:26:03 +0000402 // Now that all fixups are done, rename all uses if we are asked.
403 if (RenameUses) {
404 SmallPtrSet<BasicBlock *, 16> Visited;
405 BasicBlock *StartBlock = MD->getBlock();
406 // We are guaranteed there is a def in the block, because we just got it
407 // handed to us in this function.
408 MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin();
409 // Convert to incoming value if it's a memorydef. A phi *is* already an
410 // incoming value.
411 if (auto *MD = dyn_cast<MemoryDef>(FirstDef))
412 FirstDef = MD->getDefiningAccess();
413
414 MSSA->renamePass(MD->getBlock(), FirstDef, Visited);
415 // We just inserted a phi into this block, so the incoming value will become
416 // the phi anyway, so it does not matter what we pass.
Alexandros Lamprineasf854ce82018-07-16 07:51:27 +0000417 for (auto &MP : InsertedPHIs) {
418 MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP);
419 if (Phi)
420 MSSA->renamePass(Phi->getBlock(), nullptr, Visited);
421 }
Daniel Berlin78cbd282017-02-20 22:26:03 +0000422 }
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000423}
424
Alexandros Lamprineasf854ce82018-07-16 07:51:27 +0000425void MemorySSAUpdater::fixupDefs(const SmallVectorImpl<WeakVH> &Vars) {
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000426 SmallPtrSet<const BasicBlock *, 8> Seen;
427 SmallVector<const BasicBlock *, 16> Worklist;
Alexandros Lamprineasf854ce82018-07-16 07:51:27 +0000428 for (auto &Var : Vars) {
429 MemoryAccess *NewDef = dyn_cast_or_null<MemoryAccess>(Var);
430 if (!NewDef)
431 continue;
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000432 // First, see if there is a local def after the operand.
433 auto *Defs = MSSA->getWritableBlockDefs(NewDef->getBlock());
434 auto DefIter = NewDef->getDefsIterator();
435
Zhaoshi Zheng43af17b2018-04-09 20:55:37 +0000436 // The temporary Phi is being fixed, unmark it for not to optimize.
George Burgess IVe7cdb7e2018-07-12 21:56:31 +0000437 if (MemoryPhi *Phi = dyn_cast<MemoryPhi>(NewDef))
Zhaoshi Zheng43af17b2018-04-09 20:55:37 +0000438 NonOptPhis.erase(Phi);
439
Daniel Berlinae6b8b62017-01-28 01:35:02 +0000440 // If there is a local def after us, we only have to rename that.
441 if (++DefIter != Defs->end()) {
442 cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef);
443 continue;
444 }
445
446 // Otherwise, we need to search down through the CFG.
447 // For each of our successors, handle it directly if their is a phi, or
448 // place on the fixup worklist.
449 for (const auto *S : successors(NewDef->getBlock())) {
450 if (auto *MP = MSSA->getMemoryAccess(S))
451 setMemoryPhiValueForBlock(MP, NewDef->getBlock(), NewDef);
452 else
453 Worklist.push_back(S);
454 }
455
456 while (!Worklist.empty()) {
457 const BasicBlock *FixupBlock = Worklist.back();
458 Worklist.pop_back();
459
460 // Get the first def in the block that isn't a phi node.
461 if (auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) {
462 auto *FirstDef = &*Defs->begin();
463 // The loop above and below should have taken care of phi nodes
464 assert(!isa<MemoryPhi>(FirstDef) &&
465 "Should have already handled phi nodes!");
466 // We are now this def's defining access, make sure we actually dominate
467 // it
468 assert(MSSA->dominates(NewDef, FirstDef) &&
469 "Should have dominated the new access");
470
471 // This may insert new phi nodes, because we are not guaranteed the
472 // block we are processing has a single pred, and depending where the
473 // store was inserted, it may require phi nodes below it.
474 cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef));
475 return;
476 }
477 // We didn't find a def, so we must continue.
478 for (const auto *S : successors(FixupBlock)) {
479 // If there is a phi node, handle it.
480 // Otherwise, put the block on the worklist
481 if (auto *MP = MSSA->getMemoryAccess(S))
482 setMemoryPhiValueForBlock(MP, FixupBlock, NewDef);
483 else {
484 // If we cycle, we should have ended up at a phi node that we already
485 // processed. FIXME: Double check this
486 if (!Seen.insert(S).second)
487 continue;
488 Worklist.push_back(S);
489 }
490 }
491 }
492 }
493}
494
Alina Sbirlea79800992018-09-10 20:13:01 +0000495void MemorySSAUpdater::removeEdge(BasicBlock *From, BasicBlock *To) {
496 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) {
497 MPhi->unorderedDeleteIncomingBlock(From);
Alina Sbirlea28637212019-08-20 22:47:58 +0000498 tryRemoveTrivialPhi(MPhi);
Alina Sbirlea79800992018-09-10 20:13:01 +0000499 }
500}
501
Alina Sbirleaf31eba62019-05-08 17:05:36 +0000502void MemorySSAUpdater::removeDuplicatePhiEdgesBetween(const BasicBlock *From,
503 const BasicBlock *To) {
Alina Sbirlea79800992018-09-10 20:13:01 +0000504 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) {
505 bool Found = false;
506 MPhi->unorderedDeleteIncomingIf([&](const MemoryAccess *, BasicBlock *B) {
507 if (From != B)
508 return false;
509 if (Found)
510 return true;
511 Found = true;
512 return false;
513 });
Alina Sbirlea28637212019-08-20 22:47:58 +0000514 tryRemoveTrivialPhi(MPhi);
Alina Sbirlea79800992018-09-10 20:13:01 +0000515 }
516}
517
Alina Sbirlea4bc625c2019-07-30 20:10:33 +0000518static MemoryAccess *getNewDefiningAccessForClone(MemoryAccess *MA,
519 const ValueToValueMapTy &VMap,
520 PhiToDefMap &MPhiMap,
521 bool CloneWasSimplified,
522 MemorySSA *MSSA) {
523 MemoryAccess *InsnDefining = MA;
524 if (MemoryDef *DefMUD = dyn_cast<MemoryDef>(InsnDefining)) {
525 if (!MSSA->isLiveOnEntryDef(DefMUD)) {
526 Instruction *DefMUDI = DefMUD->getMemoryInst();
527 assert(DefMUDI && "Found MemoryUseOrDef with no Instruction.");
528 if (Instruction *NewDefMUDI =
529 cast_or_null<Instruction>(VMap.lookup(DefMUDI))) {
530 InsnDefining = MSSA->getMemoryAccess(NewDefMUDI);
531 if (!CloneWasSimplified)
532 assert(InsnDefining && "Defining instruction cannot be nullptr.");
533 else if (!InsnDefining || isa<MemoryUse>(InsnDefining)) {
534 // The clone was simplified, it's no longer a MemoryDef, look up.
535 auto DefIt = DefMUD->getDefsIterator();
536 // Since simplified clones only occur in single block cloning, a
537 // previous definition must exist, otherwise NewDefMUDI would not
538 // have been found in VMap.
539 assert(DefIt != MSSA->getBlockDefs(DefMUD->getBlock())->begin() &&
540 "Previous def must exist");
541 InsnDefining = getNewDefiningAccessForClone(
542 &*(--DefIt), VMap, MPhiMap, CloneWasSimplified, MSSA);
543 }
544 }
545 }
546 } else {
547 MemoryPhi *DefPhi = cast<MemoryPhi>(InsnDefining);
548 if (MemoryAccess *NewDefPhi = MPhiMap.lookup(DefPhi))
549 InsnDefining = NewDefPhi;
550 }
551 assert(InsnDefining && "Defining instruction cannot be nullptr.");
552 return InsnDefining;
553}
554
Alina Sbirlea79800992018-09-10 20:13:01 +0000555void MemorySSAUpdater::cloneUsesAndDefs(BasicBlock *BB, BasicBlock *NewBB,
556 const ValueToValueMapTy &VMap,
Alina Sbirlea7a0098a2019-06-17 18:58:40 +0000557 PhiToDefMap &MPhiMap,
558 bool CloneWasSimplified) {
Alina Sbirlea79800992018-09-10 20:13:01 +0000559 const MemorySSA::AccessList *Acc = MSSA->getBlockAccesses(BB);
560 if (!Acc)
561 return;
562 for (const MemoryAccess &MA : *Acc) {
563 if (const MemoryUseOrDef *MUD = dyn_cast<MemoryUseOrDef>(&MA)) {
564 Instruction *Insn = MUD->getMemoryInst();
565 // Entry does not exist if the clone of the block did not clone all
566 // instructions. This occurs in LoopRotate when cloning instructions
567 // from the old header to the old preheader. The cloned instruction may
568 // also be a simplified Value, not an Instruction (see LoopRotate).
Alina Sbirlea7a0098a2019-06-17 18:58:40 +0000569 // Also in LoopRotate, even when it's an instruction, due to it being
570 // simplified, it may be a Use rather than a Def, so we cannot use MUD as
571 // template. Calls coming from updateForClonedBlockIntoPred, ensure this.
Alina Sbirlea79800992018-09-10 20:13:01 +0000572 if (Instruction *NewInsn =
573 dyn_cast_or_null<Instruction>(VMap.lookup(Insn))) {
574 MemoryAccess *NewUseOrDef = MSSA->createDefinedAccess(
Alina Sbirlea4bc625c2019-07-30 20:10:33 +0000575 NewInsn,
576 getNewDefiningAccessForClone(MUD->getDefiningAccess(), VMap,
577 MPhiMap, CloneWasSimplified, MSSA),
578 /*Template=*/CloneWasSimplified ? nullptr : MUD,
579 /*CreationMustSucceed=*/CloneWasSimplified ? false : true);
580 if (NewUseOrDef)
581 MSSA->insertIntoListsForBlock(NewUseOrDef, NewBB, MemorySSA::End);
Alina Sbirlea79800992018-09-10 20:13:01 +0000582 }
583 }
584 }
585}
586
Alina Sbirleaf31eba62019-05-08 17:05:36 +0000587void MemorySSAUpdater::updatePhisWhenInsertingUniqueBackedgeBlock(
588 BasicBlock *Header, BasicBlock *Preheader, BasicBlock *BEBlock) {
589 auto *MPhi = MSSA->getMemoryAccess(Header);
590 if (!MPhi)
591 return;
592
593 // Create phi node in the backedge block and populate it with the same
594 // incoming values as MPhi. Skip incoming values coming from Preheader.
595 auto *NewMPhi = MSSA->createMemoryPhi(BEBlock);
596 bool HasUniqueIncomingValue = true;
597 MemoryAccess *UniqueValue = nullptr;
598 for (unsigned I = 0, E = MPhi->getNumIncomingValues(); I != E; ++I) {
599 BasicBlock *IBB = MPhi->getIncomingBlock(I);
600 MemoryAccess *IV = MPhi->getIncomingValue(I);
601 if (IBB != Preheader) {
602 NewMPhi->addIncoming(IV, IBB);
603 if (HasUniqueIncomingValue) {
604 if (!UniqueValue)
605 UniqueValue = IV;
606 else if (UniqueValue != IV)
607 HasUniqueIncomingValue = false;
608 }
609 }
610 }
611
612 // Update incoming edges into MPhi. Remove all but the incoming edge from
613 // Preheader. Add an edge from NewMPhi
614 auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader);
615 MPhi->setIncomingValue(0, AccFromPreheader);
616 MPhi->setIncomingBlock(0, Preheader);
617 for (unsigned I = MPhi->getNumIncomingValues() - 1; I >= 1; --I)
618 MPhi->unorderedDeleteIncoming(I);
619 MPhi->addIncoming(NewMPhi, BEBlock);
620
621 // If NewMPhi is a trivial phi, remove it. Its use in the header MPhi will be
622 // replaced with the unique value.
Alina Sbirlea28637212019-08-20 22:47:58 +0000623 tryRemoveTrivialPhi(MPhi);
Alina Sbirleaf31eba62019-05-08 17:05:36 +0000624}
625
Alina Sbirlea79800992018-09-10 20:13:01 +0000626void MemorySSAUpdater::updateForClonedLoop(const LoopBlocksRPO &LoopBlocks,
627 ArrayRef<BasicBlock *> ExitBlocks,
628 const ValueToValueMapTy &VMap,
629 bool IgnoreIncomingWithNoClones) {
630 PhiToDefMap MPhiMap;
631
632 auto FixPhiIncomingValues = [&](MemoryPhi *Phi, MemoryPhi *NewPhi) {
633 assert(Phi && NewPhi && "Invalid Phi nodes.");
634 BasicBlock *NewPhiBB = NewPhi->getBlock();
635 SmallPtrSet<BasicBlock *, 4> NewPhiBBPreds(pred_begin(NewPhiBB),
636 pred_end(NewPhiBB));
637 for (unsigned It = 0, E = Phi->getNumIncomingValues(); It < E; ++It) {
638 MemoryAccess *IncomingAccess = Phi->getIncomingValue(It);
639 BasicBlock *IncBB = Phi->getIncomingBlock(It);
640
641 if (BasicBlock *NewIncBB = cast_or_null<BasicBlock>(VMap.lookup(IncBB)))
642 IncBB = NewIncBB;
643 else if (IgnoreIncomingWithNoClones)
644 continue;
645
646 // Now we have IncBB, and will need to add incoming from it to NewPhi.
647
648 // If IncBB is not a predecessor of NewPhiBB, then do not add it.
649 // NewPhiBB was cloned without that edge.
650 if (!NewPhiBBPreds.count(IncBB))
651 continue;
652
653 // Determine incoming value and add it as incoming from IncBB.
654 if (MemoryUseOrDef *IncMUD = dyn_cast<MemoryUseOrDef>(IncomingAccess)) {
655 if (!MSSA->isLiveOnEntryDef(IncMUD)) {
656 Instruction *IncI = IncMUD->getMemoryInst();
657 assert(IncI && "Found MemoryUseOrDef with no Instruction.");
658 if (Instruction *NewIncI =
659 cast_or_null<Instruction>(VMap.lookup(IncI))) {
660 IncMUD = MSSA->getMemoryAccess(NewIncI);
661 assert(IncMUD &&
662 "MemoryUseOrDef cannot be null, all preds processed.");
663 }
664 }
665 NewPhi->addIncoming(IncMUD, IncBB);
666 } else {
667 MemoryPhi *IncPhi = cast<MemoryPhi>(IncomingAccess);
668 if (MemoryAccess *NewDefPhi = MPhiMap.lookup(IncPhi))
669 NewPhi->addIncoming(NewDefPhi, IncBB);
670 else
671 NewPhi->addIncoming(IncPhi, IncBB);
672 }
673 }
674 };
675
676 auto ProcessBlock = [&](BasicBlock *BB) {
677 BasicBlock *NewBlock = cast_or_null<BasicBlock>(VMap.lookup(BB));
678 if (!NewBlock)
679 return;
680
681 assert(!MSSA->getWritableBlockAccesses(NewBlock) &&
682 "Cloned block should have no accesses");
683
684 // Add MemoryPhi.
685 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) {
686 MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock);
687 MPhiMap[MPhi] = NewPhi;
688 }
689 // Update Uses and Defs.
690 cloneUsesAndDefs(BB, NewBlock, VMap, MPhiMap);
691 };
692
693 for (auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks))
694 ProcessBlock(BB);
695
696 for (auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks))
697 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB))
698 if (MemoryAccess *NewPhi = MPhiMap.lookup(MPhi))
699 FixPhiIncomingValues(MPhi, cast<MemoryPhi>(NewPhi));
700}
701
702void MemorySSAUpdater::updateForClonedBlockIntoPred(
703 BasicBlock *BB, BasicBlock *P1, const ValueToValueMapTy &VM) {
704 // All defs/phis from outside BB that are used in BB, are valid uses in P1.
705 // Since those defs/phis must have dominated BB, and also dominate P1.
706 // Defs from BB being used in BB will be replaced with the cloned defs from
707 // VM. The uses of BB's Phi (if it exists) in BB will be replaced by the
708 // incoming def into the Phi from P1.
Alina Sbirlea7a0098a2019-06-17 18:58:40 +0000709 // Instructions cloned into the predecessor are in practice sometimes
710 // simplified, so disable the use of the template, and create an access from
711 // scratch.
Alina Sbirlea79800992018-09-10 20:13:01 +0000712 PhiToDefMap MPhiMap;
713 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB))
714 MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1);
Alina Sbirlea7a0098a2019-06-17 18:58:40 +0000715 cloneUsesAndDefs(BB, P1, VM, MPhiMap, /*CloneWasSimplified=*/true);
Alina Sbirlea79800992018-09-10 20:13:01 +0000716}
717
718template <typename Iter>
719void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop(
720 ArrayRef<BasicBlock *> ExitBlocks, Iter ValuesBegin, Iter ValuesEnd,
721 DominatorTree &DT) {
722 SmallVector<CFGUpdate, 4> Updates;
723 // Update/insert phis in all successors of exit blocks.
724 for (auto *Exit : ExitBlocks)
725 for (const ValueToValueMapTy *VMap : make_range(ValuesBegin, ValuesEnd))
726 if (BasicBlock *NewExit = cast_or_null<BasicBlock>(VMap->lookup(Exit))) {
727 BasicBlock *ExitSucc = NewExit->getTerminator()->getSuccessor(0);
728 Updates.push_back({DT.Insert, NewExit, ExitSucc});
729 }
730 applyInsertUpdates(Updates, DT);
731}
732
733void MemorySSAUpdater::updateExitBlocksForClonedLoop(
734 ArrayRef<BasicBlock *> ExitBlocks, const ValueToValueMapTy &VMap,
735 DominatorTree &DT) {
736 const ValueToValueMapTy *const Arr[] = {&VMap};
737 privateUpdateExitBlocksForClonedLoop(ExitBlocks, std::begin(Arr),
738 std::end(Arr), DT);
739}
740
741void MemorySSAUpdater::updateExitBlocksForClonedLoop(
742 ArrayRef<BasicBlock *> ExitBlocks,
743 ArrayRef<std::unique_ptr<ValueToValueMapTy>> VMaps, DominatorTree &DT) {
744 auto GetPtr = [&](const std::unique_ptr<ValueToValueMapTy> &I) {
745 return I.get();
746 };
747 using MappedIteratorType =
748 mapped_iterator<const std::unique_ptr<ValueToValueMapTy> *,
749 decltype(GetPtr)>;
750 auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr);
751 auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr);
752 privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT);
753}
754
755void MemorySSAUpdater::applyUpdates(ArrayRef<CFGUpdate> Updates,
756 DominatorTree &DT) {
757 SmallVector<CFGUpdate, 4> RevDeleteUpdates;
758 SmallVector<CFGUpdate, 4> InsertUpdates;
759 for (auto &Update : Updates) {
760 if (Update.getKind() == DT.Insert)
761 InsertUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()});
762 else
763 RevDeleteUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()});
764 }
765
766 if (!RevDeleteUpdates.empty()) {
767 // Update for inserted edges: use newDT and snapshot CFG as if deletes had
Hiroshi Inoue02a2bb22019-02-05 08:30:48 +0000768 // not occurred.
Alina Sbirlea79800992018-09-10 20:13:01 +0000769 // FIXME: This creates a new DT, so it's more expensive to do mix
770 // delete/inserts vs just inserts. We can do an incremental update on the DT
771 // to revert deletes, than re-delete the edges. Teaching DT to do this, is
772 // part of a pending cleanup.
773 DominatorTree NewDT(DT, RevDeleteUpdates);
774 GraphDiff<BasicBlock *> GD(RevDeleteUpdates);
775 applyInsertUpdates(InsertUpdates, NewDT, &GD);
776 } else {
777 GraphDiff<BasicBlock *> GD;
778 applyInsertUpdates(InsertUpdates, DT, &GD);
779 }
780
781 // Update for deleted edges
782 for (auto &Update : RevDeleteUpdates)
783 removeEdge(Update.getFrom(), Update.getTo());
784}
785
786void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates,
787 DominatorTree &DT) {
788 GraphDiff<BasicBlock *> GD;
789 applyInsertUpdates(Updates, DT, &GD);
790}
791
792void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates,
793 DominatorTree &DT,
794 const GraphDiff<BasicBlock *> *GD) {
795 // Get recursive last Def, assuming well formed MSSA and updated DT.
796 auto GetLastDef = [&](BasicBlock *BB) -> MemoryAccess * {
797 while (true) {
798 MemorySSA::DefsList *Defs = MSSA->getWritableBlockDefs(BB);
799 // Return last Def or Phi in BB, if it exists.
800 if (Defs)
801 return &*(--Defs->end());
802
803 // Check number of predecessors, we only care if there's more than one.
804 unsigned Count = 0;
805 BasicBlock *Pred = nullptr;
806 for (auto &Pair : children<GraphDiffInvBBPair>({GD, BB})) {
807 Pred = Pair.second;
808 Count++;
809 if (Count == 2)
810 break;
811 }
812
813 // If BB has multiple predecessors, get last definition from IDom.
814 if (Count != 1) {
815 // [SimpleLoopUnswitch] If BB is a dead block, about to be deleted, its
816 // DT is invalidated. Return LoE as its last def. This will be added to
817 // MemoryPhi node, and later deleted when the block is deleted.
818 if (!DT.getNode(BB))
819 return MSSA->getLiveOnEntryDef();
820 if (auto *IDom = DT.getNode(BB)->getIDom())
821 if (IDom->getBlock() != BB) {
822 BB = IDom->getBlock();
823 continue;
824 }
825 return MSSA->getLiveOnEntryDef();
826 } else {
827 // Single predecessor, BB cannot be dead. GetLastDef of Pred.
828 assert(Count == 1 && Pred && "Single predecessor expected.");
829 BB = Pred;
830 }
831 };
832 llvm_unreachable("Unable to get last definition.");
833 };
834
835 // Get nearest IDom given a set of blocks.
836 // TODO: this can be optimized by starting the search at the node with the
837 // lowest level (highest in the tree).
838 auto FindNearestCommonDominator =
839 [&](const SmallSetVector<BasicBlock *, 2> &BBSet) -> BasicBlock * {
840 BasicBlock *PrevIDom = *BBSet.begin();
841 for (auto *BB : BBSet)
842 PrevIDom = DT.findNearestCommonDominator(PrevIDom, BB);
843 return PrevIDom;
844 };
845
846 // Get all blocks that dominate PrevIDom, stop when reaching CurrIDom. Do not
847 // include CurrIDom.
848 auto GetNoLongerDomBlocks =
849 [&](BasicBlock *PrevIDom, BasicBlock *CurrIDom,
850 SmallVectorImpl<BasicBlock *> &BlocksPrevDom) {
851 if (PrevIDom == CurrIDom)
852 return;
853 BlocksPrevDom.push_back(PrevIDom);
854 BasicBlock *NextIDom = PrevIDom;
855 while (BasicBlock *UpIDom =
856 DT.getNode(NextIDom)->getIDom()->getBlock()) {
857 if (UpIDom == CurrIDom)
858 break;
859 BlocksPrevDom.push_back(UpIDom);
860 NextIDom = UpIDom;
861 }
862 };
863
864 // Map a BB to its predecessors: added + previously existing. To get a
865 // deterministic order, store predecessors as SetVectors. The order in each
Hiroshi Inoue02a2bb22019-02-05 08:30:48 +0000866 // will be defined by the order in Updates (fixed) and the order given by
Alina Sbirlea79800992018-09-10 20:13:01 +0000867 // children<> (also fixed). Since we further iterate over these ordered sets,
868 // we lose the information of multiple edges possibly existing between two
869 // blocks, so we'll keep and EdgeCount map for that.
870 // An alternate implementation could keep unordered set for the predecessors,
871 // traverse either Updates or children<> each time to get the deterministic
872 // order, and drop the usage of EdgeCount. This alternate approach would still
873 // require querying the maps for each predecessor, and children<> call has
874 // additional computation inside for creating the snapshot-graph predecessors.
875 // As such, we favor using a little additional storage and less compute time.
876 // This decision can be revisited if we find the alternative more favorable.
877
878 struct PredInfo {
879 SmallSetVector<BasicBlock *, 2> Added;
880 SmallSetVector<BasicBlock *, 2> Prev;
881 };
882 SmallDenseMap<BasicBlock *, PredInfo> PredMap;
883
884 for (auto &Edge : Updates) {
885 BasicBlock *BB = Edge.getTo();
886 auto &AddedBlockSet = PredMap[BB].Added;
887 AddedBlockSet.insert(Edge.getFrom());
888 }
889
890 // Store all existing predecessor for each BB, at least one must exist.
891 SmallDenseMap<std::pair<BasicBlock *, BasicBlock *>, int> EdgeCountMap;
892 SmallPtrSet<BasicBlock *, 2> NewBlocks;
893 for (auto &BBPredPair : PredMap) {
894 auto *BB = BBPredPair.first;
895 const auto &AddedBlockSet = BBPredPair.second.Added;
896 auto &PrevBlockSet = BBPredPair.second.Prev;
897 for (auto &Pair : children<GraphDiffInvBBPair>({GD, BB})) {
898 BasicBlock *Pi = Pair.second;
899 if (!AddedBlockSet.count(Pi))
900 PrevBlockSet.insert(Pi);
901 EdgeCountMap[{Pi, BB}]++;
902 }
903
904 if (PrevBlockSet.empty()) {
905 assert(pred_size(BB) == AddedBlockSet.size() && "Duplicate edges added.");
906 LLVM_DEBUG(
907 dbgs()
908 << "Adding a predecessor to a block with no predecessors. "
909 "This must be an edge added to a new, likely cloned, block. "
910 "Its memory accesses must be already correct, assuming completed "
911 "via the updateExitBlocksForClonedLoop API. "
912 "Assert a single such edge is added so no phi addition or "
913 "additional processing is required.\n");
914 assert(AddedBlockSet.size() == 1 &&
915 "Can only handle adding one predecessor to a new block.");
916 // Need to remove new blocks from PredMap. Remove below to not invalidate
917 // iterator here.
918 NewBlocks.insert(BB);
919 }
920 }
921 // Nothing to process for new/cloned blocks.
922 for (auto *BB : NewBlocks)
923 PredMap.erase(BB);
924
Alina Sbirlea79800992018-09-10 20:13:01 +0000925 SmallVector<BasicBlock *, 16> BlocksWithDefsToReplace;
Alina Sbirleacb4ed8a2019-06-11 19:09:34 +0000926 SmallVector<WeakVH, 8> InsertedPhis;
Alina Sbirlea79800992018-09-10 20:13:01 +0000927
928 // First create MemoryPhis in all blocks that don't have one. Create in the
929 // order found in Updates, not in PredMap, to get deterministic numbering.
930 for (auto &Edge : Updates) {
931 BasicBlock *BB = Edge.getTo();
932 if (PredMap.count(BB) && !MSSA->getMemoryAccess(BB))
Alina Sbirleacb4ed8a2019-06-11 19:09:34 +0000933 InsertedPhis.push_back(MSSA->createMemoryPhi(BB));
Alina Sbirlea79800992018-09-10 20:13:01 +0000934 }
935
936 // Now we'll fill in the MemoryPhis with the right incoming values.
937 for (auto &BBPredPair : PredMap) {
938 auto *BB = BBPredPair.first;
939 const auto &PrevBlockSet = BBPredPair.second.Prev;
940 const auto &AddedBlockSet = BBPredPair.second.Added;
941 assert(!PrevBlockSet.empty() &&
942 "At least one previous predecessor must exist.");
943
944 // TODO: if this becomes a bottleneck, we can save on GetLastDef calls by
945 // keeping this map before the loop. We can reuse already populated entries
946 // if an edge is added from the same predecessor to two different blocks,
947 // and this does happen in rotate. Note that the map needs to be updated
948 // when deleting non-necessary phis below, if the phi is in the map by
949 // replacing the value with DefP1.
950 SmallDenseMap<BasicBlock *, MemoryAccess *> LastDefAddedPred;
951 for (auto *AddedPred : AddedBlockSet) {
952 auto *DefPn = GetLastDef(AddedPred);
953 assert(DefPn != nullptr && "Unable to find last definition.");
954 LastDefAddedPred[AddedPred] = DefPn;
955 }
956
957 MemoryPhi *NewPhi = MSSA->getMemoryAccess(BB);
958 // If Phi is not empty, add an incoming edge from each added pred. Must
959 // still compute blocks with defs to replace for this block below.
960 if (NewPhi->getNumOperands()) {
961 for (auto *Pred : AddedBlockSet) {
962 auto *LastDefForPred = LastDefAddedPred[Pred];
963 for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I)
964 NewPhi->addIncoming(LastDefForPred, Pred);
965 }
966 } else {
967 // Pick any existing predecessor and get its definition. All other
968 // existing predecessors should have the same one, since no phi existed.
969 auto *P1 = *PrevBlockSet.begin();
970 MemoryAccess *DefP1 = GetLastDef(P1);
971
972 // Check DefP1 against all Defs in LastDefPredPair. If all the same,
973 // nothing to add.
974 bool InsertPhi = false;
975 for (auto LastDefPredPair : LastDefAddedPred)
976 if (DefP1 != LastDefPredPair.second) {
977 InsertPhi = true;
978 break;
979 }
980 if (!InsertPhi) {
981 // Since NewPhi may be used in other newly added Phis, replace all uses
982 // of NewPhi with the definition coming from all predecessors (DefP1),
983 // before deleting it.
984 NewPhi->replaceAllUsesWith(DefP1);
985 removeMemoryAccess(NewPhi);
986 continue;
987 }
988
989 // Update Phi with new values for new predecessors and old value for all
990 // other predecessors. Since AddedBlockSet and PrevBlockSet are ordered
991 // sets, the order of entries in NewPhi is deterministic.
992 for (auto *Pred : AddedBlockSet) {
993 auto *LastDefForPred = LastDefAddedPred[Pred];
994 for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I)
995 NewPhi->addIncoming(LastDefForPred, Pred);
996 }
997 for (auto *Pred : PrevBlockSet)
998 for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I)
999 NewPhi->addIncoming(DefP1, Pred);
Alina Sbirlea79800992018-09-10 20:13:01 +00001000 }
1001
1002 // Get all blocks that used to dominate BB and no longer do after adding
1003 // AddedBlockSet, where PrevBlockSet are the previously known predecessors.
1004 assert(DT.getNode(BB)->getIDom() && "BB does not have valid idom");
1005 BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet);
1006 assert(PrevIDom && "Previous IDom should exists");
1007 BasicBlock *NewIDom = DT.getNode(BB)->getIDom()->getBlock();
1008 assert(NewIDom && "BB should have a new valid idom");
1009 assert(DT.dominates(NewIDom, PrevIDom) &&
1010 "New idom should dominate old idom");
1011 GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace);
1012 }
1013
Alina Sbirlea109d2ea2019-06-19 21:33:09 +00001014 tryRemoveTrivialPhis(InsertedPhis);
1015 // Create the set of blocks that now have a definition. We'll use this to
1016 // compute IDF and add Phis there next.
1017 SmallVector<BasicBlock *, 8> BlocksToProcess;
1018 for (auto &VH : InsertedPhis)
1019 if (auto *MPhi = cast_or_null<MemoryPhi>(VH))
1020 BlocksToProcess.push_back(MPhi->getBlock());
1021
Alina Sbirlea79800992018-09-10 20:13:01 +00001022 // Compute IDF and add Phis in all IDF blocks that do not have one.
1023 SmallVector<BasicBlock *, 32> IDFBlocks;
1024 if (!BlocksToProcess.empty()) {
Alina Sbirlea238b8e62019-06-19 21:17:31 +00001025 ForwardIDFCalculator IDFs(DT, GD);
Alina Sbirlea79800992018-09-10 20:13:01 +00001026 SmallPtrSet<BasicBlock *, 16> DefiningBlocks(BlocksToProcess.begin(),
1027 BlocksToProcess.end());
1028 IDFs.setDefiningBlocks(DefiningBlocks);
1029 IDFs.calculate(IDFBlocks);
Alina Sbirlea05f77802019-06-17 18:16:53 +00001030
1031 SmallSetVector<MemoryPhi *, 4> PhisToFill;
1032 // First create all needed Phis.
1033 for (auto *BBIDF : IDFBlocks)
1034 if (!MSSA->getMemoryAccess(BBIDF)) {
1035 auto *IDFPhi = MSSA->createMemoryPhi(BBIDF);
1036 InsertedPhis.push_back(IDFPhi);
1037 PhisToFill.insert(IDFPhi);
1038 }
1039 // Then update or insert their correct incoming values.
Alina Sbirlea79800992018-09-10 20:13:01 +00001040 for (auto *BBIDF : IDFBlocks) {
Alina Sbirlea05f77802019-06-17 18:16:53 +00001041 auto *IDFPhi = MSSA->getMemoryAccess(BBIDF);
1042 assert(IDFPhi && "Phi must exist");
1043 if (!PhisToFill.count(IDFPhi)) {
Alina Sbirlea79800992018-09-10 20:13:01 +00001044 // Update existing Phi.
1045 // FIXME: some updates may be redundant, try to optimize and skip some.
1046 for (unsigned I = 0, E = IDFPhi->getNumIncomingValues(); I < E; ++I)
1047 IDFPhi->setIncomingValue(I, GetLastDef(IDFPhi->getIncomingBlock(I)));
1048 } else {
Alina Sbirlea79800992018-09-10 20:13:01 +00001049 for (auto &Pair : children<GraphDiffInvBBPair>({GD, BBIDF})) {
1050 BasicBlock *Pi = Pair.second;
1051 IDFPhi->addIncoming(GetLastDef(Pi), Pi);
1052 }
1053 }
1054 }
1055 }
1056
1057 // Now for all defs in BlocksWithDefsToReplace, if there are uses they no
1058 // longer dominate, replace those with the closest dominating def.
1059 // This will also update optimized accesses, as they're also uses.
1060 for (auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) {
1061 if (auto DefsList = MSSA->getWritableBlockDefs(BlockWithDefsToReplace)) {
1062 for (auto &DefToReplaceUses : *DefsList) {
1063 BasicBlock *DominatingBlock = DefToReplaceUses.getBlock();
1064 Value::use_iterator UI = DefToReplaceUses.use_begin(),
1065 E = DefToReplaceUses.use_end();
1066 for (; UI != E;) {
1067 Use &U = *UI;
1068 ++UI;
1069 MemoryAccess *Usr = dyn_cast<MemoryAccess>(U.getUser());
1070 if (MemoryPhi *UsrPhi = dyn_cast<MemoryPhi>(Usr)) {
1071 BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U);
1072 if (!DT.dominates(DominatingBlock, DominatedBlock))
1073 U.set(GetLastDef(DominatedBlock));
1074 } else {
1075 BasicBlock *DominatedBlock = Usr->getBlock();
1076 if (!DT.dominates(DominatingBlock, DominatedBlock)) {
1077 if (auto *DomBlPhi = MSSA->getMemoryAccess(DominatedBlock))
1078 U.set(DomBlPhi);
1079 else {
1080 auto *IDom = DT.getNode(DominatedBlock)->getIDom();
1081 assert(IDom && "Block must have a valid IDom.");
1082 U.set(GetLastDef(IDom->getBlock()));
1083 }
1084 cast<MemoryUseOrDef>(Usr)->resetOptimized();
1085 }
1086 }
1087 }
1088 }
1089 }
1090 }
Alina Sbirleacb4ed8a2019-06-11 19:09:34 +00001091 tryRemoveTrivialPhis(InsertedPhis);
Alina Sbirlea79800992018-09-10 20:13:01 +00001092}
1093
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001094// Move What before Where in the MemorySSA IR.
Daniel Berlin9d8a3352017-01-30 11:35:39 +00001095template <class WhereType>
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001096void MemorySSAUpdater::moveTo(MemoryUseOrDef *What, BasicBlock *BB,
Daniel Berlin9d8a3352017-01-30 11:35:39 +00001097 WhereType Where) {
Zhaoshi Zheng43af17b2018-04-09 20:55:37 +00001098 // Mark MemoryPhi users of What not to be optimized.
1099 for (auto *U : What->users())
George Burgess IVe7cdb7e2018-07-12 21:56:31 +00001100 if (MemoryPhi *PhiUser = dyn_cast<MemoryPhi>(U))
Zhaoshi Zheng43af17b2018-04-09 20:55:37 +00001101 NonOptPhis.insert(PhiUser);
1102
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001103 // Replace all our users with our defining access.
1104 What->replaceAllUsesWith(What->getDefiningAccess());
1105
1106 // Let MemorySSA take care of moving it around in the lists.
1107 MSSA->moveTo(What, BB, Where);
1108
1109 // Now reinsert it into the IR and do whatever fixups needed.
1110 if (auto *MD = dyn_cast<MemoryDef>(What))
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +00001111 insertDef(MD, /*RenameUses=*/true);
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001112 else
Alina Sbirlea1a3fdaf2019-08-19 18:57:40 +00001113 insertUse(cast<MemoryUse>(What), /*RenameUses=*/true);
Zhaoshi Zheng43af17b2018-04-09 20:55:37 +00001114
1115 // Clear dangling pointers. We added all MemoryPhi users, but not all
1116 // of them are removed by fixupDefs().
1117 NonOptPhis.clear();
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001118}
Daniel Berlin9d8a3352017-01-30 11:35:39 +00001119
Daniel Berlinae6b8b62017-01-28 01:35:02 +00001120// Move What before Where in the MemorySSA IR.
1121void MemorySSAUpdater::moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where) {
1122 moveTo(What, Where->getBlock(), Where->getIterator());
1123}
1124
1125// Move What after Where in the MemorySSA IR.
1126void MemorySSAUpdater::moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where) {
1127 moveTo(What, Where->getBlock(), ++Where->getIterator());
1128}
1129
Daniel Berlin9d8a3352017-01-30 11:35:39 +00001130void MemorySSAUpdater::moveToPlace(MemoryUseOrDef *What, BasicBlock *BB,
1131 MemorySSA::InsertionPlace Where) {
1132 return moveTo(What, BB, Where);
1133}
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001134
Alina Sbirlea0f533552018-07-11 22:11:46 +00001135// All accesses in To used to be in From. Move to end and update access lists.
1136void MemorySSAUpdater::moveAllAccesses(BasicBlock *From, BasicBlock *To,
1137 Instruction *Start) {
1138
1139 MemorySSA::AccessList *Accs = MSSA->getWritableBlockAccesses(From);
1140 if (!Accs)
1141 return;
1142
1143 MemoryAccess *FirstInNew = nullptr;
1144 for (Instruction &I : make_range(Start->getIterator(), To->end()))
1145 if ((FirstInNew = MSSA->getMemoryAccess(&I)))
1146 break;
1147 if (!FirstInNew)
1148 return;
1149
1150 auto *MUD = cast<MemoryUseOrDef>(FirstInNew);
1151 do {
1152 auto NextIt = ++MUD->getIterator();
1153 MemoryUseOrDef *NextMUD = (!Accs || NextIt == Accs->end())
1154 ? nullptr
1155 : cast<MemoryUseOrDef>(&*NextIt);
1156 MSSA->moveTo(MUD, To, MemorySSA::End);
1157 // Moving MUD from Accs in the moveTo above, may delete Accs, so we need to
1158 // retrieve it again.
1159 Accs = MSSA->getWritableBlockAccesses(From);
1160 MUD = NextMUD;
1161 } while (MUD);
1162}
1163
1164void MemorySSAUpdater::moveAllAfterSpliceBlocks(BasicBlock *From,
1165 BasicBlock *To,
1166 Instruction *Start) {
1167 assert(MSSA->getBlockAccesses(To) == nullptr &&
1168 "To block is expected to be free of MemoryAccesses.");
1169 moveAllAccesses(From, To, Start);
1170 for (BasicBlock *Succ : successors(To))
1171 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ))
1172 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To);
1173}
1174
1175void MemorySSAUpdater::moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To,
1176 Instruction *Start) {
1177 assert(From->getSinglePredecessor() == To &&
1178 "From block is expected to have a single predecessor (To).");
1179 moveAllAccesses(From, To, Start);
1180 for (BasicBlock *Succ : successors(From))
1181 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ))
1182 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To);
1183}
1184
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001185/// If all arguments of a MemoryPHI are defined by the same incoming
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001186/// argument, return that argument.
1187static MemoryAccess *onlySingleValue(MemoryPhi *MP) {
1188 MemoryAccess *MA = nullptr;
1189
1190 for (auto &Arg : MP->operands()) {
1191 if (!MA)
1192 MA = cast<MemoryAccess>(Arg);
1193 else if (MA != Arg)
1194 return nullptr;
1195 }
1196 return MA;
1197}
George Burgess IV56169ed2017-04-21 04:54:52 +00001198
Alina Sbirlea20c29622018-07-20 17:13:05 +00001199void MemorySSAUpdater::wireOldPredecessorsToNewImmediatePredecessor(
Alina Sbirleaf98c2c52018-09-07 21:14:48 +00001200 BasicBlock *Old, BasicBlock *New, ArrayRef<BasicBlock *> Preds,
1201 bool IdenticalEdgesWereMerged) {
Alina Sbirlea20c29622018-07-20 17:13:05 +00001202 assert(!MSSA->getWritableBlockAccesses(New) &&
1203 "Access list should be null for a new block.");
1204 MemoryPhi *Phi = MSSA->getMemoryAccess(Old);
1205 if (!Phi)
1206 return;
Vedant Kumar4de31bb2018-11-19 19:54:27 +00001207 if (Old->hasNPredecessors(1)) {
Alina Sbirlea20c29622018-07-20 17:13:05 +00001208 assert(pred_size(New) == Preds.size() &&
1209 "Should have moved all predecessors.");
1210 MSSA->moveTo(Phi, New, MemorySSA::Beginning);
1211 } else {
1212 assert(!Preds.empty() && "Must be moving at least one predecessor to the "
1213 "new immediate predecessor.");
1214 MemoryPhi *NewPhi = MSSA->createMemoryPhi(New);
1215 SmallPtrSet<BasicBlock *, 16> PredsSet(Preds.begin(), Preds.end());
Alina Sbirleaf98c2c52018-09-07 21:14:48 +00001216 // Currently only support the case of removing a single incoming edge when
1217 // identical edges were not merged.
1218 if (!IdenticalEdgesWereMerged)
1219 assert(PredsSet.size() == Preds.size() &&
1220 "If identical edges were not merged, we cannot have duplicate "
1221 "blocks in the predecessors");
Alina Sbirlea20c29622018-07-20 17:13:05 +00001222 Phi->unorderedDeleteIncomingIf([&](MemoryAccess *MA, BasicBlock *B) {
1223 if (PredsSet.count(B)) {
1224 NewPhi->addIncoming(MA, B);
Alina Sbirleaf98c2c52018-09-07 21:14:48 +00001225 if (!IdenticalEdgesWereMerged)
1226 PredsSet.erase(B);
Alina Sbirlea20c29622018-07-20 17:13:05 +00001227 return true;
1228 }
1229 return false;
1230 });
1231 Phi->addIncoming(NewPhi, New);
Alina Sbirlea28637212019-08-20 22:47:58 +00001232 tryRemoveTrivialPhi(NewPhi);
Alina Sbirlea20c29622018-07-20 17:13:05 +00001233 }
1234}
1235
Alina Sbirlea240a90a2019-01-31 20:13:47 +00001236void MemorySSAUpdater::removeMemoryAccess(MemoryAccess *MA, bool OptimizePhis) {
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001237 assert(!MSSA->isLiveOnEntryDef(MA) &&
1238 "Trying to remove the live on entry def");
1239 // We can only delete phi nodes if they have no uses, or we can replace all
1240 // uses with a single definition.
1241 MemoryAccess *NewDefTarget = nullptr;
1242 if (MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) {
1243 // Note that it is sufficient to know that all edges of the phi node have
1244 // the same argument. If they do, by the definition of dominance frontiers
1245 // (which we used to place this phi), that argument must dominate this phi,
1246 // and thus, must dominate the phi's uses, and so we will not hit the assert
1247 // below.
1248 NewDefTarget = onlySingleValue(MP);
1249 assert((NewDefTarget || MP->use_empty()) &&
1250 "We can't delete this memory phi");
1251 } else {
1252 NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess();
1253 }
1254
Alina Sbirlea240a90a2019-01-31 20:13:47 +00001255 SmallSetVector<MemoryPhi *, 4> PhisToCheck;
1256
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001257 // Re-point the uses at our defining access
1258 if (!isa<MemoryUse>(MA) && !MA->use_empty()) {
1259 // Reset optimized on users of this store, and reset the uses.
1260 // A few notes:
1261 // 1. This is a slightly modified version of RAUW to avoid walking the
1262 // uses twice here.
1263 // 2. If we wanted to be complete, we would have to reset the optimized
1264 // flags on users of phi nodes if doing the below makes a phi node have all
1265 // the same arguments. Instead, we prefer users to removeMemoryAccess those
1266 // phi nodes, because doing it here would be N^3.
1267 if (MA->hasValueHandle())
1268 ValueHandleBase::ValueIsRAUWd(MA, NewDefTarget);
1269 // Note: We assume MemorySSA is not used in metadata since it's not really
1270 // part of the IR.
1271
1272 while (!MA->use_empty()) {
1273 Use &U = *MA->use_begin();
Daniel Berline33bc312017-04-04 23:43:10 +00001274 if (auto *MUD = dyn_cast<MemoryUseOrDef>(U.getUser()))
1275 MUD->resetOptimized();
Alina Sbirlea240a90a2019-01-31 20:13:47 +00001276 if (OptimizePhis)
1277 if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U.getUser()))
1278 PhisToCheck.insert(MP);
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001279 U.set(NewDefTarget);
1280 }
1281 }
1282
1283 // The call below to erase will destroy MA, so we can't change the order we
1284 // are doing things here
1285 MSSA->removeFromLookups(MA);
1286 MSSA->removeFromLists(MA);
Alina Sbirlea240a90a2019-01-31 20:13:47 +00001287
1288 // Optionally optimize Phi uses. This will recursively remove trivial phis.
1289 if (!PhisToCheck.empty()) {
1290 SmallVector<WeakVH, 16> PhisToOptimize{PhisToCheck.begin(),
1291 PhisToCheck.end()};
1292 PhisToCheck.clear();
1293
1294 unsigned PhisSize = PhisToOptimize.size();
1295 while (PhisSize-- > 0)
1296 if (MemoryPhi *MP =
Alina Sbirlea28637212019-08-20 22:47:58 +00001297 cast_or_null<MemoryPhi>(PhisToOptimize.pop_back_val()))
1298 tryRemoveTrivialPhi(MP);
Alina Sbirlea240a90a2019-01-31 20:13:47 +00001299 }
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001300}
1301
Alina Sbirleada1e80f2018-06-29 20:46:16 +00001302void MemorySSAUpdater::removeBlocks(
Alina Sbirleadb101862019-07-12 22:30:30 +00001303 const SmallSetVector<BasicBlock *, 8> &DeadBlocks) {
Alina Sbirleada1e80f2018-06-29 20:46:16 +00001304 // First delete all uses of BB in MemoryPhis.
1305 for (BasicBlock *BB : DeadBlocks) {
Chandler Carruthedb12a82018-10-15 10:04:59 +00001306 Instruction *TI = BB->getTerminator();
Alina Sbirleada1e80f2018-06-29 20:46:16 +00001307 assert(TI && "Basic block expected to have a terminator instruction");
Chandler Carruth96fc1de2018-08-26 08:41:15 +00001308 for (BasicBlock *Succ : successors(TI))
Alina Sbirleada1e80f2018-06-29 20:46:16 +00001309 if (!DeadBlocks.count(Succ))
1310 if (MemoryPhi *MP = MSSA->getMemoryAccess(Succ)) {
1311 MP->unorderedDeleteIncomingBlock(BB);
Alina Sbirlea28637212019-08-20 22:47:58 +00001312 tryRemoveTrivialPhi(MP);
Alina Sbirleada1e80f2018-06-29 20:46:16 +00001313 }
1314 // Drop all references of all accesses in BB
1315 if (MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB))
1316 for (MemoryAccess &MA : *Acc)
1317 MA.dropAllReferences();
1318 }
1319
1320 // Next, delete all memory accesses in each block
1321 for (BasicBlock *BB : DeadBlocks) {
1322 MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB);
1323 if (!Acc)
1324 continue;
1325 for (auto AB = Acc->begin(), AE = Acc->end(); AB != AE;) {
1326 MemoryAccess *MA = &*AB;
1327 ++AB;
1328 MSSA->removeFromLookups(MA);
1329 MSSA->removeFromLists(MA);
1330 }
1331 }
1332}
1333
Alina Sbirlea151ab482019-05-02 23:12:49 +00001334void MemorySSAUpdater::tryRemoveTrivialPhis(ArrayRef<WeakVH> UpdatedPHIs) {
1335 for (auto &VH : UpdatedPHIs)
Alina Sbirlea28637212019-08-20 22:47:58 +00001336 if (auto *MPhi = cast_or_null<MemoryPhi>(VH))
1337 tryRemoveTrivialPhi(MPhi);
Alina Sbirlea151ab482019-05-02 23:12:49 +00001338}
1339
Alina Sbirleaf31eba62019-05-08 17:05:36 +00001340void MemorySSAUpdater::changeToUnreachable(const Instruction *I) {
1341 const BasicBlock *BB = I->getParent();
1342 // Remove memory accesses in BB for I and all following instructions.
1343 auto BBI = I->getIterator(), BBE = BB->end();
1344 // FIXME: If this becomes too expensive, iterate until the first instruction
1345 // with a memory access, then iterate over MemoryAccesses.
1346 while (BBI != BBE)
1347 removeMemoryAccess(&*(BBI++));
1348 // Update phis in BB's successors to remove BB.
1349 SmallVector<WeakVH, 16> UpdatedPHIs;
1350 for (const BasicBlock *Successor : successors(BB)) {
1351 removeDuplicatePhiEdgesBetween(BB, Successor);
1352 if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Successor)) {
1353 MPhi->unorderedDeleteIncomingBlock(BB);
1354 UpdatedPHIs.push_back(MPhi);
1355 }
1356 }
1357 // Optimize trivial phis.
1358 tryRemoveTrivialPhis(UpdatedPHIs);
1359}
1360
1361void MemorySSAUpdater::changeCondBranchToUnconditionalTo(const BranchInst *BI,
1362 const BasicBlock *To) {
1363 const BasicBlock *BB = BI->getParent();
1364 SmallVector<WeakVH, 16> UpdatedPHIs;
1365 for (const BasicBlock *Succ : successors(BB)) {
1366 removeDuplicatePhiEdgesBetween(BB, Succ);
1367 if (Succ != To)
1368 if (auto *MPhi = MSSA->getMemoryAccess(Succ)) {
1369 MPhi->unorderedDeleteIncomingBlock(BB);
1370 UpdatedPHIs.push_back(MPhi);
1371 }
1372 }
1373 // Optimize trivial phis.
1374 tryRemoveTrivialPhis(UpdatedPHIs);
1375}
1376
Daniel Berlin17e8d0e2017-02-22 22:19:55 +00001377MemoryAccess *MemorySSAUpdater::createMemoryAccessInBB(
1378 Instruction *I, MemoryAccess *Definition, const BasicBlock *BB,
1379 MemorySSA::InsertionPlace Point) {
1380 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition);
1381 MSSA->insertIntoListsForBlock(NewAccess, BB, Point);
1382 return NewAccess;
1383}
1384
1385MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessBefore(
1386 Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt) {
1387 assert(I->getParent() == InsertPt->getBlock() &&
1388 "New and old access must be in the same block");
1389 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition);
1390 MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(),
1391 InsertPt->getIterator());
1392 return NewAccess;
1393}
1394
1395MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessAfter(
1396 Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt) {
1397 assert(I->getParent() == InsertPt->getBlock() &&
1398 "New and old access must be in the same block");
1399 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition);
1400 MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(),
1401 ++InsertPt->getIterator());
1402 return NewAccess;
1403}