blob: acccf7abf808fda3e8a9f12ce16470603ad37623 [file] [log] [blame]
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001//===- Local.cpp - Functions to perform local transformations -------------===//
Misha Brukmanb1c93172005-04-21 23:48:37 +00002//
John Criswell482202a2003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukmanb1c93172005-04-21 23:48:37 +00007//
John Criswell482202a2003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattner28537df2002-05-07 18:07:59 +00009//
10// This family of functions perform various local transformations to the
11// program.
12//
13//===----------------------------------------------------------------------===//
14
15#include "llvm/Transforms/Utils/Local.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000016#include "llvm/ADT/APInt.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000017#include "llvm/ADT/DenseMap.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000018#include "llvm/ADT/DenseMapInfo.h"
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +000019#include "llvm/ADT/DenseSet.h"
20#include "llvm/ADT/Hashing.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000021#include "llvm/ADT/None.h"
22#include "llvm/ADT/Optional.h"
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +000023#include "llvm/ADT/STLExtras.h"
Fiona Glaserf74cc402015-09-28 18:56:07 +000024#include "llvm/ADT/SetVector.h"
Chandler Carruthbe810232013-01-02 10:22:59 +000025#include "llvm/ADT/SmallPtrSet.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000026#include "llvm/ADT/SmallVector.h"
Peter Collingbourne8d642de2013-08-12 22:38:43 +000027#include "llvm/ADT/Statistic.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000028#include "llvm/ADT/TinyPtrVector.h"
29#include "llvm/Analysis/ConstantFolding.h"
David Majnemer70497c62015-12-02 23:06:39 +000030#include "llvm/Analysis/EHPersonalities.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000031#include "llvm/Analysis/InstructionSimplify.h"
David Majnemerd9833ea2016-01-10 07:13:04 +000032#include "llvm/Analysis/LazyValueInfo.h"
Chandler Carruth6bda14b2017-06-06 11:49:48 +000033#include "llvm/Analysis/MemoryBuiltins.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000034#include "llvm/Analysis/TargetLibraryInfo.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000035#include "llvm/Analysis/ValueTracking.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000036#include "llvm/BinaryFormat/Dwarf.h"
37#include "llvm/IR/Argument.h"
38#include "llvm/IR/Attributes.h"
39#include "llvm/IR/BasicBlock.h"
Chandler Carruth1305dc32014-03-04 11:45:46 +000040#include "llvm/IR/CFG.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000041#include "llvm/IR/CallSite.h"
42#include "llvm/IR/Constant.h"
Chandler Carruth2abb65a2017-06-26 03:31:31 +000043#include "llvm/IR/ConstantRange.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000044#include "llvm/IR/Constants.h"
Chandler Carruth12664a02014-03-06 00:22:06 +000045#include "llvm/IR/DIBuilder.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000046#include "llvm/IR/DataLayout.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000047#include "llvm/IR/DebugInfoMetadata.h"
48#include "llvm/IR/DebugLoc.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000049#include "llvm/IR/DerivedTypes.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000050#include "llvm/IR/Dominators.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000051#include "llvm/IR/Function.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000052#include "llvm/IR/GetElementPtrTypeIterator.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000053#include "llvm/IR/GlobalObject.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000054#include "llvm/IR/IRBuilder.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000055#include "llvm/IR/InstrTypes.h"
56#include "llvm/IR/Instruction.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000057#include "llvm/IR/Instructions.h"
58#include "llvm/IR/IntrinsicInst.h"
59#include "llvm/IR/Intrinsics.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000060#include "llvm/IR/LLVMContext.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000061#include "llvm/IR/MDBuilder.h"
62#include "llvm/IR/Metadata.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000063#include "llvm/IR/Module.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000064#include "llvm/IR/Operator.h"
David Majnemer9f506252016-06-25 08:34:38 +000065#include "llvm/IR/PatternMatch.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000066#include "llvm/IR/Type.h"
67#include "llvm/IR/Use.h"
68#include "llvm/IR/User.h"
69#include "llvm/IR/Value.h"
Chandler Carruth4220e9c2014-03-04 11:17:44 +000070#include "llvm/IR/ValueHandle.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000071#include "llvm/Support/Casting.h"
Chris Lattnercbd18fc2009-11-10 05:59:26 +000072#include "llvm/Support/Debug.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000073#include "llvm/Support/ErrorHandling.h"
Craig Topperb45eabc2017-04-26 16:39:58 +000074#include "llvm/Support/KnownBits.h"
Chris Lattnercbd18fc2009-11-10 05:59:26 +000075#include "llvm/Support/raw_ostream.h"
Eugene Zelenko6cadde72017-10-17 21:27:42 +000076#include <algorithm>
77#include <cassert>
78#include <climits>
79#include <cstdint>
80#include <iterator>
81#include <map>
82#include <utility>
83
Chris Lattner04efa4b2003-12-19 05:56:28 +000084using namespace llvm;
David Majnemer9f506252016-06-25 08:34:38 +000085using namespace llvm::PatternMatch;
Brian Gaeke960707c2003-11-11 22:41:34 +000086
Chandler Carruthe96dd892014-04-21 22:55:11 +000087#define DEBUG_TYPE "local"
88
Peter Collingbourne8d642de2013-08-12 22:38:43 +000089STATISTIC(NumRemoved, "Number of unreachable basic blocks removed");
90
Chris Lattner28537df2002-05-07 18:07:59 +000091//===----------------------------------------------------------------------===//
Chris Lattnerc6c481c2008-11-27 22:57:53 +000092// Local constant propagation.
Chris Lattner28537df2002-05-07 18:07:59 +000093//
94
Frits van Bommelad964552011-05-22 16:24:18 +000095/// ConstantFoldTerminator - If a terminator instruction is predicated on a
96/// constant value, convert it into an unconditional branch to the constant
97/// destination. This is a nontrivial operation because the successors of this
98/// basic block must have their PHI nodes updated.
99/// Also calls RecursivelyDeleteTriviallyDeadInstructions() on any branch/switch
100/// conditions and indirectbr addresses this might make dead if
101/// DeleteDeadConditions is true.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000102bool llvm::ConstantFoldTerminator(BasicBlock *BB, bool DeleteDeadConditions,
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000103 const TargetLibraryInfo *TLI) {
Chris Lattner4b009ad2002-05-21 20:04:50 +0000104 TerminatorInst *T = BB->getTerminator();
Devang Patel1fabbe92011-05-18 17:26:46 +0000105 IRBuilder<> Builder(T);
Misha Brukmanb1c93172005-04-21 23:48:37 +0000106
Chris Lattner28537df2002-05-07 18:07:59 +0000107 // Branch - See if we are conditional jumping on constant
Davide Italiano0512bf52017-12-31 16:51:50 +0000108 if (auto *BI = dyn_cast<BranchInst>(T)) {
Chris Lattner28537df2002-05-07 18:07:59 +0000109 if (BI->isUnconditional()) return false; // Can't optimize uncond branch
Gabor Greif97f17202009-01-30 18:21:13 +0000110 BasicBlock *Dest1 = BI->getSuccessor(0);
111 BasicBlock *Dest2 = BI->getSuccessor(1);
Chris Lattner28537df2002-05-07 18:07:59 +0000112
Davide Italiano0512bf52017-12-31 16:51:50 +0000113 if (auto *Cond = dyn_cast<ConstantInt>(BI->getCondition())) {
Chris Lattner28537df2002-05-07 18:07:59 +0000114 // Are we branching on constant?
115 // YES. Change to unconditional branch...
Reid Spencercddc9df2007-01-12 04:24:46 +0000116 BasicBlock *Destination = Cond->getZExtValue() ? Dest1 : Dest2;
117 BasicBlock *OldDest = Cond->getZExtValue() ? Dest2 : Dest1;
Chris Lattner28537df2002-05-07 18:07:59 +0000118
Chris Lattner28537df2002-05-07 18:07:59 +0000119 // Let the basic block know that we are letting go of it. Based on this,
120 // it will adjust it's PHI nodes.
Jay Foad6a85be22011-04-19 15:23:29 +0000121 OldDest->removePredecessor(BB);
Chris Lattner28537df2002-05-07 18:07:59 +0000122
Jay Foad89afb432011-01-07 20:25:56 +0000123 // Replace the conditional branch with an unconditional one.
Devang Patel1fabbe92011-05-18 17:26:46 +0000124 Builder.CreateBr(Destination);
Jay Foad89afb432011-01-07 20:25:56 +0000125 BI->eraseFromParent();
Chris Lattner28537df2002-05-07 18:07:59 +0000126 return true;
Chris Lattner54a4b842009-11-01 03:40:38 +0000127 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000128
Chris Lattner54a4b842009-11-01 03:40:38 +0000129 if (Dest2 == Dest1) { // Conditional branch to same location?
Misha Brukmanb1c93172005-04-21 23:48:37 +0000130 // This branch matches something like this:
Chris Lattner28537df2002-05-07 18:07:59 +0000131 // br bool %cond, label %Dest, label %Dest
132 // and changes it into: br label %Dest
133
134 // Let the basic block know that we are letting go of one copy of it.
135 assert(BI->getParent() && "Terminator not inserted in block!");
136 Dest1->removePredecessor(BI->getParent());
137
Jay Foad89afb432011-01-07 20:25:56 +0000138 // Replace the conditional branch with an unconditional one.
Devang Patel1fabbe92011-05-18 17:26:46 +0000139 Builder.CreateBr(Dest1);
Frits van Bommelad964552011-05-22 16:24:18 +0000140 Value *Cond = BI->getCondition();
Jay Foad89afb432011-01-07 20:25:56 +0000141 BI->eraseFromParent();
Frits van Bommelad964552011-05-22 16:24:18 +0000142 if (DeleteDeadConditions)
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000143 RecursivelyDeleteTriviallyDeadInstructions(Cond, TLI);
Chris Lattner28537df2002-05-07 18:07:59 +0000144 return true;
145 }
Chris Lattner54a4b842009-11-01 03:40:38 +0000146 return false;
147 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000148
Davide Italiano0512bf52017-12-31 16:51:50 +0000149 if (auto *SI = dyn_cast<SwitchInst>(T)) {
Hans Wennborg90b827c2015-01-26 19:52:24 +0000150 // If we are switching on a constant, we can convert the switch to an
151 // unconditional branch.
Davide Italiano0512bf52017-12-31 16:51:50 +0000152 auto *CI = dyn_cast<ConstantInt>(SI->getCondition());
Hans Wennborg90b827c2015-01-26 19:52:24 +0000153 BasicBlock *DefaultDest = SI->getDefaultDest();
154 BasicBlock *TheOnlyDest = DefaultDest;
155
156 // If the default is unreachable, ignore it when searching for TheOnlyDest.
157 if (isa<UnreachableInst>(DefaultDest->getFirstNonPHIOrDbg()) &&
158 SI->getNumCases() > 0) {
Chandler Carruth927d8e62017-04-12 07:27:28 +0000159 TheOnlyDest = SI->case_begin()->getCaseSuccessor();
Hans Wennborg90b827c2015-01-26 19:52:24 +0000160 }
Chris Lattner031340a2003-08-17 19:41:53 +0000161
Chris Lattner54a4b842009-11-01 03:40:38 +0000162 // Figure out which case it goes to.
Chandler Carruth0d256c02017-03-26 02:49:23 +0000163 for (auto i = SI->case_begin(), e = SI->case_end(); i != e;) {
Chris Lattner821deee2003-08-17 20:21:14 +0000164 // Found case matching a constant operand?
Chandler Carruth927d8e62017-04-12 07:27:28 +0000165 if (i->getCaseValue() == CI) {
166 TheOnlyDest = i->getCaseSuccessor();
Chris Lattner821deee2003-08-17 20:21:14 +0000167 break;
168 }
Chris Lattner031340a2003-08-17 19:41:53 +0000169
Chris Lattnerc54d6082003-08-23 23:18:19 +0000170 // Check to see if this branch is going to the same place as the default
171 // dest. If so, eliminate it as an explicit compare.
Chandler Carruth927d8e62017-04-12 07:27:28 +0000172 if (i->getCaseSuccessor() == DefaultDest) {
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000173 MDNode *MD = SI->getMetadata(LLVMContext::MD_prof);
Justin Bognera41a7b32013-12-10 00:13:41 +0000174 unsigned NCases = SI->getNumCases();
175 // Fold the case metadata into the default if there will be any branches
176 // left, unless the metadata doesn't match the switch.
177 if (NCases > 1 && MD && MD->getNumOperands() == 2 + NCases) {
Manman Ren49dbe252012-09-12 17:04:11 +0000178 // Collect branch weights into a vector.
179 SmallVector<uint32_t, 8> Weights;
180 for (unsigned MD_i = 1, MD_e = MD->getNumOperands(); MD_i < MD_e;
181 ++MD_i) {
David Majnemer9f506252016-06-25 08:34:38 +0000182 auto *CI = mdconst::extract<ConstantInt>(MD->getOperand(MD_i));
Manman Ren49dbe252012-09-12 17:04:11 +0000183 Weights.push_back(CI->getValue().getZExtValue());
184 }
185 // Merge weight of this case to the default weight.
Chandler Carruth927d8e62017-04-12 07:27:28 +0000186 unsigned idx = i->getCaseIndex();
Manman Ren49dbe252012-09-12 17:04:11 +0000187 Weights[0] += Weights[idx+1];
188 // Remove weight for this case.
189 std::swap(Weights[idx+1], Weights.back());
190 Weights.pop_back();
191 SI->setMetadata(LLVMContext::MD_prof,
192 MDBuilder(BB->getContext()).
193 createBranchWeights(Weights));
194 }
Chris Lattner54a4b842009-11-01 03:40:38 +0000195 // Remove this entry.
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000196 DefaultDest->removePredecessor(SI->getParent());
Chandler Carruth0d256c02017-03-26 02:49:23 +0000197 i = SI->removeCase(i);
198 e = SI->case_end();
Chris Lattnerc54d6082003-08-23 23:18:19 +0000199 continue;
200 }
201
Chris Lattner821deee2003-08-17 20:21:14 +0000202 // Otherwise, check to see if the switch only branches to one destination.
203 // We do this by reseting "TheOnlyDest" to null when we find two non-equal
204 // destinations.
Chandler Carruth927d8e62017-04-12 07:27:28 +0000205 if (i->getCaseSuccessor() != TheOnlyDest)
206 TheOnlyDest = nullptr;
Chandler Carruth0d256c02017-03-26 02:49:23 +0000207
208 // Increment this iterator as we haven't removed the case.
209 ++i;
Chris Lattner031340a2003-08-17 19:41:53 +0000210 }
211
Chris Lattner821deee2003-08-17 20:21:14 +0000212 if (CI && !TheOnlyDest) {
213 // Branching on a constant, but not any of the cases, go to the default
214 // successor.
215 TheOnlyDest = SI->getDefaultDest();
216 }
217
218 // If we found a single destination that we can fold the switch into, do so
219 // now.
220 if (TheOnlyDest) {
Chris Lattner54a4b842009-11-01 03:40:38 +0000221 // Insert the new branch.
Devang Patel1fabbe92011-05-18 17:26:46 +0000222 Builder.CreateBr(TheOnlyDest);
Chris Lattner821deee2003-08-17 20:21:14 +0000223 BasicBlock *BB = SI->getParent();
224
225 // Remove entries from PHI nodes which we no longer branch to...
Pete Cooperebcd7482015-08-06 20:22:46 +0000226 for (BasicBlock *Succ : SI->successors()) {
Chris Lattner821deee2003-08-17 20:21:14 +0000227 // Found case matching a constant operand?
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000228 if (Succ == TheOnlyDest)
Craig Topperf40110f2014-04-25 05:29:35 +0000229 TheOnlyDest = nullptr; // Don't modify the first branch to TheOnlyDest
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000230 else
Chris Lattner821deee2003-08-17 20:21:14 +0000231 Succ->removePredecessor(BB);
232 }
233
Chris Lattner54a4b842009-11-01 03:40:38 +0000234 // Delete the old switch.
Frits van Bommelad964552011-05-22 16:24:18 +0000235 Value *Cond = SI->getCondition();
236 SI->eraseFromParent();
237 if (DeleteDeadConditions)
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000238 RecursivelyDeleteTriviallyDeadInstructions(Cond, TLI);
Chris Lattner821deee2003-08-17 20:21:14 +0000239 return true;
Chris Lattner54a4b842009-11-01 03:40:38 +0000240 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000241
Stepan Dyatkovskiy513aaa52012-02-01 07:49:51 +0000242 if (SI->getNumCases() == 1) {
Chris Lattner821deee2003-08-17 20:21:14 +0000243 // Otherwise, we can fold this switch into a conditional branch
244 // instruction if it has only one non-default destination.
Chandler Carruth927d8e62017-04-12 07:27:28 +0000245 auto FirstCase = *SI->case_begin();
Bob Wilsone4077362013-09-09 19:14:35 +0000246 Value *Cond = Builder.CreateICmpEQ(SI->getCondition(),
247 FirstCase.getCaseValue(), "cond");
Devang Patel1fabbe92011-05-18 17:26:46 +0000248
Bob Wilsone4077362013-09-09 19:14:35 +0000249 // Insert the new branch.
250 BranchInst *NewBr = Builder.CreateCondBr(Cond,
251 FirstCase.getCaseSuccessor(),
252 SI->getDefaultDest());
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000253 MDNode *MD = SI->getMetadata(LLVMContext::MD_prof);
Bob Wilsone4077362013-09-09 19:14:35 +0000254 if (MD && MD->getNumOperands() == 3) {
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000255 ConstantInt *SICase =
256 mdconst::dyn_extract<ConstantInt>(MD->getOperand(2));
257 ConstantInt *SIDef =
258 mdconst::dyn_extract<ConstantInt>(MD->getOperand(1));
Bob Wilsone4077362013-09-09 19:14:35 +0000259 assert(SICase && SIDef);
260 // The TrueWeight should be the weight for the single case of SI.
261 NewBr->setMetadata(LLVMContext::MD_prof,
262 MDBuilder(BB->getContext()).
263 createBranchWeights(SICase->getValue().getZExtValue(),
264 SIDef->getValue().getZExtValue()));
Stepan Dyatkovskiy7a501552012-05-23 08:18:26 +0000265 }
Bob Wilsone4077362013-09-09 19:14:35 +0000266
Chen Lieafbc9d2015-08-07 19:30:12 +0000267 // Update make.implicit metadata to the newly-created conditional branch.
268 MDNode *MakeImplicitMD = SI->getMetadata(LLVMContext::MD_make_implicit);
269 if (MakeImplicitMD)
270 NewBr->setMetadata(LLVMContext::MD_make_implicit, MakeImplicitMD);
271
Bob Wilsone4077362013-09-09 19:14:35 +0000272 // Delete the old switch.
273 SI->eraseFromParent();
274 return true;
Chris Lattner821deee2003-08-17 20:21:14 +0000275 }
Chris Lattner54a4b842009-11-01 03:40:38 +0000276 return false;
Chris Lattner28537df2002-05-07 18:07:59 +0000277 }
Chris Lattner54a4b842009-11-01 03:40:38 +0000278
Davide Italiano0512bf52017-12-31 16:51:50 +0000279 if (auto *IBI = dyn_cast<IndirectBrInst>(T)) {
Chris Lattner54a4b842009-11-01 03:40:38 +0000280 // indirectbr blockaddress(@F, @BB) -> br label @BB
Davide Italiano0512bf52017-12-31 16:51:50 +0000281 if (auto *BA =
Chris Lattner54a4b842009-11-01 03:40:38 +0000282 dyn_cast<BlockAddress>(IBI->getAddress()->stripPointerCasts())) {
283 BasicBlock *TheOnlyDest = BA->getBasicBlock();
284 // Insert the new branch.
Devang Patel1fabbe92011-05-18 17:26:46 +0000285 Builder.CreateBr(TheOnlyDest);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000286
Chris Lattner54a4b842009-11-01 03:40:38 +0000287 for (unsigned i = 0, e = IBI->getNumDestinations(); i != e; ++i) {
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000288 if (IBI->getDestination(i) == TheOnlyDest)
Craig Topperf40110f2014-04-25 05:29:35 +0000289 TheOnlyDest = nullptr;
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000290 else
291 IBI->getDestination(i)->removePredecessor(IBI->getParent());
Chris Lattner54a4b842009-11-01 03:40:38 +0000292 }
Frits van Bommelad964552011-05-22 16:24:18 +0000293 Value *Address = IBI->getAddress();
Chris Lattner54a4b842009-11-01 03:40:38 +0000294 IBI->eraseFromParent();
Frits van Bommelad964552011-05-22 16:24:18 +0000295 if (DeleteDeadConditions)
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000296 RecursivelyDeleteTriviallyDeadInstructions(Address, TLI);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000297
Chris Lattner54a4b842009-11-01 03:40:38 +0000298 // If we didn't find our destination in the IBI successor list, then we
299 // have undefined behavior. Replace the unconditional branch with an
300 // 'unreachable' instruction.
301 if (TheOnlyDest) {
302 BB->getTerminator()->eraseFromParent();
303 new UnreachableInst(BB->getContext(), BB);
304 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000305
Chris Lattner54a4b842009-11-01 03:40:38 +0000306 return true;
307 }
308 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000309
Chris Lattner28537df2002-05-07 18:07:59 +0000310 return false;
311}
312
Chris Lattner28537df2002-05-07 18:07:59 +0000313//===----------------------------------------------------------------------===//
Chris Lattner852d6d62009-11-10 22:26:15 +0000314// Local dead code elimination.
Chris Lattner28537df2002-05-07 18:07:59 +0000315//
316
Chris Lattnerc6c481c2008-11-27 22:57:53 +0000317/// isInstructionTriviallyDead - Return true if the result produced by the
318/// instruction is not used, and the instruction has no side effects.
319///
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000320bool llvm::isInstructionTriviallyDead(Instruction *I,
321 const TargetLibraryInfo *TLI) {
Daniel Berline3e69e12017-03-10 00:32:33 +0000322 if (!I->use_empty())
323 return false;
324 return wouldInstructionBeTriviallyDead(I, TLI);
325}
326
327bool llvm::wouldInstructionBeTriviallyDead(Instruction *I,
328 const TargetLibraryInfo *TLI) {
329 if (isa<TerminatorInst>(I))
330 return false;
Jeff Cohen5f4ef3c2005-07-27 06:12:32 +0000331
David Majnemer654e1302015-07-31 17:58:14 +0000332 // We don't want the landingpad-like instructions removed by anything this
333 // general.
334 if (I->isEHPad())
Bill Wendlingd9fb4702011-08-15 20:10:51 +0000335 return false;
336
Devang Patelc1431e62011-03-18 23:28:02 +0000337 // We don't want debug info removed by anything this general, unless
338 // debug info is empty.
339 if (DbgDeclareInst *DDI = dyn_cast<DbgDeclareInst>(I)) {
Nick Lewycky99890a22011-08-02 21:19:27 +0000340 if (DDI->getAddress())
Devang Patelc1431e62011-03-18 23:28:02 +0000341 return false;
Devang Patel17bbd7f2011-03-21 22:04:45 +0000342 return true;
Nick Lewycky99890a22011-08-02 21:19:27 +0000343 }
Devang Patel17bbd7f2011-03-21 22:04:45 +0000344 if (DbgValueInst *DVI = dyn_cast<DbgValueInst>(I)) {
Devang Patelc1431e62011-03-18 23:28:02 +0000345 if (DVI->getValue())
346 return false;
Devang Patel17bbd7f2011-03-21 22:04:45 +0000347 return true;
Devang Patelc1431e62011-03-18 23:28:02 +0000348 }
349
Daniel Berline3e69e12017-03-10 00:32:33 +0000350 if (!I->mayHaveSideEffects())
351 return true;
Duncan Sands1efabaa2009-05-06 06:49:50 +0000352
353 // Special case intrinsics that "may have side effects" but can be deleted
354 // when dead.
Nick Lewycky99890a22011-08-02 21:19:27 +0000355 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
Chris Lattnere9665832007-12-29 00:59:12 +0000356 // Safe to delete llvm.stacksave if dead.
357 if (II->getIntrinsicID() == Intrinsic::stacksave)
358 return true;
Nick Lewycky99890a22011-08-02 21:19:27 +0000359
360 // Lifetime intrinsics are dead when their right-hand is undef.
361 if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
362 II->getIntrinsicID() == Intrinsic::lifetime_end)
363 return isa<UndefValue>(II->getArgOperand(1));
Hal Finkel93046912014-07-25 21:13:35 +0000364
Sanjoy Das107aefc2016-04-29 22:23:16 +0000365 // Assumptions are dead if their condition is trivially true. Guards on
366 // true are operationally no-ops. In the future we can consider more
367 // sophisticated tradeoffs for guards considering potential for check
368 // widening, but for now we keep things simple.
369 if (II->getIntrinsicID() == Intrinsic::assume ||
370 II->getIntrinsicID() == Intrinsic::experimental_guard) {
Hal Finkel93046912014-07-25 21:13:35 +0000371 if (ConstantInt *Cond = dyn_cast<ConstantInt>(II->getArgOperand(0)))
372 return !Cond->isZero();
373
374 return false;
375 }
Nick Lewycky99890a22011-08-02 21:19:27 +0000376 }
Nick Lewyckydd1d3df2011-10-24 04:35:36 +0000377
Daniel Berline3e69e12017-03-10 00:32:33 +0000378 if (isAllocLikeFn(I, TLI))
379 return true;
Nick Lewyckydd1d3df2011-10-24 04:35:36 +0000380
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000381 if (CallInst *CI = isFreeCall(I, TLI))
Nick Lewyckydd1d3df2011-10-24 04:35:36 +0000382 if (Constant *C = dyn_cast<Constant>(CI->getArgOperand(0)))
383 return C->isNullValue() || isa<UndefValue>(C);
384
Eli Friedmanb6befc32016-11-02 20:48:11 +0000385 if (CallSite CS = CallSite(I))
386 if (isMathLibCallNoop(CS, TLI))
387 return true;
388
Chris Lattnera36d5252005-05-06 05:27:34 +0000389 return false;
Chris Lattner28537df2002-05-07 18:07:59 +0000390}
391
Chris Lattnerc6c481c2008-11-27 22:57:53 +0000392/// RecursivelyDeleteTriviallyDeadInstructions - If the specified value is a
393/// trivially dead instruction, delete it. If that makes any of its operands
Dan Gohmancb99fe92010-01-05 15:45:31 +0000394/// trivially dead, delete them too, recursively. Return true if any
395/// instructions were deleted.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000396bool
397llvm::RecursivelyDeleteTriviallyDeadInstructions(Value *V,
398 const TargetLibraryInfo *TLI) {
Chris Lattnerc6c481c2008-11-27 22:57:53 +0000399 Instruction *I = dyn_cast<Instruction>(V);
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000400 if (!I || !I->use_empty() || !isInstructionTriviallyDead(I, TLI))
Dan Gohmancb99fe92010-01-05 15:45:31 +0000401 return false;
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000402
Chris Lattnere9f6c352008-11-28 01:20:46 +0000403 SmallVector<Instruction*, 16> DeadInsts;
404 DeadInsts.push_back(I);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000405
Dan Gohman28943872010-01-05 16:27:25 +0000406 do {
Dan Gohman9a6fef02009-05-06 17:22:41 +0000407 I = DeadInsts.pop_back_val();
Chris Lattnerd4b5ba62008-11-28 00:58:15 +0000408
Chris Lattnere9f6c352008-11-28 01:20:46 +0000409 // Null out all of the instruction's operands to see if any operand becomes
410 // dead as we go.
411 for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) {
412 Value *OpV = I->getOperand(i);
Craig Topperf40110f2014-04-25 05:29:35 +0000413 I->setOperand(i, nullptr);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000414
Chris Lattnere9f6c352008-11-28 01:20:46 +0000415 if (!OpV->use_empty()) continue;
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000416
Chris Lattnere9f6c352008-11-28 01:20:46 +0000417 // If the operand is an instruction that became dead as we nulled out the
418 // operand, and if it is 'trivially' dead, delete it in a future loop
419 // iteration.
420 if (Instruction *OpI = dyn_cast<Instruction>(OpV))
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000421 if (isInstructionTriviallyDead(OpI, TLI))
Chris Lattnere9f6c352008-11-28 01:20:46 +0000422 DeadInsts.push_back(OpI);
423 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000424
Chris Lattnere9f6c352008-11-28 01:20:46 +0000425 I->eraseFromParent();
Dan Gohman28943872010-01-05 16:27:25 +0000426 } while (!DeadInsts.empty());
Dan Gohmancb99fe92010-01-05 15:45:31 +0000427
428 return true;
Chris Lattner28537df2002-05-07 18:07:59 +0000429}
Chris Lattner99d68092008-11-27 07:43:12 +0000430
Nick Lewyckyc8a15692011-02-20 08:38:20 +0000431/// areAllUsesEqual - Check whether the uses of a value are all the same.
432/// This is similar to Instruction::hasOneUse() except this will also return
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000433/// true when there are no uses or multiple uses that all refer to the same
434/// value.
Nick Lewyckyc8a15692011-02-20 08:38:20 +0000435static bool areAllUsesEqual(Instruction *I) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000436 Value::user_iterator UI = I->user_begin();
437 Value::user_iterator UE = I->user_end();
Nick Lewyckyc8a15692011-02-20 08:38:20 +0000438 if (UI == UE)
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000439 return true;
Nick Lewyckyc8a15692011-02-20 08:38:20 +0000440
441 User *TheUse = *UI;
442 for (++UI; UI != UE; ++UI) {
443 if (*UI != TheUse)
444 return false;
445 }
446 return true;
447}
448
Dan Gohmanff089952009-05-02 18:29:22 +0000449/// RecursivelyDeleteDeadPHINode - If the specified value is an effectively
450/// dead PHI node, due to being a def-use chain of single-use nodes that
451/// either forms a cycle or is terminated by a trivially dead instruction,
452/// delete it. If that makes any of its operands trivially dead, delete them
Duncan Sandsecbbf082011-02-21 17:32:05 +0000453/// too, recursively. Return true if a change was made.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000454bool llvm::RecursivelyDeleteDeadPHINode(PHINode *PN,
455 const TargetLibraryInfo *TLI) {
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000456 SmallPtrSet<Instruction*, 4> Visited;
457 for (Instruction *I = PN; areAllUsesEqual(I) && !I->mayHaveSideEffects();
Chandler Carruthcdf47882014-03-09 03:16:01 +0000458 I = cast<Instruction>(*I->user_begin())) {
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000459 if (I->use_empty())
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000460 return RecursivelyDeleteTriviallyDeadInstructions(I, TLI);
Nick Lewycky183c24c2011-02-20 18:05:56 +0000461
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000462 // If we find an instruction more than once, we're on a cycle that
Dan Gohmanff089952009-05-02 18:29:22 +0000463 // won't prove fruitful.
David Blaikie70573dc2014-11-19 07:49:26 +0000464 if (!Visited.insert(I).second) {
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000465 // Break the cycle and delete the instruction and its operands.
466 I->replaceAllUsesWith(UndefValue::get(I->getType()));
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000467 (void)RecursivelyDeleteTriviallyDeadInstructions(I, TLI);
Duncan Sandsecbbf082011-02-21 17:32:05 +0000468 return true;
Duncan Sands6dcd49b2011-02-21 16:27:36 +0000469 }
470 }
471 return false;
Dan Gohmanff089952009-05-02 18:29:22 +0000472}
Chris Lattnerc6c481c2008-11-27 22:57:53 +0000473
Fiona Glaserf74cc402015-09-28 18:56:07 +0000474static bool
475simplifyAndDCEInstruction(Instruction *I,
476 SmallSetVector<Instruction *, 16> &WorkList,
477 const DataLayout &DL,
478 const TargetLibraryInfo *TLI) {
479 if (isInstructionTriviallyDead(I, TLI)) {
480 // Null out all of the instruction's operands to see if any operand becomes
481 // dead as we go.
482 for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) {
483 Value *OpV = I->getOperand(i);
484 I->setOperand(i, nullptr);
485
486 if (!OpV->use_empty() || I == OpV)
487 continue;
488
489 // If the operand is an instruction that became dead as we nulled out the
490 // operand, and if it is 'trivially' dead, delete it in a future loop
491 // iteration.
492 if (Instruction *OpI = dyn_cast<Instruction>(OpV))
493 if (isInstructionTriviallyDead(OpI, TLI))
494 WorkList.insert(OpI);
495 }
496
497 I->eraseFromParent();
498
499 return true;
500 }
501
502 if (Value *SimpleV = SimplifyInstruction(I, DL)) {
503 // Add the users to the worklist. CAREFUL: an instruction can use itself,
504 // in the case of a phi node.
David Majnemerb8da3a22016-06-25 00:04:10 +0000505 for (User *U : I->users()) {
506 if (U != I) {
Fiona Glaserf74cc402015-09-28 18:56:07 +0000507 WorkList.insert(cast<Instruction>(U));
David Majnemerb8da3a22016-06-25 00:04:10 +0000508 }
509 }
Fiona Glaserf74cc402015-09-28 18:56:07 +0000510
511 // Replace the instruction with its simplified value.
David Majnemerb8da3a22016-06-25 00:04:10 +0000512 bool Changed = false;
513 if (!I->use_empty()) {
514 I->replaceAllUsesWith(SimpleV);
515 Changed = true;
516 }
517 if (isInstructionTriviallyDead(I, TLI)) {
518 I->eraseFromParent();
519 Changed = true;
520 }
521 return Changed;
Fiona Glaserf74cc402015-09-28 18:56:07 +0000522 }
523 return false;
524}
525
Chris Lattner7c743f22010-01-12 19:40:54 +0000526/// SimplifyInstructionsInBlock - Scan the specified basic block and try to
527/// simplify any instructions in it and recursively delete dead instructions.
528///
529/// This returns true if it changed the code, note that it can delete
530/// instructions in other blocks as well in this block.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000531bool llvm::SimplifyInstructionsInBlock(BasicBlock *BB,
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000532 const TargetLibraryInfo *TLI) {
Chris Lattner7c743f22010-01-12 19:40:54 +0000533 bool MadeChange = false;
Fiona Glaserf74cc402015-09-28 18:56:07 +0000534 const DataLayout &DL = BB->getModule()->getDataLayout();
Chandler Carruth0c72e3f2012-03-25 03:29:25 +0000535
536#ifndef NDEBUG
537 // In debug builds, ensure that the terminator of the block is never replaced
538 // or deleted by these simplifications. The idea of simplification is that it
539 // cannot introduce new instructions, and there is no way to replace the
540 // terminator of a block without introducing a new instruction.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000541 AssertingVH<Instruction> TerminatorVH(&BB->back());
Chandler Carruth0c72e3f2012-03-25 03:29:25 +0000542#endif
543
Fiona Glaserf74cc402015-09-28 18:56:07 +0000544 SmallSetVector<Instruction *, 16> WorkList;
545 // Iterate over the original function, only adding insts to the worklist
546 // if they actually need to be revisited. This avoids having to pre-init
547 // the worklist with the entire function's worth of instructions.
Chad Rosier56def252016-05-21 21:12:06 +0000548 for (BasicBlock::iterator BI = BB->begin(), E = std::prev(BB->end());
549 BI != E;) {
Chandler Carruth17fc6ef2012-03-24 23:03:27 +0000550 assert(!BI->isTerminator());
Fiona Glaserf74cc402015-09-28 18:56:07 +0000551 Instruction *I = &*BI;
552 ++BI;
Chandler Carruthcf1b5852012-03-24 21:11:24 +0000553
Fiona Glaserf74cc402015-09-28 18:56:07 +0000554 // We're visiting this instruction now, so make sure it's not in the
555 // worklist from an earlier visit.
556 if (!WorkList.count(I))
557 MadeChange |= simplifyAndDCEInstruction(I, WorkList, DL, TLI);
558 }
Eli Friedman17bf4922011-04-02 22:45:17 +0000559
Fiona Glaserf74cc402015-09-28 18:56:07 +0000560 while (!WorkList.empty()) {
561 Instruction *I = WorkList.pop_back_val();
562 MadeChange |= simplifyAndDCEInstruction(I, WorkList, DL, TLI);
Chris Lattner7c743f22010-01-12 19:40:54 +0000563 }
564 return MadeChange;
565}
566
Chris Lattner99d68092008-11-27 07:43:12 +0000567//===----------------------------------------------------------------------===//
Chris Lattner852d6d62009-11-10 22:26:15 +0000568// Control Flow Graph Restructuring.
Chris Lattner99d68092008-11-27 07:43:12 +0000569//
570
Chris Lattner852d6d62009-11-10 22:26:15 +0000571/// RemovePredecessorAndSimplify - Like BasicBlock::removePredecessor, this
572/// method is called when we're about to delete Pred as a predecessor of BB. If
573/// BB contains any PHI nodes, this drops the entries in the PHI nodes for Pred.
574///
575/// Unlike the removePredecessor method, this attempts to simplify uses of PHI
576/// nodes that collapse into identity values. For example, if we have:
577/// x = phi(1, 0, 0, 0)
578/// y = and x, z
579///
580/// .. and delete the predecessor corresponding to the '1', this will attempt to
581/// recursively fold the and to 0.
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000582void llvm::RemovePredecessorAndSimplify(BasicBlock *BB, BasicBlock *Pred) {
Chris Lattner852d6d62009-11-10 22:26:15 +0000583 // This only adjusts blocks with PHI nodes.
584 if (!isa<PHINode>(BB->begin()))
585 return;
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000586
Chris Lattner852d6d62009-11-10 22:26:15 +0000587 // Remove the entries for Pred from the PHI nodes in BB, but do not simplify
588 // them down. This will leave us with single entry phi nodes and other phis
589 // that can be removed.
590 BB->removePredecessor(Pred, true);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000591
Sanjoy Dase6bca0e2017-05-01 17:07:49 +0000592 WeakTrackingVH PhiIt = &BB->front();
Chris Lattner852d6d62009-11-10 22:26:15 +0000593 while (PHINode *PN = dyn_cast<PHINode>(PhiIt)) {
594 PhiIt = &*++BasicBlock::iterator(cast<Instruction>(PhiIt));
Chris Lattnere41ab072010-07-15 06:06:04 +0000595 Value *OldPhiIt = PhiIt;
Chandler Carruthcf1b5852012-03-24 21:11:24 +0000596
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000597 if (!recursivelySimplifyInstruction(PN))
Chandler Carruthcf1b5852012-03-24 21:11:24 +0000598 continue;
599
Chris Lattner852d6d62009-11-10 22:26:15 +0000600 // If recursive simplification ended up deleting the next PHI node we would
601 // iterate to, then our iterator is invalid, restart scanning from the top
602 // of the block.
Chris Lattnere41ab072010-07-15 06:06:04 +0000603 if (PhiIt != OldPhiIt) PhiIt = &BB->front();
Chris Lattner852d6d62009-11-10 22:26:15 +0000604 }
605}
606
Chris Lattner99d68092008-11-27 07:43:12 +0000607/// MergeBasicBlockIntoOnlyPred - DestBB is a block with one predecessor and its
608/// predecessor is known to have one successor (DestBB!). Eliminate the edge
609/// between them, moving the instructions in the predecessor into DestBB and
610/// deleting the predecessor block.
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000611void llvm::MergeBasicBlockIntoOnlyPred(BasicBlock *DestBB, DominatorTree *DT) {
Chris Lattner99d68092008-11-27 07:43:12 +0000612 // If BB has single-entry PHI nodes, fold them.
613 while (PHINode *PN = dyn_cast<PHINode>(DestBB->begin())) {
614 Value *NewVal = PN->getIncomingValue(0);
615 // Replace self referencing PHI with undef, it must be dead.
Owen Andersonb292b8c2009-07-30 23:03:37 +0000616 if (NewVal == PN) NewVal = UndefValue::get(PN->getType());
Chris Lattner99d68092008-11-27 07:43:12 +0000617 PN->replaceAllUsesWith(NewVal);
618 PN->eraseFromParent();
619 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000620
Chris Lattner99d68092008-11-27 07:43:12 +0000621 BasicBlock *PredBB = DestBB->getSinglePredecessor();
622 assert(PredBB && "Block doesn't have a single predecessor!");
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000623
Chris Lattner6fbfe582010-02-15 20:47:49 +0000624 // Zap anything that took the address of DestBB. Not doing this will give the
625 // address an invalid value.
626 if (DestBB->hasAddressTaken()) {
627 BlockAddress *BA = BlockAddress::get(DestBB);
628 Constant *Replacement =
Eugene Zelenko6cadde72017-10-17 21:27:42 +0000629 ConstantInt::get(Type::getInt32Ty(BA->getContext()), 1);
Chris Lattner6fbfe582010-02-15 20:47:49 +0000630 BA->replaceAllUsesWith(ConstantExpr::getIntToPtr(Replacement,
631 BA->getType()));
632 BA->destroyConstant();
633 }
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000634
Chris Lattner99d68092008-11-27 07:43:12 +0000635 // Anything that branched to PredBB now branches to DestBB.
636 PredBB->replaceAllUsesWith(DestBB);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000637
Jay Foad61ea0e42011-06-23 09:09:15 +0000638 // Splice all the instructions from PredBB to DestBB.
639 PredBB->getTerminator()->eraseFromParent();
Bill Wendling90dd90a2013-10-21 04:09:17 +0000640 DestBB->getInstList().splice(DestBB->begin(), PredBB->getInstList());
Jay Foad61ea0e42011-06-23 09:09:15 +0000641
Owen Andersona8d1c3e2014-07-12 07:12:47 +0000642 // If the PredBB is the entry block of the function, move DestBB up to
643 // become the entry block after we erase PredBB.
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000644 if (PredBB == &DestBB->getParent()->getEntryBlock())
Owen Andersona8d1c3e2014-07-12 07:12:47 +0000645 DestBB->moveAfter(PredBB);
Evandro Menezes3701df52017-09-28 17:24:40 +0000646
Daniel Jasper0a51ec22017-09-30 11:57:19 +0000647 if (DT) {
Balaram Makam9ee942f2017-10-26 15:04:53 +0000648 // For some irreducible CFG we end up having forward-unreachable blocks
649 // so check if getNode returns a valid node before updating the domtree.
650 if (DomTreeNode *DTN = DT->getNode(PredBB)) {
651 BasicBlock *PredBBIDom = DTN->getIDom()->getBlock();
652 DT->changeImmediateDominator(DestBB, PredBBIDom);
653 DT->eraseNode(PredBB);
654 }
Evandro Menezes3701df52017-09-28 17:24:40 +0000655 }
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000656 // Nuke BB.
657 PredBB->eraseFromParent();
Chris Lattner99d68092008-11-27 07:43:12 +0000658}
Devang Patelcaf44852009-02-10 07:00:59 +0000659
Duncan Sandse773c082013-07-11 08:28:20 +0000660/// CanMergeValues - Return true if we can choose one of these values to use
661/// in place of the other. Note that we will always choose the non-undef
662/// value to keep.
663static bool CanMergeValues(Value *First, Value *Second) {
664 return First == Second || isa<UndefValue>(First) || isa<UndefValue>(Second);
665}
666
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000667/// CanPropagatePredecessorsForPHIs - Return true if we can fold BB, an
Mark Laceya2626552013-08-14 22:11:42 +0000668/// almost-empty BB ending in an unconditional branch to Succ, into Succ.
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000669///
670/// Assumption: Succ is the single successor for BB.
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000671static bool CanPropagatePredecessorsForPHIs(BasicBlock *BB, BasicBlock *Succ) {
672 assert(*succ_begin(BB) == Succ && "Succ is not successor of BB!");
673
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000674 DEBUG(dbgs() << "Looking to fold " << BB->getName() << " into "
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000675 << Succ->getName() << "\n");
676 // Shortcut, if there is only a single predecessor it must be BB and merging
677 // is always safe
678 if (Succ->getSinglePredecessor()) return true;
679
680 // Make a list of the predecessors of BB
Benjamin Kramerb5188f12011-12-06 16:14:29 +0000681 SmallPtrSet<BasicBlock*, 16> BBPreds(pred_begin(BB), pred_end(BB));
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000682
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000683 // Look at all the phi nodes in Succ, to see if they present a conflict when
684 // merging these blocks
685 for (BasicBlock::iterator I = Succ->begin(); isa<PHINode>(I); ++I) {
686 PHINode *PN = cast<PHINode>(I);
687
688 // If the incoming value from BB is again a PHINode in
689 // BB which has the same incoming value for *PI as PN does, we can
690 // merge the phi nodes and then the blocks can still be merged
691 PHINode *BBPN = dyn_cast<PHINode>(PN->getIncomingValueForBlock(BB));
692 if (BBPN && BBPN->getParent() == BB) {
Benjamin Kramerb5188f12011-12-06 16:14:29 +0000693 for (unsigned PI = 0, PE = PN->getNumIncomingValues(); PI != PE; ++PI) {
694 BasicBlock *IBB = PN->getIncomingBlock(PI);
695 if (BBPreds.count(IBB) &&
Duncan Sandse773c082013-07-11 08:28:20 +0000696 !CanMergeValues(BBPN->getIncomingValueForBlock(IBB),
697 PN->getIncomingValue(PI))) {
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000698 DEBUG(dbgs() << "Can't fold, phi node " << PN->getName() << " in "
699 << Succ->getName() << " is conflicting with "
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000700 << BBPN->getName() << " with regard to common predecessor "
Benjamin Kramerb5188f12011-12-06 16:14:29 +0000701 << IBB->getName() << "\n");
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000702 return false;
703 }
704 }
705 } else {
706 Value* Val = PN->getIncomingValueForBlock(BB);
Benjamin Kramerb5188f12011-12-06 16:14:29 +0000707 for (unsigned PI = 0, PE = PN->getNumIncomingValues(); PI != PE; ++PI) {
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000708 // See if the incoming value for the common predecessor is equal to the
709 // one for BB, in which case this phi node will not prevent the merging
710 // of the block.
Benjamin Kramerb5188f12011-12-06 16:14:29 +0000711 BasicBlock *IBB = PN->getIncomingBlock(PI);
Duncan Sandse773c082013-07-11 08:28:20 +0000712 if (BBPreds.count(IBB) &&
713 !CanMergeValues(Val, PN->getIncomingValue(PI))) {
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000714 DEBUG(dbgs() << "Can't fold, phi node " << PN->getName() << " in "
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000715 << Succ->getName() << " is conflicting with regard to common "
Benjamin Kramerb5188f12011-12-06 16:14:29 +0000716 << "predecessor " << IBB->getName() << "\n");
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000717 return false;
718 }
719 }
720 }
721 }
722
723 return true;
724}
725
Eugene Zelenko6cadde72017-10-17 21:27:42 +0000726using PredBlockVector = SmallVector<BasicBlock *, 16>;
727using IncomingValueMap = DenseMap<BasicBlock *, Value *>;
Duncan Sandse773c082013-07-11 08:28:20 +0000728
729/// \brief Determines the value to use as the phi node input for a block.
730///
731/// Select between \p OldVal any value that we know flows from \p BB
732/// to a particular phi on the basis of which one (if either) is not
733/// undef. Update IncomingValues based on the selected value.
734///
735/// \param OldVal The value we are considering selecting.
736/// \param BB The block that the value flows in from.
737/// \param IncomingValues A map from block-to-value for other phi inputs
738/// that we have examined.
739///
740/// \returns the selected value.
741static Value *selectIncomingValueForBlock(Value *OldVal, BasicBlock *BB,
742 IncomingValueMap &IncomingValues) {
743 if (!isa<UndefValue>(OldVal)) {
744 assert((!IncomingValues.count(BB) ||
745 IncomingValues.find(BB)->second == OldVal) &&
746 "Expected OldVal to match incoming value from BB!");
747
748 IncomingValues.insert(std::make_pair(BB, OldVal));
749 return OldVal;
750 }
751
752 IncomingValueMap::const_iterator It = IncomingValues.find(BB);
753 if (It != IncomingValues.end()) return It->second;
754
755 return OldVal;
756}
757
758/// \brief Create a map from block to value for the operands of a
759/// given phi.
760///
761/// Create a map from block to value for each non-undef value flowing
762/// into \p PN.
763///
764/// \param PN The phi we are collecting the map for.
765/// \param IncomingValues [out] The map from block to value for this phi.
766static void gatherIncomingValuesToPhi(PHINode *PN,
767 IncomingValueMap &IncomingValues) {
768 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
769 BasicBlock *BB = PN->getIncomingBlock(i);
770 Value *V = PN->getIncomingValue(i);
771
772 if (!isa<UndefValue>(V))
773 IncomingValues.insert(std::make_pair(BB, V));
774 }
775}
776
777/// \brief Replace the incoming undef values to a phi with the values
778/// from a block-to-value map.
779///
780/// \param PN The phi we are replacing the undefs in.
781/// \param IncomingValues A map from block to value.
782static void replaceUndefValuesInPhi(PHINode *PN,
783 const IncomingValueMap &IncomingValues) {
784 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
785 Value *V = PN->getIncomingValue(i);
786
787 if (!isa<UndefValue>(V)) continue;
788
789 BasicBlock *BB = PN->getIncomingBlock(i);
790 IncomingValueMap::const_iterator It = IncomingValues.find(BB);
791 if (It == IncomingValues.end()) continue;
792
793 PN->setIncomingValue(i, It->second);
794 }
795}
796
797/// \brief Replace a value flowing from a block to a phi with
798/// potentially multiple instances of that value flowing from the
799/// block's predecessors to the phi.
800///
801/// \param BB The block with the value flowing into the phi.
802/// \param BBPreds The predecessors of BB.
803/// \param PN The phi that we are updating.
804static void redirectValuesFromPredecessorsToPhi(BasicBlock *BB,
805 const PredBlockVector &BBPreds,
806 PHINode *PN) {
807 Value *OldVal = PN->removeIncomingValue(BB, false);
808 assert(OldVal && "No entry in PHI for Pred BB!");
809
810 IncomingValueMap IncomingValues;
811
812 // We are merging two blocks - BB, and the block containing PN - and
813 // as a result we need to redirect edges from the predecessors of BB
814 // to go to the block containing PN, and update PN
815 // accordingly. Since we allow merging blocks in the case where the
816 // predecessor and successor blocks both share some predecessors,
817 // and where some of those common predecessors might have undef
818 // values flowing into PN, we want to rewrite those values to be
819 // consistent with the non-undef values.
820
821 gatherIncomingValuesToPhi(PN, IncomingValues);
822
823 // If this incoming value is one of the PHI nodes in BB, the new entries
824 // in the PHI node are the entries from the old PHI.
825 if (isa<PHINode>(OldVal) && cast<PHINode>(OldVal)->getParent() == BB) {
826 PHINode *OldValPN = cast<PHINode>(OldVal);
827 for (unsigned i = 0, e = OldValPN->getNumIncomingValues(); i != e; ++i) {
828 // Note that, since we are merging phi nodes and BB and Succ might
829 // have common predecessors, we could end up with a phi node with
830 // identical incoming branches. This will be cleaned up later (and
831 // will trigger asserts if we try to clean it up now, without also
832 // simplifying the corresponding conditional branch).
833 BasicBlock *PredBB = OldValPN->getIncomingBlock(i);
834 Value *PredVal = OldValPN->getIncomingValue(i);
835 Value *Selected = selectIncomingValueForBlock(PredVal, PredBB,
836 IncomingValues);
837
838 // And add a new incoming value for this predecessor for the
839 // newly retargeted branch.
840 PN->addIncoming(Selected, PredBB);
841 }
842 } else {
843 for (unsigned i = 0, e = BBPreds.size(); i != e; ++i) {
844 // Update existing incoming values in PN for this
845 // predecessor of BB.
846 BasicBlock *PredBB = BBPreds[i];
847 Value *Selected = selectIncomingValueForBlock(OldVal, PredBB,
848 IncomingValues);
849
850 // And add a new incoming value for this predecessor for the
851 // newly retargeted branch.
852 PN->addIncoming(Selected, PredBB);
853 }
854 }
855
856 replaceUndefValuesInPhi(PN, IncomingValues);
857}
858
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000859/// TryToSimplifyUncondBranchFromEmptyBlock - BB is known to contain an
860/// unconditional branch, and contains no instructions other than PHI nodes,
Rafael Espindolab10a0f22011-06-30 20:14:24 +0000861/// potential side-effect free intrinsics and the branch. If possible,
862/// eliminate BB by rewriting all the predecessors to branch to the successor
863/// block and return true. If we can't transform, return false.
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000864bool llvm::TryToSimplifyUncondBranchFromEmptyBlock(BasicBlock *BB) {
Dan Gohman4a63fad2010-08-14 00:29:42 +0000865 assert(BB != &BB->getParent()->getEntryBlock() &&
866 "TryToSimplifyUncondBranchFromEmptyBlock called on entry block!");
867
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000868 // We can't eliminate infinite loops.
869 BasicBlock *Succ = cast<BranchInst>(BB->getTerminator())->getSuccessor(0);
870 if (BB == Succ) return false;
Jakub Staszak8e1a6e72013-07-22 23:16:36 +0000871
Reid Klecknerbca59d22016-05-02 19:43:22 +0000872 // Check to see if merging these blocks would cause conflicts for any of the
873 // phi nodes in BB or Succ. If not, we can safely merge.
874 if (!CanPropagatePredecessorsForPHIs(BB, Succ)) return false;
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000875
Reid Klecknerbca59d22016-05-02 19:43:22 +0000876 // Check for cases where Succ has multiple predecessors and a PHI node in BB
877 // has uses which will not disappear when the PHI nodes are merged. It is
878 // possible to handle such cases, but difficult: it requires checking whether
879 // BB dominates Succ, which is non-trivial to calculate in the case where
880 // Succ has multiple predecessors. Also, it requires checking whether
881 // constructing the necessary self-referential PHI node doesn't introduce any
882 // conflicts; this isn't too difficult, but the previous code for doing this
883 // was incorrect.
884 //
885 // Note that if this check finds a live use, BB dominates Succ, so BB is
886 // something like a loop pre-header (or rarely, a part of an irreducible CFG);
887 // folding the branch isn't profitable in that case anyway.
888 if (!Succ->getSinglePredecessor()) {
889 BasicBlock::iterator BBI = BB->begin();
890 while (isa<PHINode>(*BBI)) {
891 for (Use &U : BBI->uses()) {
892 if (PHINode* PN = dyn_cast<PHINode>(U.getUser())) {
893 if (PN->getIncomingBlock(U) != BB)
Hans Wennborgb7599322016-05-02 17:22:54 +0000894 return false;
Reid Klecknerbca59d22016-05-02 19:43:22 +0000895 } else {
896 return false;
Hans Wennborgb7599322016-05-02 17:22:54 +0000897 }
Hans Wennborgb7599322016-05-02 17:22:54 +0000898 }
Reid Klecknerbca59d22016-05-02 19:43:22 +0000899 ++BBI;
Hans Wennborgb7599322016-05-02 17:22:54 +0000900 }
Hans Wennborgb7599322016-05-02 17:22:54 +0000901 }
Reid Klecknerbca59d22016-05-02 19:43:22 +0000902
903 DEBUG(dbgs() << "Killing Trivial BB: \n" << *BB);
904
905 if (isa<PHINode>(Succ->begin())) {
906 // If there is more than one pred of succ, and there are PHI nodes in
907 // the successor, then we need to add incoming edges for the PHI nodes
908 //
909 const PredBlockVector BBPreds(pred_begin(BB), pred_end(BB));
910
911 // Loop over all of the PHI nodes in the successor of BB.
912 for (BasicBlock::iterator I = Succ->begin(); isa<PHINode>(I); ++I) {
913 PHINode *PN = cast<PHINode>(I);
914
915 redirectValuesFromPredecessorsToPhi(BB, BBPreds, PN);
916 }
917 }
918
919 if (Succ->getSinglePredecessor()) {
920 // BB is the only predecessor of Succ, so Succ will end up with exactly
921 // the same predecessors BB had.
922
923 // Copy over any phi, debug or lifetime instruction.
924 BB->getTerminator()->eraseFromParent();
925 Succ->getInstList().splice(Succ->getFirstNonPHI()->getIterator(),
926 BB->getInstList());
927 } else {
928 while (PHINode *PN = dyn_cast<PHINode>(&BB->front())) {
929 // We explicitly check for such uses in CanPropagatePredecessorsForPHIs.
930 assert(PN->use_empty() && "There shouldn't be any uses here!");
931 PN->eraseFromParent();
932 }
933 }
934
Florian Hahn77382be2016-11-18 13:12:07 +0000935 // If the unconditional branch we replaced contains llvm.loop metadata, we
936 // add the metadata to the branch instructions in the predecessors.
937 unsigned LoopMDKind = BB->getContext().getMDKindID("llvm.loop");
938 Instruction *TI = BB->getTerminator();
Daniel Jasper0a51ec22017-09-30 11:57:19 +0000939 if (TI)
Florian Hahn77382be2016-11-18 13:12:07 +0000940 if (MDNode *LoopMD = TI->getMetadata(LoopMDKind))
941 for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI) {
942 BasicBlock *Pred = *PI;
943 Pred->getTerminator()->setMetadata(LoopMDKind, LoopMD);
944 }
945
Reid Klecknerbca59d22016-05-02 19:43:22 +0000946 // Everything that jumped to BB now goes to Succ.
947 BB->replaceAllUsesWith(Succ);
948 if (!Succ->hasName()) Succ->takeName(BB);
Reid Klecknercd78ddc2018-01-04 23:23:46 +0000949 BB->eraseFromParent(); // Delete the old basic block.
Reid Klecknerbca59d22016-05-02 19:43:22 +0000950 return true;
Chris Lattnercbd18fc2009-11-10 05:59:26 +0000951}
952
Jim Grosbachd831ef42009-12-02 17:06:45 +0000953/// EliminateDuplicatePHINodes - Check for and eliminate duplicate PHI
954/// nodes in this block. This doesn't try to be clever about PHI nodes
955/// which differ only in the order of the incoming values, but instcombine
956/// orders them so it usually won't matter.
Jim Grosbachd831ef42009-12-02 17:06:45 +0000957bool llvm::EliminateDuplicatePHINodes(BasicBlock *BB) {
Jim Grosbachd831ef42009-12-02 17:06:45 +0000958 // This implementation doesn't currently consider undef operands
Nick Lewyckyfa44dc62011-06-28 03:57:31 +0000959 // specially. Theoretically, two phis which are identical except for
Jim Grosbachd831ef42009-12-02 17:06:45 +0000960 // one having an undef where the other doesn't could be collapsed.
961
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +0000962 struct PHIDenseMapInfo {
963 static PHINode *getEmptyKey() {
964 return DenseMapInfo<PHINode *>::getEmptyKey();
965 }
Eugene Zelenko6cadde72017-10-17 21:27:42 +0000966
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +0000967 static PHINode *getTombstoneKey() {
968 return DenseMapInfo<PHINode *>::getTombstoneKey();
969 }
Eugene Zelenko6cadde72017-10-17 21:27:42 +0000970
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +0000971 static unsigned getHashValue(PHINode *PN) {
972 // Compute a hash value on the operands. Instcombine will likely have
973 // sorted them, which helps expose duplicates, but we have to check all
974 // the operands to be safe in case instcombine hasn't run.
975 return static_cast<unsigned>(hash_combine(
976 hash_combine_range(PN->value_op_begin(), PN->value_op_end()),
977 hash_combine_range(PN->block_begin(), PN->block_end())));
978 }
Eugene Zelenko6cadde72017-10-17 21:27:42 +0000979
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +0000980 static bool isEqual(PHINode *LHS, PHINode *RHS) {
981 if (LHS == getEmptyKey() || LHS == getTombstoneKey() ||
982 RHS == getEmptyKey() || RHS == getTombstoneKey())
983 return LHS == RHS;
984 return LHS->isIdenticalTo(RHS);
985 }
986 };
Jim Grosbachd831ef42009-12-02 17:06:45 +0000987
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +0000988 // Set of unique PHINodes.
989 DenseSet<PHINode *, PHIDenseMapInfo> PHISet;
Jim Grosbachd831ef42009-12-02 17:06:45 +0000990
991 // Examine each PHI.
Benjamin Kramer2b2cdd72015-06-18 16:01:00 +0000992 bool Changed = false;
993 for (auto I = BB->begin(); PHINode *PN = dyn_cast<PHINode>(I++);) {
994 auto Inserted = PHISet.insert(PN);
995 if (!Inserted.second) {
996 // A duplicate. Replace this PHI with its duplicate.
997 PN->replaceAllUsesWith(*Inserted.first);
998 PN->eraseFromParent();
999 Changed = true;
Benjamin Kramerf175e042015-09-02 19:52:23 +00001000
1001 // The RAUW can change PHIs that we already visited. Start over from the
1002 // beginning.
1003 PHISet.clear();
1004 I = BB->begin();
Jim Grosbachd831ef42009-12-02 17:06:45 +00001005 }
1006 }
1007
1008 return Changed;
1009}
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001010
1011/// enforceKnownAlignment - If the specified pointer points to an object that
1012/// we control, modify the object's alignment to PrefAlign. This isn't
1013/// often possible though. If alignment is important, a more reliable approach
1014/// is to simply align all global variables and allocation instructions to
1015/// their preferred alignment from the beginning.
Benjamin Kramer570dd782010-12-30 22:34:44 +00001016static unsigned enforceKnownAlignment(Value *V, unsigned Align,
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001017 unsigned PrefAlign,
1018 const DataLayout &DL) {
James Y Knightac03dca2016-01-15 16:33:06 +00001019 assert(PrefAlign > Align);
1020
Eli Friedman19ace4c2011-06-15 21:08:25 +00001021 V = V->stripPointerCasts();
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001022
Eli Friedman19ace4c2011-06-15 21:08:25 +00001023 if (AllocaInst *AI = dyn_cast<AllocaInst>(V)) {
James Y Knightac03dca2016-01-15 16:33:06 +00001024 // TODO: ideally, computeKnownBits ought to have used
1025 // AllocaInst::getAlignment() in its computation already, making
1026 // the below max redundant. But, as it turns out,
1027 // stripPointerCasts recurses through infinite layers of bitcasts,
1028 // while computeKnownBits is not allowed to traverse more than 6
1029 // levels.
1030 Align = std::max(AI->getAlignment(), Align);
1031 if (PrefAlign <= Align)
1032 return Align;
1033
Lang Hamesde7ab802011-10-10 23:42:08 +00001034 // If the preferred alignment is greater than the natural stack alignment
1035 // then don't round up. This avoids dynamic stack realignment.
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001036 if (DL.exceedsNaturalStackAlignment(PrefAlign))
Lang Hamesde7ab802011-10-10 23:42:08 +00001037 return Align;
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001038 AI->setAlignment(PrefAlign);
1039 return PrefAlign;
1040 }
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001041
Rafael Espindola99e05cf2014-05-13 18:45:48 +00001042 if (auto *GO = dyn_cast<GlobalObject>(V)) {
James Y Knightac03dca2016-01-15 16:33:06 +00001043 // TODO: as above, this shouldn't be necessary.
1044 Align = std::max(GO->getAlignment(), Align);
1045 if (PrefAlign <= Align)
1046 return Align;
1047
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001048 // If there is a large requested alignment and we can, bump up the alignment
Reid Kleckner486fa392015-07-14 00:11:08 +00001049 // of the global. If the memory we set aside for the global may not be the
1050 // memory used by the final program then it is impossible for us to reliably
1051 // enforce the preferred alignment.
James Y Knightac03dca2016-01-15 16:33:06 +00001052 if (!GO->canIncreaseAlignment())
Rafael Espindolafc13db42014-05-09 16:01:06 +00001053 return Align;
Jakub Staszak8e1a6e72013-07-22 23:16:36 +00001054
James Y Knightac03dca2016-01-15 16:33:06 +00001055 GO->setAlignment(PrefAlign);
1056 return PrefAlign;
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001057 }
1058
1059 return Align;
1060}
1061
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001062unsigned llvm::getOrEnforceKnownAlignment(Value *V, unsigned PrefAlign,
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001063 const DataLayout &DL,
Hal Finkel60db0582014-09-07 18:57:58 +00001064 const Instruction *CxtI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001065 AssumptionCache *AC,
Hal Finkel60db0582014-09-07 18:57:58 +00001066 const DominatorTree *DT) {
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001067 assert(V->getType()->isPointerTy() &&
1068 "getOrEnforceKnownAlignment expects a pointer!");
Matt Arsenault87dc6072013-08-01 22:42:18 +00001069
Craig Topper8205a1a2017-05-24 16:53:07 +00001070 KnownBits Known = computeKnownBits(V, DL, 0, AC, CxtI, DT);
Craig Topper8df66c62017-05-12 17:20:30 +00001071 unsigned TrailZ = Known.countMinTrailingZeros();
Jakub Staszak8e1a6e72013-07-22 23:16:36 +00001072
Matt Arsenaultf64212b2013-07-23 22:20:57 +00001073 // Avoid trouble with ridiculously large TrailZ values, such as
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001074 // those computed from a null pointer.
1075 TrailZ = std::min(TrailZ, unsigned(sizeof(unsigned) * CHAR_BIT - 1));
Jakub Staszak8e1a6e72013-07-22 23:16:36 +00001076
Craig Topper8205a1a2017-05-24 16:53:07 +00001077 unsigned Align = 1u << std::min(Known.getBitWidth() - 1, TrailZ);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +00001078
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001079 // LLVM doesn't support alignments larger than this currently.
1080 Align = std::min(Align, +Value::MaximumAlignment);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +00001081
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001082 if (PrefAlign > Align)
Matt Arsenault87dc6072013-08-01 22:42:18 +00001083 Align = enforceKnownAlignment(V, Align, PrefAlign, DL);
Jakub Staszak8e1a6e72013-07-22 23:16:36 +00001084
Chris Lattner6fcd32e2010-12-25 20:37:57 +00001085 // We don't need to make any adjustment.
1086 return Align;
1087}
1088
Devang Patel8c0b16b2011-03-17 21:58:19 +00001089///===---------------------------------------------------------------------===//
1090/// Dbg Intrinsic utilities
1091///
1092
Adrian Prantl29b9de72013-04-26 17:48:33 +00001093/// See if there is a dbg.value intrinsic for DIVar before I.
Adrian Prantla5b2a642016-02-17 20:02:25 +00001094static bool LdStHasDebugValue(DILocalVariable *DIVar, DIExpression *DIExpr,
1095 Instruction *I) {
Adrian Prantl29b9de72013-04-26 17:48:33 +00001096 // Since we can't guarantee that the original dbg.declare instrinsic
1097 // is removed by LowerDbgDeclare(), we need to make sure that we are
1098 // not inserting the same dbg.value intrinsic over and over.
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001099 BasicBlock::InstListType::iterator PrevI(I);
Adrian Prantl29b9de72013-04-26 17:48:33 +00001100 if (PrevI != I->getParent()->getInstList().begin()) {
1101 --PrevI;
1102 if (DbgValueInst *DVI = dyn_cast<DbgValueInst>(PrevI))
1103 if (DVI->getValue() == I->getOperand(0) &&
Adrian Prantla5b2a642016-02-17 20:02:25 +00001104 DVI->getVariable() == DIVar &&
1105 DVI->getExpression() == DIExpr)
Adrian Prantl29b9de72013-04-26 17:48:33 +00001106 return true;
1107 }
1108 return false;
1109}
1110
Keith Walkerba159892016-09-22 14:13:25 +00001111/// See if there is a dbg.value intrinsic for DIVar for the PHI node.
Chandler Carruth2abb65a2017-06-26 03:31:31 +00001112static bool PhiHasDebugValue(DILocalVariable *DIVar,
Keith Walkerba159892016-09-22 14:13:25 +00001113 DIExpression *DIExpr,
1114 PHINode *APN) {
1115 // Since we can't guarantee that the original dbg.declare instrinsic
1116 // is removed by LowerDbgDeclare(), we need to make sure that we are
1117 // not inserting the same dbg.value intrinsic over and over.
Adrian Prantlfa9e84e2017-03-16 20:11:54 +00001118 SmallVector<DbgValueInst *, 1> DbgValues;
1119 findDbgValues(DbgValues, APN);
1120 for (auto *DVI : DbgValues) {
1121 assert(DVI->getValue() == APN);
Adrian Prantlfa9e84e2017-03-16 20:11:54 +00001122 if ((DVI->getVariable() == DIVar) && (DVI->getExpression() == DIExpr))
1123 return true;
1124 }
1125 return false;
Keith Walkerba159892016-09-22 14:13:25 +00001126}
1127
Adrian Prantld00333a2013-04-26 18:10:50 +00001128/// Inserts a llvm.dbg.value intrinsic before a store to an alloca'd value
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001129/// that has an associated llvm.dbg.declare or llvm.dbg.addr intrinsic.
1130void llvm::ConvertDebugDeclareToDebugValue(DbgInfoIntrinsic *DII,
Devang Patel8c0b16b2011-03-17 21:58:19 +00001131 StoreInst *SI, DIBuilder &Builder) {
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001132 assert(DII->isAddressOfVariable());
1133 auto *DIVar = DII->getVariable();
Duncan P. N. Exon Smithd4a19a32015-04-21 18:24:23 +00001134 assert(DIVar && "Missing variable");
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001135 auto *DIExpr = DII->getExpression();
David Blaikie441cfee2017-05-15 21:34:01 +00001136 Value *DV = SI->getOperand(0);
Devang Patel8c0b16b2011-03-17 21:58:19 +00001137
Devang Patel8e60ff12011-05-16 21:24:05 +00001138 // If an argument is zero extended then use argument directly. The ZExt
1139 // may be zapped by an optimization pass in future.
Craig Topperf40110f2014-04-25 05:29:35 +00001140 Argument *ExtendedArg = nullptr;
Devang Patel8e60ff12011-05-16 21:24:05 +00001141 if (ZExtInst *ZExt = dyn_cast<ZExtInst>(SI->getOperand(0)))
1142 ExtendedArg = dyn_cast<Argument>(ZExt->getOperand(0));
1143 if (SExtInst *SExt = dyn_cast<SExtInst>(SI->getOperand(0)))
1144 ExtendedArg = dyn_cast<Argument>(SExt->getOperand(0));
Keno Fischer9aae4452016-01-12 22:46:09 +00001145 if (ExtendedArg) {
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001146 // If this DII was already describing only a fragment of a variable, ensure
David Blaikie441cfee2017-05-15 21:34:01 +00001147 // that fragment is appropriately narrowed here.
1148 // But if a fragment wasn't used, describe the value as the original
1149 // argument (rather than the zext or sext) so that it remains described even
1150 // if the sext/zext is optimized away. This widens the variable description,
1151 // leaving it up to the consumer to know how the smaller value may be
1152 // represented in a larger register.
1153 if (auto Fragment = DIExpr->getFragmentInfo()) {
1154 unsigned FragmentOffset = Fragment->OffsetInBits;
1155 SmallVector<uint64_t, 3> Ops(DIExpr->elements_begin(),
1156 DIExpr->elements_end() - 3);
1157 Ops.push_back(dwarf::DW_OP_LLVM_fragment);
1158 Ops.push_back(FragmentOffset);
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001159 const DataLayout &DL = DII->getModule()->getDataLayout();
David Blaikie441cfee2017-05-15 21:34:01 +00001160 Ops.push_back(DL.getTypeSizeInBits(ExtendedArg->getType()));
1161 DIExpr = Builder.createExpression(Ops);
Keno Fischer9aae4452016-01-12 22:46:09 +00001162 }
David Blaikie441cfee2017-05-15 21:34:01 +00001163 DV = ExtendedArg;
1164 }
1165 if (!LdStHasDebugValue(DIVar, DIExpr, SI))
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001166 Builder.insertDbgValueIntrinsic(DV, DIVar, DIExpr, DII->getDebugLoc(),
David Blaikie441cfee2017-05-15 21:34:01 +00001167 SI);
Devang Patel8c0b16b2011-03-17 21:58:19 +00001168}
1169
Adrian Prantld00333a2013-04-26 18:10:50 +00001170/// Inserts a llvm.dbg.value intrinsic before a load of an alloca'd value
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001171/// that has an associated llvm.dbg.declare or llvm.dbg.addr intrinsic.
1172void llvm::ConvertDebugDeclareToDebugValue(DbgInfoIntrinsic *DII,
Devang Patel2c7ee272011-03-18 23:45:43 +00001173 LoadInst *LI, DIBuilder &Builder) {
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001174 auto *DIVar = DII->getVariable();
1175 auto *DIExpr = DII->getExpression();
Duncan P. N. Exon Smithd4a19a32015-04-21 18:24:23 +00001176 assert(DIVar && "Missing variable");
Devang Patel2c7ee272011-03-18 23:45:43 +00001177
Adrian Prantla5b2a642016-02-17 20:02:25 +00001178 if (LdStHasDebugValue(DIVar, DIExpr, LI))
Keith Walkerba159892016-09-22 14:13:25 +00001179 return;
Adrian Prantl29b9de72013-04-26 17:48:33 +00001180
Keno Fischer00cbf9a2015-12-19 02:02:44 +00001181 // We are now tracking the loaded value instead of the address. In the
1182 // future if multi-location support is added to the IR, it might be
1183 // preferable to keep tracking both the loaded value and the original
1184 // address in case the alloca can not be elided.
1185 Instruction *DbgValue = Builder.insertDbgValueIntrinsic(
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001186 LI, DIVar, DIExpr, DII->getDebugLoc(), (Instruction *)nullptr);
Keno Fischer00cbf9a2015-12-19 02:02:44 +00001187 DbgValue->insertAfter(LI);
Keith Walkerba159892016-09-22 14:13:25 +00001188}
1189
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001190/// Inserts a llvm.dbg.value intrinsic after a phi that has an associated
1191/// llvm.dbg.declare or llvm.dbg.addr intrinsic.
1192void llvm::ConvertDebugDeclareToDebugValue(DbgInfoIntrinsic *DII,
Keith Walkerba159892016-09-22 14:13:25 +00001193 PHINode *APN, DIBuilder &Builder) {
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001194 auto *DIVar = DII->getVariable();
1195 auto *DIExpr = DII->getExpression();
Keith Walkerba159892016-09-22 14:13:25 +00001196 assert(DIVar && "Missing variable");
1197
1198 if (PhiHasDebugValue(DIVar, DIExpr, APN))
1199 return;
1200
Reid Kleckner64818222016-09-27 18:45:31 +00001201 BasicBlock *BB = APN->getParent();
Keith Walkerba159892016-09-22 14:13:25 +00001202 auto InsertionPt = BB->getFirstInsertionPt();
Reid Kleckner64818222016-09-27 18:45:31 +00001203
1204 // The block may be a catchswitch block, which does not have a valid
1205 // insertion point.
1206 // FIXME: Insert dbg.value markers in the successors when appropriate.
1207 if (InsertionPt != BB->end())
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001208 Builder.insertDbgValueIntrinsic(APN, DIVar, DIExpr, DII->getDebugLoc(),
Reid Kleckner64818222016-09-27 18:45:31 +00001209 &*InsertionPt);
Keith Walkerc9412522016-09-19 09:49:30 +00001210}
1211
Adrian Prantl232897f2014-04-25 23:00:25 +00001212/// Determine whether this alloca is either a VLA or an array.
1213static bool isArray(AllocaInst *AI) {
1214 return AI->isArrayAllocation() ||
1215 AI->getType()->getElementType()->isArrayTy();
1216}
1217
Devang Patelaad34d82011-03-17 22:18:16 +00001218/// LowerDbgDeclare - Lowers llvm.dbg.declare intrinsics into appropriate set
1219/// of llvm.dbg.value intrinsics.
1220bool llvm::LowerDbgDeclare(Function &F) {
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +00001221 DIBuilder DIB(*F.getParent(), /*AllowUnresolved*/ false);
Devang Patelaad34d82011-03-17 22:18:16 +00001222 SmallVector<DbgDeclareInst *, 4> Dbgs;
Adrian Prantl79c8e8f2014-03-27 23:30:04 +00001223 for (auto &FI : F)
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001224 for (Instruction &BI : FI)
1225 if (auto DDI = dyn_cast<DbgDeclareInst>(&BI))
Devang Patelaad34d82011-03-17 22:18:16 +00001226 Dbgs.push_back(DDI);
Adrian Prantl79c8e8f2014-03-27 23:30:04 +00001227
Devang Patelaad34d82011-03-17 22:18:16 +00001228 if (Dbgs.empty())
1229 return false;
1230
Adrian Prantl79c8e8f2014-03-27 23:30:04 +00001231 for (auto &I : Dbgs) {
1232 DbgDeclareInst *DDI = I;
Adrian Prantl8e10fdb2013-11-18 23:04:38 +00001233 AllocaInst *AI = dyn_cast_or_null<AllocaInst>(DDI->getAddress());
1234 // If this is an alloca for a scalar variable, insert a dbg.value
1235 // at each load and store to the alloca and erase the dbg.declare.
Adrian Prantl32da8892014-04-25 20:49:25 +00001236 // The dbg.values allow tracking a variable even if it is not
1237 // stored on the stack, while the dbg.declare can only describe
1238 // the stack slot (and at a lexical-scope granularity). Later
1239 // passes will attempt to elide the stack slot.
Adrian Prantl232897f2014-04-25 23:00:25 +00001240 if (AI && !isArray(AI)) {
Keno Fischer1dd319f2016-01-14 19:12:27 +00001241 for (auto &AIUse : AI->uses()) {
1242 User *U = AIUse.getUser();
1243 if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
1244 if (AIUse.getOperandNo() == 1)
1245 ConvertDebugDeclareToDebugValue(DDI, SI, DIB);
1246 } else if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
Devang Patel2c7ee272011-03-18 23:45:43 +00001247 ConvertDebugDeclareToDebugValue(DDI, LI, DIB);
Keno Fischer1dd319f2016-01-14 19:12:27 +00001248 } else if (CallInst *CI = dyn_cast<CallInst>(U)) {
NAKAMURA Takumi335a7bc2014-10-28 11:53:30 +00001249 // This is a call by-value or some other instruction that
1250 // takes a pointer to the variable. Insert a *value*
1251 // intrinsic that describes the alloca.
Adrian Prantlabe04752017-07-28 20:21:02 +00001252 DIB.insertDbgValueIntrinsic(AI, DDI->getVariable(),
Adrian Prantl6825fb62017-04-18 01:21:53 +00001253 DDI->getExpression(), DDI->getDebugLoc(),
1254 CI);
Adrian Prantl87b7eb92014-10-01 18:55:02 +00001255 }
Keno Fischer1dd319f2016-01-14 19:12:27 +00001256 }
Adrian Prantl32da8892014-04-25 20:49:25 +00001257 DDI->eraseFromParent();
Devang Patelaad34d82011-03-17 22:18:16 +00001258 }
Devang Patelaad34d82011-03-17 22:18:16 +00001259 }
1260 return true;
1261}
Cameron Zwarich843bc7d2011-05-24 03:10:43 +00001262
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001263/// Finds all intrinsics declaring local variables as living in the memory that
1264/// 'V' points to. This may include a mix of dbg.declare and
1265/// dbg.addr intrinsics.
1266TinyPtrVector<DbgInfoIntrinsic *> llvm::FindDbgAddrUses(Value *V) {
1267 auto *L = LocalAsMetadata::getIfExists(V);
1268 if (!L)
1269 return {};
1270 auto *MDV = MetadataAsValue::getIfExists(V->getContext(), L);
1271 if (!MDV)
1272 return {};
Cameron Zwarich843bc7d2011-05-24 03:10:43 +00001273
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001274 TinyPtrVector<DbgInfoIntrinsic *> Declares;
1275 for (User *U : MDV->users()) {
1276 if (auto *DII = dyn_cast<DbgInfoIntrinsic>(U))
1277 if (DII->isAddressOfVariable())
1278 Declares.push_back(DII);
1279 }
1280
1281 return Declares;
Cameron Zwarich843bc7d2011-05-24 03:10:43 +00001282}
Alexey Samsonov3d43b632012-12-12 14:31:53 +00001283
Adrian Prantlfa9e84e2017-03-16 20:11:54 +00001284void llvm::findDbgValues(SmallVectorImpl<DbgValueInst *> &DbgValues, Value *V) {
Keith Walkerba159892016-09-22 14:13:25 +00001285 if (auto *L = LocalAsMetadata::getIfExists(V))
1286 if (auto *MDV = MetadataAsValue::getIfExists(V->getContext(), L))
1287 for (User *U : MDV->users())
1288 if (DbgValueInst *DVI = dyn_cast<DbgValueInst>(U))
Adrian Prantlfa9e84e2017-03-16 20:11:54 +00001289 DbgValues.push_back(DVI);
Keith Walkerba159892016-09-22 14:13:25 +00001290}
1291
Reid Kleckner29a5c032017-11-14 21:49:06 +00001292static void findDbgUsers(SmallVectorImpl<DbgInfoIntrinsic *> &DbgUsers,
1293 Value *V) {
1294 if (auto *L = LocalAsMetadata::getIfExists(V))
1295 if (auto *MDV = MetadataAsValue::getIfExists(V->getContext(), L))
1296 for (User *U : MDV->users())
1297 if (DbgInfoIntrinsic *DII = dyn_cast<DbgInfoIntrinsic>(U))
1298 DbgUsers.push_back(DII);
1299}
1300
Evgeniy Stepanov42f3b122015-12-01 00:40:05 +00001301bool llvm::replaceDbgDeclare(Value *Address, Value *NewAddress,
1302 Instruction *InsertBefore, DIBuilder &Builder,
Adrian Prantld1317012017-12-08 21:58:18 +00001303 bool DerefBefore, int Offset, bool DerefAfter) {
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001304 auto DbgAddrs = FindDbgAddrUses(Address);
1305 for (DbgInfoIntrinsic *DII : DbgAddrs) {
1306 DebugLoc Loc = DII->getDebugLoc();
1307 auto *DIVar = DII->getVariable();
1308 auto *DIExpr = DII->getExpression();
1309 assert(DIVar && "Missing variable");
Adrian Prantld1317012017-12-08 21:58:18 +00001310 DIExpr = DIExpression::prepend(DIExpr, DerefBefore, Offset, DerefAfter);
Reid Kleckner0fe506b2017-09-21 19:52:03 +00001311 // Insert llvm.dbg.declare immediately after InsertBefore, and remove old
1312 // llvm.dbg.declare.
1313 Builder.insertDeclare(NewAddress, DIVar, DIExpr, Loc, InsertBefore);
1314 if (DII == InsertBefore)
1315 InsertBefore = &*std::next(InsertBefore->getIterator());
1316 DII->eraseFromParent();
1317 }
1318 return !DbgAddrs.empty();
Alexey Samsonov3d43b632012-12-12 14:31:53 +00001319}
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001320
Evgeniy Stepanov42f3b122015-12-01 00:40:05 +00001321bool llvm::replaceDbgDeclareForAlloca(AllocaInst *AI, Value *NewAllocaAddress,
Adrian Prantld1317012017-12-08 21:58:18 +00001322 DIBuilder &Builder, bool DerefBefore,
1323 int Offset, bool DerefAfter) {
Evgeniy Stepanov42f3b122015-12-01 00:40:05 +00001324 return replaceDbgDeclare(AI, NewAllocaAddress, AI->getNextNode(), Builder,
Adrian Prantld1317012017-12-08 21:58:18 +00001325 DerefBefore, Offset, DerefAfter);
Evgeniy Stepanov42f3b122015-12-01 00:40:05 +00001326}
1327
Evgeniy Stepanov72d961a2016-06-16 22:34:00 +00001328static void replaceOneDbgValueForAlloca(DbgValueInst *DVI, Value *NewAddress,
1329 DIBuilder &Builder, int Offset) {
1330 DebugLoc Loc = DVI->getDebugLoc();
1331 auto *DIVar = DVI->getVariable();
1332 auto *DIExpr = DVI->getExpression();
1333 assert(DIVar && "Missing variable");
1334
1335 // This is an alloca-based llvm.dbg.value. The first thing it should do with
1336 // the alloca pointer is dereference it. Otherwise we don't know how to handle
1337 // it and give up.
1338 if (!DIExpr || DIExpr->getNumElements() < 1 ||
1339 DIExpr->getElement(0) != dwarf::DW_OP_deref)
1340 return;
1341
1342 // Insert the offset immediately after the first deref.
1343 // We could just change the offset argument of dbg.value, but it's unsigned...
1344 if (Offset) {
Adrian Prantl47ea6472017-03-16 21:14:09 +00001345 SmallVector<uint64_t, 4> Ops;
1346 Ops.push_back(dwarf::DW_OP_deref);
Andrew Ng03e35b62017-04-28 08:44:30 +00001347 DIExpression::appendOffset(Ops, Offset);
Adrian Prantl47ea6472017-03-16 21:14:09 +00001348 Ops.append(DIExpr->elements_begin() + 1, DIExpr->elements_end());
1349 DIExpr = Builder.createExpression(Ops);
Evgeniy Stepanov72d961a2016-06-16 22:34:00 +00001350 }
1351
Adrian Prantlabe04752017-07-28 20:21:02 +00001352 Builder.insertDbgValueIntrinsic(NewAddress, DIVar, DIExpr, Loc, DVI);
Evgeniy Stepanov72d961a2016-06-16 22:34:00 +00001353 DVI->eraseFromParent();
1354}
1355
1356void llvm::replaceDbgValueForAlloca(AllocaInst *AI, Value *NewAllocaAddress,
1357 DIBuilder &Builder, int Offset) {
1358 if (auto *L = LocalAsMetadata::getIfExists(AI))
1359 if (auto *MDV = MetadataAsValue::getIfExists(AI->getContext(), L))
1360 for (auto UI = MDV->use_begin(), UE = MDV->use_end(); UI != UE;) {
1361 Use &U = *UI++;
1362 if (auto *DVI = dyn_cast<DbgValueInst>(U.getUser()))
1363 replaceOneDbgValueForAlloca(DVI, NewAllocaAddress, Builder, Offset);
1364 }
1365}
1366
Adrian Prantl47ea6472017-03-16 21:14:09 +00001367void llvm::salvageDebugInfo(Instruction &I) {
1368 SmallVector<DbgValueInst *, 1> DbgValues;
1369 auto &M = *I.getModule();
1370
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001371 auto wrapMD = [&](Value *V) {
Adrian Prantl47ea6472017-03-16 21:14:09 +00001372 return MetadataAsValue::get(I.getContext(), ValueAsMetadata::get(V));
1373 };
1374
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001375 auto applyOffset = [&](DbgValueInst *DVI, uint64_t Offset) {
1376 auto *DIExpr = DVI->getExpression();
1377 DIExpr = DIExpression::prepend(DIExpr, DIExpression::NoDeref, Offset,
Adrian Prantld1317012017-12-08 21:58:18 +00001378 DIExpression::NoDeref,
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001379 DIExpression::WithStackValue);
1380 DVI->setOperand(0, wrapMD(I.getOperand(0)));
1381 DVI->setOperand(2, MetadataAsValue::get(I.getContext(), DIExpr));
1382 DEBUG(dbgs() << "SALVAGE: " << *DVI << '\n');
1383 };
1384
Adrian Prantl261ac8b2017-11-03 21:55:03 +00001385 if (isa<BitCastInst>(&I) || isa<IntToPtrInst>(&I)) {
Reid Kleckner29a5c032017-11-14 21:49:06 +00001386 // Bitcasts are entirely irrelevant for debug info. Rewrite dbg.value,
1387 // dbg.addr, and dbg.declare to use the cast's source.
1388 SmallVector<DbgInfoIntrinsic *, 1> DbgUsers;
1389 findDbgUsers(DbgUsers, &I);
1390 for (auto *DII : DbgUsers) {
1391 DII->setOperand(0, wrapMD(I.getOperand(0)));
1392 DEBUG(dbgs() << "SALVAGE: " << *DII << '\n');
Adrian Prantl47ea6472017-03-16 21:14:09 +00001393 }
1394 } else if (auto *GEP = dyn_cast<GetElementPtrInst>(&I)) {
Adrian Prantl6d80a262017-03-20 16:39:41 +00001395 findDbgValues(DbgValues, &I);
Adrian Prantl47ea6472017-03-16 21:14:09 +00001396 for (auto *DVI : DbgValues) {
1397 unsigned BitWidth =
1398 M.getDataLayout().getPointerSizeInBits(GEP->getPointerAddressSpace());
1399 APInt Offset(BitWidth, 0);
Adrian Prantlf2c79972017-04-24 18:11:42 +00001400 // Rewrite a constant GEP into a DIExpression. Since we are performing
1401 // arithmetic to compute the variable's *value* in the DIExpression, we
1402 // need to mark the expression with a DW_OP_stack_value.
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001403 if (GEP->accumulateConstantOffset(M.getDataLayout(), Offset))
Adrian Prantl1a18f1a2017-04-21 20:06:41 +00001404 // GEP offsets are i32 and thus always fit into an int64_t.
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001405 applyOffset(DVI, Offset.getSExtValue());
Adrian Prantl47ea6472017-03-16 21:14:09 +00001406 }
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001407 } else if (auto *BI = dyn_cast<BinaryOperator>(&I)) {
1408 if (BI->getOpcode() == Instruction::Add)
1409 if (auto *ConstInt = dyn_cast<ConstantInt>(I.getOperand(1)))
1410 if (ConstInt->getBitWidth() <= 64) {
1411 APInt Offset = ConstInt->getValue();
1412 findDbgValues(DbgValues, &I);
1413 for (auto *DVI : DbgValues)
1414 applyOffset(DVI, Offset.getSExtValue());
1415 }
Adrian Prantl6d80a262017-03-20 16:39:41 +00001416 } else if (isa<LoadInst>(&I)) {
1417 findDbgValues(DbgValues, &I);
Adrian Prantl47ea6472017-03-16 21:14:09 +00001418 for (auto *DVI : DbgValues) {
1419 // Rewrite the load into DW_OP_deref.
1420 auto *DIExpr = DVI->getExpression();
Adrian Prantl109b2362017-04-28 17:51:05 +00001421 DIExpr = DIExpression::prepend(DIExpr, DIExpression::WithDeref);
Adrian Prantl182f9fe2017-11-06 22:49:39 +00001422 DVI->setOperand(0, wrapMD(I.getOperand(0)));
Adrian Prantlabe04752017-07-28 20:21:02 +00001423 DVI->setOperand(2, MetadataAsValue::get(I.getContext(), DIExpr));
Adrian Prantl47ea6472017-03-16 21:14:09 +00001424 DEBUG(dbgs() << "SALVAGE: " << *DVI << '\n');
1425 }
1426 }
1427}
1428
David Majnemer35c46d32016-01-24 05:26:18 +00001429unsigned llvm::removeAllNonTerminatorAndEHPadInstructions(BasicBlock *BB) {
1430 unsigned NumDeadInst = 0;
1431 // Delete the instructions backwards, as it has a reduced likelihood of
1432 // having to update as many def-use and use-def chains.
1433 Instruction *EndInst = BB->getTerminator(); // Last not to be deleted.
Duncan P. N. Exon Smithe9bc5792016-02-21 20:39:50 +00001434 while (EndInst != &BB->front()) {
David Majnemer35c46d32016-01-24 05:26:18 +00001435 // Delete the next to last instruction.
1436 Instruction *Inst = &*--EndInst->getIterator();
1437 if (!Inst->use_empty() && !Inst->getType()->isTokenTy())
1438 Inst->replaceAllUsesWith(UndefValue::get(Inst->getType()));
1439 if (Inst->isEHPad() || Inst->getType()->isTokenTy()) {
1440 EndInst = Inst;
1441 continue;
1442 }
1443 if (!isa<DbgInfoIntrinsic>(Inst))
1444 ++NumDeadInst;
1445 Inst->eraseFromParent();
1446 }
1447 return NumDeadInst;
1448}
1449
Michael Zolotukhin5020c992016-11-18 21:01:12 +00001450unsigned llvm::changeToUnreachable(Instruction *I, bool UseLLVMTrap,
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001451 bool PreserveLCSSA) {
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001452 BasicBlock *BB = I->getParent();
1453 // Loop over all of the successors, removing BB's entry from any PHI
1454 // nodes.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001455 for (BasicBlock *Successor : successors(BB))
Michael Zolotukhin5020c992016-11-18 21:01:12 +00001456 Successor->removePredecessor(BB, PreserveLCSSA);
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001457
David Majnemere14e7bc2016-06-25 08:19:55 +00001458 // Insert a call to llvm.trap right before this. This turns the undefined
1459 // behavior into a hard fail instead of falling through into random code.
1460 if (UseLLVMTrap) {
1461 Function *TrapFn =
1462 Intrinsic::getDeclaration(BB->getParent()->getParent(), Intrinsic::trap);
1463 CallInst *CallTrap = CallInst::Create(TrapFn, "", I);
1464 CallTrap->setDebugLoc(I->getDebugLoc());
1465 }
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001466 new UnreachableInst(I->getContext(), I);
1467
1468 // All instructions after this are dead.
David Majnemer88542a02016-01-24 06:26:47 +00001469 unsigned NumInstrsRemoved = 0;
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001470 BasicBlock::iterator BBI = I->getIterator(), BBE = BB->end();
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001471 while (BBI != BBE) {
1472 if (!BBI->use_empty())
1473 BBI->replaceAllUsesWith(UndefValue::get(BBI->getType()));
1474 BB->getInstList().erase(BBI++);
David Majnemer88542a02016-01-24 06:26:47 +00001475 ++NumInstrsRemoved;
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001476 }
David Majnemer88542a02016-01-24 06:26:47 +00001477 return NumInstrsRemoved;
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001478}
1479
1480/// changeToCall - Convert the specified invoke into a normal call.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001481static void changeToCall(InvokeInst *II) {
Sanjoy Dasccd14562015-12-10 06:39:02 +00001482 SmallVector<Value*, 8> Args(II->arg_begin(), II->arg_end());
Sanjoy Das8a954a02015-12-08 22:26:08 +00001483 SmallVector<OperandBundleDef, 1> OpBundles;
1484 II->getOperandBundlesAsDefs(OpBundles);
1485 CallInst *NewCall = CallInst::Create(II->getCalledValue(), Args, OpBundles,
1486 "", II);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001487 NewCall->takeName(II);
1488 NewCall->setCallingConv(II->getCallingConv());
1489 NewCall->setAttributes(II->getAttributes());
1490 NewCall->setDebugLoc(II->getDebugLoc());
1491 II->replaceAllUsesWith(NewCall);
1492
1493 // Follow the call by a branch to the normal destination.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001494 BranchInst::Create(II->getNormalDest(), II);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001495
1496 // Update PHI nodes in the unwind destination
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001497 II->getUnwindDest()->removePredecessor(II->getParent());
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001498 II->eraseFromParent();
1499}
1500
Kuba Breckaddfdba32016-11-14 21:41:13 +00001501BasicBlock *llvm::changeToInvokeAndSplitBasicBlock(CallInst *CI,
1502 BasicBlock *UnwindEdge) {
1503 BasicBlock *BB = CI->getParent();
1504
1505 // Convert this function call into an invoke instruction. First, split the
1506 // basic block.
1507 BasicBlock *Split =
1508 BB->splitBasicBlock(CI->getIterator(), CI->getName() + ".noexc");
1509
1510 // Delete the unconditional branch inserted by splitBasicBlock
1511 BB->getInstList().pop_back();
1512
1513 // Create the new invoke instruction.
1514 SmallVector<Value *, 8> InvokeArgs(CI->arg_begin(), CI->arg_end());
1515 SmallVector<OperandBundleDef, 1> OpBundles;
1516
1517 CI->getOperandBundlesAsDefs(OpBundles);
1518
1519 // Note: we're round tripping operand bundles through memory here, and that
1520 // can potentially be avoided with a cleverer API design that we do not have
1521 // as of this time.
1522
1523 InvokeInst *II = InvokeInst::Create(CI->getCalledValue(), Split, UnwindEdge,
1524 InvokeArgs, OpBundles, CI->getName(), BB);
1525 II->setDebugLoc(CI->getDebugLoc());
1526 II->setCallingConv(CI->getCallingConv());
1527 II->setAttributes(CI->getAttributes());
1528
1529 // Make sure that anything using the call now uses the invoke! This also
Sanjoy Dase6bca0e2017-05-01 17:07:49 +00001530 // updates the CallGraph if present, because it uses a WeakTrackingVH.
Kuba Breckaddfdba32016-11-14 21:41:13 +00001531 CI->replaceAllUsesWith(II);
1532
1533 // Delete the original call
1534 Split->getInstList().pop_front();
1535 return Split;
1536}
1537
David Majnemer7fddecc2015-06-17 20:52:32 +00001538static bool markAliveBlocks(Function &F,
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001539 SmallPtrSetImpl<BasicBlock*> &Reachable) {
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001540 SmallVector<BasicBlock*, 128> Worklist;
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001541 BasicBlock *BB = &F.front();
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001542 Worklist.push_back(BB);
1543 Reachable.insert(BB);
1544 bool Changed = false;
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001545 do {
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001546 BB = Worklist.pop_back_val();
1547
1548 // Do a quick scan of the basic block, turning any obviously unreachable
1549 // instructions into LLVM unreachable insts. The instruction combining pass
1550 // canonicalizes unreachable insts into stores to null or undef.
David Majnemer9f506252016-06-25 08:34:38 +00001551 for (Instruction &I : *BB) {
Hal Finkel93046912014-07-25 21:13:35 +00001552 // Assumptions that are known to be false are equivalent to unreachable.
1553 // Also, if the condition is undefined, then we make the choice most
1554 // beneficial to the optimizer, and choose that to also be unreachable.
David Majnemer9f506252016-06-25 08:34:38 +00001555 if (auto *II = dyn_cast<IntrinsicInst>(&I)) {
Hal Finkel93046912014-07-25 21:13:35 +00001556 if (II->getIntrinsicID() == Intrinsic::assume) {
David Majnemer9f506252016-06-25 08:34:38 +00001557 if (match(II->getArgOperand(0), m_CombineOr(m_Zero(), m_Undef()))) {
David Majnemere14e7bc2016-06-25 08:19:55 +00001558 // Don't insert a call to llvm.trap right before the unreachable.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001559 changeToUnreachable(II, false);
Hal Finkel93046912014-07-25 21:13:35 +00001560 Changed = true;
1561 break;
1562 }
1563 }
1564
Sanjoy Das54a3a002016-04-21 05:09:12 +00001565 if (II->getIntrinsicID() == Intrinsic::experimental_guard) {
1566 // A call to the guard intrinsic bails out of the current compilation
1567 // unit if the predicate passed to it is false. If the predicate is a
1568 // constant false, then we know the guard will bail out of the current
1569 // compile unconditionally, so all code following it is dead.
1570 //
1571 // Note: unlike in llvm.assume, it is not "obviously profitable" for
1572 // guards to treat `undef` as `false` since a guard on `undef` can
1573 // still be useful for widening.
David Majnemer9f506252016-06-25 08:34:38 +00001574 if (match(II->getArgOperand(0), m_Zero()))
1575 if (!isa<UnreachableInst>(II->getNextNode())) {
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001576 changeToUnreachable(II->getNextNode(), /*UseLLVMTrap=*/ false);
Sanjoy Das54a3a002016-04-21 05:09:12 +00001577 Changed = true;
1578 break;
1579 }
1580 }
1581 }
1582
David Majnemer9f506252016-06-25 08:34:38 +00001583 if (auto *CI = dyn_cast<CallInst>(&I)) {
David Majnemer1fea77c2016-06-25 07:37:27 +00001584 Value *Callee = CI->getCalledValue();
1585 if (isa<ConstantPointerNull>(Callee) || isa<UndefValue>(Callee)) {
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001586 changeToUnreachable(CI, /*UseLLVMTrap=*/false);
David Majnemer1fea77c2016-06-25 07:37:27 +00001587 Changed = true;
1588 break;
1589 }
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001590 if (CI->doesNotReturn()) {
1591 // If we found a call to a no-return function, insert an unreachable
1592 // instruction after it. Make sure there isn't *already* one there
1593 // though.
David Majnemer9f506252016-06-25 08:34:38 +00001594 if (!isa<UnreachableInst>(CI->getNextNode())) {
David Majnemere14e7bc2016-06-25 08:19:55 +00001595 // Don't insert a call to llvm.trap right before the unreachable.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001596 changeToUnreachable(CI->getNextNode(), false);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001597 Changed = true;
1598 }
1599 break;
1600 }
1601 }
1602
1603 // Store to undef and store to null are undefined and used to signal that
1604 // they should be changed to unreachable by passes that can't modify the
1605 // CFG.
David Majnemer9f506252016-06-25 08:34:38 +00001606 if (auto *SI = dyn_cast<StoreInst>(&I)) {
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001607 // Don't touch volatile stores.
1608 if (SI->isVolatile()) continue;
1609
1610 Value *Ptr = SI->getOperand(1);
1611
1612 if (isa<UndefValue>(Ptr) ||
1613 (isa<ConstantPointerNull>(Ptr) &&
1614 SI->getPointerAddressSpace() == 0)) {
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001615 changeToUnreachable(SI, true);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001616 Changed = true;
1617 break;
1618 }
1619 }
1620 }
1621
David Majnemer2fa86512016-01-05 06:27:50 +00001622 TerminatorInst *Terminator = BB->getTerminator();
1623 if (auto *II = dyn_cast<InvokeInst>(Terminator)) {
1624 // Turn invokes that call 'nounwind' functions into ordinary calls.
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001625 Value *Callee = II->getCalledValue();
1626 if (isa<ConstantPointerNull>(Callee) || isa<UndefValue>(Callee)) {
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001627 changeToUnreachable(II, true);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001628 Changed = true;
David Majnemer7fddecc2015-06-17 20:52:32 +00001629 } else if (II->doesNotThrow() && canSimplifyInvokeNoUnwind(&F)) {
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001630 if (II->use_empty() && II->onlyReadsMemory()) {
1631 // jump to the normal destination branch.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001632 BranchInst::Create(II->getNormalDest(), II);
1633 II->getUnwindDest()->removePredecessor(II->getParent());
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001634 II->eraseFromParent();
1635 } else
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001636 changeToCall(II);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001637 Changed = true;
1638 }
David Majnemer2fa86512016-01-05 06:27:50 +00001639 } else if (auto *CatchSwitch = dyn_cast<CatchSwitchInst>(Terminator)) {
1640 // Remove catchpads which cannot be reached.
David Majnemer59eb7332016-01-05 07:42:17 +00001641 struct CatchPadDenseMapInfo {
1642 static CatchPadInst *getEmptyKey() {
1643 return DenseMapInfo<CatchPadInst *>::getEmptyKey();
1644 }
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001645
David Majnemer59eb7332016-01-05 07:42:17 +00001646 static CatchPadInst *getTombstoneKey() {
1647 return DenseMapInfo<CatchPadInst *>::getTombstoneKey();
1648 }
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001649
David Majnemer59eb7332016-01-05 07:42:17 +00001650 static unsigned getHashValue(CatchPadInst *CatchPad) {
1651 return static_cast<unsigned>(hash_combine_range(
1652 CatchPad->value_op_begin(), CatchPad->value_op_end()));
1653 }
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001654
David Majnemer59eb7332016-01-05 07:42:17 +00001655 static bool isEqual(CatchPadInst *LHS, CatchPadInst *RHS) {
1656 if (LHS == getEmptyKey() || LHS == getTombstoneKey() ||
1657 RHS == getEmptyKey() || RHS == getTombstoneKey())
1658 return LHS == RHS;
1659 return LHS->isIdenticalTo(RHS);
1660 }
1661 };
1662
1663 // Set of unique CatchPads.
1664 SmallDenseMap<CatchPadInst *, detail::DenseSetEmpty, 4,
1665 CatchPadDenseMapInfo, detail::DenseSetPair<CatchPadInst *>>
1666 HandlerSet;
1667 detail::DenseSetEmpty Empty;
David Majnemer2fa86512016-01-05 06:27:50 +00001668 for (CatchSwitchInst::handler_iterator I = CatchSwitch->handler_begin(),
1669 E = CatchSwitch->handler_end();
1670 I != E; ++I) {
1671 BasicBlock *HandlerBB = *I;
David Majnemer59eb7332016-01-05 07:42:17 +00001672 auto *CatchPad = cast<CatchPadInst>(HandlerBB->getFirstNonPHI());
1673 if (!HandlerSet.insert({CatchPad, Empty}).second) {
David Majnemer2fa86512016-01-05 06:27:50 +00001674 CatchSwitch->removeHandler(I);
1675 --I;
1676 --E;
1677 Changed = true;
1678 }
1679 }
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001680 }
1681
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001682 Changed |= ConstantFoldTerminator(BB, true);
David Majnemer9f506252016-06-25 08:34:38 +00001683 for (BasicBlock *Successor : successors(BB))
1684 if (Reachable.insert(Successor).second)
1685 Worklist.push_back(Successor);
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001686 } while (!Worklist.empty());
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001687 return Changed;
1688}
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001689
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001690void llvm::removeUnwindEdge(BasicBlock *BB) {
Joseph Tremoulet09af67a2015-09-27 01:47:46 +00001691 TerminatorInst *TI = BB->getTerminator();
1692
1693 if (auto *II = dyn_cast<InvokeInst>(TI)) {
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001694 changeToCall(II);
Joseph Tremoulet09af67a2015-09-27 01:47:46 +00001695 return;
1696 }
1697
1698 TerminatorInst *NewTI;
1699 BasicBlock *UnwindDest;
1700
1701 if (auto *CRI = dyn_cast<CleanupReturnInst>(TI)) {
1702 NewTI = CleanupReturnInst::Create(CRI->getCleanupPad(), nullptr, CRI);
1703 UnwindDest = CRI->getUnwindDest();
David Majnemer8a1c45d2015-12-12 05:38:55 +00001704 } else if (auto *CatchSwitch = dyn_cast<CatchSwitchInst>(TI)) {
1705 auto *NewCatchSwitch = CatchSwitchInst::Create(
1706 CatchSwitch->getParentPad(), nullptr, CatchSwitch->getNumHandlers(),
1707 CatchSwitch->getName(), CatchSwitch);
1708 for (BasicBlock *PadBB : CatchSwitch->handlers())
1709 NewCatchSwitch->addHandler(PadBB);
1710
1711 NewTI = NewCatchSwitch;
1712 UnwindDest = CatchSwitch->getUnwindDest();
Joseph Tremoulet09af67a2015-09-27 01:47:46 +00001713 } else {
1714 llvm_unreachable("Could not find unwind successor");
1715 }
1716
1717 NewTI->takeName(TI);
1718 NewTI->setDebugLoc(TI->getDebugLoc());
1719 UnwindDest->removePredecessor(BB);
David Majnemer8a1c45d2015-12-12 05:38:55 +00001720 TI->replaceAllUsesWith(NewTI);
Joseph Tremoulet09af67a2015-09-27 01:47:46 +00001721 TI->eraseFromParent();
1722}
1723
Davide Italiano4eb210b2017-07-07 18:54:14 +00001724/// removeUnreachableBlocks - Remove blocks that are not reachable, even
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001725/// if they are in a dead cycle. Return true if a change was made, false
Davide Italiano4eb210b2017-07-07 18:54:14 +00001726/// otherwise. If `LVI` is passed, this function preserves LazyValueInfo
1727/// after modifying the CFG.
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001728bool llvm::removeUnreachableBlocks(Function &F, LazyValueInfo *LVI) {
Matthias Braunb30f2f512016-01-30 01:24:31 +00001729 SmallPtrSet<BasicBlock*, 16> Reachable;
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001730 bool Changed = markAliveBlocks(F, Reachable);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001731
1732 // If there are unreachable blocks in the CFG...
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001733 if (Reachable.size() == F.size())
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001734 return Changed;
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001735
1736 assert(Reachable.size() < F.size());
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001737 NumRemoved += F.size()-Reachable.size();
1738
1739 // Loop over all of the basic blocks that are not reachable, dropping all of
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001740 // their internal references...
1741 for (Function::iterator BB = ++F.begin(), E = F.end(); BB != E; ++BB) {
1742 if (Reachable.count(&*BB))
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001743 continue;
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001744
1745 for (BasicBlock *Successor : successors(&*BB))
Daniel Jasper0a51ec22017-09-30 11:57:19 +00001746 if (Reachable.count(Successor))
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001747 Successor->removePredecessor(&*BB);
David Majnemerd9833ea2016-01-10 07:13:04 +00001748 if (LVI)
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001749 LVI->eraseBlock(&*BB);
Peter Collingbourne8d642de2013-08-12 22:38:43 +00001750 BB->dropAllReferences();
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001751 }
Evgeniy Stepanov2a066af2013-03-22 08:43:04 +00001752
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001753 for (Function::iterator I = ++F.begin(); I != F.end();)
1754 if (!Reachable.count(&*I))
Brian M. Rzyckicdad6c02018-01-04 21:57:32 +00001755 I = F.getBasicBlockList().erase(I);
Reid Klecknercd78ddc2018-01-04 23:23:46 +00001756 else
1757 ++I;
Evgeniy Stepanov2a066af2013-03-22 08:43:04 +00001758
Evgeniy Stepanov4fbc0d082012-12-21 11:18:49 +00001759 return true;
1760}
Rafael Espindolaea46c322014-08-15 15:46:38 +00001761
Piotr Padlewskidc9b2cf2015-10-02 22:12:22 +00001762void llvm::combineMetadata(Instruction *K, const Instruction *J,
1763 ArrayRef<unsigned> KnownIDs) {
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +00001764 SmallVector<std::pair<unsigned, MDNode *>, 4> Metadata;
Adrian Prantlcbdfdb72015-08-20 22:00:30 +00001765 K->dropUnknownNonDebugMetadata(KnownIDs);
Rafael Espindolaea46c322014-08-15 15:46:38 +00001766 K->getAllMetadataOtherThanDebugLoc(Metadata);
David Majnemer6f014d32016-07-25 02:21:19 +00001767 for (const auto &MD : Metadata) {
1768 unsigned Kind = MD.first;
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +00001769 MDNode *JMD = J->getMetadata(Kind);
David Majnemer6f014d32016-07-25 02:21:19 +00001770 MDNode *KMD = MD.second;
Rafael Espindolaea46c322014-08-15 15:46:38 +00001771
1772 switch (Kind) {
1773 default:
1774 K->setMetadata(Kind, nullptr); // Remove unknown metadata
1775 break;
1776 case LLVMContext::MD_dbg:
1777 llvm_unreachable("getAllMetadataOtherThanDebugLoc returned a MD_dbg");
1778 case LLVMContext::MD_tbaa:
1779 K->setMetadata(Kind, MDNode::getMostGenericTBAA(JMD, KMD));
1780 break;
1781 case LLVMContext::MD_alias_scope:
Bjorn Steinbrink5ec75222015-02-08 17:07:14 +00001782 K->setMetadata(Kind, MDNode::getMostGenericAliasScope(JMD, KMD));
1783 break;
Rafael Espindolaea46c322014-08-15 15:46:38 +00001784 case LLVMContext::MD_noalias:
Hal Finkele4c0c162016-04-26 02:06:06 +00001785 case LLVMContext::MD_mem_parallel_loop_access:
Rafael Espindolaea46c322014-08-15 15:46:38 +00001786 K->setMetadata(Kind, MDNode::intersect(JMD, KMD));
1787 break;
1788 case LLVMContext::MD_range:
1789 K->setMetadata(Kind, MDNode::getMostGenericRange(JMD, KMD));
1790 break;
1791 case LLVMContext::MD_fpmath:
1792 K->setMetadata(Kind, MDNode::getMostGenericFPMath(JMD, KMD));
1793 break;
1794 case LLVMContext::MD_invariant_load:
1795 // Only set the !invariant.load if it is present in both instructions.
1796 K->setMetadata(Kind, JMD);
1797 break;
Philip Reamesd7c21362014-10-21 21:02:19 +00001798 case LLVMContext::MD_nonnull:
1799 // Only set the !nonnull if it is present in both instructions.
1800 K->setMetadata(Kind, JMD);
1801 break;
Piotr Padlewskidc9b2cf2015-10-02 22:12:22 +00001802 case LLVMContext::MD_invariant_group:
1803 // Preserve !invariant.group in K.
1804 break;
Artur Pilipenko5c5011d2015-11-02 17:53:51 +00001805 case LLVMContext::MD_align:
Chandler Carruth2abb65a2017-06-26 03:31:31 +00001806 K->setMetadata(Kind,
Artur Pilipenko5c5011d2015-11-02 17:53:51 +00001807 MDNode::getMostGenericAlignmentOrDereferenceable(JMD, KMD));
1808 break;
1809 case LLVMContext::MD_dereferenceable:
1810 case LLVMContext::MD_dereferenceable_or_null:
Chandler Carruth2abb65a2017-06-26 03:31:31 +00001811 K->setMetadata(Kind,
Artur Pilipenko5c5011d2015-11-02 17:53:51 +00001812 MDNode::getMostGenericAlignmentOrDereferenceable(JMD, KMD));
1813 break;
Rafael Espindolaea46c322014-08-15 15:46:38 +00001814 }
1815 }
Piotr Padlewskidc9b2cf2015-10-02 22:12:22 +00001816 // Set !invariant.group from J if J has it. If both instructions have it
1817 // then we will just pick it from J - even when they are different.
1818 // Also make sure that K is load or store - f.e. combining bitcast with load
1819 // could produce bitcast with invariant.group metadata, which is invalid.
1820 // FIXME: we should try to preserve both invariant.group md if they are
1821 // different, but right now instruction can only have one invariant.group.
1822 if (auto *JMD = J->getMetadata(LLVMContext::MD_invariant_group))
1823 if (isa<LoadInst>(K) || isa<StoreInst>(K))
1824 K->setMetadata(LLVMContext::MD_invariant_group, JMD);
Rafael Espindolaea46c322014-08-15 15:46:38 +00001825}
Philip Reames7c78ef72015-05-22 23:53:24 +00001826
Eli Friedman02419a92016-08-08 04:10:22 +00001827void llvm::combineMetadataForCSE(Instruction *K, const Instruction *J) {
1828 unsigned KnownIDs[] = {
1829 LLVMContext::MD_tbaa, LLVMContext::MD_alias_scope,
1830 LLVMContext::MD_noalias, LLVMContext::MD_range,
1831 LLVMContext::MD_invariant_load, LLVMContext::MD_nonnull,
1832 LLVMContext::MD_invariant_group, LLVMContext::MD_align,
1833 LLVMContext::MD_dereferenceable,
1834 LLVMContext::MD_dereferenceable_or_null};
1835 combineMetadata(K, J, KnownIDs);
1836}
1837
Piotr Padlewskid979c1f2017-05-09 19:39:44 +00001838template <typename RootType, typename DominatesFn>
1839static unsigned replaceDominatedUsesWith(Value *From, Value *To,
1840 const RootType &Root,
1841 const DominatesFn &Dominates) {
Piotr Padlewski28ffcbe2015-09-02 19:59:59 +00001842 assert(From->getType() == To->getType());
1843
1844 unsigned Count = 0;
1845 for (Value::use_iterator UI = From->use_begin(), UE = From->use_end();
1846 UI != UE;) {
1847 Use &U = *UI++;
Piotr Padlewskid979c1f2017-05-09 19:39:44 +00001848 if (!Dominates(Root, U))
1849 continue;
1850 U.set(To);
1851 DEBUG(dbgs() << "Replace dominated use of '" << From->getName() << "' as "
1852 << *To << " in " << *U << "\n");
1853 ++Count;
Piotr Padlewski28ffcbe2015-09-02 19:59:59 +00001854 }
1855 return Count;
1856}
Sanjoy Dasc21a05a2015-10-08 23:18:30 +00001857
Anna Thomasc07d5542017-05-23 13:36:25 +00001858unsigned llvm::replaceNonLocalUsesWith(Instruction *From, Value *To) {
1859 assert(From->getType() == To->getType());
1860 auto *BB = From->getParent();
1861 unsigned Count = 0;
1862
1863 for (Value::use_iterator UI = From->use_begin(), UE = From->use_end();
1864 UI != UE;) {
1865 Use &U = *UI++;
1866 auto *I = cast<Instruction>(U.getUser());
1867 if (I->getParent() == BB)
1868 continue;
1869 U.set(To);
1870 ++Count;
1871 }
1872 return Count;
1873}
1874
Piotr Padlewskid979c1f2017-05-09 19:39:44 +00001875unsigned llvm::replaceDominatedUsesWith(Value *From, Value *To,
1876 DominatorTree &DT,
1877 const BasicBlockEdge &Root) {
1878 auto Dominates = [&DT](const BasicBlockEdge &Root, const Use &U) {
1879 return DT.dominates(Root, U);
1880 };
1881 return ::replaceDominatedUsesWith(From, To, Root, Dominates);
1882}
1883
1884unsigned llvm::replaceDominatedUsesWith(Value *From, Value *To,
1885 DominatorTree &DT,
1886 const BasicBlock *BB) {
1887 auto ProperlyDominates = [&DT](const BasicBlock *BB, const Use &U) {
1888 auto *I = cast<Instruction>(U.getUser())->getParent();
1889 return DT.properlyDominates(BB, I);
1890 };
1891 return ::replaceDominatedUsesWith(From, To, BB, ProperlyDominates);
1892}
1893
Daniel Neilson2574d7c2017-07-27 16:49:39 +00001894bool llvm::callsGCLeafFunction(ImmutableCallSite CS,
1895 const TargetLibraryInfo &TLI) {
Sanjoy Dasc21a05a2015-10-08 23:18:30 +00001896 // Check if the function is specifically marked as a gc leaf function.
Manuel Jacob3eedd112016-01-05 23:59:08 +00001897 if (CS.hasFnAttr("gc-leaf-function"))
1898 return true;
Sanjoy Dasd4c78332016-03-25 20:12:13 +00001899 if (const Function *F = CS.getCalledFunction()) {
1900 if (F->hasFnAttribute("gc-leaf-function"))
1901 return true;
1902
1903 if (auto IID = F->getIntrinsicID())
1904 // Most LLVM intrinsics do not take safepoints.
1905 return IID != Intrinsic::experimental_gc_statepoint &&
1906 IID != Intrinsic::experimental_deoptimize;
1907 }
Sanjoy Dasc21a05a2015-10-08 23:18:30 +00001908
Daniel Neilson2574d7c2017-07-27 16:49:39 +00001909 // Lib calls can be materialized by some passes, and won't be
1910 // marked as 'gc-leaf-function.' All available Libcalls are
1911 // GC-leaf.
1912 LibFunc LF;
1913 if (TLI.getLibFunc(CS, LF)) {
1914 return TLI.has(LF);
1915 }
1916
Sanjoy Dasc21a05a2015-10-08 23:18:30 +00001917 return false;
1918}
James Molloyf01488e2016-01-15 09:20:19 +00001919
Chandler Carruth2abb65a2017-06-26 03:31:31 +00001920void llvm::copyNonnullMetadata(const LoadInst &OldLI, MDNode *N,
1921 LoadInst &NewLI) {
1922 auto *NewTy = NewLI.getType();
1923
1924 // This only directly applies if the new type is also a pointer.
1925 if (NewTy->isPointerTy()) {
1926 NewLI.setMetadata(LLVMContext::MD_nonnull, N);
1927 return;
1928 }
1929
1930 // The only other translation we can do is to integral loads with !range
1931 // metadata.
1932 if (!NewTy->isIntegerTy())
1933 return;
1934
1935 MDBuilder MDB(NewLI.getContext());
1936 const Value *Ptr = OldLI.getPointerOperand();
1937 auto *ITy = cast<IntegerType>(NewTy);
1938 auto *NullInt = ConstantExpr::getPtrToInt(
1939 ConstantPointerNull::get(cast<PointerType>(Ptr->getType())), ITy);
1940 auto *NonNullInt = ConstantExpr::getAdd(NullInt, ConstantInt::get(ITy, 1));
1941 NewLI.setMetadata(LLVMContext::MD_range,
1942 MDB.createRange(NonNullInt, NullInt));
1943}
1944
1945void llvm::copyRangeMetadata(const DataLayout &DL, const LoadInst &OldLI,
1946 MDNode *N, LoadInst &NewLI) {
1947 auto *NewTy = NewLI.getType();
1948
Rafael Espindolac06f55e2017-11-28 01:25:38 +00001949 // Give up unless it is converted to a pointer where there is a single very
1950 // valuable mapping we can do reliably.
1951 // FIXME: It would be nice to propagate this in more ways, but the type
1952 // conversions make it hard.
1953 if (!NewTy->isPointerTy())
Chandler Carruth2abb65a2017-06-26 03:31:31 +00001954 return;
1955
Rafael Espindolac06f55e2017-11-28 01:25:38 +00001956 unsigned BitWidth = DL.getTypeSizeInBits(NewTy);
1957 if (!getConstantRangeFromMetadata(*N).contains(APInt(BitWidth, 0))) {
1958 MDNode *NN = MDNode::get(OldLI.getContext(), None);
1959 NewLI.setMetadata(LLVMContext::MD_nonnull, NN);
Chandler Carruth2abb65a2017-06-26 03:31:31 +00001960 }
1961}
1962
Benjamin Kramerb7d33112016-08-06 11:13:10 +00001963namespace {
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001964
James Molloyf01488e2016-01-15 09:20:19 +00001965/// A potential constituent of a bitreverse or bswap expression. See
1966/// collectBitParts for a fuller explanation.
1967struct BitPart {
1968 BitPart(Value *P, unsigned BW) : Provider(P) {
1969 Provenance.resize(BW);
1970 }
1971
1972 /// The Value that this is a bitreverse/bswap of.
1973 Value *Provider;
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001974
James Molloyf01488e2016-01-15 09:20:19 +00001975 /// The "provenance" of each bit. Provenance[A] = B means that bit A
1976 /// in Provider becomes bit B in the result of this expression.
1977 SmallVector<int8_t, 32> Provenance; // int8_t means max size is i128.
1978
1979 enum { Unset = -1 };
1980};
Eugene Zelenko6cadde72017-10-17 21:27:42 +00001981
Benjamin Kramerb7d33112016-08-06 11:13:10 +00001982} // end anonymous namespace
James Molloyf01488e2016-01-15 09:20:19 +00001983
1984/// Analyze the specified subexpression and see if it is capable of providing
1985/// pieces of a bswap or bitreverse. The subexpression provides a potential
1986/// piece of a bswap or bitreverse if it can be proven that each non-zero bit in
1987/// the output of the expression came from a corresponding bit in some other
1988/// value. This function is recursive, and the end result is a mapping of
1989/// bitnumber to bitnumber. It is the caller's responsibility to validate that
1990/// the bitnumber to bitnumber mapping is correct for a bswap or bitreverse.
1991///
1992/// For example, if the current subexpression if "(shl i32 %X, 24)" then we know
1993/// that the expression deposits the low byte of %X into the high byte of the
1994/// result and that all other bits are zero. This expression is accepted and a
1995/// BitPart is returned with Provider set to %X and Provenance[24-31] set to
1996/// [0-7].
1997///
1998/// To avoid revisiting values, the BitPart results are memoized into the
1999/// provided map. To avoid unnecessary copying of BitParts, BitParts are
2000/// constructed in-place in the \c BPS map. Because of this \c BPS needs to
2001/// store BitParts objects, not pointers. As we need the concept of a nullptr
2002/// BitParts (Value has been analyzed and the analysis failed), we an Optional
2003/// type instead to provide the same functionality.
2004///
2005/// Because we pass around references into \c BPS, we must use a container that
2006/// does not invalidate internal references (std::map instead of DenseMap).
James Molloyf01488e2016-01-15 09:20:19 +00002007static const Optional<BitPart> &
2008collectBitParts(Value *V, bool MatchBSwaps, bool MatchBitReversals,
2009 std::map<Value *, Optional<BitPart>> &BPS) {
2010 auto I = BPS.find(V);
2011 if (I != BPS.end())
2012 return I->second;
2013
2014 auto &Result = BPS[V] = None;
2015 auto BitWidth = cast<IntegerType>(V->getType())->getBitWidth();
2016
2017 if (Instruction *I = dyn_cast<Instruction>(V)) {
2018 // If this is an or instruction, it may be an inner node of the bswap.
2019 if (I->getOpcode() == Instruction::Or) {
2020 auto &A = collectBitParts(I->getOperand(0), MatchBSwaps,
2021 MatchBitReversals, BPS);
2022 auto &B = collectBitParts(I->getOperand(1), MatchBSwaps,
2023 MatchBitReversals, BPS);
2024 if (!A || !B)
2025 return Result;
2026
2027 // Try and merge the two together.
2028 if (!A->Provider || A->Provider != B->Provider)
2029 return Result;
2030
2031 Result = BitPart(A->Provider, BitWidth);
2032 for (unsigned i = 0; i < A->Provenance.size(); ++i) {
2033 if (A->Provenance[i] != BitPart::Unset &&
2034 B->Provenance[i] != BitPart::Unset &&
2035 A->Provenance[i] != B->Provenance[i])
2036 return Result = None;
2037
2038 if (A->Provenance[i] == BitPart::Unset)
2039 Result->Provenance[i] = B->Provenance[i];
2040 else
2041 Result->Provenance[i] = A->Provenance[i];
2042 }
2043
2044 return Result;
2045 }
2046
2047 // If this is a logical shift by a constant, recurse then shift the result.
2048 if (I->isLogicalShift() && isa<ConstantInt>(I->getOperand(1))) {
2049 unsigned BitShift =
2050 cast<ConstantInt>(I->getOperand(1))->getLimitedValue(~0U);
2051 // Ensure the shift amount is defined.
2052 if (BitShift > BitWidth)
2053 return Result;
2054
2055 auto &Res = collectBitParts(I->getOperand(0), MatchBSwaps,
2056 MatchBitReversals, BPS);
2057 if (!Res)
2058 return Result;
2059 Result = Res;
2060
2061 // Perform the "shift" on BitProvenance.
2062 auto &P = Result->Provenance;
2063 if (I->getOpcode() == Instruction::Shl) {
2064 P.erase(std::prev(P.end(), BitShift), P.end());
2065 P.insert(P.begin(), BitShift, BitPart::Unset);
2066 } else {
2067 P.erase(P.begin(), std::next(P.begin(), BitShift));
2068 P.insert(P.end(), BitShift, BitPart::Unset);
2069 }
2070
2071 return Result;
2072 }
2073
2074 // If this is a logical 'and' with a mask that clears bits, recurse then
2075 // unset the appropriate bits.
2076 if (I->getOpcode() == Instruction::And &&
2077 isa<ConstantInt>(I->getOperand(1))) {
2078 APInt Bit(I->getType()->getPrimitiveSizeInBits(), 1);
2079 const APInt &AndMask = cast<ConstantInt>(I->getOperand(1))->getValue();
2080
2081 // Check that the mask allows a multiple of 8 bits for a bswap, for an
2082 // early exit.
2083 unsigned NumMaskedBits = AndMask.countPopulation();
2084 if (!MatchBitReversals && NumMaskedBits % 8 != 0)
2085 return Result;
Chandler Carruth2abb65a2017-06-26 03:31:31 +00002086
James Molloyf01488e2016-01-15 09:20:19 +00002087 auto &Res = collectBitParts(I->getOperand(0), MatchBSwaps,
2088 MatchBitReversals, BPS);
2089 if (!Res)
2090 return Result;
2091 Result = Res;
2092
2093 for (unsigned i = 0; i < BitWidth; ++i, Bit <<= 1)
2094 // If the AndMask is zero for this bit, clear the bit.
2095 if ((AndMask & Bit) == 0)
2096 Result->Provenance[i] = BitPart::Unset;
Chad Rosiere5819e22016-05-26 14:58:51 +00002097 return Result;
2098 }
James Molloyf01488e2016-01-15 09:20:19 +00002099
Chad Rosiere5819e22016-05-26 14:58:51 +00002100 // If this is a zext instruction zero extend the result.
2101 if (I->getOpcode() == Instruction::ZExt) {
2102 auto &Res = collectBitParts(I->getOperand(0), MatchBSwaps,
2103 MatchBitReversals, BPS);
2104 if (!Res)
2105 return Result;
2106
2107 Result = BitPart(Res->Provider, BitWidth);
2108 auto NarrowBitWidth =
2109 cast<IntegerType>(cast<ZExtInst>(I)->getSrcTy())->getBitWidth();
2110 for (unsigned i = 0; i < NarrowBitWidth; ++i)
2111 Result->Provenance[i] = Res->Provenance[i];
2112 for (unsigned i = NarrowBitWidth; i < BitWidth; ++i)
2113 Result->Provenance[i] = BitPart::Unset;
James Molloyf01488e2016-01-15 09:20:19 +00002114 return Result;
2115 }
2116 }
2117
2118 // Okay, we got to something that isn't a shift, 'or' or 'and'. This must be
2119 // the input value to the bswap/bitreverse.
2120 Result = BitPart(V, BitWidth);
2121 for (unsigned i = 0; i < BitWidth; ++i)
2122 Result->Provenance[i] = i;
2123 return Result;
2124}
2125
2126static bool bitTransformIsCorrectForBSwap(unsigned From, unsigned To,
2127 unsigned BitWidth) {
2128 if (From % 8 != To % 8)
2129 return false;
2130 // Convert from bit indices to byte indices and check for a byte reversal.
2131 From >>= 3;
2132 To >>= 3;
2133 BitWidth >>= 3;
2134 return From == BitWidth - To - 1;
2135}
2136
2137static bool bitTransformIsCorrectForBitReverse(unsigned From, unsigned To,
2138 unsigned BitWidth) {
2139 return From == BitWidth - To - 1;
2140}
2141
Chad Rosiera00df492016-05-25 16:22:14 +00002142bool llvm::recognizeBSwapOrBitReverseIdiom(
James Molloyf01488e2016-01-15 09:20:19 +00002143 Instruction *I, bool MatchBSwaps, bool MatchBitReversals,
2144 SmallVectorImpl<Instruction *> &InsertedInsts) {
2145 if (Operator::getOpcode(I) != Instruction::Or)
2146 return false;
2147 if (!MatchBSwaps && !MatchBitReversals)
2148 return false;
2149 IntegerType *ITy = dyn_cast<IntegerType>(I->getType());
2150 if (!ITy || ITy->getBitWidth() > 128)
2151 return false; // Can't do vectors or integers > 128 bits.
2152 unsigned BW = ITy->getBitWidth();
2153
Chad Rosiere5819e22016-05-26 14:58:51 +00002154 unsigned DemandedBW = BW;
2155 IntegerType *DemandedTy = ITy;
2156 if (I->hasOneUse()) {
2157 if (TruncInst *Trunc = dyn_cast<TruncInst>(I->user_back())) {
2158 DemandedTy = cast<IntegerType>(Trunc->getType());
2159 DemandedBW = DemandedTy->getBitWidth();
2160 }
2161 }
2162
James Molloyf01488e2016-01-15 09:20:19 +00002163 // Try to find all the pieces corresponding to the bswap.
2164 std::map<Value *, Optional<BitPart>> BPS;
2165 auto Res = collectBitParts(I, MatchBSwaps, MatchBitReversals, BPS);
2166 if (!Res)
2167 return false;
2168 auto &BitProvenance = Res->Provenance;
2169
2170 // Now, is the bit permutation correct for a bswap or a bitreverse? We can
2171 // only byteswap values with an even number of bytes.
Chad Rosiere5819e22016-05-26 14:58:51 +00002172 bool OKForBSwap = DemandedBW % 16 == 0, OKForBitReverse = true;
2173 for (unsigned i = 0; i < DemandedBW; ++i) {
2174 OKForBSwap &=
2175 bitTransformIsCorrectForBSwap(BitProvenance[i], i, DemandedBW);
James Molloyf01488e2016-01-15 09:20:19 +00002176 OKForBitReverse &=
Chad Rosiere5819e22016-05-26 14:58:51 +00002177 bitTransformIsCorrectForBitReverse(BitProvenance[i], i, DemandedBW);
James Molloyf01488e2016-01-15 09:20:19 +00002178 }
2179
2180 Intrinsic::ID Intrin;
2181 if (OKForBSwap && MatchBSwaps)
2182 Intrin = Intrinsic::bswap;
2183 else if (OKForBitReverse && MatchBitReversals)
2184 Intrin = Intrinsic::bitreverse;
2185 else
2186 return false;
2187
Chad Rosiere5819e22016-05-26 14:58:51 +00002188 if (ITy != DemandedTy) {
2189 Function *F = Intrinsic::getDeclaration(I->getModule(), Intrin, DemandedTy);
2190 Value *Provider = Res->Provider;
2191 IntegerType *ProviderTy = cast<IntegerType>(Provider->getType());
2192 // We may need to truncate the provider.
2193 if (DemandedTy != ProviderTy) {
2194 auto *Trunc = CastInst::Create(Instruction::Trunc, Provider, DemandedTy,
2195 "trunc", I);
2196 InsertedInsts.push_back(Trunc);
2197 Provider = Trunc;
2198 }
2199 auto *CI = CallInst::Create(F, Provider, "rev", I);
2200 InsertedInsts.push_back(CI);
2201 auto *ExtInst = CastInst::Create(Instruction::ZExt, CI, ITy, "zext", I);
2202 InsertedInsts.push_back(ExtInst);
2203 return true;
2204 }
2205
James Molloyf01488e2016-01-15 09:20:19 +00002206 Function *F = Intrinsic::getDeclaration(I->getModule(), Intrin, ITy);
2207 InsertedInsts.push_back(CallInst::Create(F, Res->Provider, "rev", I));
2208 return true;
2209}
Marcin Koscielnicki3feda222016-06-18 10:10:37 +00002210
2211// CodeGen has special handling for some string functions that may replace
2212// them with target-specific intrinsics. Since that'd skip our interceptors
2213// in ASan/MSan/TSan/DFSan, and thus make us miss some memory accesses,
2214// we mark affected calls as NoBuiltin, which will disable optimization
2215// in CodeGen.
Evgeniy Stepanovd240a882016-07-28 23:45:15 +00002216void llvm::maybeMarkSanitizerLibraryCallNoBuiltin(
2217 CallInst *CI, const TargetLibraryInfo *TLI) {
Marcin Koscielnicki3feda222016-06-18 10:10:37 +00002218 Function *F = CI->getCalledFunction();
David L. Jonesd21529f2017-01-23 23:16:46 +00002219 LibFunc Func;
Evgeniy Stepanovd240a882016-07-28 23:45:15 +00002220 if (F && !F->hasLocalLinkage() && F->hasName() &&
2221 TLI->getLibFunc(F->getName(), Func) && TLI->hasOptimizedCodeGen(Func) &&
2222 !F->doesNotAccessMemory())
Reid Klecknerb5180542017-03-21 16:57:19 +00002223 CI->addAttribute(AttributeList::FunctionIndex, Attribute::NoBuiltin);
Marcin Koscielnicki3feda222016-06-18 10:10:37 +00002224}
James Molloya9290632017-05-25 12:51:11 +00002225
2226bool llvm::canReplaceOperandWithVariable(const Instruction *I, unsigned OpIdx) {
2227 // We can't have a PHI with a metadata type.
2228 if (I->getOperand(OpIdx)->getType()->isMetadataTy())
2229 return false;
2230
2231 // Early exit.
2232 if (!isa<Constant>(I->getOperand(OpIdx)))
2233 return true;
2234
2235 switch (I->getOpcode()) {
2236 default:
2237 return true;
2238 case Instruction::Call:
2239 case Instruction::Invoke:
Leo Li93abd7d2017-07-10 20:45:34 +00002240 // Can't handle inline asm. Skip it.
2241 if (isa<InlineAsm>(ImmutableCallSite(I).getCalledValue()))
2242 return false;
James Molloya9290632017-05-25 12:51:11 +00002243 // Many arithmetic intrinsics have no issue taking a
2244 // variable, however it's hard to distingish these from
2245 // specials such as @llvm.frameaddress that require a constant.
2246 if (isa<IntrinsicInst>(I))
2247 return false;
2248
2249 // Constant bundle operands may need to retain their constant-ness for
2250 // correctness.
2251 if (ImmutableCallSite(I).isBundleOperand(OpIdx))
2252 return false;
2253 return true;
2254 case Instruction::ShuffleVector:
2255 // Shufflevector masks are constant.
2256 return OpIdx != 2;
Leo Li5499b1b2017-07-06 18:47:05 +00002257 case Instruction::Switch:
James Molloya9290632017-05-25 12:51:11 +00002258 case Instruction::ExtractValue:
James Molloya9290632017-05-25 12:51:11 +00002259 // All operands apart from the first are constant.
2260 return OpIdx == 0;
Leo Li5499b1b2017-07-06 18:47:05 +00002261 case Instruction::InsertValue:
2262 // All operands apart from the first and the second are constant.
2263 return OpIdx < 2;
James Molloya9290632017-05-25 12:51:11 +00002264 case Instruction::Alloca:
Leo Li5499b1b2017-07-06 18:47:05 +00002265 // Static allocas (constant size in the entry block) are handled by
2266 // prologue/epilogue insertion so they're free anyway. We definitely don't
2267 // want to make them non-constant.
2268 return !dyn_cast<AllocaInst>(I)->isStaticAlloca();
James Molloya9290632017-05-25 12:51:11 +00002269 case Instruction::GetElementPtr:
2270 if (OpIdx == 0)
2271 return true;
2272 gep_type_iterator It = gep_type_begin(I);
2273 for (auto E = std::next(It, OpIdx); It != E; ++It)
2274 if (It.isStruct())
2275 return false;
2276 return true;
2277 }
2278}