blob: 1af7e6894777e518053848a19116fb5329ad2ff3 [file] [log] [blame]
Chris Lattner25db5802004-10-07 04:16:33 +00001//===- GlobalOpt.cpp - Optimize Global Variables --------------------------===//
Misha Brukmanb1c93172005-04-21 23:48:37 +00002//
Chris Lattner25db5802004-10-07 04:16:33 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukmanb1c93172005-04-21 23:48:37 +00007//
Chris Lattner25db5802004-10-07 04:16:33 +00008//===----------------------------------------------------------------------===//
9//
10// This pass transforms simple global variables that never have their address
11// taken. If obviously true, it marks read/write globals as constant, deletes
12// variables only stored to, etc.
13//
14//===----------------------------------------------------------------------===//
15
Justin Bogner1a075012016-04-26 00:28:01 +000016#include "llvm/Transforms/IPO/GlobalOpt.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000017#include "llvm/ADT/DenseMap.h"
18#include "llvm/ADT/STLExtras.h"
19#include "llvm/ADT/SmallPtrSet.h"
20#include "llvm/ADT/SmallVector.h"
21#include "llvm/ADT/Statistic.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000022#include "llvm/ADT/Twine.h"
23#include "llvm/ADT/iterator_range.h"
Zaara Syeda1f59ae32018-01-30 16:17:22 +000024#include "llvm/Analysis/BlockFrequencyInfo.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000025#include "llvm/Analysis/ConstantFolding.h"
26#include "llvm/Analysis/MemoryBuiltins.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000027#include "llvm/Analysis/TargetLibraryInfo.h"
Zaara Syeda1f59ae32018-01-30 16:17:22 +000028#include "llvm/Analysis/TargetTransformInfo.h"
David Blaikie31b98d22018-06-04 21:23:21 +000029#include "llvm/Transforms/Utils/Local.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000030#include "llvm/BinaryFormat/Dwarf.h"
31#include "llvm/IR/Attributes.h"
32#include "llvm/IR/BasicBlock.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000033#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000034#include "llvm/IR/CallingConv.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000035#include "llvm/IR/Constant.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000036#include "llvm/IR/Constants.h"
37#include "llvm/IR/DataLayout.h"
Victor Leschuk56b03d02017-08-04 04:51:15 +000038#include "llvm/IR/DebugInfoMetadata.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000039#include "llvm/IR/DerivedTypes.h"
James Molloy9c7d4d82015-11-15 14:21:37 +000040#include "llvm/IR/Dominators.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000041#include "llvm/IR/Function.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000042#include "llvm/IR/GetElementPtrTypeIterator.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000043#include "llvm/IR/GlobalAlias.h"
44#include "llvm/IR/GlobalValue.h"
45#include "llvm/IR/GlobalVariable.h"
46#include "llvm/IR/InstrTypes.h"
47#include "llvm/IR/Instruction.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000048#include "llvm/IR/Instructions.h"
49#include "llvm/IR/IntrinsicInst.h"
50#include "llvm/IR/Module.h"
51#include "llvm/IR/Operator.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000052#include "llvm/IR/Type.h"
53#include "llvm/IR/Use.h"
54#include "llvm/IR/User.h"
55#include "llvm/IR/Value.h"
Chandler Carruth4220e9c2014-03-04 11:17:44 +000056#include "llvm/IR/ValueHandle.h"
Chris Lattner25db5802004-10-07 04:16:33 +000057#include "llvm/Pass.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000058#include "llvm/Support/AtomicOrdering.h"
59#include "llvm/Support/Casting.h"
Zaara Syeda1f59ae32018-01-30 16:17:22 +000060#include "llvm/Support/CommandLine.h"
Chris Lattner024f4ab2007-01-30 23:46:24 +000061#include "llvm/Support/Debug.h"
Torok Edwin56d06592009-07-11 20:10:48 +000062#include "llvm/Support/ErrorHandling.h"
Chris Lattner67ca6f632008-04-26 07:40:11 +000063#include "llvm/Support/MathExtras.h"
Chandler Carruth8a8cd2b2014-01-07 11:48:04 +000064#include "llvm/Support/raw_ostream.h"
Justin Bogner1a075012016-04-26 00:28:01 +000065#include "llvm/Transforms/IPO.h"
Nico Weber4b2acde2014-05-02 18:35:25 +000066#include "llvm/Transforms/Utils/CtorUtils.h"
Peter Collingbourne9f7ec142016-02-03 02:51:00 +000067#include "llvm/Transforms/Utils/Evaluator.h"
Rafael Espindola3d7fc252013-10-21 17:14:55 +000068#include "llvm/Transforms/Utils/GlobalStatus.h"
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +000069#include <cassert>
70#include <cstdint>
71#include <utility>
72#include <vector>
73
Chris Lattner25db5802004-10-07 04:16:33 +000074using namespace llvm;
75
Chandler Carruth964daaa2014-04-22 02:55:47 +000076#define DEBUG_TYPE "globalopt"
77
Chris Lattner1631bcb2006-12-19 22:09:18 +000078STATISTIC(NumMarked , "Number of globals marked constant");
Rafael Espindolafc355bc2011-01-19 16:32:21 +000079STATISTIC(NumUnnamed , "Number of globals marked unnamed_addr");
Chris Lattner1631bcb2006-12-19 22:09:18 +000080STATISTIC(NumSRA , "Number of aggregate globals broken into scalars");
81STATISTIC(NumHeapSRA , "Number of heap objects SRA'd");
82STATISTIC(NumSubstitute,"Number of globals with initializers stored into them");
83STATISTIC(NumDeleted , "Number of globals deleted");
Chris Lattner1631bcb2006-12-19 22:09:18 +000084STATISTIC(NumGlobUses , "Number of global uses devirtualized");
Alexey Samsonova1944e62013-10-07 19:03:24 +000085STATISTIC(NumLocalized , "Number of globals localized");
Chris Lattner1631bcb2006-12-19 22:09:18 +000086STATISTIC(NumShrunkToBool , "Number of global vars shrunk to booleans");
87STATISTIC(NumFastCallFns , "Number of functions converted to fastcc");
88STATISTIC(NumCtorsEvaluated, "Number of static ctors evaluated");
Duncan Sands573b3f82008-02-16 20:56:04 +000089STATISTIC(NumNestRemoved , "Number of nest attributes removed");
Duncan Sandsb3f27882009-02-15 09:56:08 +000090STATISTIC(NumAliasesResolved, "Number of global aliases resolved");
91STATISTIC(NumAliasesRemoved, "Number of global aliases eliminated");
Anders Carlssonee6bc702011-03-20 17:59:11 +000092STATISTIC(NumCXXDtorsRemoved, "Number of global C++ destructors removed");
Zaara Syeda1f59ae32018-01-30 16:17:22 +000093STATISTIC(NumInternalFunc, "Number of internal functions");
94STATISTIC(NumColdCC, "Number of functions marked coldcc");
95
96static cl::opt<bool>
97 EnableColdCCStressTest("enable-coldcc-stress-test",
98 cl::desc("Enable stress test of coldcc by adding "
99 "calling conv to all internal functions."),
100 cl::init(false), cl::Hidden);
101
102static cl::opt<int> ColdCCRelFreq(
103 "coldcc-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
104 cl::desc(
105 "Maximum block frequency, expressed as a percentage of caller's "
106 "entry frequency, for a call site to be considered cold for enabling"
107 "coldcc"));
Chris Lattner25db5802004-10-07 04:16:33 +0000108
James Molloyea31ad32015-11-13 11:05:07 +0000109/// Is this global variable possibly used by a leak checker as a root? If so,
110/// we might not really want to eliminate the stores to it.
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000111static bool isLeakCheckerRoot(GlobalVariable *GV) {
112 // A global variable is a root if it is a pointer, or could plausibly contain
113 // a pointer. There are two challenges; one is that we could have a struct
114 // the has an inner member which is a pointer. We recurse through the type to
115 // detect these (up to a point). The other is that we may actually be a union
116 // of a pointer and another type, and so our LLVM type is an integer which
117 // gets converted into a pointer, or our type is an [i8 x #] with a pointer
118 // potentially contained here.
119
120 if (GV->hasPrivateLinkage())
121 return false;
122
123 SmallVector<Type *, 4> Types;
Manuel Jacob5f6eaac2016-01-16 20:30:46 +0000124 Types.push_back(GV->getValueType());
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000125
126 unsigned Limit = 20;
127 do {
128 Type *Ty = Types.pop_back_val();
129 switch (Ty->getTypeID()) {
130 default: break;
131 case Type::PointerTyID: return true;
132 case Type::ArrayTyID:
133 case Type::VectorTyID: {
134 SequentialType *STy = cast<SequentialType>(Ty);
135 Types.push_back(STy->getElementType());
136 break;
137 }
138 case Type::StructTyID: {
139 StructType *STy = cast<StructType>(Ty);
140 if (STy->isOpaque()) return true;
141 for (StructType::element_iterator I = STy->element_begin(),
142 E = STy->element_end(); I != E; ++I) {
143 Type *InnerTy = *I;
144 if (isa<PointerType>(InnerTy)) return true;
145 if (isa<CompositeType>(InnerTy))
146 Types.push_back(InnerTy);
147 }
148 break;
149 }
150 }
151 if (--Limit == 0) return true;
152 } while (!Types.empty());
153 return false;
154}
155
156/// Given a value that is stored to a global but never read, determine whether
157/// it's safe to remove the store and the chain of computation that feeds the
158/// store.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000159static bool IsSafeComputationToRemove(Value *V, const TargetLibraryInfo *TLI) {
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000160 do {
161 if (isa<Constant>(V))
162 return true;
163 if (!V->hasOneUse())
164 return false;
Nick Lewycky7d0f1102012-07-25 21:19:40 +0000165 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) ||
166 isa<GlobalValue>(V))
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000167 return false;
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000168 if (isAllocationFn(V, TLI))
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000169 return true;
170
171 Instruction *I = cast<Instruction>(V);
172 if (I->mayHaveSideEffects())
173 return false;
174 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(I)) {
175 if (!GEP->hasAllConstantIndices())
176 return false;
177 } else if (I->getNumOperands() != 1) {
178 return false;
179 }
180
181 V = I->getOperand(0);
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +0000182 } while (true);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000183}
184
James Molloyea31ad32015-11-13 11:05:07 +0000185/// This GV is a pointer root. Loop over all users of the global and clean up
186/// any that obviously don't assign the global a value that isn't dynamically
187/// allocated.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000188static bool CleanupPointerRootUsers(GlobalVariable *GV,
189 const TargetLibraryInfo *TLI) {
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000190 // A brief explanation of leak checkers. The goal is to find bugs where
191 // pointers are forgotten, causing an accumulating growth in memory
192 // usage over time. The common strategy for leak checkers is to whitelist the
193 // memory pointed to by globals at exit. This is popular because it also
194 // solves another problem where the main thread of a C++ program may shut down
195 // before other threads that are still expecting to use those globals. To
196 // handle that case, we expect the program may create a singleton and never
197 // destroy it.
198
199 bool Changed = false;
200
201 // If Dead[n].first is the only use of a malloc result, we can delete its
202 // chain of computation and the store to the global in Dead[n].second.
203 SmallVector<std::pair<Instruction *, Instruction *>, 32> Dead;
204
205 // Constants can't be pointers to dynamically allocated memory.
Chandler Carruthcdf47882014-03-09 03:16:01 +0000206 for (Value::user_iterator UI = GV->user_begin(), E = GV->user_end();
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000207 UI != E;) {
208 User *U = *UI++;
209 if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
210 Value *V = SI->getValueOperand();
211 if (isa<Constant>(V)) {
212 Changed = true;
213 SI->eraseFromParent();
214 } else if (Instruction *I = dyn_cast<Instruction>(V)) {
215 if (I->hasOneUse())
216 Dead.push_back(std::make_pair(I, SI));
217 }
218 } else if (MemSetInst *MSI = dyn_cast<MemSetInst>(U)) {
219 if (isa<Constant>(MSI->getValue())) {
220 Changed = true;
221 MSI->eraseFromParent();
222 } else if (Instruction *I = dyn_cast<Instruction>(MSI->getValue())) {
223 if (I->hasOneUse())
224 Dead.push_back(std::make_pair(I, MSI));
225 }
226 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(U)) {
227 GlobalVariable *MemSrc = dyn_cast<GlobalVariable>(MTI->getSource());
228 if (MemSrc && MemSrc->isConstant()) {
229 Changed = true;
230 MTI->eraseFromParent();
231 } else if (Instruction *I = dyn_cast<Instruction>(MemSrc)) {
232 if (I->hasOneUse())
233 Dead.push_back(std::make_pair(I, MTI));
234 }
235 } else if (ConstantExpr *CE = dyn_cast<ConstantExpr>(U)) {
236 if (CE->use_empty()) {
237 CE->destroyConstant();
238 Changed = true;
239 }
240 } else if (Constant *C = dyn_cast<Constant>(U)) {
Rafael Espindola27797ba2013-10-17 18:06:32 +0000241 if (isSafeToDestroyConstant(C)) {
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000242 C->destroyConstant();
243 // This could have invalidated UI, start over from scratch.
244 Dead.clear();
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000245 CleanupPointerRootUsers(GV, TLI);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000246 return true;
247 }
248 }
249 }
250
251 for (int i = 0, e = Dead.size(); i != e; ++i) {
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000252 if (IsSafeComputationToRemove(Dead[i].first, TLI)) {
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000253 Dead[i].second->eraseFromParent();
254 Instruction *I = Dead[i].first;
255 do {
Michael Gottesman2a654272013-01-11 23:08:52 +0000256 if (isAllocationFn(I, TLI))
257 break;
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000258 Instruction *J = dyn_cast<Instruction>(I->getOperand(0));
259 if (!J)
260 break;
261 I->eraseFromParent();
262 I = J;
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +0000263 } while (true);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000264 I->eraseFromParent();
265 }
266 }
267
268 return Changed;
269}
270
James Molloyea31ad32015-11-13 11:05:07 +0000271/// We just marked GV constant. Loop over all users of the global, cleaning up
272/// the obvious ones. This is largely just a quick scan over the use list to
273/// clean up the easy and obvious cruft. This returns true if it made a change.
Nick Lewyckycf6aae62012-02-12 01:13:18 +0000274static bool CleanupConstantGlobalUsers(Value *V, Constant *Init,
Mehdi Amini46a43552015-03-04 18:43:29 +0000275 const DataLayout &DL,
Rafael Espindolaaeff8a92014-02-24 23:12:18 +0000276 TargetLibraryInfo *TLI) {
Chris Lattnercb9f1522004-10-10 16:43:46 +0000277 bool Changed = false;
Hal Finkelf59fd7d2013-12-12 20:45:24 +0000278 // Note that we need to use a weak value handle for the worklist items. When
279 // we delete a constant array, we may also be holding pointer to one of its
280 // elements (or an element of one of its elements if we're dealing with an
281 // array of arrays) in the worklist.
Sanjoy Dase6bca0e2017-05-01 17:07:49 +0000282 SmallVector<WeakTrackingVH, 8> WorkList(V->user_begin(), V->user_end());
Bill Wendling88d06c32013-04-02 08:16:45 +0000283 while (!WorkList.empty()) {
Hal Finkelf59fd7d2013-12-12 20:45:24 +0000284 Value *UV = WorkList.pop_back_val();
285 if (!UV)
286 continue;
287
288 User *U = cast<User>(UV);
Misha Brukmanb1c93172005-04-21 23:48:37 +0000289
Chris Lattner25db5802004-10-07 04:16:33 +0000290 if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
Chris Lattner7561ca12005-02-27 18:58:52 +0000291 if (Init) {
292 // Replace the load with the initializer.
293 LI->replaceAllUsesWith(Init);
294 LI->eraseFromParent();
295 Changed = true;
296 }
Chris Lattner25db5802004-10-07 04:16:33 +0000297 } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
298 // Store must be unreachable or storing Init into the global.
Chris Lattner8e71c6a2004-10-16 18:09:00 +0000299 SI->eraseFromParent();
Chris Lattnercb9f1522004-10-10 16:43:46 +0000300 Changed = true;
Chris Lattner25db5802004-10-07 04:16:33 +0000301 } else if (ConstantExpr *CE = dyn_cast<ConstantExpr>(U)) {
302 if (CE->getOpcode() == Instruction::GetElementPtr) {
Craig Topperf40110f2014-04-25 05:29:35 +0000303 Constant *SubInit = nullptr;
Chris Lattner46d9ff082005-09-26 07:34:35 +0000304 if (Init)
Dan Gohmane525d9d2009-10-05 16:36:26 +0000305 SubInit = ConstantFoldLoadThroughGEPConstantExpr(Init, CE);
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000306 Changed |= CleanupConstantGlobalUsers(CE, SubInit, DL, TLI);
Matt Arsenault461c8e02014-01-02 20:01:43 +0000307 } else if ((CE->getOpcode() == Instruction::BitCast &&
308 CE->getType()->isPointerTy()) ||
309 CE->getOpcode() == Instruction::AddrSpaceCast) {
Chris Lattner7561ca12005-02-27 18:58:52 +0000310 // Pointer cast, delete any stores and memsets to the global.
Craig Topperf40110f2014-04-25 05:29:35 +0000311 Changed |= CleanupConstantGlobalUsers(CE, nullptr, DL, TLI);
Chris Lattner7561ca12005-02-27 18:58:52 +0000312 }
313
314 if (CE->use_empty()) {
315 CE->destroyConstant();
316 Changed = true;
Chris Lattner25db5802004-10-07 04:16:33 +0000317 }
318 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) {
Chris Lattnerf9c0fd72007-11-09 17:33:02 +0000319 // Do not transform "gepinst (gep constexpr (GV))" here, because forming
320 // "gepconstexpr (gep constexpr (GV))" will cause the two gep's to fold
321 // and will invalidate our notion of what Init is.
Craig Topperf40110f2014-04-25 05:29:35 +0000322 Constant *SubInit = nullptr;
Chris Lattnerf9c0fd72007-11-09 17:33:02 +0000323 if (!isa<ConstantExpr>(GEP->getOperand(0))) {
Mehdi Amini46a43552015-03-04 18:43:29 +0000324 ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000325 ConstantFoldInstruction(GEP, DL, TLI));
Chris Lattnerf9c0fd72007-11-09 17:33:02 +0000326 if (Init && CE && CE->getOpcode() == Instruction::GetElementPtr)
Dan Gohmane525d9d2009-10-05 16:36:26 +0000327 SubInit = ConstantFoldLoadThroughGEPConstantExpr(Init, CE);
Benjamin Krameraa9e4a52012-03-28 14:50:09 +0000328
329 // If the initializer is an all-null value and we have an inbounds GEP,
330 // we already know what the result of any load from that GEP is.
331 // TODO: Handle splats.
332 if (Init && isa<ConstantAggregateZero>(Init) && GEP->isInBounds())
Eduard Burtescu19eb0312016-01-19 17:28:00 +0000333 SubInit = Constant::getNullValue(GEP->getResultElementType());
Chris Lattnerf9c0fd72007-11-09 17:33:02 +0000334 }
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000335 Changed |= CleanupConstantGlobalUsers(GEP, SubInit, DL, TLI);
Chris Lattnera0e769c2004-10-10 16:47:33 +0000336
Chris Lattnercb9f1522004-10-10 16:43:46 +0000337 if (GEP->use_empty()) {
Chris Lattner8e71c6a2004-10-16 18:09:00 +0000338 GEP->eraseFromParent();
Chris Lattnercb9f1522004-10-10 16:43:46 +0000339 Changed = true;
340 }
Chris Lattner7561ca12005-02-27 18:58:52 +0000341 } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U)) { // memset/cpy/mv
342 if (MI->getRawDest() == V) {
343 MI->eraseFromParent();
344 Changed = true;
345 }
346
Chris Lattner1c4bddc2004-10-08 20:59:28 +0000347 } else if (Constant *C = dyn_cast<Constant>(U)) {
348 // If we have a chain of dead constantexprs or other things dangling from
349 // us, and if they are all dead, nuke them without remorse.
Rafael Espindola27797ba2013-10-17 18:06:32 +0000350 if (isSafeToDestroyConstant(C)) {
Devang Pateld926aaa2009-03-06 01:37:41 +0000351 C->destroyConstant();
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000352 CleanupConstantGlobalUsers(V, Init, DL, TLI);
Chris Lattnercb9f1522004-10-10 16:43:46 +0000353 return true;
Chris Lattner1c4bddc2004-10-08 20:59:28 +0000354 }
Chris Lattner25db5802004-10-07 04:16:33 +0000355 }
356 }
Chris Lattnercb9f1522004-10-10 16:43:46 +0000357 return Changed;
Chris Lattner25db5802004-10-07 04:16:33 +0000358}
359
James Molloyea31ad32015-11-13 11:05:07 +0000360/// Return true if the specified instruction is a safe user of a derived
361/// expression from a global that we want to SROA.
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000362static bool isSafeSROAElementUse(Value *V) {
363 // We might have a dead and dangling constant hanging off of here.
364 if (Constant *C = dyn_cast<Constant>(V))
Rafael Espindola27797ba2013-10-17 18:06:32 +0000365 return isSafeToDestroyConstant(C);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000366
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000367 Instruction *I = dyn_cast<Instruction>(V);
368 if (!I) return false;
369
370 // Loads are ok.
371 if (isa<LoadInst>(I)) return true;
372
373 // Stores *to* the pointer are ok.
374 if (StoreInst *SI = dyn_cast<StoreInst>(I))
375 return SI->getOperand(0) != V;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000376
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000377 // Otherwise, it must be a GEP.
378 GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I);
Craig Topperf40110f2014-04-25 05:29:35 +0000379 if (!GEPI) return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000380
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000381 if (GEPI->getNumOperands() < 3 || !isa<Constant>(GEPI->getOperand(1)) ||
382 !cast<Constant>(GEPI->getOperand(1))->isNullValue())
383 return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000384
Chandler Carruthcdf47882014-03-09 03:16:01 +0000385 for (User *U : GEPI->users())
386 if (!isSafeSROAElementUse(U))
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000387 return false;
Chris Lattnerab053722008-01-14 01:31:05 +0000388 return true;
389}
390
James Molloyea31ad32015-11-13 11:05:07 +0000391/// U is a direct user of the specified global value. Look at it and its uses
392/// and decide whether it is safe to SROA this global.
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000393static bool IsUserOfGlobalSafeForSRA(User *U, GlobalValue *GV) {
394 // The user of the global must be a GEP Inst or a ConstantExpr GEP.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000395 if (!isa<GetElementPtrInst>(U) &&
396 (!isa<ConstantExpr>(U) ||
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000397 cast<ConstantExpr>(U)->getOpcode() != Instruction::GetElementPtr))
398 return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000399
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000400 // Check to see if this ConstantExpr GEP is SRA'able. In particular, we
401 // don't like < 3 operand CE's, and we don't like non-constant integer
402 // indices. This enforces that all uses are 'gep GV, 0, C, ...' for some
403 // value of C.
404 if (U->getNumOperands() < 3 || !isa<Constant>(U->getOperand(1)) ||
405 !cast<Constant>(U->getOperand(1))->isNullValue() ||
406 !isa<ConstantInt>(U->getOperand(2)))
407 return false;
408
409 gep_type_iterator GEPI = gep_type_begin(U), E = gep_type_end(U);
410 ++GEPI; // Skip over the pointer index.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000411
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000412 // If this is a use of an array allocation, do a bit more checking for sanity.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000413 if (GEPI.isSequential()) {
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000414 ConstantInt *Idx = cast<ConstantInt>(U->getOperand(2));
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000415
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000416 // Check to make sure that index falls within the array. If not,
417 // something funny is going on, so we won't do the optimization.
418 //
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000419 if (GEPI.isBoundedSequential() &&
420 Idx->getZExtValue() >= GEPI.getSequentialNumElements())
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000421 return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000422
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000423 // We cannot scalar repl this level of the array unless any array
424 // sub-indices are in-range constants. In particular, consider:
425 // A[0][i]. We cannot know that the user isn't doing invalid things like
426 // allowing i to index an out-of-range subscript that accesses A[1].
427 //
428 // Scalar replacing *just* the outer index of the array is probably not
429 // going to be a win anyway, so just give up.
430 for (++GEPI; // Skip array index.
Dan Gohman82ac81b2009-08-18 14:58:19 +0000431 GEPI != E;
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000432 ++GEPI) {
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000433 if (GEPI.isStruct())
Dan Gohman82ac81b2009-08-18 14:58:19 +0000434 continue;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000435
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000436 ConstantInt *IdxVal = dyn_cast<ConstantInt>(GEPI.getOperand());
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000437 if (!IdxVal ||
438 (GEPI.isBoundedSequential() &&
439 IdxVal->getZExtValue() >= GEPI.getSequentialNumElements()))
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000440 return false;
441 }
442 }
443
Davide Italianoc163fac2017-08-09 09:23:29 +0000444 return llvm::all_of(U->users(),
445 [](User *UU) { return isSafeSROAElementUse(UU); });
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000446}
447
James Molloyea31ad32015-11-13 11:05:07 +0000448/// Look at all uses of the global and decide whether it is safe for us to
449/// perform this transformation.
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000450static bool GlobalUsersSafeToSRA(GlobalValue *GV) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000451 for (User *U : GV->users())
452 if (!IsUserOfGlobalSafeForSRA(U, GV))
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000453 return false;
Chandler Carruthcdf47882014-03-09 03:16:01 +0000454
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000455 return true;
456}
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000457
Victor Leschuk56b03d02017-08-04 04:51:15 +0000458/// Copy over the debug info for a variable to its SRA replacements.
459static void transferSRADebugInfo(GlobalVariable *GV, GlobalVariable *NGV,
460 uint64_t FragmentOffsetInBits,
Adrian Prantl504b82d2017-08-31 00:06:18 +0000461 uint64_t FragmentSizeInBits,
462 unsigned NumElements) {
Victor Leschuk56b03d02017-08-04 04:51:15 +0000463 SmallVector<DIGlobalVariableExpression *, 1> GVs;
464 GV->getDebugInfo(GVs);
465 for (auto *GVE : GVs) {
466 DIVariable *Var = GVE->getVariable();
467 DIExpression *Expr = GVE->getExpression();
Adrian Prantl25a09dd2017-11-07 00:45:34 +0000468 if (NumElements > 1) {
469 if (auto E = DIExpression::createFragmentExpression(
470 Expr, FragmentOffsetInBits, FragmentSizeInBits))
471 Expr = *E;
472 else
473 return;
474 }
Adrian Prantl504b82d2017-08-31 00:06:18 +0000475 auto *NGVE = DIGlobalVariableExpression::get(GVE->getContext(), Var, Expr);
Victor Leschuk56b03d02017-08-04 04:51:15 +0000476 NGV->addDebugInfo(NGVE);
477 }
478}
479
James Molloyea31ad32015-11-13 11:05:07 +0000480/// Perform scalar replacement of aggregates on the specified global variable.
481/// This opens the door for other optimizations by exposing the behavior of the
482/// program in a more fine-grained way. We have determined that this
483/// transformation is safe already. We return the first global variable we
Chris Lattnerabab0712004-10-08 17:32:09 +0000484/// insert so that the caller can reprocess it.
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000485static GlobalVariable *SRAGlobal(GlobalVariable *GV, const DataLayout &DL) {
Chris Lattnerab053722008-01-14 01:31:05 +0000486 // Make sure this global only has simple uses that we can SRA.
Chris Lattner26fe7eb2008-01-14 02:09:12 +0000487 if (!GlobalUsersSafeToSRA(GV))
Craig Topperf40110f2014-04-25 05:29:35 +0000488 return nullptr;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000489
James Molloyeb040cc2016-04-25 10:48:29 +0000490 assert(GV->hasLocalLinkage());
Chris Lattnerabab0712004-10-08 17:32:09 +0000491 Constant *Init = GV->getInitializer();
Chris Lattner229907c2011-07-18 04:54:35 +0000492 Type *Ty = Init->getType();
Misha Brukmanb1c93172005-04-21 23:48:37 +0000493
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +0000494 std::vector<GlobalVariable *> NewGlobals;
Chris Lattnerabab0712004-10-08 17:32:09 +0000495 Module::GlobalListType &Globals = GV->getParent()->getGlobalList();
496
Chris Lattner67ca6f632008-04-26 07:40:11 +0000497 // Get the alignment of the global, either explicit or target-specific.
498 unsigned StartAlignment = GV->getAlignment();
499 if (StartAlignment == 0)
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000500 StartAlignment = DL.getABITypeAlignment(GV->getType());
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000501
Chris Lattner229907c2011-07-18 04:54:35 +0000502 if (StructType *STy = dyn_cast<StructType>(Ty)) {
Adrian Prantl504b82d2017-08-31 00:06:18 +0000503 unsigned NumElements = STy->getNumElements();
504 NewGlobals.reserve(NumElements);
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000505 const StructLayout &Layout = *DL.getStructLayout(STy);
Adrian Prantl504b82d2017-08-31 00:06:18 +0000506 for (unsigned i = 0, e = NumElements; i != e; ++i) {
Chris Lattner67058832012-01-25 06:48:06 +0000507 Constant *In = Init->getAggregateElement(i);
Chris Lattnerabab0712004-10-08 17:32:09 +0000508 assert(In && "Couldn't get element of initializer?");
Chris Lattner46b5c642009-11-06 04:27:31 +0000509 GlobalVariable *NGV = new GlobalVariable(STy->getElementType(i), false,
Chris Lattnerabab0712004-10-08 17:32:09 +0000510 GlobalVariable::InternalLinkage,
Daniel Dunbar132f7832009-07-30 17:37:43 +0000511 In, GV->getName()+"."+Twine(i),
Hans Wennborgcbe34b42012-06-23 11:37:03 +0000512 GV->getThreadLocalMode(),
Owen Anderson5948fdf2009-07-08 01:26:06 +0000513 GV->getType()->getAddressSpace());
Oliver Stannardc1103392015-11-09 16:47:16 +0000514 NGV->setExternallyInitialized(GV->isExternallyInitialized());
Sergei Larin94be2de2016-01-22 21:18:20 +0000515 NGV->copyAttributesFrom(GV);
Rafael Espindolae4ed0e52015-12-22 19:16:50 +0000516 Globals.push_back(NGV);
Chris Lattnerabab0712004-10-08 17:32:09 +0000517 NewGlobals.push_back(NGV);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000518
Chris Lattner67ca6f632008-04-26 07:40:11 +0000519 // Calculate the known alignment of the field. If the original aggregate
520 // had 256 byte alignment for example, something might depend on that:
521 // propagate info to each field.
522 uint64_t FieldOffset = Layout.getElementOffset(i);
523 unsigned NewAlign = (unsigned)MinAlign(StartAlignment, FieldOffset);
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000524 if (NewAlign > DL.getABITypeAlignment(STy->getElementType(i)))
Chris Lattner67ca6f632008-04-26 07:40:11 +0000525 NGV->setAlignment(NewAlign);
Victor Leschuk56b03d02017-08-04 04:51:15 +0000526
527 // Copy over the debug info for the variable.
Mikael Holmenb69e5b72018-02-02 10:34:13 +0000528 uint64_t Size = DL.getTypeAllocSizeInBits(NGV->getValueType());
Mikael Holmen886edf82018-01-25 10:09:26 +0000529 uint64_t FragmentOffsetInBits = Layout.getElementOffsetInBits(i);
530 transferSRADebugInfo(GV, NGV, FragmentOffsetInBits, Size, NumElements);
Chris Lattnerabab0712004-10-08 17:32:09 +0000531 }
Chris Lattner229907c2011-07-18 04:54:35 +0000532 } else if (SequentialType *STy = dyn_cast<SequentialType>(Ty)) {
Peter Collingbournebc070522016-12-02 03:20:58 +0000533 unsigned NumElements = STy->getNumElements();
Chris Lattner25169ca2005-02-23 16:53:04 +0000534 if (NumElements > 16 && GV->hasNUsesOrMore(16))
Craig Topperf40110f2014-04-25 05:29:35 +0000535 return nullptr; // It's not worth it.
Chris Lattnerabab0712004-10-08 17:32:09 +0000536 NewGlobals.reserve(NumElements);
Victor Leschuk56b03d02017-08-04 04:51:15 +0000537 auto ElTy = STy->getElementType();
538 uint64_t EltSize = DL.getTypeAllocSize(ElTy);
539 unsigned EltAlign = DL.getABITypeAlignment(ElTy);
Mikael Holmenb69e5b72018-02-02 10:34:13 +0000540 uint64_t FragmentSizeInBits = DL.getTypeAllocSizeInBits(ElTy);
Chris Lattnerabab0712004-10-08 17:32:09 +0000541 for (unsigned i = 0, e = NumElements; i != e; ++i) {
Chris Lattner67058832012-01-25 06:48:06 +0000542 Constant *In = Init->getAggregateElement(i);
Chris Lattnerabab0712004-10-08 17:32:09 +0000543 assert(In && "Couldn't get element of initializer?");
544
Chris Lattner46b5c642009-11-06 04:27:31 +0000545 GlobalVariable *NGV = new GlobalVariable(STy->getElementType(), false,
Chris Lattnerabab0712004-10-08 17:32:09 +0000546 GlobalVariable::InternalLinkage,
Daniel Dunbar132f7832009-07-30 17:37:43 +0000547 In, GV->getName()+"."+Twine(i),
Hans Wennborgcbe34b42012-06-23 11:37:03 +0000548 GV->getThreadLocalMode(),
Owen Andersonb17f3292009-07-08 19:03:57 +0000549 GV->getType()->getAddressSpace());
Oliver Stannardc1103392015-11-09 16:47:16 +0000550 NGV->setExternallyInitialized(GV->isExternallyInitialized());
Sergei Larin94be2de2016-01-22 21:18:20 +0000551 NGV->copyAttributesFrom(GV);
Rafael Espindolae4ed0e52015-12-22 19:16:50 +0000552 Globals.push_back(NGV);
Chris Lattnerabab0712004-10-08 17:32:09 +0000553 NewGlobals.push_back(NGV);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000554
Chris Lattner67ca6f632008-04-26 07:40:11 +0000555 // Calculate the known alignment of the field. If the original aggregate
556 // had 256 byte alignment for example, something might depend on that:
557 // propagate info to each field.
558 unsigned NewAlign = (unsigned)MinAlign(StartAlignment, EltSize*i);
559 if (NewAlign > EltAlign)
560 NGV->setAlignment(NewAlign);
Adrian Prantl504b82d2017-08-31 00:06:18 +0000561 transferSRADebugInfo(GV, NGV, FragmentSizeInBits * i, FragmentSizeInBits,
562 NumElements);
Chris Lattnerabab0712004-10-08 17:32:09 +0000563 }
564 }
565
566 if (NewGlobals.empty())
Craig Topperf40110f2014-04-25 05:29:35 +0000567 return nullptr;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000568
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000569 LLVM_DEBUG(dbgs() << "PERFORMING GLOBAL SRA ON: " << *GV << "\n");
Chris Lattner004e2502004-10-11 05:54:41 +0000570
Chris Lattner46b5c642009-11-06 04:27:31 +0000571 Constant *NullInt =Constant::getNullValue(Type::getInt32Ty(GV->getContext()));
Chris Lattnerabab0712004-10-08 17:32:09 +0000572
573 // Loop over all of the uses of the global, replacing the constantexpr geps,
574 // with smaller constantexpr geps or direct references.
575 while (!GV->use_empty()) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000576 User *GEP = GV->user_back();
Chris Lattner004e2502004-10-11 05:54:41 +0000577 assert(((isa<ConstantExpr>(GEP) &&
578 cast<ConstantExpr>(GEP)->getOpcode()==Instruction::GetElementPtr)||
579 isa<GetElementPtrInst>(GEP)) && "NonGEP CE's are not SRAable!");
Misha Brukmanb1c93172005-04-21 23:48:37 +0000580
Chris Lattnerabab0712004-10-08 17:32:09 +0000581 // Ignore the 1th operand, which has to be zero or else the program is quite
582 // broken (undefined). Get the 2nd operand, which is the structure or array
583 // index.
Reid Spencere0fc4df2006-10-20 07:07:24 +0000584 unsigned Val = cast<ConstantInt>(GEP->getOperand(2))->getZExtValue();
Chris Lattnerabab0712004-10-08 17:32:09 +0000585 if (Val >= NewGlobals.size()) Val = 0; // Out of bound array access.
586
Chris Lattner004e2502004-10-11 05:54:41 +0000587 Value *NewPtr = NewGlobals[Val];
David Blaikied9d900c2015-05-07 17:28:58 +0000588 Type *NewTy = NewGlobals[Val]->getValueType();
Chris Lattnerabab0712004-10-08 17:32:09 +0000589
590 // Form a shorter GEP if needed.
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000591 if (GEP->getNumOperands() > 3) {
Chris Lattner004e2502004-10-11 05:54:41 +0000592 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(GEP)) {
Chris Lattnerf96f4a82007-01-31 04:40:53 +0000593 SmallVector<Constant*, 8> Idxs;
Chris Lattner004e2502004-10-11 05:54:41 +0000594 Idxs.push_back(NullInt);
595 for (unsigned i = 3, e = CE->getNumOperands(); i != e; ++i)
596 Idxs.push_back(CE->getOperand(i));
David Blaikie4a2e73b2015-04-02 18:55:32 +0000597 NewPtr =
598 ConstantExpr::getGetElementPtr(NewTy, cast<Constant>(NewPtr), Idxs);
Chris Lattner004e2502004-10-11 05:54:41 +0000599 } else {
600 GetElementPtrInst *GEPI = cast<GetElementPtrInst>(GEP);
Chris Lattner927653f2007-01-31 19:59:55 +0000601 SmallVector<Value*, 8> Idxs;
Chris Lattner004e2502004-10-11 05:54:41 +0000602 Idxs.push_back(NullInt);
603 for (unsigned i = 3, e = GEPI->getNumOperands(); i != e; ++i)
604 Idxs.push_back(GEPI->getOperand(i));
David Blaikie741c8f82015-03-14 01:53:18 +0000605 NewPtr = GetElementPtrInst::Create(
David Blaikied9d900c2015-05-07 17:28:58 +0000606 NewTy, NewPtr, Idxs, GEPI->getName() + "." + Twine(Val), GEPI);
Chris Lattner004e2502004-10-11 05:54:41 +0000607 }
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000608 }
Chris Lattner004e2502004-10-11 05:54:41 +0000609 GEP->replaceAllUsesWith(NewPtr);
610
611 if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(GEP))
Chris Lattner8e71c6a2004-10-16 18:09:00 +0000612 GEPI->eraseFromParent();
Chris Lattner004e2502004-10-11 05:54:41 +0000613 else
614 cast<ConstantExpr>(GEP)->destroyConstant();
Chris Lattnerabab0712004-10-08 17:32:09 +0000615 }
616
Chris Lattner73ad73e2004-10-08 20:25:55 +0000617 // Delete the old global, now that it is dead.
618 Globals.erase(GV);
Chris Lattnerabab0712004-10-08 17:32:09 +0000619 ++NumSRA;
Chris Lattner004e2502004-10-11 05:54:41 +0000620
621 // Loop over the new globals array deleting any globals that are obviously
622 // dead. This can arise due to scalarization of a structure or an array that
623 // has elements that are dead.
624 unsigned FirstGlobal = 0;
625 for (unsigned i = 0, e = NewGlobals.size(); i != e; ++i)
626 if (NewGlobals[i]->use_empty()) {
627 Globals.erase(NewGlobals[i]);
628 if (FirstGlobal == i) ++FirstGlobal;
629 }
630
Craig Topperf40110f2014-04-25 05:29:35 +0000631 return FirstGlobal != NewGlobals.size() ? NewGlobals[FirstGlobal] : nullptr;
Chris Lattnerabab0712004-10-08 17:32:09 +0000632}
633
James Molloyea31ad32015-11-13 11:05:07 +0000634/// Return true if all users of the specified value will trap if the value is
635/// dynamically null. PHIs keeps track of any phi nodes we've seen to avoid
636/// reprocessing them.
Gabor Greif67972872010-04-06 19:24:18 +0000637static bool AllUsesOfValueWillTrapIfNull(const Value *V,
Craig Topper71b7b682014-08-21 05:55:13 +0000638 SmallPtrSetImpl<const PHINode*> &PHIs) {
Manoj Gupta77eeac32018-07-09 22:27:23 +0000639 for (const User *U : V->users()) {
640 if (const Instruction *I = dyn_cast<Instruction>(U)) {
641 // If null pointer is considered valid, then all uses are non-trapping.
642 // Non address-space 0 globals have already been pruned by the caller.
643 if (NullPointerIsDefined(I->getFunction()))
644 return false;
645 }
Gabor Greif08355d62010-04-06 19:14:05 +0000646 if (isa<LoadInst>(U)) {
Chris Lattner09a52722004-10-09 21:48:45 +0000647 // Will trap.
Gabor Greif67972872010-04-06 19:24:18 +0000648 } else if (const StoreInst *SI = dyn_cast<StoreInst>(U)) {
Chris Lattner09a52722004-10-09 21:48:45 +0000649 if (SI->getOperand(0) == V) {
Gabor Greif08355d62010-04-06 19:14:05 +0000650 //cerr << "NONTRAPPING USE: " << *U;
Chris Lattner09a52722004-10-09 21:48:45 +0000651 return false; // Storing the value.
652 }
Gabor Greif67972872010-04-06 19:24:18 +0000653 } else if (const CallInst *CI = dyn_cast<CallInst>(U)) {
Gabor Greiffebf6ab2010-03-20 21:00:25 +0000654 if (CI->getCalledValue() != V) {
Gabor Greif08355d62010-04-06 19:14:05 +0000655 //cerr << "NONTRAPPING USE: " << *U;
Chris Lattner09a52722004-10-09 21:48:45 +0000656 return false; // Not calling the ptr
657 }
Gabor Greif67972872010-04-06 19:24:18 +0000658 } else if (const InvokeInst *II = dyn_cast<InvokeInst>(U)) {
Gabor Greiffebf6ab2010-03-20 21:00:25 +0000659 if (II->getCalledValue() != V) {
Gabor Greif08355d62010-04-06 19:14:05 +0000660 //cerr << "NONTRAPPING USE: " << *U;
Chris Lattner09a52722004-10-09 21:48:45 +0000661 return false; // Not calling the ptr
662 }
Gabor Greif67972872010-04-06 19:24:18 +0000663 } else if (const BitCastInst *CI = dyn_cast<BitCastInst>(U)) {
Chris Lattner2d2892e2007-09-13 16:30:19 +0000664 if (!AllUsesOfValueWillTrapIfNull(CI, PHIs)) return false;
Gabor Greif67972872010-04-06 19:24:18 +0000665 } else if (const GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(U)) {
Chris Lattner2d2892e2007-09-13 16:30:19 +0000666 if (!AllUsesOfValueWillTrapIfNull(GEPI, PHIs)) return false;
Gabor Greif67972872010-04-06 19:24:18 +0000667 } else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
Chris Lattner2d2892e2007-09-13 16:30:19 +0000668 // If we've already seen this phi node, ignore it, it has already been
669 // checked.
David Blaikie70573dc2014-11-19 07:49:26 +0000670 if (PHIs.insert(PN).second && !AllUsesOfValueWillTrapIfNull(PN, PHIs))
Jakob Stoklund Olesene27dc722010-01-29 23:54:14 +0000671 return false;
Gabor Greif08355d62010-04-06 19:14:05 +0000672 } else if (isa<ICmpInst>(U) &&
Chandler Carruthcdf47882014-03-09 03:16:01 +0000673 isa<ConstantPointerNull>(U->getOperand(1))) {
Nick Lewycky614fb942010-02-25 06:39:10 +0000674 // Ignore icmp X, null
Chris Lattner09a52722004-10-09 21:48:45 +0000675 } else {
Gabor Greif08355d62010-04-06 19:14:05 +0000676 //cerr << "NONTRAPPING USE: " << *U;
Chris Lattner09a52722004-10-09 21:48:45 +0000677 return false;
678 }
Manoj Gupta77eeac32018-07-09 22:27:23 +0000679 }
Chris Lattner09a52722004-10-09 21:48:45 +0000680 return true;
681}
682
James Molloyea31ad32015-11-13 11:05:07 +0000683/// Return true if all uses of any loads from GV will trap if the loaded value
684/// is null. Note that this also permits comparisons of the loaded value
685/// against null, as a special case.
Gabor Greif67972872010-04-06 19:24:18 +0000686static bool AllUsesOfLoadedValueWillTrapIfNull(const GlobalVariable *GV) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000687 for (const User *U : GV->users())
Gabor Greif67972872010-04-06 19:24:18 +0000688 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
689 SmallPtrSet<const PHINode*, 8> PHIs;
Chris Lattner2d2892e2007-09-13 16:30:19 +0000690 if (!AllUsesOfValueWillTrapIfNull(LI, PHIs))
Chris Lattner09a52722004-10-09 21:48:45 +0000691 return false;
Gabor Greif08355d62010-04-06 19:14:05 +0000692 } else if (isa<StoreInst>(U)) {
Chris Lattner09a52722004-10-09 21:48:45 +0000693 // Ignore stores to the global.
694 } else {
695 // We don't know or understand this user, bail out.
Gabor Greif08355d62010-04-06 19:14:05 +0000696 //cerr << "UNKNOWN USER OF GLOBAL!: " << *U;
Chris Lattner09a52722004-10-09 21:48:45 +0000697 return false;
698 }
Chris Lattner09a52722004-10-09 21:48:45 +0000699 return true;
700}
701
Chris Lattner46b5c642009-11-06 04:27:31 +0000702static bool OptimizeAwayTrappingUsesOfValue(Value *V, Constant *NewV) {
Chris Lattnere42eb312004-10-10 23:14:11 +0000703 bool Changed = false;
Chandler Carruthcdf47882014-03-09 03:16:01 +0000704 for (auto UI = V->user_begin(), E = V->user_end(); UI != E; ) {
Chris Lattnere42eb312004-10-10 23:14:11 +0000705 Instruction *I = cast<Instruction>(*UI++);
Manoj Gupta77eeac32018-07-09 22:27:23 +0000706 // Uses are non-trapping if null pointer is considered valid.
707 // Non address-space 0 globals are already pruned by the caller.
708 if (NullPointerIsDefined(I->getFunction()))
709 return false;
Chris Lattnere42eb312004-10-10 23:14:11 +0000710 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
711 LI->setOperand(0, NewV);
712 Changed = true;
713 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
714 if (SI->getOperand(1) == V) {
715 SI->setOperand(1, NewV);
716 Changed = true;
717 }
718 } else if (isa<CallInst>(I) || isa<InvokeInst>(I)) {
Gabor Greif04397892010-04-06 18:45:08 +0000719 CallSite CS(I);
720 if (CS.getCalledValue() == V) {
Chris Lattnere42eb312004-10-10 23:14:11 +0000721 // Calling through the pointer! Turn into a direct call, but be careful
722 // that the pointer is not also being passed as an argument.
Gabor Greif04397892010-04-06 18:45:08 +0000723 CS.setCalledFunction(NewV);
Chris Lattnere42eb312004-10-10 23:14:11 +0000724 Changed = true;
725 bool PassedAsArg = false;
Gabor Greif04397892010-04-06 18:45:08 +0000726 for (unsigned i = 0, e = CS.arg_size(); i != e; ++i)
727 if (CS.getArgument(i) == V) {
Chris Lattnere42eb312004-10-10 23:14:11 +0000728 PassedAsArg = true;
Gabor Greif04397892010-04-06 18:45:08 +0000729 CS.setArgument(i, NewV);
Chris Lattnere42eb312004-10-10 23:14:11 +0000730 }
731
732 if (PassedAsArg) {
733 // Being passed as an argument also. Be careful to not invalidate UI!
Chandler Carruthcdf47882014-03-09 03:16:01 +0000734 UI = V->user_begin();
Chris Lattnere42eb312004-10-10 23:14:11 +0000735 }
736 }
737 } else if (CastInst *CI = dyn_cast<CastInst>(I)) {
738 Changed |= OptimizeAwayTrappingUsesOfValue(CI,
Owen Anderson487375e2009-07-29 18:55:55 +0000739 ConstantExpr::getCast(CI->getOpcode(),
Chris Lattner46b5c642009-11-06 04:27:31 +0000740 NewV, CI->getType()));
Chris Lattnere42eb312004-10-10 23:14:11 +0000741 if (CI->use_empty()) {
742 Changed = true;
Chris Lattner8e71c6a2004-10-16 18:09:00 +0000743 CI->eraseFromParent();
Chris Lattnere42eb312004-10-10 23:14:11 +0000744 }
745 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) {
746 // Should handle GEP here.
Chris Lattnerf96f4a82007-01-31 04:40:53 +0000747 SmallVector<Constant*, 8> Idxs;
748 Idxs.reserve(GEPI->getNumOperands()-1);
Gabor Greif3a9fba52008-05-29 01:59:18 +0000749 for (User::op_iterator i = GEPI->op_begin() + 1, e = GEPI->op_end();
750 i != e; ++i)
751 if (Constant *C = dyn_cast<Constant>(*i))
Chris Lattnerf96f4a82007-01-31 04:40:53 +0000752 Idxs.push_back(C);
Chris Lattnere42eb312004-10-10 23:14:11 +0000753 else
754 break;
Chris Lattnerf96f4a82007-01-31 04:40:53 +0000755 if (Idxs.size() == GEPI->getNumOperands()-1)
David Blaikie4a2e73b2015-04-02 18:55:32 +0000756 Changed |= OptimizeAwayTrappingUsesOfValue(
757 GEPI, ConstantExpr::getGetElementPtr(nullptr, NewV, Idxs));
Chris Lattnere42eb312004-10-10 23:14:11 +0000758 if (GEPI->use_empty()) {
759 Changed = true;
Chris Lattner8e71c6a2004-10-16 18:09:00 +0000760 GEPI->eraseFromParent();
Chris Lattnere42eb312004-10-10 23:14:11 +0000761 }
762 }
763 }
764
765 return Changed;
766}
767
James Molloyea31ad32015-11-13 11:05:07 +0000768/// The specified global has only one non-null value stored into it. If there
769/// are uses of the loaded value that would trap if the loaded value is
770/// dynamically null, then we know that they cannot be reachable with a null
771/// optimize away the load.
Nick Lewyckycf6aae62012-02-12 01:13:18 +0000772static bool OptimizeAwayTrappingUsesOfLoads(GlobalVariable *GV, Constant *LV,
Mehdi Amini46a43552015-03-04 18:43:29 +0000773 const DataLayout &DL,
Nick Lewyckycf6aae62012-02-12 01:13:18 +0000774 TargetLibraryInfo *TLI) {
Chris Lattnere42eb312004-10-10 23:14:11 +0000775 bool Changed = false;
776
Chris Lattner2538eb62009-01-14 00:12:58 +0000777 // Keep track of whether we are able to remove all the uses of the global
778 // other than the store that defines it.
779 bool AllNonStoreUsesGone = true;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000780
Chris Lattnere42eb312004-10-10 23:14:11 +0000781 // Replace all uses of loads with uses of uses of the stored value.
Chandler Carruthcdf47882014-03-09 03:16:01 +0000782 for (Value::user_iterator GUI = GV->user_begin(), E = GV->user_end(); GUI != E;){
Chris Lattner2538eb62009-01-14 00:12:58 +0000783 User *GlobalUser = *GUI++;
784 if (LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) {
Chris Lattner46b5c642009-11-06 04:27:31 +0000785 Changed |= OptimizeAwayTrappingUsesOfValue(LI, LV);
Chris Lattner2538eb62009-01-14 00:12:58 +0000786 // If we were able to delete all uses of the loads
787 if (LI->use_empty()) {
788 LI->eraseFromParent();
789 Changed = true;
790 } else {
791 AllNonStoreUsesGone = false;
792 }
793 } else if (isa<StoreInst>(GlobalUser)) {
794 // Ignore the store that stores "LV" to the global.
795 assert(GlobalUser->getOperand(1) == GV &&
796 "Must be storing *to* the global");
Chris Lattnere42eb312004-10-10 23:14:11 +0000797 } else {
Chris Lattner2538eb62009-01-14 00:12:58 +0000798 AllNonStoreUsesGone = false;
799
800 // If we get here we could have other crazy uses that are transitively
801 // loaded.
802 assert((isa<PHINode>(GlobalUser) || isa<SelectInst>(GlobalUser) ||
Benjamin Kramered843602012-09-28 10:01:27 +0000803 isa<ConstantExpr>(GlobalUser) || isa<CmpInst>(GlobalUser) ||
804 isa<BitCastInst>(GlobalUser) ||
805 isa<GetElementPtrInst>(GlobalUser)) &&
Chris Lattner1a1acc22011-05-22 07:15:13 +0000806 "Only expect load and stores!");
Chris Lattnere42eb312004-10-10 23:14:11 +0000807 }
Chris Lattner2538eb62009-01-14 00:12:58 +0000808 }
Chris Lattnere42eb312004-10-10 23:14:11 +0000809
810 if (Changed) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000811 LLVM_DEBUG(dbgs() << "OPTIMIZED LOADS FROM STORED ONCE POINTER: " << *GV
812 << "\n");
Chris Lattnere42eb312004-10-10 23:14:11 +0000813 ++NumGlobUses;
814 }
815
Chris Lattnere42eb312004-10-10 23:14:11 +0000816 // If we nuked all of the loads, then none of the stores are needed either,
817 // nor is the global.
Chris Lattner2538eb62009-01-14 00:12:58 +0000818 if (AllNonStoreUsesGone) {
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000819 if (isLeakCheckerRoot(GV)) {
Benjamin Kramer8bcc9712012-08-29 15:32:21 +0000820 Changed |= CleanupPointerRootUsers(GV, TLI);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000821 } else {
822 Changed = true;
Craig Topperf40110f2014-04-25 05:29:35 +0000823 CleanupConstantGlobalUsers(GV, nullptr, DL, TLI);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000824 }
Chris Lattnere42eb312004-10-10 23:14:11 +0000825 if (GV->use_empty()) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000826 LLVM_DEBUG(dbgs() << " *** GLOBAL NOW DEAD!\n");
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +0000827 Changed = true;
Chris Lattner8e71c6a2004-10-16 18:09:00 +0000828 GV->eraseFromParent();
Chris Lattnere42eb312004-10-10 23:14:11 +0000829 ++NumDeleted;
830 }
Chris Lattnere42eb312004-10-10 23:14:11 +0000831 }
832 return Changed;
833}
834
James Molloyea31ad32015-11-13 11:05:07 +0000835/// Walk the use list of V, constant folding all of the instructions that are
836/// foldable.
Mehdi Amini46a43552015-03-04 18:43:29 +0000837static void ConstantPropUsersOf(Value *V, const DataLayout &DL,
Rafael Espindolaaeff8a92014-02-24 23:12:18 +0000838 TargetLibraryInfo *TLI) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000839 for (Value::user_iterator UI = V->user_begin(), E = V->user_end(); UI != E; )
Chris Lattner004e2502004-10-11 05:54:41 +0000840 if (Instruction *I = dyn_cast<Instruction>(*UI++))
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000841 if (Constant *NewC = ConstantFoldInstruction(I, DL, TLI)) {
Chris Lattner004e2502004-10-11 05:54:41 +0000842 I->replaceAllUsesWith(NewC);
843
Chris Lattnerd6a44922005-02-01 01:23:31 +0000844 // Advance UI to the next non-I use to avoid invalidating it!
845 // Instructions could multiply use V.
846 while (UI != E && *UI == I)
Chris Lattner004e2502004-10-11 05:54:41 +0000847 ++UI;
David Majnemer522a9112016-07-22 04:54:44 +0000848 if (isInstructionTriviallyDead(I, TLI))
849 I->eraseFromParent();
Chris Lattner004e2502004-10-11 05:54:41 +0000850 }
851}
852
James Molloyea31ad32015-11-13 11:05:07 +0000853/// This function takes the specified global variable, and transforms the
854/// program as if it always contained the result of the specified malloc.
855/// Because it is always the result of the specified malloc, there is no reason
856/// to actually DO the malloc. Instead, turn the malloc into a global, and any
857/// loads of GV as uses of the new global.
Mehdi Amini46a43552015-03-04 18:43:29 +0000858static GlobalVariable *
859OptimizeGlobalAddressOfMalloc(GlobalVariable *GV, CallInst *CI, Type *AllocTy,
860 ConstantInt *NElements, const DataLayout &DL,
861 TargetLibraryInfo *TLI) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000862 LLVM_DEBUG(errs() << "PROMOTING GLOBAL: " << *GV << " CALL = " << *CI
863 << '\n');
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000864
Chris Lattner229907c2011-07-18 04:54:35 +0000865 Type *GlobalType;
Chris Lattner7939f792010-02-25 22:33:52 +0000866 if (NElements->getZExtValue() == 1)
867 GlobalType = AllocTy;
868 else
869 // If we have an array allocation, the global variable is of an array.
870 GlobalType = ArrayType::get(AllocTy, NElements->getZExtValue());
Victor Hernandez5d034492009-09-18 22:35:49 +0000871
872 // Create the new global variable. The contents of the malloc'd memory is
873 // undefined, so initialize with an undef value.
Rafael Espindolae4ed0e52015-12-22 19:16:50 +0000874 GlobalVariable *NewGV = new GlobalVariable(
875 *GV->getParent(), GlobalType, false, GlobalValue::InternalLinkage,
876 UndefValue::get(GlobalType), GV->getName() + ".body", nullptr,
877 GV->getThreadLocalMode());
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000878
Chris Lattner7939f792010-02-25 22:33:52 +0000879 // If there are bitcast users of the malloc (which is typical, usually we have
880 // a malloc + bitcast) then replace them with uses of the new global. Update
881 // other users to use the global as well.
Craig Topperf40110f2014-04-25 05:29:35 +0000882 BitCastInst *TheBC = nullptr;
Chris Lattner7939f792010-02-25 22:33:52 +0000883 while (!CI->use_empty()) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000884 Instruction *User = cast<Instruction>(CI->user_back());
Chris Lattner7939f792010-02-25 22:33:52 +0000885 if (BitCastInst *BCI = dyn_cast<BitCastInst>(User)) {
886 if (BCI->getType() == NewGV->getType()) {
887 BCI->replaceAllUsesWith(NewGV);
888 BCI->eraseFromParent();
889 } else {
890 BCI->setOperand(0, NewGV);
891 }
892 } else {
Craig Topperf40110f2014-04-25 05:29:35 +0000893 if (!TheBC)
Chris Lattner7939f792010-02-25 22:33:52 +0000894 TheBC = new BitCastInst(NewGV, CI->getType(), "newgv", CI);
895 User->replaceUsesOfWith(CI, TheBC);
896 }
897 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000898
Victor Hernandez5d034492009-09-18 22:35:49 +0000899 Constant *RepValue = NewGV;
Manuel Jacob5f6eaac2016-01-16 20:30:46 +0000900 if (NewGV->getType() != GV->getValueType())
901 RepValue = ConstantExpr::getBitCast(RepValue, GV->getValueType());
Victor Hernandez5d034492009-09-18 22:35:49 +0000902
903 // If there is a comparison against null, we will insert a global bool to
904 // keep track of whether the global was initialized yet or not.
905 GlobalVariable *InitBool =
Chris Lattner46b5c642009-11-06 04:27:31 +0000906 new GlobalVariable(Type::getInt1Ty(GV->getContext()), false,
Victor Hernandez5d034492009-09-18 22:35:49 +0000907 GlobalValue::InternalLinkage,
Chris Lattner46b5c642009-11-06 04:27:31 +0000908 ConstantInt::getFalse(GV->getContext()),
Hans Wennborgcbe34b42012-06-23 11:37:03 +0000909 GV->getName()+".init", GV->getThreadLocalMode());
Victor Hernandez5d034492009-09-18 22:35:49 +0000910 bool InitBoolUsed = false;
911
912 // Loop over all uses of GV, processing them in turn.
Chris Lattner7939f792010-02-25 22:33:52 +0000913 while (!GV->use_empty()) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000914 if (StoreInst *SI = dyn_cast<StoreInst>(GV->user_back())) {
Victor Hernandez5d034492009-09-18 22:35:49 +0000915 // The global is initialized when the store to it occurs.
Nick Lewycky52da72b2012-02-05 19:56:38 +0000916 new StoreInst(ConstantInt::getTrue(GV->getContext()), InitBool, false, 0,
Konstantin Zhuravlyovbb80d3e2017-07-11 22:23:00 +0000917 SI->getOrdering(), SI->getSyncScopeID(), SI);
Victor Hernandez5d034492009-09-18 22:35:49 +0000918 SI->eraseFromParent();
Chris Lattner7939f792010-02-25 22:33:52 +0000919 continue;
Victor Hernandez5d034492009-09-18 22:35:49 +0000920 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000921
Chandler Carruthcdf47882014-03-09 03:16:01 +0000922 LoadInst *LI = cast<LoadInst>(GV->user_back());
Chris Lattner7939f792010-02-25 22:33:52 +0000923 while (!LI->use_empty()) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000924 Use &LoadUse = *LI->use_begin();
925 ICmpInst *ICI = dyn_cast<ICmpInst>(LoadUse.getUser());
926 if (!ICI) {
Chris Lattner7939f792010-02-25 22:33:52 +0000927 LoadUse = RepValue;
928 continue;
929 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000930
Chris Lattner7939f792010-02-25 22:33:52 +0000931 // Replace the cmp X, 0 with a use of the bool value.
Nick Lewycky52da72b2012-02-05 19:56:38 +0000932 // Sink the load to where the compare was, if atomic rules allow us to.
933 Value *LV = new LoadInst(InitBool, InitBool->getName()+".val", false, 0,
Konstantin Zhuravlyovbb80d3e2017-07-11 22:23:00 +0000934 LI->getOrdering(), LI->getSyncScopeID(),
Nick Lewycky52da72b2012-02-05 19:56:38 +0000935 LI->isUnordered() ? (Instruction*)ICI : LI);
Chris Lattner7939f792010-02-25 22:33:52 +0000936 InitBoolUsed = true;
937 switch (ICI->getPredicate()) {
938 default: llvm_unreachable("Unknown ICmp Predicate!");
939 case ICmpInst::ICMP_ULT:
940 case ICmpInst::ICMP_SLT: // X < null -> always false
941 LV = ConstantInt::getFalse(GV->getContext());
942 break;
943 case ICmpInst::ICMP_ULE:
944 case ICmpInst::ICMP_SLE:
945 case ICmpInst::ICMP_EQ:
946 LV = BinaryOperator::CreateNot(LV, "notinit", ICI);
947 break;
948 case ICmpInst::ICMP_NE:
949 case ICmpInst::ICMP_UGE:
950 case ICmpInst::ICMP_SGE:
951 case ICmpInst::ICMP_UGT:
952 case ICmpInst::ICMP_SGT:
953 break; // no change.
954 }
955 ICI->replaceAllUsesWith(LV);
956 ICI->eraseFromParent();
957 }
958 LI->eraseFromParent();
959 }
Victor Hernandez5d034492009-09-18 22:35:49 +0000960
961 // If the initialization boolean was used, insert it, otherwise delete it.
962 if (!InitBoolUsed) {
963 while (!InitBool->use_empty()) // Delete initializations
Chandler Carruthcdf47882014-03-09 03:16:01 +0000964 cast<StoreInst>(InitBool->user_back())->eraseFromParent();
Victor Hernandez5d034492009-09-18 22:35:49 +0000965 delete InitBool;
966 } else
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +0000967 GV->getParent()->getGlobalList().insert(GV->getIterator(), InitBool);
Victor Hernandez5d034492009-09-18 22:35:49 +0000968
Chris Lattner7939f792010-02-25 22:33:52 +0000969 // Now the GV is dead, nuke it and the malloc..
Victor Hernandez5d034492009-09-18 22:35:49 +0000970 GV->eraseFromParent();
Victor Hernandez5d034492009-09-18 22:35:49 +0000971 CI->eraseFromParent();
972
973 // To further other optimizations, loop over all users of NewGV and try to
974 // constant prop them. This will promote GEP instructions with constant
975 // indices into GEP constant-exprs, which will allow global-opt to hack on it.
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000976 ConstantPropUsersOf(NewGV, DL, TLI);
Victor Hernandez5d034492009-09-18 22:35:49 +0000977 if (RepValue != NewGV)
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000978 ConstantPropUsersOf(RepValue, DL, TLI);
Victor Hernandez5d034492009-09-18 22:35:49 +0000979
980 return NewGV;
981}
982
James Molloyea31ad32015-11-13 11:05:07 +0000983/// Scan the use-list of V checking to make sure that there are no complex uses
984/// of V. We permit simple things like dereferencing the pointer, but not
985/// storing through the address, unless it is to the specified global.
Gabor Greifa21bc0f2010-04-06 18:58:22 +0000986static bool ValueIsOnlyUsedLocallyOrStoredToOneGlobal(const Instruction *V,
987 const GlobalVariable *GV,
Craig Topper71b7b682014-08-21 05:55:13 +0000988 SmallPtrSetImpl<const PHINode*> &PHIs) {
Chandler Carruthcdf47882014-03-09 03:16:01 +0000989 for (const User *U : V->users()) {
990 const Instruction *Inst = cast<Instruction>(U);
Gabor Greif08355d62010-04-06 19:14:05 +0000991
Chris Lattnerf0eb5682008-12-15 21:08:54 +0000992 if (isa<LoadInst>(Inst) || isa<CmpInst>(Inst)) {
993 continue; // Fine, ignore.
994 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +0000995
Gabor Greifa21bc0f2010-04-06 18:58:22 +0000996 if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Chris Lattnerc0677c02004-12-02 07:11:07 +0000997 if (SI->getOperand(0) == V && SI->getOperand(1) != GV)
998 return false; // Storing the pointer itself... bad.
Chris Lattnerf0eb5682008-12-15 21:08:54 +0000999 continue; // Otherwise, storing through it, or storing into GV... fine.
1000 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001001
Chris Lattnerb9801ff2010-04-10 18:19:22 +00001002 // Must index into the array and into the struct.
1003 if (isa<GetElementPtrInst>(Inst) && Inst->getNumOperands() >= 3) {
Chris Lattnerf0eb5682008-12-15 21:08:54 +00001004 if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(Inst, GV, PHIs))
Chris Lattnerc0677c02004-12-02 07:11:07 +00001005 return false;
Chris Lattnerf0eb5682008-12-15 21:08:54 +00001006 continue;
1007 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001008
Gabor Greifa21bc0f2010-04-06 18:58:22 +00001009 if (const PHINode *PN = dyn_cast<PHINode>(Inst)) {
Chris Lattner6eed0e72007-09-13 16:37:20 +00001010 // PHIs are ok if all uses are ok. Don't infinitely recurse through PHI
1011 // cycles.
David Blaikie70573dc2014-11-19 07:49:26 +00001012 if (PHIs.insert(PN).second)
Chris Lattner5d13fb532007-09-14 03:41:21 +00001013 if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(PN, GV, PHIs))
1014 return false;
Chris Lattnerf0eb5682008-12-15 21:08:54 +00001015 continue;
Chris Lattnerc0677c02004-12-02 07:11:07 +00001016 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001017
Gabor Greifa21bc0f2010-04-06 18:58:22 +00001018 if (const BitCastInst *BCI = dyn_cast<BitCastInst>(Inst)) {
Chris Lattnerf0eb5682008-12-15 21:08:54 +00001019 if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(BCI, GV, PHIs))
1020 return false;
1021 continue;
1022 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001023
Chris Lattnerf0eb5682008-12-15 21:08:54 +00001024 return false;
1025 }
Chris Lattnerc0677c02004-12-02 07:11:07 +00001026 return true;
Chris Lattnerc0677c02004-12-02 07:11:07 +00001027}
1028
James Molloyea31ad32015-11-13 11:05:07 +00001029/// The Alloc pointer is stored into GV somewhere. Transform all uses of the
1030/// allocation into loads from the global and uses of the resultant pointer.
1031/// Further, delete the store into GV. This assumes that these value pass the
Chris Lattner24d3d422006-09-30 23:32:09 +00001032/// 'ValueIsOnlyUsedLocallyOrStoredToOneGlobal' predicate.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001033static void ReplaceUsesOfMallocWithGlobal(Instruction *Alloc,
Chris Lattner24d3d422006-09-30 23:32:09 +00001034 GlobalVariable *GV) {
1035 while (!Alloc->use_empty()) {
Chandler Carruthcdf47882014-03-09 03:16:01 +00001036 Instruction *U = cast<Instruction>(*Alloc->user_begin());
Chris Lattnerba98f892007-09-13 18:00:31 +00001037 Instruction *InsertPt = U;
Chris Lattner24d3d422006-09-30 23:32:09 +00001038 if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
1039 // If this is the store of the allocation into the global, remove it.
1040 if (SI->getOperand(1) == GV) {
1041 SI->eraseFromParent();
1042 continue;
1043 }
Chris Lattnerba98f892007-09-13 18:00:31 +00001044 } else if (PHINode *PN = dyn_cast<PHINode>(U)) {
1045 // Insert the load in the corresponding predecessor, not right before the
1046 // PHI.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001047 InsertPt = PN->getIncomingBlock(*Alloc->use_begin())->getTerminator();
Chris Lattner49e3bdc2008-12-15 21:44:34 +00001048 } else if (isa<BitCastInst>(U)) {
1049 // Must be bitcast between the malloc and store to initialize the global.
1050 ReplaceUsesOfMallocWithGlobal(U, GV);
1051 U->eraseFromParent();
1052 continue;
1053 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(U)) {
1054 // If this is a "GEP bitcast" and the user is a store to the global, then
1055 // just process it as a bitcast.
1056 if (GEPI->hasAllZeroIndices() && GEPI->hasOneUse())
Chandler Carruthcdf47882014-03-09 03:16:01 +00001057 if (StoreInst *SI = dyn_cast<StoreInst>(GEPI->user_back()))
Chris Lattner49e3bdc2008-12-15 21:44:34 +00001058 if (SI->getOperand(1) == GV) {
1059 // Must be bitcast GEP between the malloc and store to initialize
1060 // the global.
1061 ReplaceUsesOfMallocWithGlobal(GEPI, GV);
1062 GEPI->eraseFromParent();
1063 continue;
1064 }
Chris Lattner24d3d422006-09-30 23:32:09 +00001065 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001066
Chris Lattner24d3d422006-09-30 23:32:09 +00001067 // Insert a load from the global, and use it instead of the malloc.
Chris Lattnerba98f892007-09-13 18:00:31 +00001068 Value *NL = new LoadInst(GV, GV->getName()+".val", InsertPt);
Chris Lattner24d3d422006-09-30 23:32:09 +00001069 U->replaceUsesOfWith(Alloc, NL);
1070 }
1071}
1072
James Molloyea31ad32015-11-13 11:05:07 +00001073/// Verify that all uses of V (a load, or a phi of a load) are simple enough to
1074/// perform heap SRA on. This permits GEP's that index through the array and
1075/// struct field, icmps of null, and PHIs.
Gabor Greif5d5db532010-04-01 08:21:08 +00001076static bool LoadUsesSimpleEnoughForHeapSRA(const Value *V,
Craig Topper71b7b682014-08-21 05:55:13 +00001077 SmallPtrSetImpl<const PHINode*> &LoadUsingPHIs,
1078 SmallPtrSetImpl<const PHINode*> &LoadUsingPHIsPerLoad) {
Chris Lattner56b55382008-12-16 21:24:51 +00001079 // We permit two users of the load: setcc comparing against the null
1080 // pointer, and a getelementptr of a specific form.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001081 for (const User *U : V->users()) {
1082 const Instruction *UI = cast<Instruction>(U);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001083
Chris Lattner56b55382008-12-16 21:24:51 +00001084 // Comparison against null is ok.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001085 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(UI)) {
Chris Lattner56b55382008-12-16 21:24:51 +00001086 if (!isa<ConstantPointerNull>(ICI->getOperand(1)))
1087 return false;
1088 continue;
1089 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001090
Chris Lattner56b55382008-12-16 21:24:51 +00001091 // getelementptr is also ok, but only a simple form.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001092 if (const GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(UI)) {
Chris Lattner56b55382008-12-16 21:24:51 +00001093 // Must index into the array and into the struct.
1094 if (GEPI->getNumOperands() < 3)
1095 return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001096
Chris Lattner56b55382008-12-16 21:24:51 +00001097 // Otherwise the GEP is ok.
1098 continue;
1099 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001100
Chandler Carruthcdf47882014-03-09 03:16:01 +00001101 if (const PHINode *PN = dyn_cast<PHINode>(UI)) {
David Blaikie70573dc2014-11-19 07:49:26 +00001102 if (!LoadUsingPHIsPerLoad.insert(PN).second)
Evan Cheng83689442009-06-02 00:56:07 +00001103 // This means some phi nodes are dependent on each other.
1104 // Avoid infinite looping!
1105 return false;
David Blaikie70573dc2014-11-19 07:49:26 +00001106 if (!LoadUsingPHIs.insert(PN).second)
Evan Cheng83689442009-06-02 00:56:07 +00001107 // If we have already analyzed this PHI, then it is safe.
Chris Lattner56b55382008-12-16 21:24:51 +00001108 continue;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001109
Chris Lattner222ef4c2008-12-17 05:28:49 +00001110 // Make sure all uses of the PHI are simple enough to transform.
Evan Cheng83689442009-06-02 00:56:07 +00001111 if (!LoadUsesSimpleEnoughForHeapSRA(PN,
1112 LoadUsingPHIs, LoadUsingPHIsPerLoad))
Chris Lattner56b55382008-12-16 21:24:51 +00001113 return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001114
Chris Lattner56b55382008-12-16 21:24:51 +00001115 continue;
Chris Lattner24d3d422006-09-30 23:32:09 +00001116 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001117
Chris Lattner56b55382008-12-16 21:24:51 +00001118 // Otherwise we don't know what this is, not ok.
1119 return false;
1120 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001121
Chris Lattner56b55382008-12-16 21:24:51 +00001122 return true;
1123}
1124
James Molloyea31ad32015-11-13 11:05:07 +00001125/// If all users of values loaded from GV are simple enough to perform HeapSRA,
1126/// return true.
Gabor Greif5d5db532010-04-01 08:21:08 +00001127static bool AllGlobalLoadUsesSimpleEnoughForHeapSRA(const GlobalVariable *GV,
Victor Hernandez5d034492009-09-18 22:35:49 +00001128 Instruction *StoredVal) {
Gabor Greif5d5db532010-04-01 08:21:08 +00001129 SmallPtrSet<const PHINode*, 32> LoadUsingPHIs;
1130 SmallPtrSet<const PHINode*, 32> LoadUsingPHIsPerLoad;
Chandler Carruthcdf47882014-03-09 03:16:01 +00001131 for (const User *U : GV->users())
1132 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
Evan Cheng83689442009-06-02 00:56:07 +00001133 if (!LoadUsesSimpleEnoughForHeapSRA(LI, LoadUsingPHIs,
1134 LoadUsingPHIsPerLoad))
Chris Lattner56b55382008-12-16 21:24:51 +00001135 return false;
Evan Cheng83689442009-06-02 00:56:07 +00001136 LoadUsingPHIsPerLoad.clear();
1137 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001138
Chris Lattner222ef4c2008-12-17 05:28:49 +00001139 // If we reach here, we know that all uses of the loads and transitive uses
1140 // (through PHI nodes) are simple enough to transform. However, we don't know
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001141 // that all inputs the to the PHI nodes are in the same equivalence sets.
Chris Lattner222ef4c2008-12-17 05:28:49 +00001142 // Check to verify that all operands of the PHIs are either PHIS that can be
1143 // transformed, loads from GV, or MI itself.
Craig Topper46276792014-08-24 23:23:06 +00001144 for (const PHINode *PN : LoadUsingPHIs) {
Chris Lattner222ef4c2008-12-17 05:28:49 +00001145 for (unsigned op = 0, e = PN->getNumIncomingValues(); op != e; ++op) {
1146 Value *InVal = PN->getIncomingValue(op);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001147
Chris Lattner222ef4c2008-12-17 05:28:49 +00001148 // PHI of the stored value itself is ok.
Victor Hernandez5d034492009-09-18 22:35:49 +00001149 if (InVal == StoredVal) continue;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001150
Gabor Greif5d5db532010-04-01 08:21:08 +00001151 if (const PHINode *InPN = dyn_cast<PHINode>(InVal)) {
Chris Lattner222ef4c2008-12-17 05:28:49 +00001152 // One of the PHIs in our set is (optimistically) ok.
1153 if (LoadUsingPHIs.count(InPN))
1154 continue;
1155 return false;
1156 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001157
Chris Lattner222ef4c2008-12-17 05:28:49 +00001158 // Load from GV is ok.
Gabor Greif5d5db532010-04-01 08:21:08 +00001159 if (const LoadInst *LI = dyn_cast<LoadInst>(InVal))
Chris Lattner222ef4c2008-12-17 05:28:49 +00001160 if (LI->getOperand(0) == GV)
1161 continue;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001162
Chris Lattner222ef4c2008-12-17 05:28:49 +00001163 // UNDEF? NULL?
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001164
Chris Lattner222ef4c2008-12-17 05:28:49 +00001165 // Anything else is rejected.
1166 return false;
1167 }
1168 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001169
Chris Lattner24d3d422006-09-30 23:32:09 +00001170 return true;
1171}
1172
Chris Lattner222ef4c2008-12-17 05:28:49 +00001173static Value *GetHeapSROAValue(Value *V, unsigned FieldNo,
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001174 DenseMap<Value *, std::vector<Value *>> &InsertedScalarizedValues,
1175 std::vector<std::pair<PHINode *, unsigned>> &PHIsToRewrite) {
1176 std::vector<Value *> &FieldVals = InsertedScalarizedValues[V];
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001177
Chris Lattner222ef4c2008-12-17 05:28:49 +00001178 if (FieldNo >= FieldVals.size())
1179 FieldVals.resize(FieldNo+1);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001180
Chris Lattner222ef4c2008-12-17 05:28:49 +00001181 // If we already have this value, just reuse the previously scalarized
1182 // version.
1183 if (Value *FieldVal = FieldVals[FieldNo])
1184 return FieldVal;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001185
Chris Lattner222ef4c2008-12-17 05:28:49 +00001186 // Depending on what instruction this is, we have several cases.
1187 Value *Result;
1188 if (LoadInst *LI = dyn_cast<LoadInst>(V)) {
1189 // This is a scalarized version of the load from the global. Just create
1190 // a new Load of the scalarized global.
1191 Result = new LoadInst(GetHeapSROAValue(LI->getOperand(0), FieldNo,
1192 InsertedScalarizedValues,
Chris Lattner46b5c642009-11-06 04:27:31 +00001193 PHIsToRewrite),
Daniel Dunbar132f7832009-07-30 17:37:43 +00001194 LI->getName()+".f"+Twine(FieldNo), LI);
David Blaikie741c8f82015-03-14 01:53:18 +00001195 } else {
1196 PHINode *PN = cast<PHINode>(V);
Chris Lattner222ef4c2008-12-17 05:28:49 +00001197 // PN's type is pointer to struct. Make a new PHI of pointer to struct
1198 // field.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001199
Matt Arsenaultfcd74012014-04-23 20:36:10 +00001200 PointerType *PTy = cast<PointerType>(PN->getType());
1201 StructType *ST = cast<StructType>(PTy->getElementType());
1202
1203 unsigned AS = PTy->getAddressSpace();
Jay Foade0938d82011-03-30 11:19:20 +00001204 PHINode *NewPN =
Matt Arsenaultfcd74012014-04-23 20:36:10 +00001205 PHINode::Create(PointerType::get(ST->getElementType(FieldNo), AS),
Jay Foad52131342011-03-30 11:28:46 +00001206 PN->getNumIncomingValues(),
Daniel Dunbar132f7832009-07-30 17:37:43 +00001207 PN->getName()+".f"+Twine(FieldNo), PN);
Jay Foade0938d82011-03-30 11:19:20 +00001208 Result = NewPN;
Chris Lattner222ef4c2008-12-17 05:28:49 +00001209 PHIsToRewrite.push_back(std::make_pair(PN, FieldNo));
Chris Lattner222ef4c2008-12-17 05:28:49 +00001210 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001211
Chris Lattner222ef4c2008-12-17 05:28:49 +00001212 return FieldVals[FieldNo] = Result;
Chris Lattnerba98f892007-09-13 18:00:31 +00001213}
1214
James Molloyea31ad32015-11-13 11:05:07 +00001215/// Given a load instruction and a value derived from the load, rewrite the
1216/// derived value to use the HeapSRoA'd load.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001217static void RewriteHeapSROALoadUser(Instruction *LoadUser,
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001218 DenseMap<Value *, std::vector<Value *>> &InsertedScalarizedValues,
1219 std::vector<std::pair<PHINode *, unsigned>> &PHIsToRewrite) {
Chris Lattnerf315d4f2007-09-13 17:29:05 +00001220 // If this is a comparison against null, handle it.
1221 if (ICmpInst *SCI = dyn_cast<ICmpInst>(LoadUser)) {
1222 assert(isa<ConstantPointerNull>(SCI->getOperand(1)));
1223 // If we have a setcc of the loaded pointer, we can use a setcc of any
1224 // field.
Chris Lattner222ef4c2008-12-17 05:28:49 +00001225 Value *NPtr = GetHeapSROAValue(SCI->getOperand(0), 0,
Chris Lattner46b5c642009-11-06 04:27:31 +00001226 InsertedScalarizedValues, PHIsToRewrite);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001227
Owen Anderson1e5f00e2009-07-09 23:48:35 +00001228 Value *New = new ICmpInst(SCI, SCI->getPredicate(), NPtr,
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001229 Constant::getNullValue(NPtr->getType()),
Owen Anderson1e5f00e2009-07-09 23:48:35 +00001230 SCI->getName());
Chris Lattnerf315d4f2007-09-13 17:29:05 +00001231 SCI->replaceAllUsesWith(New);
1232 SCI->eraseFromParent();
1233 return;
1234 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001235
Chris Lattner222ef4c2008-12-17 05:28:49 +00001236 // Handle 'getelementptr Ptr, Idx, i32 FieldNo ...'
Chris Lattnerba98f892007-09-13 18:00:31 +00001237 if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(LoadUser)) {
1238 assert(GEPI->getNumOperands() >= 3 && isa<ConstantInt>(GEPI->getOperand(2))
1239 && "Unexpected GEPI!");
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001240
Chris Lattnerba98f892007-09-13 18:00:31 +00001241 // Load the pointer for this field.
1242 unsigned FieldNo = cast<ConstantInt>(GEPI->getOperand(2))->getZExtValue();
Chris Lattner222ef4c2008-12-17 05:28:49 +00001243 Value *NewPtr = GetHeapSROAValue(GEPI->getOperand(0), FieldNo,
Chris Lattner46b5c642009-11-06 04:27:31 +00001244 InsertedScalarizedValues, PHIsToRewrite);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001245
Chris Lattnerba98f892007-09-13 18:00:31 +00001246 // Create the new GEP idx vector.
1247 SmallVector<Value*, 8> GEPIdx;
1248 GEPIdx.push_back(GEPI->getOperand(1));
1249 GEPIdx.append(GEPI->op_begin()+3, GEPI->op_end());
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001250
David Blaikie22319eb2015-03-14 19:24:04 +00001251 Value *NGEPI = GetElementPtrInst::Create(GEPI->getResultElementType(), NewPtr, GEPIdx,
Gabor Greife9ecc682008-04-06 20:25:17 +00001252 GEPI->getName(), GEPI);
Chris Lattnerba98f892007-09-13 18:00:31 +00001253 GEPI->replaceAllUsesWith(NGEPI);
1254 GEPI->eraseFromParent();
1255 return;
1256 }
Chris Lattner011f91b2007-09-13 21:31:36 +00001257
Chris Lattner222ef4c2008-12-17 05:28:49 +00001258 // Recursively transform the users of PHI nodes. This will lazily create the
1259 // PHIs that are needed for individual elements. Keep track of what PHIs we
1260 // see in InsertedScalarizedValues so that we don't get infinite loops (very
1261 // antisocial). If the PHI is already in InsertedScalarizedValues, it has
1262 // already been seen first by another load, so its uses have already been
1263 // processed.
1264 PHINode *PN = cast<PHINode>(LoadUser);
Chris Lattner5cf753c2011-07-21 06:21:31 +00001265 if (!InsertedScalarizedValues.insert(std::make_pair(PN,
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001266 std::vector<Value *>())).second)
Chris Lattner5cf753c2011-07-21 06:21:31 +00001267 return;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001268
Chris Lattner222ef4c2008-12-17 05:28:49 +00001269 // If this is the first time we've seen this PHI, recursively process all
1270 // users.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001271 for (auto UI = PN->user_begin(), E = PN->user_end(); UI != E;) {
Chris Lattner0cdf5232008-12-17 05:42:08 +00001272 Instruction *User = cast<Instruction>(*UI++);
Chris Lattner46b5c642009-11-06 04:27:31 +00001273 RewriteHeapSROALoadUser(User, InsertedScalarizedValues, PHIsToRewrite);
Chris Lattner0cdf5232008-12-17 05:42:08 +00001274 }
Chris Lattnerf315d4f2007-09-13 17:29:05 +00001275}
1276
James Molloyea31ad32015-11-13 11:05:07 +00001277/// We are performing Heap SRoA on a global. Ptr is a value loaded from the
1278/// global. Eliminate all uses of Ptr, making them use FieldGlobals instead.
1279/// All uses of loaded values satisfy AllGlobalLoadUsesSimpleEnoughForHeapSRA.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001280static void RewriteUsesOfLoadForHeapSRoA(LoadInst *Load,
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001281 DenseMap<Value *, std::vector<Value *>> &InsertedScalarizedValues,
1282 std::vector<std::pair<PHINode *, unsigned> > &PHIsToRewrite) {
Chandler Carruthcdf47882014-03-09 03:16:01 +00001283 for (auto UI = Load->user_begin(), E = Load->user_end(); UI != E;) {
Chris Lattner0cdf5232008-12-17 05:42:08 +00001284 Instruction *User = cast<Instruction>(*UI++);
Chris Lattner46b5c642009-11-06 04:27:31 +00001285 RewriteHeapSROALoadUser(User, InsertedScalarizedValues, PHIsToRewrite);
Chris Lattner0cdf5232008-12-17 05:42:08 +00001286 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001287
Chris Lattner222ef4c2008-12-17 05:28:49 +00001288 if (Load->use_empty()) {
1289 Load->eraseFromParent();
1290 InsertedScalarizedValues.erase(Load);
1291 }
Chris Lattner24d3d422006-09-30 23:32:09 +00001292}
1293
James Molloyea31ad32015-11-13 11:05:07 +00001294/// CI is an allocation of an array of structures. Break it up into multiple
1295/// allocations of arrays of the fields.
Victor Hernandezf3db9152009-11-07 00:16:28 +00001296static GlobalVariable *PerformHeapAllocSRoA(GlobalVariable *GV, CallInst *CI,
Mehdi Amini46a43552015-03-04 18:43:29 +00001297 Value *NElems, const DataLayout &DL,
Benjamin Kramer8bcc9712012-08-29 15:32:21 +00001298 const TargetLibraryInfo *TLI) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001299 LLVM_DEBUG(dbgs() << "SROA HEAP ALLOC: " << *GV << " MALLOC = " << *CI
1300 << '\n');
Benjamin Kramer8bcc9712012-08-29 15:32:21 +00001301 Type *MAT = getMallocAllocatedType(CI, TLI);
Chris Lattner229907c2011-07-18 04:54:35 +00001302 StructType *STy = cast<StructType>(MAT);
Victor Hernandez5d034492009-09-18 22:35:49 +00001303
1304 // There is guaranteed to be at least one use of the malloc (storing
1305 // it into GV). If there are other uses, change them to be uses of
1306 // the global to simplify later code. This also deletes the store
1307 // into GV.
Victor Hernandezf3db9152009-11-07 00:16:28 +00001308 ReplaceUsesOfMallocWithGlobal(CI, GV);
1309
Victor Hernandez5d034492009-09-18 22:35:49 +00001310 // Okay, at this point, there are no users of the malloc. Insert N
1311 // new mallocs at the same place as CI, and N globals.
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001312 std::vector<Value *> FieldGlobals;
1313 std::vector<Value *> FieldMallocs;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001314
David Majnemerfadc6db2016-04-29 08:07:22 +00001315 SmallVector<OperandBundleDef, 1> OpBundles;
1316 CI->getOperandBundlesAsDefs(OpBundles);
1317
Matt Arsenaultfcd74012014-04-23 20:36:10 +00001318 unsigned AS = GV->getType()->getPointerAddressSpace();
Victor Hernandez5d034492009-09-18 22:35:49 +00001319 for (unsigned FieldNo = 0, e = STy->getNumElements(); FieldNo != e;++FieldNo){
Chris Lattner229907c2011-07-18 04:54:35 +00001320 Type *FieldTy = STy->getElementType(FieldNo);
Matt Arsenaultfcd74012014-04-23 20:36:10 +00001321 PointerType *PFieldTy = PointerType::get(FieldTy, AS);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001322
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00001323 GlobalVariable *NGV = new GlobalVariable(
1324 *GV->getParent(), PFieldTy, false, GlobalValue::InternalLinkage,
1325 Constant::getNullValue(PFieldTy), GV->getName() + ".f" + Twine(FieldNo),
1326 nullptr, GV->getThreadLocalMode());
Sergei Larin94be2de2016-01-22 21:18:20 +00001327 NGV->copyAttributesFrom(GV);
Victor Hernandez5d034492009-09-18 22:35:49 +00001328 FieldGlobals.push_back(NGV);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001329
Mehdi Amini46a43552015-03-04 18:43:29 +00001330 unsigned TypeSize = DL.getTypeAllocSize(FieldTy);
Chris Lattner229907c2011-07-18 04:54:35 +00001331 if (StructType *ST = dyn_cast<StructType>(FieldTy))
Mehdi Amini46a43552015-03-04 18:43:29 +00001332 TypeSize = DL.getStructLayout(ST)->getSizeInBytes();
1333 Type *IntPtrTy = DL.getIntPtrType(CI->getType());
Victor Hernandezf3db9152009-11-07 00:16:28 +00001334 Value *NMI = CallInst::CreateMalloc(CI, IntPtrTy, FieldTy,
1335 ConstantInt::get(IntPtrTy, TypeSize),
David Majnemerfadc6db2016-04-29 08:07:22 +00001336 NElems, OpBundles, nullptr,
Victor Hernandezf3db9152009-11-07 00:16:28 +00001337 CI->getName() + ".f" + Twine(FieldNo));
Chris Lattner0521c092010-02-26 18:23:13 +00001338 FieldMallocs.push_back(NMI);
Victor Hernandezf3db9152009-11-07 00:16:28 +00001339 new StoreInst(NMI, NGV, CI);
Victor Hernandez5d034492009-09-18 22:35:49 +00001340 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001341
Victor Hernandez5d034492009-09-18 22:35:49 +00001342 // The tricky aspect of this transformation is handling the case when malloc
1343 // fails. In the original code, malloc failing would set the result pointer
1344 // of malloc to null. In this case, some mallocs could succeed and others
1345 // could fail. As such, we emit code that looks like this:
1346 // F0 = malloc(field0)
1347 // F1 = malloc(field1)
1348 // F2 = malloc(field2)
1349 // if (F0 == 0 || F1 == 0 || F2 == 0) {
1350 // if (F0) { free(F0); F0 = 0; }
1351 // if (F1) { free(F1); F1 = 0; }
1352 // if (F2) { free(F2); F2 = 0; }
1353 // }
Victor Hernandezfcc77b12009-11-10 08:32:25 +00001354 // The malloc can also fail if its argument is too large.
Gabor Greif218f5542010-06-24 14:42:01 +00001355 Constant *ConstantZero = ConstantInt::get(CI->getArgOperand(0)->getType(), 0);
1356 Value *RunningOr = new ICmpInst(CI, ICmpInst::ICMP_SLT, CI->getArgOperand(0),
Victor Hernandezfcc77b12009-11-10 08:32:25 +00001357 ConstantZero, "isneg");
Victor Hernandez5d034492009-09-18 22:35:49 +00001358 for (unsigned i = 0, e = FieldMallocs.size(); i != e; ++i) {
Victor Hernandezf3db9152009-11-07 00:16:28 +00001359 Value *Cond = new ICmpInst(CI, ICmpInst::ICMP_EQ, FieldMallocs[i],
1360 Constant::getNullValue(FieldMallocs[i]->getType()),
1361 "isnull");
Victor Hernandezfcc77b12009-11-10 08:32:25 +00001362 RunningOr = BinaryOperator::CreateOr(RunningOr, Cond, "tmp", CI);
Victor Hernandez5d034492009-09-18 22:35:49 +00001363 }
1364
1365 // Split the basic block at the old malloc.
Victor Hernandezf3db9152009-11-07 00:16:28 +00001366 BasicBlock *OrigBB = CI->getParent();
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00001367 BasicBlock *ContBB =
1368 OrigBB->splitBasicBlock(CI->getIterator(), "malloc_cont");
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001369
Victor Hernandez5d034492009-09-18 22:35:49 +00001370 // Create the block to check the first condition. Put all these blocks at the
1371 // end of the function as they are unlikely to be executed.
Chris Lattner46b5c642009-11-06 04:27:31 +00001372 BasicBlock *NullPtrBlock = BasicBlock::Create(OrigBB->getContext(),
1373 "malloc_ret_null",
Victor Hernandez5d034492009-09-18 22:35:49 +00001374 OrigBB->getParent());
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001375
Victor Hernandez5d034492009-09-18 22:35:49 +00001376 // Remove the uncond branch from OrigBB to ContBB, turning it into a cond
1377 // branch on RunningOr.
1378 OrigBB->getTerminator()->eraseFromParent();
1379 BranchInst::Create(NullPtrBlock, ContBB, RunningOr, OrigBB);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001380
Victor Hernandez5d034492009-09-18 22:35:49 +00001381 // Within the NullPtrBlock, we need to emit a comparison and branch for each
1382 // pointer, because some may be null while others are not.
1383 for (unsigned i = 0, e = FieldGlobals.size(); i != e; ++i) {
1384 Value *GVVal = new LoadInst(FieldGlobals[i], "tmp", NullPtrBlock);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001385 Value *Cmp = new ICmpInst(*NullPtrBlock, ICmpInst::ICMP_NE, GVVal,
Benjamin Kramer547b6c52011-09-27 20:39:19 +00001386 Constant::getNullValue(GVVal->getType()));
Chris Lattner46b5c642009-11-06 04:27:31 +00001387 BasicBlock *FreeBlock = BasicBlock::Create(Cmp->getContext(), "free_it",
Victor Hernandez5d034492009-09-18 22:35:49 +00001388 OrigBB->getParent());
Chris Lattner46b5c642009-11-06 04:27:31 +00001389 BasicBlock *NextBlock = BasicBlock::Create(Cmp->getContext(), "next",
Victor Hernandez5d034492009-09-18 22:35:49 +00001390 OrigBB->getParent());
Victor Hernandeze2971492009-10-24 04:23:03 +00001391 Instruction *BI = BranchInst::Create(FreeBlock, NextBlock,
1392 Cmp, NullPtrBlock);
Victor Hernandez5d034492009-09-18 22:35:49 +00001393
1394 // Fill in FreeBlock.
David Majnemerfadc6db2016-04-29 08:07:22 +00001395 CallInst::CreateFree(GVVal, OpBundles, BI);
Victor Hernandez5d034492009-09-18 22:35:49 +00001396 new StoreInst(Constant::getNullValue(GVVal->getType()), FieldGlobals[i],
1397 FreeBlock);
1398 BranchInst::Create(NextBlock, FreeBlock);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001399
Victor Hernandez5d034492009-09-18 22:35:49 +00001400 NullPtrBlock = NextBlock;
1401 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001402
Victor Hernandez5d034492009-09-18 22:35:49 +00001403 BranchInst::Create(ContBB, NullPtrBlock);
Victor Hernandezf3db9152009-11-07 00:16:28 +00001404
1405 // CI is no longer needed, remove it.
Victor Hernandez5d034492009-09-18 22:35:49 +00001406 CI->eraseFromParent();
1407
James Molloyea31ad32015-11-13 11:05:07 +00001408 /// As we process loads, if we can't immediately update all uses of the load,
1409 /// keep track of what scalarized loads are inserted for a given load.
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001410 DenseMap<Value *, std::vector<Value *>> InsertedScalarizedValues;
Victor Hernandez5d034492009-09-18 22:35:49 +00001411 InsertedScalarizedValues[GV] = FieldGlobals;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001412
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001413 std::vector<std::pair<PHINode *, unsigned>> PHIsToRewrite;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001414
Victor Hernandez5d034492009-09-18 22:35:49 +00001415 // Okay, the malloc site is completely handled. All of the uses of GV are now
1416 // loads, and all uses of those loads are simple. Rewrite them to use loads
1417 // of the per-field globals instead.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001418 for (auto UI = GV->user_begin(), E = GV->user_end(); UI != E;) {
Victor Hernandez5d034492009-09-18 22:35:49 +00001419 Instruction *User = cast<Instruction>(*UI++);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001420
Victor Hernandez5d034492009-09-18 22:35:49 +00001421 if (LoadInst *LI = dyn_cast<LoadInst>(User)) {
Chris Lattner46b5c642009-11-06 04:27:31 +00001422 RewriteUsesOfLoadForHeapSRoA(LI, InsertedScalarizedValues, PHIsToRewrite);
Victor Hernandez5d034492009-09-18 22:35:49 +00001423 continue;
1424 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001425
Victor Hernandez5d034492009-09-18 22:35:49 +00001426 // Must be a store of null.
1427 StoreInst *SI = cast<StoreInst>(User);
1428 assert(isa<ConstantPointerNull>(SI->getOperand(0)) &&
1429 "Unexpected heap-sra user!");
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001430
Victor Hernandez5d034492009-09-18 22:35:49 +00001431 // Insert a store of null into each global.
1432 for (unsigned i = 0, e = FieldGlobals.size(); i != e; ++i) {
Manuel Jacob5f6eaac2016-01-16 20:30:46 +00001433 Type *ValTy = cast<GlobalValue>(FieldGlobals[i])->getValueType();
1434 Constant *Null = Constant::getNullValue(ValTy);
Victor Hernandez5d034492009-09-18 22:35:49 +00001435 new StoreInst(Null, FieldGlobals[i], SI);
1436 }
1437 // Erase the original store.
1438 SI->eraseFromParent();
1439 }
1440
1441 // While we have PHIs that are interesting to rewrite, do it.
1442 while (!PHIsToRewrite.empty()) {
1443 PHINode *PN = PHIsToRewrite.back().first;
1444 unsigned FieldNo = PHIsToRewrite.back().second;
1445 PHIsToRewrite.pop_back();
1446 PHINode *FieldPN = cast<PHINode>(InsertedScalarizedValues[PN][FieldNo]);
1447 assert(FieldPN->getNumIncomingValues() == 0 &&"Already processed this phi");
1448
1449 // Add all the incoming values. This can materialize more phis.
1450 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
1451 Value *InVal = PN->getIncomingValue(i);
1452 InVal = GetHeapSROAValue(InVal, FieldNo, InsertedScalarizedValues,
Chris Lattner46b5c642009-11-06 04:27:31 +00001453 PHIsToRewrite);
Victor Hernandez5d034492009-09-18 22:35:49 +00001454 FieldPN->addIncoming(InVal, PN->getIncomingBlock(i));
1455 }
1456 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001457
Victor Hernandez5d034492009-09-18 22:35:49 +00001458 // Drop all inter-phi links and any loads that made it this far.
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001459 for (DenseMap<Value *, std::vector<Value *>>::iterator
Victor Hernandez5d034492009-09-18 22:35:49 +00001460 I = InsertedScalarizedValues.begin(), E = InsertedScalarizedValues.end();
1461 I != E; ++I) {
1462 if (PHINode *PN = dyn_cast<PHINode>(I->first))
1463 PN->dropAllReferences();
1464 else if (LoadInst *LI = dyn_cast<LoadInst>(I->first))
1465 LI->dropAllReferences();
1466 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001467
Victor Hernandez5d034492009-09-18 22:35:49 +00001468 // Delete all the phis and loads now that inter-references are dead.
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00001469 for (DenseMap<Value *, std::vector<Value *>>::iterator
Victor Hernandez5d034492009-09-18 22:35:49 +00001470 I = InsertedScalarizedValues.begin(), E = InsertedScalarizedValues.end();
1471 I != E; ++I) {
1472 if (PHINode *PN = dyn_cast<PHINode>(I->first))
1473 PN->eraseFromParent();
1474 else if (LoadInst *LI = dyn_cast<LoadInst>(I->first))
1475 LI->eraseFromParent();
1476 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001477
Victor Hernandez5d034492009-09-18 22:35:49 +00001478 // The old global is now dead, remove it.
1479 GV->eraseFromParent();
1480
1481 ++NumHeapSRA;
1482 return cast<GlobalVariable>(FieldGlobals[0]);
1483}
1484
James Molloyea31ad32015-11-13 11:05:07 +00001485/// This function is called when we see a pointer global variable with a single
1486/// value stored it that is a malloc or cast of malloc.
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00001487static bool tryToOptimizeStoreOfMallocToGlobal(GlobalVariable *GV, CallInst *CI,
Chris Lattner229907c2011-07-18 04:54:35 +00001488 Type *AllocTy,
Nick Lewycky52da72b2012-02-05 19:56:38 +00001489 AtomicOrdering Ordering,
Mehdi Amini46a43552015-03-04 18:43:29 +00001490 const DataLayout &DL,
Nick Lewyckycf6aae62012-02-12 01:13:18 +00001491 TargetLibraryInfo *TLI) {
Victor Hernandez5d034492009-09-18 22:35:49 +00001492 // If this is a malloc of an abstract type, don't touch it.
1493 if (!AllocTy->isSized())
1494 return false;
1495
1496 // We can't optimize this global unless all uses of it are *known* to be
1497 // of the malloc value, not of the null initializer value (consider a use
1498 // that compares the global's value against zero to see if the malloc has
1499 // been reached). To do this, we check to see if all uses of the global
1500 // would trap if the global were null: this proves that they must all
1501 // happen after the malloc.
1502 if (!AllUsesOfLoadedValueWillTrapIfNull(GV))
1503 return false;
1504
1505 // We can't optimize this if the malloc itself is used in a complex way,
1506 // for example, being stored into multiple globals. This allows the
Nick Lewyckybbd11562012-02-05 19:48:37 +00001507 // malloc to be stored into the specified global, loaded icmp'd, and
Victor Hernandez5d034492009-09-18 22:35:49 +00001508 // GEP'd. These are all things we could transform to using the global
1509 // for.
Evan Cheng21b588b2010-04-14 20:52:55 +00001510 SmallPtrSet<const PHINode*, 8> PHIs;
1511 if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(CI, GV, PHIs))
1512 return false;
Victor Hernandez5d034492009-09-18 22:35:49 +00001513
1514 // If we have a global that is only initialized with a fixed size malloc,
1515 // transform the program to use global memory instead of malloc'd memory.
1516 // This eliminates dynamic allocation, avoids an indirection accessing the
1517 // data, and exposes the resultant global to further GlobalOpt.
Victor Hernandez264da322009-10-16 23:12:25 +00001518 // We cannot optimize the malloc if we cannot determine malloc array size.
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001519 Value *NElems = getMallocArraySize(CI, DL, TLI, true);
Evan Cheng21b588b2010-04-14 20:52:55 +00001520 if (!NElems)
1521 return false;
Victor Hernandez5d034492009-09-18 22:35:49 +00001522
Evan Cheng21b588b2010-04-14 20:52:55 +00001523 if (ConstantInt *NElements = dyn_cast<ConstantInt>(NElems))
1524 // Restrict this transformation to only working on small allocations
1525 // (2048 bytes currently), as we don't want to introduce a 16M global or
1526 // something.
Mehdi Amini46a43552015-03-04 18:43:29 +00001527 if (NElements->getZExtValue() * DL.getTypeAllocSize(AllocTy) < 2048) {
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00001528 OptimizeGlobalAddressOfMalloc(GV, CI, AllocTy, NElements, DL, TLI);
Evan Cheng21b588b2010-04-14 20:52:55 +00001529 return true;
Victor Hernandez5d034492009-09-18 22:35:49 +00001530 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001531
Evan Cheng21b588b2010-04-14 20:52:55 +00001532 // If the allocation is an array of structures, consider transforming this
1533 // into multiple malloc'd arrays, one for each field. This is basically
1534 // SRoA for malloc'd memory.
1535
JF Bastien800f87a2016-04-06 21:19:33 +00001536 if (Ordering != AtomicOrdering::NotAtomic)
Nick Lewycky52da72b2012-02-05 19:56:38 +00001537 return false;
1538
Evan Cheng21b588b2010-04-14 20:52:55 +00001539 // If this is an allocation of a fixed size array of structs, analyze as a
1540 // variable size array. malloc [100 x struct],1 -> malloc struct, 100
Gabor Greif218f5542010-06-24 14:42:01 +00001541 if (NElems == ConstantInt::get(CI->getArgOperand(0)->getType(), 1))
Chris Lattner229907c2011-07-18 04:54:35 +00001542 if (ArrayType *AT = dyn_cast<ArrayType>(AllocTy))
Evan Cheng21b588b2010-04-14 20:52:55 +00001543 AllocTy = AT->getElementType();
Gabor Greif218f5542010-06-24 14:42:01 +00001544
Chris Lattner229907c2011-07-18 04:54:35 +00001545 StructType *AllocSTy = dyn_cast<StructType>(AllocTy);
Evan Cheng21b588b2010-04-14 20:52:55 +00001546 if (!AllocSTy)
1547 return false;
1548
1549 // This the structure has an unreasonable number of fields, leave it
1550 // alone.
1551 if (AllocSTy->getNumElements() <= 16 && AllocSTy->getNumElements() != 0 &&
1552 AllGlobalLoadUsesSimpleEnoughForHeapSRA(GV, CI)) {
1553
1554 // If this is a fixed size array, transform the Malloc to be an alloc of
1555 // structs. malloc [100 x struct],1 -> malloc struct, 100
Benjamin Kramer8bcc9712012-08-29 15:32:21 +00001556 if (ArrayType *AT = dyn_cast<ArrayType>(getMallocAllocatedType(CI, TLI))) {
Mehdi Amini46a43552015-03-04 18:43:29 +00001557 Type *IntPtrTy = DL.getIntPtrType(CI->getType());
1558 unsigned TypeSize = DL.getStructLayout(AllocSTy)->getSizeInBytes();
Evan Cheng21b588b2010-04-14 20:52:55 +00001559 Value *AllocSize = ConstantInt::get(IntPtrTy, TypeSize);
1560 Value *NumElements = ConstantInt::get(IntPtrTy, AT->getNumElements());
David Majnemerfadc6db2016-04-29 08:07:22 +00001561 SmallVector<OperandBundleDef, 1> OpBundles;
1562 CI->getOperandBundlesAsDefs(OpBundles);
1563 Instruction *Malloc =
1564 CallInst::CreateMalloc(CI, IntPtrTy, AllocSTy, AllocSize, NumElements,
1565 OpBundles, nullptr, CI->getName());
Evan Cheng21b588b2010-04-14 20:52:55 +00001566 Instruction *Cast = new BitCastInst(Malloc, CI->getType(), "tmp", CI);
1567 CI->replaceAllUsesWith(Cast);
1568 CI->eraseFromParent();
Nuno Lopes9792d682012-06-22 00:25:01 +00001569 if (BitCastInst *BCI = dyn_cast<BitCastInst>(Malloc))
1570 CI = cast<CallInst>(BCI->getOperand(0));
1571 else
Nuno Lopes0b60ebb2012-06-22 00:29:58 +00001572 CI = cast<CallInst>(Malloc);
Evan Cheng21b588b2010-04-14 20:52:55 +00001573 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001574
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00001575 PerformHeapAllocSRoA(GV, CI, getMallocArraySize(CI, DL, TLI, true), DL,
1576 TLI);
Evan Cheng21b588b2010-04-14 20:52:55 +00001577 return true;
Victor Hernandez5d034492009-09-18 22:35:49 +00001578 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001579
Victor Hernandez5d034492009-09-18 22:35:49 +00001580 return false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001581}
Victor Hernandez5d034492009-09-18 22:35:49 +00001582
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00001583// Try to optimize globals based on the knowledge that only one value (besides
1584// its initializer) is ever stored to the global.
1585static bool optimizeOnceStoredGlobal(GlobalVariable *GV, Value *StoredOnceVal,
Nick Lewycky52da72b2012-02-05 19:56:38 +00001586 AtomicOrdering Ordering,
Mehdi Amini46a43552015-03-04 18:43:29 +00001587 const DataLayout &DL,
Rafael Espindolaaeff8a92014-02-24 23:12:18 +00001588 TargetLibraryInfo *TLI) {
Chris Lattner1c731fa2008-12-15 21:20:32 +00001589 // Ignore no-op GEPs and bitcasts.
1590 StoredOnceVal = StoredOnceVal->stripPointerCasts();
Chris Lattner09a52722004-10-09 21:48:45 +00001591
Chris Lattnere42eb312004-10-10 23:14:11 +00001592 // If we are dealing with a pointer global that is initialized to null and
1593 // only has one (non-null) value stored into it, then we can optimize any
1594 // users of the loaded value (often calls and loads) that would trap if the
1595 // value was null.
Duncan Sands19d0b472010-02-16 11:11:14 +00001596 if (GV->getInitializer()->getType()->isPointerTy() &&
Manoj Gupta77eeac32018-07-09 22:27:23 +00001597 GV->getInitializer()->isNullValue() &&
1598 !NullPointerIsDefined(
1599 nullptr /* F */,
1600 GV->getInitializer()->getType()->getPointerAddressSpace())) {
Chris Lattnere42eb312004-10-10 23:14:11 +00001601 if (Constant *SOVC = dyn_cast<Constant>(StoredOnceVal)) {
1602 if (GV->getInitializer()->getType() != SOVC->getType())
Chris Lattner1a1acc22011-05-22 07:15:13 +00001603 SOVC = ConstantExpr::getBitCast(SOVC, GV->getInitializer()->getType());
Misha Brukmanb1c93172005-04-21 23:48:37 +00001604
Chris Lattnere42eb312004-10-10 23:14:11 +00001605 // Optimize away any trapping uses of the loaded value.
Rafael Espindola37dc9e12014-02-21 00:06:31 +00001606 if (OptimizeAwayTrappingUsesOfLoads(GV, SOVC, DL, TLI))
Chris Lattner604ed7a2004-10-10 17:07:12 +00001607 return true;
Benjamin Kramer8bcc9712012-08-29 15:32:21 +00001608 } else if (CallInst *CI = extractMallocCall(StoredOnceVal, TLI)) {
1609 Type *MallocType = getMallocAllocatedType(CI, TLI);
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00001610 if (MallocType && tryToOptimizeStoreOfMallocToGlobal(GV, CI, MallocType,
1611 Ordering, DL, TLI))
Victor Hernandezf3db9152009-11-07 00:16:28 +00001612 return true;
Chris Lattnere42eb312004-10-10 23:14:11 +00001613 }
Chris Lattner09a52722004-10-09 21:48:45 +00001614 }
Chris Lattner004e2502004-10-11 05:54:41 +00001615
Chris Lattner09a52722004-10-09 21:48:45 +00001616 return false;
1617}
Chris Lattner1c4bddc2004-10-08 20:59:28 +00001618
James Molloyea31ad32015-11-13 11:05:07 +00001619/// At this point, we have learned that the only two values ever stored into GV
1620/// are its initializer and OtherVal. See if we can shrink the global into a
1621/// boolean and select between the two values whenever it is used. This exposes
1622/// the values to other scalar optimizations.
Lang Hames459b5dc2014-03-23 04:22:31 +00001623static bool TryToShrinkGlobalToBoolean(GlobalVariable *GV, Constant *OtherVal) {
Manuel Jacob5f6eaac2016-01-16 20:30:46 +00001624 Type *GVElType = GV->getValueType();
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00001625
Lang Hames459b5dc2014-03-23 04:22:31 +00001626 // If GVElType is already i1, it is already shrunk. If the type of the GV is
1627 // an FP value, pointer or vector, don't do this optimization because a select
1628 // between them is very expensive and unlikely to lead to later
1629 // simplification. In these cases, we typically end up with "cond ? v1 : v2"
1630 // where v1 and v2 both require constant pool loads, a big loss.
Chris Lattner46b5c642009-11-06 04:27:31 +00001631 if (GVElType == Type::getInt1Ty(GV->getContext()) ||
Duncan Sands9dff9be2010-02-15 16:12:20 +00001632 GVElType->isFloatingPointTy() ||
Duncan Sands19d0b472010-02-16 11:11:14 +00001633 GVElType->isPointerTy() || GVElType->isVectorTy())
Chris Lattner20bbac32008-01-14 01:17:44 +00001634 return false;
Gabor Greifa75ed762010-07-12 14:13:15 +00001635
Chris Lattner20bbac32008-01-14 01:17:44 +00001636 // Walk the use list of the global seeing if all the uses are load or store.
1637 // If there is anything else, bail out.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001638 for (User *U : GV->users())
Gabor Greifa75ed762010-07-12 14:13:15 +00001639 if (!isa<LoadInst>(U) && !isa<StoreInst>(U))
Chris Lattner20bbac32008-01-14 01:17:44 +00001640 return false;
Gabor Greifa75ed762010-07-12 14:13:15 +00001641
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001642 LLVM_DEBUG(dbgs() << " *** SHRINKING TO BOOL: " << *GV << "\n");
Lang Hames459b5dc2014-03-23 04:22:31 +00001643
1644 // Create the new global, initializing it to false.
1645 GlobalVariable *NewGV = new GlobalVariable(Type::getInt1Ty(GV->getContext()),
1646 false,
1647 GlobalValue::InternalLinkage,
1648 ConstantInt::getFalse(GV->getContext()),
1649 GV->getName()+".b",
1650 GV->getThreadLocalMode(),
1651 GV->getType()->getAddressSpace());
Sergei Larin94be2de2016-01-22 21:18:20 +00001652 NewGV->copyAttributesFrom(GV);
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00001653 GV->getParent()->getGlobalList().insert(GV->getIterator(), NewGV);
Lang Hames459b5dc2014-03-23 04:22:31 +00001654
Chris Lattner40e4cec2004-12-12 05:53:50 +00001655 Constant *InitVal = GV->getInitializer();
Chris Lattner46b5c642009-11-06 04:27:31 +00001656 assert(InitVal->getType() != Type::getInt1Ty(GV->getContext()) &&
Lang Hames459b5dc2014-03-23 04:22:31 +00001657 "No reason to shrink to bool!");
Chris Lattner40e4cec2004-12-12 05:53:50 +00001658
Strahinja Petrovic29202f62017-09-21 10:04:02 +00001659 SmallVector<DIGlobalVariableExpression *, 1> GVs;
1660 GV->getDebugInfo(GVs);
1661
Lang Hames459b5dc2014-03-23 04:22:31 +00001662 // If initialized to zero and storing one into the global, we can use a cast
1663 // instead of a select to synthesize the desired value.
1664 bool IsOneZero = false;
Strahinja Petrovic29202f62017-09-21 10:04:02 +00001665 bool EmitOneOrZero = true;
1666 if (ConstantInt *CI = dyn_cast<ConstantInt>(OtherVal)){
Lang Hames459b5dc2014-03-23 04:22:31 +00001667 IsOneZero = InitVal->isNullValue() && CI->isOne();
Chris Lattner40e4cec2004-12-12 05:53:50 +00001668
Strahinja Petrovic29202f62017-09-21 10:04:02 +00001669 if (ConstantInt *CIInit = dyn_cast<ConstantInt>(GV->getInitializer())){
1670 uint64_t ValInit = CIInit->getZExtValue();
1671 uint64_t ValOther = CI->getZExtValue();
1672 uint64_t ValMinus = ValOther - ValInit;
1673
1674 for(auto *GVe : GVs){
1675 DIGlobalVariable *DGV = GVe->getVariable();
1676 DIExpression *E = GVe->getExpression();
1677
1678 // It is expected that the address of global optimized variable is on
1679 // top of the stack. After optimization, value of that variable will
1680 // be ether 0 for initial value or 1 for other value. The following
1681 // expression should return constant integer value depending on the
1682 // value at global object address:
1683 // val * (ValOther - ValInit) + ValInit:
1684 // DW_OP_deref DW_OP_constu <ValMinus>
1685 // DW_OP_mul DW_OP_constu <ValInit> DW_OP_plus DW_OP_stack_value
Adrian Prantl210a29d2018-04-27 21:41:36 +00001686 SmallVector<uint64_t, 12> Ops = {
1687 dwarf::DW_OP_deref, dwarf::DW_OP_constu, ValMinus,
1688 dwarf::DW_OP_mul, dwarf::DW_OP_constu, ValInit,
1689 dwarf::DW_OP_plus};
1690 E = DIExpression::prependOpcodes(E, Ops, DIExpression::WithStackValue);
Strahinja Petrovic29202f62017-09-21 10:04:02 +00001691 DIGlobalVariableExpression *DGVE =
1692 DIGlobalVariableExpression::get(NewGV->getContext(), DGV, E);
1693 NewGV->addDebugInfo(DGVE);
1694 }
1695 EmitOneOrZero = false;
1696 }
1697 }
1698
1699 if (EmitOneOrZero) {
1700 // FIXME: This will only emit address for debugger on which will
1701 // be written only 0 or 1.
1702 for(auto *GV : GVs)
1703 NewGV->addDebugInfo(GV);
1704 }
1705
Lang Hames459b5dc2014-03-23 04:22:31 +00001706 while (!GV->use_empty()) {
1707 Instruction *UI = cast<Instruction>(GV->user_back());
1708 if (StoreInst *SI = dyn_cast<StoreInst>(UI)) {
1709 // Change the store into a boolean store.
1710 bool StoringOther = SI->getOperand(0) == OtherVal;
1711 // Only do this if we weren't storing a loaded value.
1712 Value *StoreVal;
1713 if (StoringOther || SI->getOperand(0) == InitVal) {
1714 StoreVal = ConstantInt::get(Type::getInt1Ty(GV->getContext()),
1715 StoringOther);
Bill Wendling7297b862013-02-13 23:00:51 +00001716 } else {
Lang Hames459b5dc2014-03-23 04:22:31 +00001717 // Otherwise, we are storing a previously loaded copy. To do this,
1718 // change the copy from copying the original value to just copying the
1719 // bool.
1720 Instruction *StoredVal = cast<Instruction>(SI->getOperand(0));
1721
1722 // If we've already replaced the input, StoredVal will be a cast or
1723 // select instruction. If not, it will be a load of the original
1724 // global.
1725 if (LoadInst *LI = dyn_cast<LoadInst>(StoredVal)) {
1726 assert(LI->getOperand(0) == GV && "Not a copy!");
1727 // Insert a new load, to preserve the saved value.
1728 StoreVal = new LoadInst(NewGV, LI->getName()+".b", false, 0,
Konstantin Zhuravlyovbb80d3e2017-07-11 22:23:00 +00001729 LI->getOrdering(), LI->getSyncScopeID(), LI);
Lang Hames459b5dc2014-03-23 04:22:31 +00001730 } else {
1731 assert((isa<CastInst>(StoredVal) || isa<SelectInst>(StoredVal)) &&
1732 "This is not a form that we understand!");
1733 StoreVal = StoredVal->getOperand(0);
1734 assert(isa<LoadInst>(StoreVal) && "Not a load of NewGV!");
1735 }
Chris Lattner745196a2004-12-12 19:34:41 +00001736 }
Lang Hames459b5dc2014-03-23 04:22:31 +00001737 new StoreInst(StoreVal, NewGV, false, 0,
Konstantin Zhuravlyovbb80d3e2017-07-11 22:23:00 +00001738 SI->getOrdering(), SI->getSyncScopeID(), SI);
Lang Hames459b5dc2014-03-23 04:22:31 +00001739 } else {
1740 // Change the load into a load of bool then a select.
1741 LoadInst *LI = cast<LoadInst>(UI);
1742 LoadInst *NLI = new LoadInst(NewGV, LI->getName()+".b", false, 0,
Konstantin Zhuravlyovbb80d3e2017-07-11 22:23:00 +00001743 LI->getOrdering(), LI->getSyncScopeID(), LI);
Lang Hames459b5dc2014-03-23 04:22:31 +00001744 Value *NSI;
1745 if (IsOneZero)
1746 NSI = new ZExtInst(NLI, LI->getType(), "", LI);
1747 else
1748 NSI = SelectInst::Create(NLI, OtherVal, InitVal, "", LI);
1749 NSI->takeName(LI);
1750 LI->replaceAllUsesWith(NSI);
Devang Patelfc507a12009-03-06 01:39:36 +00001751 }
Lang Hames459b5dc2014-03-23 04:22:31 +00001752 UI->eraseFromParent();
Chris Lattner40e4cec2004-12-12 05:53:50 +00001753 }
1754
Lang Hames459b5dc2014-03-23 04:22:31 +00001755 // Retain the name of the old global variable. People who are debugging their
1756 // programs may expect these variables to be named the same.
1757 NewGV->takeName(GV);
1758 GV->eraseFromParent();
Chris Lattner20bbac32008-01-14 01:17:44 +00001759 return true;
Chris Lattner40e4cec2004-12-12 05:53:50 +00001760}
1761
Florian Hahna1cc8482018-06-12 11:16:56 +00001762static bool deleteIfDead(
1763 GlobalValue &GV, SmallPtrSetImpl<const Comdat *> &NotDiscardableComdats) {
Rafael Espindola2cc46b32015-12-22 19:38:07 +00001764 GV.removeDeadConstantUsers();
1765
Mehdi Aminid8803092016-09-15 20:26:27 +00001766 if (!GV.isDiscardableIfUnused() && !GV.isDeclaration())
Rafael Espindola2cc46b32015-12-22 19:38:07 +00001767 return false;
1768
1769 if (const Comdat *C = GV.getComdat())
1770 if (!GV.hasLocalLinkage() && NotDiscardableComdats.count(C))
1771 return false;
1772
1773 bool Dead;
1774 if (auto *F = dyn_cast<Function>(&GV))
Mehdi Aminid8803092016-09-15 20:26:27 +00001775 Dead = (F->isDeclaration() && F->use_empty()) || F->isDefTriviallyDead();
Rafael Espindola2cc46b32015-12-22 19:38:07 +00001776 else
1777 Dead = GV.use_empty();
1778 if (!Dead)
1779 return false;
1780
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001781 LLVM_DEBUG(dbgs() << "GLOBAL DEAD: " << GV << "\n");
Rafael Espindola2cc46b32015-12-22 19:38:07 +00001782 GV.eraseFromParent();
1783 ++NumDeleted;
1784 return true;
1785}
Chris Lattner40e4cec2004-12-12 05:53:50 +00001786
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00001787static bool isPointerValueDeadOnEntryToFunction(
1788 const Function *F, GlobalValue *GV,
1789 function_ref<DominatorTree &(Function &)> LookupDomTree) {
James Molloy9c7d4d82015-11-15 14:21:37 +00001790 // Find all uses of GV. We expect them all to be in F, and if we can't
1791 // identify any of the uses we bail out.
1792 //
1793 // On each of these uses, identify if the memory that GV points to is
1794 // used/required/live at the start of the function. If it is not, for example
1795 // if the first thing the function does is store to the GV, the GV can
1796 // possibly be demoted.
1797 //
1798 // We don't do an exhaustive search for memory operations - simply look
1799 // through bitcasts as they're quite common and benign.
1800 const DataLayout &DL = GV->getParent()->getDataLayout();
1801 SmallVector<LoadInst *, 4> Loads;
1802 SmallVector<StoreInst *, 4> Stores;
1803 for (auto *U : GV->users()) {
1804 if (Operator::getOpcode(U) == Instruction::BitCast) {
1805 for (auto *UU : U->users()) {
1806 if (auto *LI = dyn_cast<LoadInst>(UU))
1807 Loads.push_back(LI);
1808 else if (auto *SI = dyn_cast<StoreInst>(UU))
1809 Stores.push_back(SI);
1810 else
1811 return false;
1812 }
1813 continue;
1814 }
1815
1816 Instruction *I = dyn_cast<Instruction>(U);
1817 if (!I)
1818 return false;
1819 assert(I->getParent()->getParent() == F);
1820
1821 if (auto *LI = dyn_cast<LoadInst>(I))
1822 Loads.push_back(LI);
1823 else if (auto *SI = dyn_cast<StoreInst>(I))
1824 Stores.push_back(SI);
1825 else
1826 return false;
1827 }
1828
1829 // We have identified all uses of GV into loads and stores. Now check if all
1830 // of them are known not to depend on the value of the global at the function
1831 // entry point. We do this by ensuring that every load is dominated by at
1832 // least one store.
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00001833 auto &DT = LookupDomTree(*const_cast<Function *>(F));
James Molloy9c7d4d82015-11-15 14:21:37 +00001834
James Molloyd4d23572015-11-16 10:16:22 +00001835 // The below check is quadratic. Check we're not going to do too many tests.
1836 // FIXME: Even though this will always have worst-case quadratic time, we
1837 // could put effort into minimizing the average time by putting stores that
1838 // have been shown to dominate at least one load at the beginning of the
1839 // Stores array, making subsequent dominance checks more likely to succeed
1840 // early.
1841 //
1842 // The threshold here is fairly large because global->local demotion is a
1843 // very powerful optimization should it fire.
1844 const unsigned Threshold = 100;
1845 if (Loads.size() * Stores.size() > Threshold)
1846 return false;
1847
James Molloy9c7d4d82015-11-15 14:21:37 +00001848 for (auto *L : Loads) {
1849 auto *LTy = L->getType();
David Majnemer0a16c222016-08-11 21:15:00 +00001850 if (none_of(Stores, [&](const StoreInst *S) {
James Molloy9c7d4d82015-11-15 14:21:37 +00001851 auto *STy = S->getValueOperand()->getType();
1852 // The load is only dominated by the store if DomTree says so
1853 // and the number of bits loaded in L is less than or equal to
1854 // the number of bits stored in S.
1855 return DT.dominates(S, L) &&
1856 DL.getTypeStoreSize(LTy) <= DL.getTypeStoreSize(STy);
1857 }))
1858 return false;
1859 }
1860 // All loads have known dependences inside F, so the global can be localized.
1861 return true;
1862}
1863
James Molloy1d695a02015-11-19 18:04:33 +00001864/// C may have non-instruction users. Can all of those users be turned into
1865/// instructions?
1866static bool allNonInstructionUsersCanBeMadeInstructions(Constant *C) {
1867 // We don't do this exhaustively. The most common pattern that we really need
1868 // to care about is a constant GEP or constant bitcast - so just looking
1869 // through one single ConstantExpr.
1870 //
1871 // The set of constants that this function returns true for must be able to be
1872 // handled by makeAllConstantUsesInstructions.
1873 for (auto *U : C->users()) {
1874 if (isa<Instruction>(U))
1875 continue;
1876 if (!isa<ConstantExpr>(U))
1877 // Non instruction, non-constantexpr user; cannot convert this.
1878 return false;
1879 for (auto *UU : U->users())
1880 if (!isa<Instruction>(UU))
1881 // A constantexpr used by another constant. We don't try and recurse any
1882 // further but just bail out at this point.
1883 return false;
1884 }
1885
1886 return true;
1887}
1888
1889/// C may have non-instruction users, and
1890/// allNonInstructionUsersCanBeMadeInstructions has returned true. Convert the
1891/// non-instruction users to instructions.
1892static void makeAllConstantUsesInstructions(Constant *C) {
1893 SmallVector<ConstantExpr*,4> Users;
1894 for (auto *U : C->users()) {
1895 if (isa<ConstantExpr>(U))
1896 Users.push_back(cast<ConstantExpr>(U));
1897 else
1898 // We should never get here; allNonInstructionUsersCanBeMadeInstructions
1899 // should not have returned true for C.
1900 assert(
1901 isa<Instruction>(U) &&
1902 "Can't transform non-constantexpr non-instruction to instruction!");
1903 }
1904
1905 SmallVector<Value*,4> UUsers;
1906 for (auto *U : Users) {
1907 UUsers.clear();
1908 for (auto *UU : U->users())
1909 UUsers.push_back(UU);
1910 for (auto *UU : UUsers) {
1911 Instruction *UI = cast<Instruction>(UU);
1912 Instruction *NewU = U->getAsInstruction();
1913 NewU->insertBefore(UI);
1914 UI->replaceUsesOfWith(U, NewU);
1915 }
Eli Friedman10ab9232017-04-27 18:39:08 +00001916 // We've replaced all the uses, so destroy the constant. (destroyConstant
1917 // will update value handles and metadata.)
1918 U->destroyConstant();
James Molloy1d695a02015-11-19 18:04:33 +00001919 }
1920}
1921
James Molloyea31ad32015-11-13 11:05:07 +00001922/// Analyze the specified global variable and optimize
Rafael Espindolafc355bc2011-01-19 16:32:21 +00001923/// it if possible. If we make a change, return true.
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00001924static bool processInternalGlobal(
1925 GlobalVariable *GV, const GlobalStatus &GS, TargetLibraryInfo *TLI,
1926 function_ref<DominatorTree &(Function &)> LookupDomTree) {
Mehdi Amini46a43552015-03-04 18:43:29 +00001927 auto &DL = GV->getParent()->getDataLayout();
James Molloy9c7d4d82015-11-15 14:21:37 +00001928 // If this is a first class global and has only one accessing function and
1929 // this function is non-recursive, we replace the global with a local alloca
1930 // in this function.
Alexey Samsonova1944e62013-10-07 19:03:24 +00001931 //
Alp Tokerf907b892013-12-05 05:44:44 +00001932 // NOTE: It doesn't make sense to promote non-single-value types since we
Alexey Samsonova1944e62013-10-07 19:03:24 +00001933 // are just replacing static memory to stack memory.
1934 //
1935 // If the global is in different address space, don't bring it to stack.
1936 if (!GS.HasMultipleAccessingFunctions &&
James Molloy1d695a02015-11-19 18:04:33 +00001937 GS.AccessingFunction &&
Manuel Jacob5f6eaac2016-01-16 20:30:46 +00001938 GV->getValueType()->isSingleValueType() &&
James Molloy9c7d4d82015-11-15 14:21:37 +00001939 GV->getType()->getAddressSpace() == 0 &&
1940 !GV->isExternallyInitialized() &&
James Molloy1d695a02015-11-19 18:04:33 +00001941 allNonInstructionUsersCanBeMadeInstructions(GV) &&
James Molloy9c7d4d82015-11-15 14:21:37 +00001942 GS.AccessingFunction->doesNotRecurse() &&
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00001943 isPointerValueDeadOnEntryToFunction(GS.AccessingFunction, GV,
1944 LookupDomTree)) {
Matt Arsenault3c1fc762017-04-10 22:27:50 +00001945 const DataLayout &DL = GV->getParent()->getDataLayout();
1946
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001947 LLVM_DEBUG(dbgs() << "LOCALIZING GLOBAL: " << *GV << "\n");
Alexey Samsonova1944e62013-10-07 19:03:24 +00001948 Instruction &FirstI = const_cast<Instruction&>(*GS.AccessingFunction
1949 ->getEntryBlock().begin());
Manuel Jacob5f6eaac2016-01-16 20:30:46 +00001950 Type *ElemTy = GV->getValueType();
Alexey Samsonova1944e62013-10-07 19:03:24 +00001951 // FIXME: Pass Global's alignment when globals have alignment
Matt Arsenault3c1fc762017-04-10 22:27:50 +00001952 AllocaInst *Alloca = new AllocaInst(ElemTy, DL.getAllocaAddrSpace(), nullptr,
Craig Topperf40110f2014-04-25 05:29:35 +00001953 GV->getName(), &FirstI);
Alexey Samsonova1944e62013-10-07 19:03:24 +00001954 if (!isa<UndefValue>(GV->getInitializer()))
1955 new StoreInst(GV->getInitializer(), Alloca, &FirstI);
1956
James Molloy1d695a02015-11-19 18:04:33 +00001957 makeAllConstantUsesInstructions(GV);
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00001958
Alexey Samsonova1944e62013-10-07 19:03:24 +00001959 GV->replaceAllUsesWith(Alloca);
1960 GV->eraseFromParent();
1961 ++NumLocalized;
1962 return true;
1963 }
1964
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00001965 // If the global is never loaded (but may be stored to), it is dead.
1966 // Delete it now.
Rafael Espindola045a78f2013-10-17 18:18:52 +00001967 if (!GS.IsLoaded) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001968 LLVM_DEBUG(dbgs() << "GLOBAL NEVER LOADED: " << *GV << "\n");
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00001969
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +00001970 bool Changed;
1971 if (isLeakCheckerRoot(GV)) {
1972 // Delete any constant stores to the global.
Benjamin Kramer8bcc9712012-08-29 15:32:21 +00001973 Changed = CleanupPointerRootUsers(GV, TLI);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +00001974 } else {
1975 // Delete any stores we can find to the global. We may not be able to
1976 // make it completely dead though.
Rafael Espindola37dc9e12014-02-21 00:06:31 +00001977 Changed = CleanupConstantGlobalUsers(GV, GV->getInitializer(), DL, TLI);
Nick Lewyckyfaa9c3b02012-07-24 07:21:08 +00001978 }
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00001979
1980 // If the global is dead now, delete it.
1981 if (GV->use_empty()) {
1982 GV->eraseFromParent();
1983 ++NumDeleted;
1984 Changed = true;
1985 }
1986 return Changed;
1987
James Molloyeb040cc2016-04-25 10:48:29 +00001988 }
1989 if (GS.StoredType <= GlobalStatus::InitializerStored) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001990 LLVM_DEBUG(dbgs() << "MARKING CONSTANT: " << *GV << "\n");
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00001991 GV->setConstant(true);
1992
1993 // Clean up any obviously simplifiable users now.
Rafael Espindola37dc9e12014-02-21 00:06:31 +00001994 CleanupConstantGlobalUsers(GV, GV->getInitializer(), DL, TLI);
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00001995
1996 // If the global is dead now, just nuke it.
1997 if (GV->use_empty()) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001998 LLVM_DEBUG(dbgs() << " *** Marking constant allowed us to simplify "
1999 << "all users and delete global!\n");
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002000 GV->eraseFromParent();
2001 ++NumDeleted;
James Molloyeb040cc2016-04-25 10:48:29 +00002002 return true;
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002003 }
2004
James Molloyeb040cc2016-04-25 10:48:29 +00002005 // Fall through to the next check; see if we can optimize further.
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002006 ++NumMarked;
James Molloyeb040cc2016-04-25 10:48:29 +00002007 }
2008 if (!GV->getInitializer()->getType()->isSingleValueType()) {
Mehdi Amini46a43552015-03-04 18:43:29 +00002009 const DataLayout &DL = GV->getParent()->getDataLayout();
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00002010 if (SRAGlobal(GV, DL))
Mehdi Amini46a43552015-03-04 18:43:29 +00002011 return true;
James Molloyeb040cc2016-04-25 10:48:29 +00002012 }
2013 if (GS.StoredType == GlobalStatus::StoredOnce && GS.StoredOnceValue) {
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002014 // If the initial value for the global was an undef value, and if only
2015 // one other value was stored into it, we can just change the
2016 // initializer to be the stored value, then delete all stores to the
2017 // global. This allows us to mark it constant.
2018 if (Constant *SOVConstant = dyn_cast<Constant>(GS.StoredOnceValue))
2019 if (isa<UndefValue>(GV->getInitializer())) {
2020 // Change the initial value here.
2021 GV->setInitializer(SOVConstant);
2022
2023 // Clean up any obviously simplifiable users now.
Rafael Espindola37dc9e12014-02-21 00:06:31 +00002024 CleanupConstantGlobalUsers(GV, GV->getInitializer(), DL, TLI);
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002025
2026 if (GV->use_empty()) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +00002027 LLVM_DEBUG(dbgs() << " *** Substituting initializer allowed us to "
2028 << "simplify all users and delete global!\n");
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002029 GV->eraseFromParent();
2030 ++NumDeleted;
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002031 }
2032 ++NumSubstitute;
2033 return true;
2034 }
2035
2036 // Try to optimize globals based on the knowledge that only one value
2037 // (besides its initializer) is ever stored to the global.
Rafael Espindolae4ed0e52015-12-22 19:16:50 +00002038 if (optimizeOnceStoredGlobal(GV, GS.StoredOnceValue, GS.Ordering, DL, TLI))
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002039 return true;
2040
Lang Hames459b5dc2014-03-23 04:22:31 +00002041 // Otherwise, if the global was not a boolean, we can shrink it to be a
2042 // boolean.
Eli Friedman33d37002013-09-09 22:00:13 +00002043 if (Constant *SOVConstant = dyn_cast<Constant>(GS.StoredOnceValue)) {
JF Bastien800f87a2016-04-06 21:19:33 +00002044 if (GS.Ordering == AtomicOrdering::NotAtomic) {
Lang Hames459b5dc2014-03-23 04:22:31 +00002045 if (TryToShrinkGlobalToBoolean(GV, SOVConstant)) {
Eli Friedman33d37002013-09-09 22:00:13 +00002046 ++NumShrunkToBool;
2047 return true;
2048 }
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002049 }
Eli Friedman33d37002013-09-09 22:00:13 +00002050 }
Rafael Espindolaecd5b9a2011-01-18 04:36:06 +00002051 }
2052
Chris Lattner1c4bddc2004-10-08 20:59:28 +00002053 return false;
2054}
2055
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002056/// Analyze the specified global variable and optimize it if possible. If we
2057/// make a change, return true.
2058static bool
2059processGlobal(GlobalValue &GV, TargetLibraryInfo *TLI,
2060 function_ref<DominatorTree &(Function &)> LookupDomTree) {
Peter Collingbourne96efdd62016-06-14 21:01:22 +00002061 if (GV.getName().startswith("llvm."))
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002062 return false;
2063
2064 GlobalStatus GS;
2065
2066 if (GlobalStatus::analyzeGlobal(&GV, GS))
2067 return false;
2068
2069 bool Changed = false;
Peter Collingbourne96efdd62016-06-14 21:01:22 +00002070 if (!GS.IsCompared && !GV.hasGlobalUnnamedAddr()) {
2071 auto NewUnnamedAddr = GV.hasLocalLinkage() ? GlobalValue::UnnamedAddr::Global
2072 : GlobalValue::UnnamedAddr::Local;
2073 if (NewUnnamedAddr != GV.getUnnamedAddr()) {
2074 GV.setUnnamedAddr(NewUnnamedAddr);
2075 NumUnnamed++;
2076 Changed = true;
2077 }
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002078 }
2079
Peter Collingbourne96efdd62016-06-14 21:01:22 +00002080 // Do more involved optimizations if the global is internal.
2081 if (!GV.hasLocalLinkage())
2082 return Changed;
2083
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002084 auto *GVar = dyn_cast<GlobalVariable>(&GV);
2085 if (!GVar)
2086 return Changed;
2087
2088 if (GVar->isConstant() || !GVar->hasInitializer())
2089 return Changed;
2090
2091 return processInternalGlobal(GVar, GS, TLI, LookupDomTree) || Changed;
2092}
2093
James Molloyea31ad32015-11-13 11:05:07 +00002094/// Walk all of the direct calls of the specified function, changing them to
2095/// FastCC.
Chris Lattnera4c80222005-05-08 22:18:06 +00002096static void ChangeCalleesToFastCall(Function *F) {
Chandler Carruthcdf47882014-03-09 03:16:01 +00002097 for (User *U : F->users()) {
2098 if (isa<BlockAddress>(U))
Jay Foadca0c4992012-05-12 08:30:16 +00002099 continue;
Chandler Carruthcdf47882014-03-09 03:16:01 +00002100 CallSite CS(cast<Instruction>(U));
2101 CS.setCallingConv(CallingConv::Fast);
Chris Lattnera4c80222005-05-08 22:18:06 +00002102 }
2103}
Chris Lattner1c4bddc2004-10-08 20:59:28 +00002104
Reid Kleckner0a5ed3d2017-04-19 23:26:44 +00002105static AttributeList StripNest(LLVMContext &C, AttributeList Attrs) {
2106 // There can be at most one attribute set with a nest attribute.
2107 unsigned NestIndex;
2108 if (Attrs.hasAttrSomewhere(Attribute::Nest, &NestIndex))
2109 return Attrs.removeAttribute(C, NestIndex, Attribute::Nest);
Duncan Sands573b3f82008-02-16 20:56:04 +00002110 return Attrs;
2111}
2112
2113static void RemoveNestAttribute(Function *F) {
Bill Wendling85a64c22012-10-14 06:39:53 +00002114 F->setAttributes(StripNest(F->getContext(), F->getAttributes()));
Chandler Carruthcdf47882014-03-09 03:16:01 +00002115 for (User *U : F->users()) {
2116 if (isa<BlockAddress>(U))
Jay Foadca0c4992012-05-12 08:30:16 +00002117 continue;
Chandler Carruthcdf47882014-03-09 03:16:01 +00002118 CallSite CS(cast<Instruction>(U));
2119 CS.setAttributes(StripNest(F->getContext(), CS.getAttributes()));
Duncan Sands573b3f82008-02-16 20:56:04 +00002120 }
2121}
2122
Reid Kleckner22869372014-02-26 19:57:30 +00002123/// Return true if this is a calling convention that we'd like to change. The
2124/// idea here is that we don't want to mess with the convention if the user
2125/// explicitly requested something with performance implications like coldcc,
2126/// GHC, or anyregcc.
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002127static bool hasChangeableCC(Function *F) {
Reid Kleckner22869372014-02-26 19:57:30 +00002128 CallingConv::ID CC = F->getCallingConv();
Jonas Devlieghere9ca06452018-02-28 22:28:44 +00002129
Reid Kleckner22869372014-02-26 19:57:30 +00002130 // FIXME: Is it worth transforming x86_stdcallcc and x86_fastcallcc?
Jonas Devlieghere9ca06452018-02-28 22:28:44 +00002131 if (CC != CallingConv::C && CC != CallingConv::X86_ThisCall)
2132 return false;
2133
2134 // FIXME: Change CC for the whole chain of musttail calls when possible.
2135 //
2136 // Can't change CC of the function that either has musttail calls, or is a
2137 // musttail callee itself
2138 for (User *U : F->users()) {
2139 if (isa<BlockAddress>(U))
2140 continue;
2141 CallInst* CI = dyn_cast<CallInst>(U);
2142 if (!CI)
2143 continue;
2144
2145 if (CI->isMustTailCall())
2146 return false;
2147 }
2148
2149 for (BasicBlock &BB : *F)
2150 if (BB.getTerminatingMustTailCall())
2151 return false;
2152
2153 return true;
Reid Kleckner22869372014-02-26 19:57:30 +00002154}
2155
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002156/// Return true if the block containing the call site has a BlockFrequency of
2157/// less than ColdCCRelFreq% of the entry block.
2158static bool isColdCallSite(CallSite CS, BlockFrequencyInfo &CallerBFI) {
2159 const BranchProbability ColdProb(ColdCCRelFreq, 100);
2160 auto CallSiteBB = CS.getInstruction()->getParent();
2161 auto CallSiteFreq = CallerBFI.getBlockFreq(CallSiteBB);
2162 auto CallerEntryFreq =
2163 CallerBFI.getBlockFreq(&(CS.getCaller()->getEntryBlock()));
2164 return CallSiteFreq < CallerEntryFreq * ColdProb;
2165}
2166
2167// This function checks if the input function F is cold at all call sites. It
2168// also looks each call site's containing function, returning false if the
2169// caller function contains other non cold calls. The input vector AllCallsCold
2170// contains a list of functions that only have call sites in cold blocks.
2171static bool
2172isValidCandidateForColdCC(Function &F,
2173 function_ref<BlockFrequencyInfo &(Function &)> GetBFI,
2174 const std::vector<Function *> &AllCallsCold) {
2175
2176 if (F.user_empty())
2177 return false;
2178
2179 for (User *U : F.users()) {
2180 if (isa<BlockAddress>(U))
2181 continue;
2182
2183 CallSite CS(cast<Instruction>(U));
2184 Function *CallerFunc = CS.getInstruction()->getParent()->getParent();
2185 BlockFrequencyInfo &CallerBFI = GetBFI(*CallerFunc);
2186 if (!isColdCallSite(CS, CallerBFI))
2187 return false;
2188 auto It = std::find(AllCallsCold.begin(), AllCallsCold.end(), CallerFunc);
2189 if (It == AllCallsCold.end())
2190 return false;
2191 }
2192 return true;
2193}
2194
2195static void changeCallSitesToColdCC(Function *F) {
2196 for (User *U : F->users()) {
2197 if (isa<BlockAddress>(U))
2198 continue;
2199 CallSite CS(cast<Instruction>(U));
2200 CS.setCallingConv(CallingConv::Cold);
2201 }
2202}
2203
2204// This function iterates over all the call instructions in the input Function
2205// and checks that all call sites are in cold blocks and are allowed to use the
2206// coldcc calling convention.
2207static bool
2208hasOnlyColdCalls(Function &F,
2209 function_ref<BlockFrequencyInfo &(Function &)> GetBFI) {
2210 for (BasicBlock &BB : F) {
2211 for (Instruction &I : BB) {
2212 if (CallInst *CI = dyn_cast<CallInst>(&I)) {
2213 CallSite CS(cast<Instruction>(CI));
2214 // Skip over isline asm instructions since they aren't function calls.
2215 if (CI->isInlineAsm())
2216 continue;
2217 Function *CalledFn = CI->getCalledFunction();
2218 if (!CalledFn)
2219 return false;
2220 if (!CalledFn->hasLocalLinkage())
2221 return false;
2222 // Skip over instrinsics since they won't remain as function calls.
2223 if (CalledFn->getIntrinsicID() != Intrinsic::not_intrinsic)
2224 continue;
2225 // Check if it's valid to use coldcc calling convention.
2226 if (!hasChangeableCC(CalledFn) || CalledFn->isVarArg() ||
2227 CalledFn->hasAddressTaken())
2228 return false;
2229 BlockFrequencyInfo &CallerBFI = GetBFI(F);
2230 if (!isColdCallSite(CS, CallerBFI))
2231 return false;
2232 }
2233 }
2234 }
2235 return true;
2236}
2237
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002238static bool
2239OptimizeFunctions(Module &M, TargetLibraryInfo *TLI,
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002240 function_ref<TargetTransformInfo &(Function &)> GetTTI,
2241 function_ref<BlockFrequencyInfo &(Function &)> GetBFI,
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002242 function_ref<DominatorTree &(Function &)> LookupDomTree,
Florian Hahna1cc8482018-06-12 11:16:56 +00002243 SmallPtrSetImpl<const Comdat *> &NotDiscardableComdats) {
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002244
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002245 bool Changed = false;
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002246
2247 std::vector<Function *> AllCallsCold;
2248 for (Module::iterator FI = M.begin(), E = M.end(); FI != E;) {
2249 Function *F = &*FI++;
2250 if (hasOnlyColdCalls(*F, GetBFI))
2251 AllCallsCold.push_back(F);
2252 }
2253
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002254 // Optimize functions.
2255 for (Module::iterator FI = M.begin(), E = M.end(); FI != E; ) {
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00002256 Function *F = &*FI++;
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002257
Luke Cheeseman6c1e6bb2018-02-22 14:42:08 +00002258 // Don't perform global opt pass on naked functions; we don't want fast
2259 // calling conventions for naked functions.
2260 if (F->hasFnAttribute(Attribute::Naked))
2261 continue;
2262
Duncan Sandsed722832009-03-06 10:21:56 +00002263 // Functions without names cannot be referenced outside this module.
David Majnemer5c921152014-07-01 15:26:50 +00002264 if (!F->hasName() && !F->isDeclaration() && !F->hasLocalLinkage())
Duncan Sandsed722832009-03-06 10:21:56 +00002265 F->setLinkage(GlobalValue::InternalLinkage);
David Majnemer1b3b70e2014-10-08 07:23:31 +00002266
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002267 if (deleteIfDead(*F, NotDiscardableComdats)) {
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002268 Changed = true;
Rafael Espindola9f0bebc2015-12-22 19:26:18 +00002269 continue;
2270 }
Rafael Espindola10d9a032015-12-22 20:43:30 +00002271
Davide Italianoc3dc055782017-07-13 15:40:59 +00002272 // LLVM's definition of dominance allows instructions that are cyclic
2273 // in unreachable blocks, e.g.:
2274 // %pat = select i1 %condition, @global, i16* %pat
2275 // because any instruction dominates an instruction in a block that's
2276 // not reachable from entry.
2277 // So, remove unreachable blocks from the function, because a) there's
2278 // no point in analyzing them and b) GlobalOpt should otherwise grow
2279 // some more complicated logic to break these cycles.
2280 // Removing unreachable blocks might invalidate the dominator so we
2281 // recalculate it.
2282 if (!F->isDeclaration()) {
2283 if (removeUnreachableBlocks(*F)) {
2284 auto &DT = LookupDomTree(*F);
2285 DT.recalculate(*F);
2286 Changed = true;
2287 }
2288 }
2289
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002290 Changed |= processGlobal(*F, TLI, LookupDomTree);
Rafael Espindola10d9a032015-12-22 20:43:30 +00002291
Rafael Espindola9f0bebc2015-12-22 19:26:18 +00002292 if (!F->hasLocalLinkage())
2293 continue;
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002294
2295 if (hasChangeableCC(F) && !F->isVarArg() && !F->hasAddressTaken()) {
2296 NumInternalFunc++;
2297 TargetTransformInfo &TTI = GetTTI(*F);
2298 // Change the calling convention to coldcc if either stress testing is
2299 // enabled or the target would like to use coldcc on functions which are
2300 // cold at all call sites and the callers contain no other non coldcc
2301 // calls.
2302 if (EnableColdCCStressTest ||
2303 (isValidCandidateForColdCC(*F, GetBFI, AllCallsCold) &&
2304 TTI.useColdCCForColdCall(*F))) {
2305 F->setCallingConv(CallingConv::Cold);
2306 changeCallSitesToColdCC(F);
2307 Changed = true;
2308 NumColdCC++;
2309 }
2310 }
2311
2312 if (hasChangeableCC(F) && !F->isVarArg() &&
Rafael Espindola9f0bebc2015-12-22 19:26:18 +00002313 !F->hasAddressTaken()) {
2314 // If this function has a calling convention worth changing, is not a
2315 // varargs function, and is only called directly, promote it to use the
2316 // Fast calling convention.
2317 F->setCallingConv(CallingConv::Fast);
2318 ChangeCalleesToFastCall(F);
2319 ++NumFastCallFns;
2320 Changed = true;
2321 }
Duncan Sands573b3f82008-02-16 20:56:04 +00002322
Rafael Espindola9f0bebc2015-12-22 19:26:18 +00002323 if (F->getAttributes().hasAttrSomewhere(Attribute::Nest) &&
2324 !F->hasAddressTaken()) {
2325 // The function is not used by a trampoline intrinsic, so it is safe
2326 // to remove the 'nest' attribute.
2327 RemoveNestAttribute(F);
2328 ++NumNestRemoved;
2329 Changed = true;
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002330 }
2331 }
2332 return Changed;
2333}
2334
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002335static bool
2336OptimizeGlobalVars(Module &M, TargetLibraryInfo *TLI,
2337 function_ref<DominatorTree &(Function &)> LookupDomTree,
Florian Hahna1cc8482018-06-12 11:16:56 +00002338 SmallPtrSetImpl<const Comdat *> &NotDiscardableComdats) {
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002339 bool Changed = false;
David Majnemerdad0a642014-06-27 18:19:56 +00002340
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002341 for (Module::global_iterator GVI = M.global_begin(), E = M.global_end();
2342 GVI != E; ) {
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00002343 GlobalVariable *GV = &*GVI++;
Duncan Sandsed722832009-03-06 10:21:56 +00002344 // Global variables without names cannot be referenced outside this module.
David Majnemer5c921152014-07-01 15:26:50 +00002345 if (!GV->hasName() && !GV->isDeclaration() && !GV->hasLocalLinkage())
Duncan Sandsed722832009-03-06 10:21:56 +00002346 GV->setLinkage(GlobalValue::InternalLinkage);
Dan Gohman580b80d2009-11-23 16:22:21 +00002347 // Simplify the initializer.
2348 if (GV->hasInitializer())
David Majnemerd536f232016-07-29 03:27:26 +00002349 if (auto *C = dyn_cast<Constant>(GV->getInitializer())) {
Mehdi Amini46a43552015-03-04 18:43:29 +00002350 auto &DL = M.getDataLayout();
David Majnemerd536f232016-07-29 03:27:26 +00002351 Constant *New = ConstantFoldConstant(C, DL, TLI);
2352 if (New && New != C)
Dan Gohman580b80d2009-11-23 16:22:21 +00002353 GV->setInitializer(New);
2354 }
Rafael Espindolafc355bc2011-01-19 16:32:21 +00002355
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002356 if (deleteIfDead(*GV, NotDiscardableComdats)) {
Rafael Espindola10d9a032015-12-22 20:43:30 +00002357 Changed = true;
2358 continue;
2359 }
2360
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002361 Changed |= processGlobal(*GV, TLI, LookupDomTree);
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002362 }
2363 return Changed;
2364}
2365
James Molloyea31ad32015-11-13 11:05:07 +00002366/// Evaluate a piece of a constantexpr store into a global initializer. This
2367/// returns 'Init' modified to reflect 'Val' stored into it. At this point, the
2368/// GEP operands of Addr [0, OpNo) have been stepped into.
Anthony Pesche92ae2d2015-07-22 22:26:54 +00002369static Constant *EvaluateStoreInto(Constant *Init, Constant *Val,
2370 ConstantExpr *Addr, unsigned OpNo) {
2371 // Base case of the recursion.
2372 if (OpNo == Addr->getNumOperands()) {
2373 assert(Val->getType() == Init->getType() && "Type mismatch!");
2374 return Val;
2375 }
2376
2377 SmallVector<Constant*, 32> Elts;
2378 if (StructType *STy = dyn_cast<StructType>(Init->getType())) {
2379 // Break up the constant into its elements.
2380 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i)
2381 Elts.push_back(Init->getAggregateElement(i));
2382
2383 // Replace the element that we are supposed to.
2384 ConstantInt *CU = cast<ConstantInt>(Addr->getOperand(OpNo));
2385 unsigned Idx = CU->getZExtValue();
2386 assert(Idx < STy->getNumElements() && "Struct index out of range!");
2387 Elts[Idx] = EvaluateStoreInto(Elts[Idx], Val, Addr, OpNo+1);
2388
2389 // Return the modified struct.
2390 return ConstantStruct::get(STy, Elts);
2391 }
2392
2393 ConstantInt *CI = cast<ConstantInt>(Addr->getOperand(OpNo));
2394 SequentialType *InitTy = cast<SequentialType>(Init->getType());
Peter Collingbournebc070522016-12-02 03:20:58 +00002395 uint64_t NumElts = InitTy->getNumElements();
Anthony Pesche92ae2d2015-07-22 22:26:54 +00002396
2397 // Break up the array into elements.
2398 for (uint64_t i = 0, e = NumElts; i != e; ++i)
2399 Elts.push_back(Init->getAggregateElement(i));
2400
2401 assert(CI->getZExtValue() < NumElts);
2402 Elts[CI->getZExtValue()] =
2403 EvaluateStoreInto(Elts[CI->getZExtValue()], Val, Addr, OpNo+1);
2404
2405 if (Init->getType()->isArrayTy())
2406 return ConstantArray::get(cast<ArrayType>(InitTy), Elts);
2407 return ConstantVector::get(Elts);
2408}
2409
James Molloyea31ad32015-11-13 11:05:07 +00002410/// We have decided that Addr (which satisfies the predicate
Anthony Pesche92ae2d2015-07-22 22:26:54 +00002411/// isSimpleEnoughPointerToCommit) should get Val as its value. Make it happen.
2412static void CommitValueTo(Constant *Val, Constant *Addr) {
2413 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Addr)) {
2414 assert(GV->hasInitializer());
2415 GV->setInitializer(Val);
2416 return;
2417 }
2418
2419 ConstantExpr *CE = cast<ConstantExpr>(Addr);
2420 GlobalVariable *GV = cast<GlobalVariable>(CE->getOperand(0));
2421 GV->setInitializer(EvaluateStoreInto(GV->getInitializer(), Val, CE, 2));
2422}
2423
Amara Emerson93b0ff22018-01-31 23:56:07 +00002424/// Given a map of address -> value, where addresses are expected to be some form
2425/// of either a global or a constant GEP, set the initializer for the address to
2426/// be the value. This performs mostly the same function as CommitValueTo()
2427/// and EvaluateStoreInto() but is optimized to be more efficient for the common
2428/// case where the set of addresses are GEPs sharing the same underlying global,
2429/// processing the GEPs in batches rather than individually.
2430///
2431/// To give an example, consider the following C++ code adapted from the clang
2432/// regression tests:
2433/// struct S {
2434/// int n = 10;
2435/// int m = 2 * n;
2436/// S(int a) : n(a) {}
2437/// };
2438///
2439/// template<typename T>
2440/// struct U {
2441/// T *r = &q;
2442/// T q = 42;
2443/// U *p = this;
2444/// };
2445///
2446/// U<S> e;
2447///
2448/// The global static constructor for 'e' will need to initialize 'r' and 'p' of
2449/// the outer struct, while also initializing the inner 'q' structs 'n' and 'm'
2450/// members. This batch algorithm will simply use general CommitValueTo() method
2451/// to handle the complex nested S struct initialization of 'q', before
2452/// processing the outermost members in a single batch. Using CommitValueTo() to
2453/// handle member in the outer struct is inefficient when the struct/array is
2454/// very large as we end up creating and destroy constant arrays for each
2455/// initialization.
2456/// For the above case, we expect the following IR to be generated:
2457///
2458/// %struct.U = type { %struct.S*, %struct.S, %struct.U* }
2459/// %struct.S = type { i32, i32 }
2460/// @e = global %struct.U { %struct.S* gep inbounds (%struct.U, %struct.U* @e,
2461/// i64 0, i32 1),
2462/// %struct.S { i32 42, i32 84 }, %struct.U* @e }
2463/// The %struct.S { i32 42, i32 84 } inner initializer is treated as a complex
2464/// constant expression, while the other two elements of @e are "simple".
2465static void BatchCommitValueTo(const DenseMap<Constant*, Constant*> &Mem) {
2466 SmallVector<std::pair<GlobalVariable*, Constant*>, 32> GVs;
2467 SmallVector<std::pair<ConstantExpr*, Constant*>, 32> ComplexCEs;
2468 SmallVector<std::pair<ConstantExpr*, Constant*>, 32> SimpleCEs;
2469 SimpleCEs.reserve(Mem.size());
2470
2471 for (const auto &I : Mem) {
2472 if (auto *GV = dyn_cast<GlobalVariable>(I.first)) {
2473 GVs.push_back(std::make_pair(GV, I.second));
2474 } else {
2475 ConstantExpr *GEP = cast<ConstantExpr>(I.first);
2476 // We don't handle the deeply recursive case using the batch method.
2477 if (GEP->getNumOperands() > 3)
2478 ComplexCEs.push_back(std::make_pair(GEP, I.second));
2479 else
2480 SimpleCEs.push_back(std::make_pair(GEP, I.second));
2481 }
2482 }
2483
2484 // The algorithm below doesn't handle cases like nested structs, so use the
2485 // slower fully general method if we have to.
2486 for (auto ComplexCE : ComplexCEs)
2487 CommitValueTo(ComplexCE.second, ComplexCE.first);
2488
2489 for (auto GVPair : GVs) {
2490 assert(GVPair.first->hasInitializer());
2491 GVPair.first->setInitializer(GVPair.second);
2492 }
2493
2494 if (SimpleCEs.empty())
2495 return;
2496
2497 // We cache a single global's initializer elements in the case where the
2498 // subsequent address/val pair uses the same one. This avoids throwing away and
2499 // rebuilding the constant struct/vector/array just because one element is
2500 // modified at a time.
2501 SmallVector<Constant *, 32> Elts;
2502 Elts.reserve(SimpleCEs.size());
2503 GlobalVariable *CurrentGV = nullptr;
2504
2505 auto commitAndSetupCache = [&](GlobalVariable *GV, bool Update) {
2506 Constant *Init = GV->getInitializer();
2507 Type *Ty = Init->getType();
2508 if (Update) {
2509 if (CurrentGV) {
2510 assert(CurrentGV && "Expected a GV to commit to!");
2511 Type *CurrentInitTy = CurrentGV->getInitializer()->getType();
2512 // We have a valid cache that needs to be committed.
2513 if (StructType *STy = dyn_cast<StructType>(CurrentInitTy))
2514 CurrentGV->setInitializer(ConstantStruct::get(STy, Elts));
2515 else if (ArrayType *ArrTy = dyn_cast<ArrayType>(CurrentInitTy))
2516 CurrentGV->setInitializer(ConstantArray::get(ArrTy, Elts));
2517 else
2518 CurrentGV->setInitializer(ConstantVector::get(Elts));
2519 }
2520 if (CurrentGV == GV)
2521 return;
2522 // Need to clear and set up cache for new initializer.
2523 CurrentGV = GV;
2524 Elts.clear();
2525 unsigned NumElts;
2526 if (auto *STy = dyn_cast<StructType>(Ty))
2527 NumElts = STy->getNumElements();
2528 else
2529 NumElts = cast<SequentialType>(Ty)->getNumElements();
2530 for (unsigned i = 0, e = NumElts; i != e; ++i)
2531 Elts.push_back(Init->getAggregateElement(i));
2532 }
2533 };
2534
2535 for (auto CEPair : SimpleCEs) {
2536 ConstantExpr *GEP = CEPair.first;
2537 Constant *Val = CEPair.second;
2538
2539 GlobalVariable *GV = cast<GlobalVariable>(GEP->getOperand(0));
2540 commitAndSetupCache(GV, GV != CurrentGV);
2541 ConstantInt *CI = cast<ConstantInt>(GEP->getOperand(2));
2542 Elts[CI->getZExtValue()] = Val;
2543 }
2544 // The last initializer in the list needs to be committed, others
2545 // will be committed on a new initializer being processed.
2546 commitAndSetupCache(CurrentGV, true);
2547}
2548
James Molloyea31ad32015-11-13 11:05:07 +00002549/// Evaluate static constructors in the function, if we can. Return true if we
2550/// can, false otherwise.
Mehdi Amini46a43552015-03-04 18:43:29 +00002551static bool EvaluateStaticConstructor(Function *F, const DataLayout &DL,
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002552 TargetLibraryInfo *TLI) {
Chris Lattnerda1889b2005-09-27 04:27:01 +00002553 // Call the function.
Rafael Espindola37dc9e12014-02-21 00:06:31 +00002554 Evaluator Eval(DL, TLI);
Chris Lattner65a3a092005-09-27 04:45:34 +00002555 Constant *RetValDummy;
Nick Lewycky73be5e32012-02-19 23:26:27 +00002556 bool EvalSuccess = Eval.EvaluateFunction(F, RetValDummy,
2557 SmallVector<Constant*, 0>());
Jakub Staszak9525a772012-12-06 21:57:16 +00002558
Chris Lattnerda1889b2005-09-27 04:27:01 +00002559 if (EvalSuccess) {
Nico Weber4b2acde2014-05-02 18:35:25 +00002560 ++NumCtorsEvaluated;
2561
Chris Lattner6bf2cd52005-09-26 17:07:09 +00002562 // We succeeded at evaluation: commit the result.
Nicola Zaghend34e60c2018-05-14 12:53:11 +00002563 LLVM_DEBUG(dbgs() << "FULLY EVALUATED GLOBAL CTOR FUNCTION '"
2564 << F->getName() << "' to "
2565 << Eval.getMutatedMemory().size() << " stores.\n");
Amara Emerson93b0ff22018-01-31 23:56:07 +00002566 BatchCommitValueTo(Eval.getMutatedMemory());
Craig Topper46276792014-08-24 23:23:06 +00002567 for (GlobalVariable *GV : Eval.getInvariants())
2568 GV->setConstant(true);
Chris Lattner6bf2cd52005-09-26 17:07:09 +00002569 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00002570
Chris Lattnerda1889b2005-09-27 04:27:01 +00002571 return EvalSuccess;
Chris Lattner99e23fa2005-09-26 04:44:35 +00002572}
2573
Benjamin Krameradf1ea82014-03-07 21:52:38 +00002574static int compareNames(Constant *const *A, Constant *const *B) {
Benjamin Kramer96f4b122016-03-15 14:18:26 +00002575 Value *AStripped = (*A)->stripPointerCastsNoFollowAliases();
2576 Value *BStripped = (*B)->stripPointerCastsNoFollowAliases();
2577 return AStripped->getName().compare(BStripped->getName());
Benjamin Krameradf1ea82014-03-07 21:52:38 +00002578}
2579
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002580static void setUsedInitializer(GlobalVariable &V,
Florian Hahna1cc8482018-06-12 11:16:56 +00002581 const SmallPtrSetImpl<GlobalValue *> &Init) {
Rafael Espindolac2bb73f2013-07-20 23:33:15 +00002582 if (Init.empty()) {
2583 V.eraseFromParent();
2584 return;
2585 }
2586
Matt Arsenaultda1deab2014-01-02 19:53:49 +00002587 // Type of pointer to the array of pointers.
2588 PointerType *Int8PtrTy = Type::getInt8PtrTy(V.getContext(), 0);
Rafael Espindola00752162013-05-09 17:22:59 +00002589
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002590 SmallVector<Constant *, 8> UsedArray;
Craig Topper71b7b682014-08-21 05:55:13 +00002591 for (GlobalValue *GV : Init) {
Matt Arsenaultda1deab2014-01-02 19:53:49 +00002592 Constant *Cast
Craig Topper71b7b682014-08-21 05:55:13 +00002593 = ConstantExpr::getPointerBitCastOrAddrSpaceCast(GV, Int8PtrTy);
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002594 UsedArray.push_back(Cast);
Rafael Espindola00752162013-05-09 17:22:59 +00002595 }
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002596 // Sort to get deterministic order.
Benjamin Krameradf1ea82014-03-07 21:52:38 +00002597 array_pod_sort(UsedArray.begin(), UsedArray.end(), compareNames);
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002598 ArrayType *ATy = ArrayType::get(Int8PtrTy, UsedArray.size());
Rafael Espindola00752162013-05-09 17:22:59 +00002599
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002600 Module *M = V.getParent();
2601 V.removeFromParent();
2602 GlobalVariable *NV =
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002603 new GlobalVariable(*M, ATy, false, GlobalValue::AppendingLinkage,
2604 ConstantArray::get(ATy, UsedArray), "");
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002605 NV->takeName(&V);
2606 NV->setSection("llvm.metadata");
2607 delete &V;
Rafael Espindola00752162013-05-09 17:22:59 +00002608}
2609
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002610namespace {
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002611
James Molloyea31ad32015-11-13 11:05:07 +00002612/// An easy to access representation of llvm.used and llvm.compiler.used.
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002613class LLVMUsed {
2614 SmallPtrSet<GlobalValue *, 8> Used;
2615 SmallPtrSet<GlobalValue *, 8> CompilerUsed;
2616 GlobalVariable *UsedV;
2617 GlobalVariable *CompilerUsedV;
2618
2619public:
Rafael Espindolaec2375f2013-07-25 02:50:08 +00002620 LLVMUsed(Module &M) {
Rafael Espindola17600e22013-07-25 03:23:25 +00002621 UsedV = collectUsedGlobalVariables(M, Used, false);
2622 CompilerUsedV = collectUsedGlobalVariables(M, CompilerUsed, true);
Rafael Espindola00752162013-05-09 17:22:59 +00002623 }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002624
2625 using iterator = SmallPtrSet<GlobalValue *, 8>::iterator;
2626 using used_iterator_range = iterator_range<iterator>;
2627
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002628 iterator usedBegin() { return Used.begin(); }
2629 iterator usedEnd() { return Used.end(); }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002630
Craig Topper46276792014-08-24 23:23:06 +00002631 used_iterator_range used() {
2632 return used_iterator_range(usedBegin(), usedEnd());
2633 }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002634
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002635 iterator compilerUsedBegin() { return CompilerUsed.begin(); }
2636 iterator compilerUsedEnd() { return CompilerUsed.end(); }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002637
Craig Topper46276792014-08-24 23:23:06 +00002638 used_iterator_range compilerUsed() {
2639 return used_iterator_range(compilerUsedBegin(), compilerUsedEnd());
2640 }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002641
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002642 bool usedCount(GlobalValue *GV) const { return Used.count(GV); }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002643
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002644 bool compilerUsedCount(GlobalValue *GV) const {
2645 return CompilerUsed.count(GV);
2646 }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002647
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002648 bool usedErase(GlobalValue *GV) { return Used.erase(GV); }
2649 bool compilerUsedErase(GlobalValue *GV) { return CompilerUsed.erase(GV); }
David Blaikie70573dc2014-11-19 07:49:26 +00002650 bool usedInsert(GlobalValue *GV) { return Used.insert(GV).second; }
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002651
David Blaikie70573dc2014-11-19 07:49:26 +00002652 bool compilerUsedInsert(GlobalValue *GV) {
2653 return CompilerUsed.insert(GV).second;
2654 }
Rafael Espindola00752162013-05-09 17:22:59 +00002655
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002656 void syncVariablesAndSets() {
2657 if (UsedV)
2658 setUsedInitializer(*UsedV, Used);
2659 if (CompilerUsedV)
2660 setUsedInitializer(*CompilerUsedV, CompilerUsed);
2661 }
2662};
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002663
2664} // end anonymous namespace
Rafael Espindola00752162013-05-09 17:22:59 +00002665
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002666static bool hasUseOtherThanLLVMUsed(GlobalAlias &GA, const LLVMUsed &U) {
2667 if (GA.use_empty()) // No use at all.
2668 return false;
2669
2670 assert((!U.usedCount(&GA) || !U.compilerUsedCount(&GA)) &&
2671 "We should have removed the duplicated "
Rafael Espindola9aadcc42013-07-19 18:44:51 +00002672 "element from llvm.compiler.used");
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002673 if (!GA.hasOneUse())
2674 // Strictly more than one use. So at least one is not in llvm.used and
Rafael Espindola9aadcc42013-07-19 18:44:51 +00002675 // llvm.compiler.used.
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002676 return true;
2677
Rafael Espindola9aadcc42013-07-19 18:44:51 +00002678 // Exactly one use. Check if it is in llvm.used or llvm.compiler.used.
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002679 return !U.usedCount(&GA) && !U.compilerUsedCount(&GA);
Rafael Espindola00752162013-05-09 17:22:59 +00002680}
2681
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002682static bool hasMoreThanOneUseOtherThanLLVMUsed(GlobalValue &V,
2683 const LLVMUsed &U) {
2684 unsigned N = 2;
2685 assert((!U.usedCount(&V) || !U.compilerUsedCount(&V)) &&
2686 "We should have removed the duplicated "
Rafael Espindola9aadcc42013-07-19 18:44:51 +00002687 "element from llvm.compiler.used");
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002688 if (U.usedCount(&V) || U.compilerUsedCount(&V))
2689 ++N;
2690 return V.hasNUsesOrMore(N);
2691}
2692
2693static bool mayHaveOtherReferences(GlobalAlias &GA, const LLVMUsed &U) {
2694 if (!GA.hasLocalLinkage())
2695 return true;
2696
2697 return U.usedCount(&GA) || U.compilerUsedCount(&GA);
2698}
2699
Craig Topper71b7b682014-08-21 05:55:13 +00002700static bool hasUsesToReplace(GlobalAlias &GA, const LLVMUsed &U,
2701 bool &RenameTarget) {
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002702 RenameTarget = false;
Rafael Espindola00752162013-05-09 17:22:59 +00002703 bool Ret = false;
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002704 if (hasUseOtherThanLLVMUsed(GA, U))
Rafael Espindola00752162013-05-09 17:22:59 +00002705 Ret = true;
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002706
2707 // If the alias is externally visible, we may still be able to simplify it.
2708 if (!mayHaveOtherReferences(GA, U))
2709 return Ret;
2710
2711 // If the aliasee has internal linkage, give it the name and linkage
2712 // of the alias, and delete the alias. This turns:
2713 // define internal ... @f(...)
2714 // @a = alias ... @f
2715 // into:
2716 // define ... @a(...)
2717 Constant *Aliasee = GA.getAliasee();
2718 GlobalValue *Target = cast<GlobalValue>(Aliasee->stripPointerCasts());
2719 if (!Target->hasLocalLinkage())
2720 return Ret;
2721
2722 // Do not perform the transform if multiple aliases potentially target the
2723 // aliasee. This check also ensures that it is safe to replace the section
2724 // and other attributes of the aliasee with those of the alias.
2725 if (hasMoreThanOneUseOtherThanLLVMUsed(*Target, U))
2726 return Ret;
2727
2728 RenameTarget = true;
2729 return true;
Rafael Espindola00752162013-05-09 17:22:59 +00002730}
2731
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002732static bool
2733OptimizeGlobalAliases(Module &M,
Florian Hahna1cc8482018-06-12 11:16:56 +00002734 SmallPtrSetImpl<const Comdat *> &NotDiscardableComdats) {
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002735 bool Changed = false;
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002736 LLVMUsed Used(M);
2737
Craig Topper46276792014-08-24 23:23:06 +00002738 for (GlobalValue *GV : Used.used())
2739 Used.compilerUsedErase(GV);
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002740
Duncan Sands0bcf0852009-01-07 20:01:06 +00002741 for (Module::alias_iterator I = M.alias_begin(), E = M.alias_end();
Duncan Sandsb3f27882009-02-15 09:56:08 +00002742 I != E;) {
Rafael Espindola5349d872015-12-22 19:50:22 +00002743 GlobalAlias *J = &*I++;
2744
Duncan Sandsed722832009-03-06 10:21:56 +00002745 // Aliases without names cannot be referenced outside this module.
David Majnemer5c921152014-07-01 15:26:50 +00002746 if (!J->hasName() && !J->isDeclaration() && !J->hasLocalLinkage())
Duncan Sandsed722832009-03-06 10:21:56 +00002747 J->setLinkage(GlobalValue::InternalLinkage);
Rafael Espindola5349d872015-12-22 19:50:22 +00002748
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002749 if (deleteIfDead(*J, NotDiscardableComdats)) {
Rafael Espindola5349d872015-12-22 19:50:22 +00002750 Changed = true;
2751 continue;
2752 }
2753
Eric Christopher675dcf02018-02-22 23:12:11 +00002754 // If the alias can change at link time, nothing can be done - bail out.
Sanjoy Das5ce32722016-04-08 00:48:30 +00002755 if (J->isInterposable())
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002756 continue;
2757
Duncan Sandsb3f27882009-02-15 09:56:08 +00002758 Constant *Aliasee = J->getAliasee();
David Majnemer0e2cc2a2014-07-01 00:30:56 +00002759 GlobalValue *Target = dyn_cast<GlobalValue>(Aliasee->stripPointerCasts());
2760 // We can't trivially replace the alias with the aliasee if the aliasee is
2761 // non-trivial in some way.
2762 // TODO: Try to handle non-zero GEPs of local aliasees.
2763 if (!Target)
2764 continue;
Duncan Sands7a1db332009-02-18 17:55:38 +00002765 Target->removeDeadConstantUsers();
Duncan Sandsb3f27882009-02-15 09:56:08 +00002766
2767 // Make all users of the alias use the aliasee instead.
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002768 bool RenameTarget;
2769 if (!hasUsesToReplace(*J, Used, RenameTarget))
Rafael Espindola00752162013-05-09 17:22:59 +00002770 continue;
Duncan Sandsb3f27882009-02-15 09:56:08 +00002771
Rafael Espindola6b238632014-05-16 19:35:39 +00002772 J->replaceAllUsesWith(ConstantExpr::getBitCast(Aliasee, J->getType()));
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002773 ++NumAliasesResolved;
2774 Changed = true;
Duncan Sandsb3f27882009-02-15 09:56:08 +00002775
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002776 if (RenameTarget) {
Duncan Sands6a3df7b2009-12-08 10:10:20 +00002777 // Give the aliasee the name, linkage and other attributes of the alias.
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00002778 Target->takeName(&*J);
Duncan Sands6a3df7b2009-12-08 10:10:20 +00002779 Target->setLinkage(J->getLinkage());
Rafael Espindolae4b02312018-01-11 22:15:05 +00002780 Target->setDSOLocal(J->isDSOLocal());
Reid Kleckner22b19da2014-02-13 02:18:36 +00002781 Target->setVisibility(J->getVisibility());
2782 Target->setDLLStorageClass(J->getDLLStorageClass());
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002783
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00002784 if (Used.usedErase(&*J))
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002785 Used.usedInsert(Target);
2786
Duncan P. N. Exon Smith17323402015-10-13 17:51:03 +00002787 if (Used.compilerUsedErase(&*J))
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002788 Used.compilerUsedInsert(Target);
Rafael Espindola8d304802013-06-12 16:45:47 +00002789 } else if (mayHaveOtherReferences(*J, Used))
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002790 continue;
2791
Duncan Sandsb3f27882009-02-15 09:56:08 +00002792 // Delete the alias.
2793 M.getAliasList().erase(J);
2794 ++NumAliasesRemoved;
2795 Changed = true;
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002796 }
2797
Rafael Espindolaa82555c2013-06-11 17:48:06 +00002798 Used.syncVariablesAndSets();
2799
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002800 return Changed;
2801}
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002802
Nick Lewycky4b273cb2012-02-12 02:15:20 +00002803static Function *FindCXAAtExit(Module &M, TargetLibraryInfo *TLI) {
David L. Jonesd21529f2017-01-23 23:16:46 +00002804 LibFunc F = LibFunc_cxa_atexit;
Ahmed Bougachad765a822016-04-27 19:04:35 +00002805 if (!TLI->has(F))
Craig Topperf40110f2014-04-25 05:29:35 +00002806 return nullptr;
Nick Lewycky4b273cb2012-02-12 02:15:20 +00002807
Ahmed Bougachad765a822016-04-27 19:04:35 +00002808 Function *Fn = M.getFunction(TLI->getName(F));
Anders Carlssonee6bc702011-03-20 17:59:11 +00002809 if (!Fn)
Craig Topperf40110f2014-04-25 05:29:35 +00002810 return nullptr;
Nick Lewycky4b273cb2012-02-12 02:15:20 +00002811
Ahmed Bougachad765a822016-04-27 19:04:35 +00002812 // Make sure that the function has the correct prototype.
David L. Jonesd21529f2017-01-23 23:16:46 +00002813 if (!TLI->getLibFunc(*Fn, F) || F != LibFunc_cxa_atexit)
Craig Topperf40110f2014-04-25 05:29:35 +00002814 return nullptr;
Anders Carlssonee6bc702011-03-20 17:59:11 +00002815
2816 return Fn;
2817}
2818
James Molloyea31ad32015-11-13 11:05:07 +00002819/// Returns whether the given function is an empty C++ destructor and can
2820/// therefore be eliminated.
Anders Carlssonee6bc702011-03-20 17:59:11 +00002821/// Note that we assume that other optimization passes have already simplified
2822/// the code so we only look for a function with a single basic block, where
Benjamin Kramer1a4695a2012-02-09 16:28:15 +00002823/// the only allowed instructions are 'ret', 'call' to an empty C++ dtor and
2824/// other side-effect free instructions.
Anders Carlssonfcec2f52011-03-20 20:16:43 +00002825static bool cxxDtorIsEmpty(const Function &Fn,
2826 SmallPtrSet<const Function *, 8> &CalledFunctions) {
Anders Carlsson48a44912011-03-20 19:51:13 +00002827 // FIXME: We could eliminate C++ destructors if they're readonly/readnone and
Nick Lewyckyd0781832011-03-21 02:26:01 +00002828 // nounwind, but that doesn't seem worth doing.
Anders Carlsson48a44912011-03-20 19:51:13 +00002829 if (Fn.isDeclaration())
2830 return false;
Anders Carlssonee6bc702011-03-20 17:59:11 +00002831
2832 if (++Fn.begin() != Fn.end())
2833 return false;
2834
2835 const BasicBlock &EntryBlock = Fn.getEntryBlock();
2836 for (BasicBlock::const_iterator I = EntryBlock.begin(), E = EntryBlock.end();
2837 I != E; ++I) {
2838 if (const CallInst *CI = dyn_cast<CallInst>(I)) {
Anders Carlsson4dd420f2011-03-21 14:54:40 +00002839 // Ignore debug intrinsics.
2840 if (isa<DbgInfoIntrinsic>(CI))
2841 continue;
2842
Anders Carlssonee6bc702011-03-20 17:59:11 +00002843 const Function *CalledFn = CI->getCalledFunction();
2844
2845 if (!CalledFn)
2846 return false;
2847
Anders Carlsson1cc80732011-03-22 03:21:01 +00002848 SmallPtrSet<const Function *, 8> NewCalledFunctions(CalledFunctions);
2849
Anders Carlsson48a44912011-03-20 19:51:13 +00002850 // Don't treat recursive functions as empty.
David Blaikie70573dc2014-11-19 07:49:26 +00002851 if (!NewCalledFunctions.insert(CalledFn).second)
Anders Carlsson48a44912011-03-20 19:51:13 +00002852 return false;
2853
Anders Carlsson1cc80732011-03-22 03:21:01 +00002854 if (!cxxDtorIsEmpty(*CalledFn, NewCalledFunctions))
Anders Carlssonee6bc702011-03-20 17:59:11 +00002855 return false;
2856 } else if (isa<ReturnInst>(*I))
Benjamin Kramer487a3962012-02-09 14:26:06 +00002857 return true; // We're done.
2858 else if (I->mayHaveSideEffects())
2859 return false; // Destructor with side effects, bail.
Anders Carlssonee6bc702011-03-20 17:59:11 +00002860 }
2861
2862 return false;
2863}
2864
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002865static bool OptimizeEmptyGlobalCXXDtors(Function *CXAAtExitFn) {
Anders Carlssonee6bc702011-03-20 17:59:11 +00002866 /// Itanium C++ ABI p3.3.5:
2867 ///
2868 /// After constructing a global (or local static) object, that will require
2869 /// destruction on exit, a termination function is registered as follows:
2870 ///
2871 /// extern "C" int __cxa_atexit ( void (*f)(void *), void *p, void *d );
2872 ///
2873 /// This registration, e.g. __cxa_atexit(f,p,d), is intended to cause the
2874 /// call f(p) when DSO d is unloaded, before all such termination calls
2875 /// registered before this one. It returns zero if registration is
Nick Lewyckyd0781832011-03-21 02:26:01 +00002876 /// successful, nonzero on failure.
Anders Carlssonee6bc702011-03-20 17:59:11 +00002877
2878 // This pass will look for calls to __cxa_atexit where the function is trivial
2879 // and remove them.
2880 bool Changed = false;
2881
Chandler Carruthcdf47882014-03-09 03:16:01 +00002882 for (auto I = CXAAtExitFn->user_begin(), E = CXAAtExitFn->user_end();
2883 I != E;) {
Anders Carlsson336fd902011-03-20 20:21:33 +00002884 // We're only interested in calls. Theoretically, we could handle invoke
2885 // instructions as well, but neither llvm-gcc nor clang generate invokes
2886 // to __cxa_atexit.
Anders Carlsson4dd420f2011-03-21 14:54:40 +00002887 CallInst *CI = dyn_cast<CallInst>(*I++);
2888 if (!CI)
Anders Carlsson336fd902011-03-20 20:21:33 +00002889 continue;
2890
Jakub Staszak9525a772012-12-06 21:57:16 +00002891 Function *DtorFn =
Anders Carlsson4dd420f2011-03-21 14:54:40 +00002892 dyn_cast<Function>(CI->getArgOperand(0)->stripPointerCasts());
Anders Carlssonee6bc702011-03-20 17:59:11 +00002893 if (!DtorFn)
2894 continue;
2895
Anders Carlssonfcec2f52011-03-20 20:16:43 +00002896 SmallPtrSet<const Function *, 8> CalledFunctions;
2897 if (!cxxDtorIsEmpty(*DtorFn, CalledFunctions))
Anders Carlssonee6bc702011-03-20 17:59:11 +00002898 continue;
2899
2900 // Just remove the call.
Anders Carlsson4dd420f2011-03-21 14:54:40 +00002901 CI->replaceAllUsesWith(Constant::getNullValue(CI->getType()));
2902 CI->eraseFromParent();
Anders Carlsson48a44912011-03-20 19:51:13 +00002903
Anders Carlssonee6bc702011-03-20 17:59:11 +00002904 ++NumCXXDtorsRemoved;
2905
2906 Changed |= true;
2907 }
2908
2909 return Changed;
2910}
2911
Justin Bogner1a075012016-04-26 00:28:01 +00002912static bool optimizeGlobalsInModule(
2913 Module &M, const DataLayout &DL, TargetLibraryInfo *TLI,
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002914 function_ref<TargetTransformInfo &(Function &)> GetTTI,
2915 function_ref<BlockFrequencyInfo &(Function &)> GetBFI,
Justin Bogner1a075012016-04-26 00:28:01 +00002916 function_ref<DominatorTree &(Function &)> LookupDomTree) {
Florian Hahna1cc8482018-06-12 11:16:56 +00002917 SmallPtrSet<const Comdat *, 8> NotDiscardableComdats;
Justin Bogner1a075012016-04-26 00:28:01 +00002918 bool Changed = false;
Chris Lattner25db5802004-10-07 04:16:33 +00002919 bool LocalChange = true;
2920 while (LocalChange) {
2921 LocalChange = false;
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00002922
David Majnemer1b3b70e2014-10-08 07:23:31 +00002923 NotDiscardableComdats.clear();
2924 for (const GlobalVariable &GV : M.globals())
2925 if (const Comdat *C = GV.getComdat())
2926 if (!GV.isDiscardableIfUnused() || !GV.use_empty())
2927 NotDiscardableComdats.insert(C);
2928 for (Function &F : M)
2929 if (const Comdat *C = F.getComdat())
2930 if (!F.isDefTriviallyDead())
2931 NotDiscardableComdats.insert(C);
2932 for (GlobalAlias &GA : M.aliases())
2933 if (const Comdat *C = GA.getComdat())
2934 if (!GA.isDiscardableIfUnused() || !GA.use_empty())
2935 NotDiscardableComdats.insert(C);
2936
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002937 // Delete functions that are trivially dead, ccc -> fastcc
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002938 LocalChange |= OptimizeFunctions(M, TLI, GetTTI, GetBFI, LookupDomTree,
2939 NotDiscardableComdats);
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00002940
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002941 // Optimize global_ctors list.
Richard Smithc167d652014-05-06 01:44:26 +00002942 LocalChange |= optimizeGlobalCtorsList(M, [&](Function *F) {
2943 return EvaluateStaticConstructor(F, DL, TLI);
2944 });
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00002945
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002946 // Optimize non-address-taken globals.
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002947 LocalChange |= OptimizeGlobalVars(M, TLI, LookupDomTree,
2948 NotDiscardableComdats);
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002949
2950 // Resolve aliases, when possible.
Justin Bognerd2f3d0a2016-04-26 00:27:56 +00002951 LocalChange |= OptimizeGlobalAliases(M, NotDiscardableComdats);
Anders Carlssonee6bc702011-03-20 17:59:11 +00002952
Manman Renb3c52fb2013-05-14 21:52:44 +00002953 // Try to remove trivial global destructors if they are not removed
2954 // already.
2955 Function *CXAAtExitFn = FindCXAAtExit(M, TLI);
Anders Carlssonee6bc702011-03-20 17:59:11 +00002956 if (CXAAtExitFn)
2957 LocalChange |= OptimizeEmptyGlobalCXXDtors(CXAAtExitFn);
2958
Anton Korobeynikova9b60ee2008-09-09 19:04:59 +00002959 Changed |= LocalChange;
Chris Lattner25db5802004-10-07 04:16:33 +00002960 }
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00002961
Chris Lattner41b6a5a2005-09-26 01:43:45 +00002962 // TODO: Move all global ctors functions to the end of the module for code
2963 // layout.
Mikhail Glushenkovcf2afe02010-10-18 21:16:00 +00002964
Chris Lattner25db5802004-10-07 04:16:33 +00002965 return Changed;
2966}
Justin Bogner1a075012016-04-26 00:28:01 +00002967
Sean Silvafd03ac62016-08-09 00:28:38 +00002968PreservedAnalyses GlobalOptPass::run(Module &M, ModuleAnalysisManager &AM) {
Justin Bogner1a075012016-04-26 00:28:01 +00002969 auto &DL = M.getDataLayout();
2970 auto &TLI = AM.getResult<TargetLibraryAnalysis>(M);
2971 auto &FAM =
2972 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
2973 auto LookupDomTree = [&FAM](Function &F) -> DominatorTree &{
2974 return FAM.getResult<DominatorTreeAnalysis>(F);
2975 };
Zaara Syeda1f59ae32018-01-30 16:17:22 +00002976 auto GetTTI = [&FAM](Function &F) -> TargetTransformInfo & {
2977 return FAM.getResult<TargetIRAnalysis>(F);
2978 };
2979
2980 auto GetBFI = [&FAM](Function &F) -> BlockFrequencyInfo & {
2981 return FAM.getResult<BlockFrequencyAnalysis>(F);
2982 };
2983
2984 if (!optimizeGlobalsInModule(M, DL, &TLI, GetTTI, GetBFI, LookupDomTree))
Justin Bogner1a075012016-04-26 00:28:01 +00002985 return PreservedAnalyses::all();
2986 return PreservedAnalyses::none();
2987}
2988
2989namespace {
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002990
Justin Bogner1a075012016-04-26 00:28:01 +00002991struct GlobalOptLegacyPass : public ModulePass {
2992 static char ID; // Pass identification, replacement for typeid
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00002993
Justin Bogner1a075012016-04-26 00:28:01 +00002994 GlobalOptLegacyPass() : ModulePass(ID) {
2995 initializeGlobalOptLegacyPassPass(*PassRegistry::getPassRegistry());
2996 }
2997
2998 bool runOnModule(Module &M) override {
2999 if (skipModule(M))
3000 return false;
3001
3002 auto &DL = M.getDataLayout();
3003 auto *TLI = &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
3004 auto LookupDomTree = [this](Function &F) -> DominatorTree & {
3005 return this->getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
3006 };
Zaara Syeda1f59ae32018-01-30 16:17:22 +00003007 auto GetTTI = [this](Function &F) -> TargetTransformInfo & {
3008 return this->getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
3009 };
3010
3011 auto GetBFI = [this](Function &F) -> BlockFrequencyInfo & {
3012 return this->getAnalysis<BlockFrequencyInfoWrapperPass>(F).getBFI();
3013 };
3014
3015 return optimizeGlobalsInModule(M, DL, TLI, GetTTI, GetBFI, LookupDomTree);
Justin Bogner1a075012016-04-26 00:28:01 +00003016 }
3017
3018 void getAnalysisUsage(AnalysisUsage &AU) const override {
3019 AU.addRequired<TargetLibraryInfoWrapperPass>();
Zaara Syeda1f59ae32018-01-30 16:17:22 +00003020 AU.addRequired<TargetTransformInfoWrapperPass>();
Justin Bogner1a075012016-04-26 00:28:01 +00003021 AU.addRequired<DominatorTreeWrapperPass>();
Zaara Syeda1f59ae32018-01-30 16:17:22 +00003022 AU.addRequired<BlockFrequencyInfoWrapperPass>();
Justin Bogner1a075012016-04-26 00:28:01 +00003023 }
3024};
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00003025
3026} // end anonymous namespace
Justin Bogner1a075012016-04-26 00:28:01 +00003027
3028char GlobalOptLegacyPass::ID = 0;
Eugene Zelenkoe9ea08a2017-10-10 22:49:55 +00003029
Justin Bogner1a075012016-04-26 00:28:01 +00003030INITIALIZE_PASS_BEGIN(GlobalOptLegacyPass, "globalopt",
3031 "Global Variable Optimizer", false, false)
3032INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
Zaara Syeda1f59ae32018-01-30 16:17:22 +00003033INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
3034INITIALIZE_PASS_DEPENDENCY(BlockFrequencyInfoWrapperPass)
Justin Bogner1a075012016-04-26 00:28:01 +00003035INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
3036INITIALIZE_PASS_END(GlobalOptLegacyPass, "globalopt",
3037 "Global Variable Optimizer", false, false)
3038
3039ModulePass *llvm::createGlobalOptimizerPass() {
3040 return new GlobalOptLegacyPass();
3041}