blob: a7d296929a356e6af3231cd7fba19150de7dc03e [file] [log] [blame]
Michael Gottesman3923bec2013-08-12 21:02:02 +00001//===-- SelectionDAGBuilder.h - Selection-DAG building --------*- C++ -*---===//
Dan Gohman575fad32008-09-03 16:12:24 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This implements routines for translating from LLVM IR into SelectionDAG IR.
11//
12//===----------------------------------------------------------------------===//
13
Benjamin Kramera7c40ef2014-08-13 16:26:38 +000014#ifndef LLVM_LIB_CODEGEN_SELECTIONDAG_SELECTIONDAGBUILDER_H
15#define LLVM_LIB_CODEGEN_SELECTIONDAG_SELECTIONDAGBUILDER_H
Dan Gohman575fad32008-09-03 16:12:24 +000016
Chandler Carruthd9903882015-01-14 11:23:27 +000017#include "StatepointLowering.h"
Dan Gohman575fad32008-09-03 16:12:24 +000018#include "llvm/ADT/APInt.h"
19#include "llvm/ADT/DenseMap.h"
Chandler Carruth7b560d42015-09-09 17:55:00 +000020#include "llvm/Analysis/AliasAnalysis.h"
Sanjoy Das3936a972015-05-05 23:06:54 +000021#include "llvm/CodeGen/Analysis.h"
Chandler Carruth802d7552012-12-04 07:12:27 +000022#include "llvm/CodeGen/SelectionDAG.h"
Dan Gohman575fad32008-09-03 16:12:24 +000023#include "llvm/CodeGen/SelectionDAGNodes.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000024#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000025#include "llvm/IR/Constants.h"
Benjamin Kramer82de7d32016-05-27 14:27:24 +000026#include "llvm/IR/Statepoint.h"
Torok Edwin56d06592009-07-11 20:10:48 +000027#include "llvm/Support/ErrorHandling.h"
Juergen Ributzkafd4633e2014-10-16 21:26:35 +000028#include "llvm/Target/TargetLowering.h"
Benjamin Kramer82de7d32016-05-27 14:27:24 +000029#include <utility>
Dan Gohman575fad32008-09-03 16:12:24 +000030#include <vector>
Dan Gohman575fad32008-09-03 16:12:24 +000031
32namespace llvm {
33
Matt Arsenaultb03bd4d2013-11-15 01:34:59 +000034class AddrSpaceCastInst;
Dan Gohman575fad32008-09-03 16:12:24 +000035class AllocaInst;
36class BasicBlock;
37class BitCastInst;
38class BranchInst;
39class CallInst;
Devang Patelb12ff592010-08-26 23:35:15 +000040class DbgValueInst;
Dan Gohman575fad32008-09-03 16:12:24 +000041class ExtractElementInst;
42class ExtractValueInst;
43class FCmpInst;
44class FPExtInst;
45class FPToSIInst;
46class FPToUIInst;
47class FPTruncInst;
Dan Gohman575fad32008-09-03 16:12:24 +000048class Function;
Dan Gohmana3624b62009-11-23 17:16:22 +000049class FunctionLoweringInfo;
Dan Gohman575fad32008-09-03 16:12:24 +000050class GetElementPtrInst;
51class GCFunctionInfo;
52class ICmpInst;
53class IntToPtrInst;
Chris Lattnerd04cb6d2009-10-28 00:19:10 +000054class IndirectBrInst;
Dan Gohman575fad32008-09-03 16:12:24 +000055class InvokeInst;
56class InsertElementInst;
57class InsertValueInst;
58class Instruction;
59class LoadInst;
60class MachineBasicBlock;
Dan Gohman575fad32008-09-03 16:12:24 +000061class MachineInstr;
Dan Gohman575fad32008-09-03 16:12:24 +000062class MachineRegisterInfo;
Evan Cheng6e822452010-04-28 23:08:54 +000063class MDNode;
Patrik Hagglund1da35122014-03-12 08:00:24 +000064class MVT;
Dan Gohman575fad32008-09-03 16:12:24 +000065class PHINode;
66class PtrToIntInst;
67class ReturnInst;
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000068class SDDbgValue;
Dan Gohman575fad32008-09-03 16:12:24 +000069class SExtInst;
70class SelectInst;
71class ShuffleVectorInst;
72class SIToFPInst;
73class StoreInst;
74class SwitchInst;
Micah Villmowcdfe20b2012-10-08 16:38:25 +000075class DataLayout;
Owen Andersonbb15fec2011-12-08 22:15:21 +000076class TargetLibraryInfo;
Dan Gohman575fad32008-09-03 16:12:24 +000077class TargetLowering;
78class TruncInst;
79class UIToFPInst;
80class UnreachableInst;
Dan Gohman575fad32008-09-03 16:12:24 +000081class VAArgInst;
82class ZExtInst;
83
Dan Gohman575fad32008-09-03 16:12:24 +000084//===----------------------------------------------------------------------===//
Dan Gohman1a6c47f2009-11-23 18:04:58 +000085/// SelectionDAGBuilder - This is the common target-independent lowering
Dan Gohman575fad32008-09-03 16:12:24 +000086/// implementation that is parameterized by a TargetLowering object.
Dan Gohman575fad32008-09-03 16:12:24 +000087///
Benjamin Kramer079b96e2013-09-11 18:05:11 +000088class SelectionDAGBuilder {
Andrew Trick175143b2013-05-25 02:20:36 +000089 /// CurInst - The current instruction being visited
90 const Instruction *CurInst;
Dale Johannesendb7c5f62009-01-31 02:22:37 +000091
Dan Gohman575fad32008-09-03 16:12:24 +000092 DenseMap<const Value*, SDValue> NodeMap;
Andrew Trickd4d1d9c2013-10-31 17:18:07 +000093
Devang Patelb0c76392010-06-01 19:59:01 +000094 /// UnusedArgNodeMap - Maps argument value for unused arguments. This is used
95 /// to preserve debug information for incoming arguments.
96 DenseMap<const Value*, SDValue> UnusedArgNodeMap;
Dan Gohman575fad32008-09-03 16:12:24 +000097
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000098 /// DanglingDebugInfo - Helper type for DanglingDebugInfoMap.
99 class DanglingDebugInfo {
Devang Patelb12ff592010-08-26 23:35:15 +0000100 const DbgValueInst* DI;
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000101 DebugLoc dl;
102 unsigned SDNodeOrder;
103 public:
Craig Topperada08572014-04-16 04:21:27 +0000104 DanglingDebugInfo() : DI(nullptr), dl(DebugLoc()), SDNodeOrder(0) { }
Benjamin Kramer82de7d32016-05-27 14:27:24 +0000105 DanglingDebugInfo(const DbgValueInst *di, DebugLoc DL, unsigned SDNO)
106 : DI(di), dl(std::move(DL)), SDNodeOrder(SDNO) {}
Devang Patelb12ff592010-08-26 23:35:15 +0000107 const DbgValueInst* getDI() { return DI; }
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000108 DebugLoc getdl() { return dl; }
109 unsigned getSDNodeOrder() { return SDNodeOrder; }
110 };
111
112 /// DanglingDebugInfoMap - Keeps track of dbg_values for which we have not
113 /// yet seen the referent. We defer handling these until we do see it.
114 DenseMap<const Value*, DanglingDebugInfo> DanglingDebugInfoMap;
115
Chris Lattner1a32ede2009-12-24 00:37:38 +0000116public:
Dan Gohman575fad32008-09-03 16:12:24 +0000117 /// PendingLoads - Loads are not emitted to the program immediately. We bunch
118 /// them up and then emit token factor nodes when possible. This allows us to
119 /// get simple disambiguation between loads without worrying about alias
120 /// analysis.
121 SmallVector<SDValue, 8> PendingLoads;
Philip Reames1a1bdb22014-12-02 18:50:36 +0000122
123 /// State used while lowering a statepoint sequence (gc_statepoint,
124 /// gc_relocate, and gc_result). See StatepointLowering.hpp/cpp for details.
125 StatepointLoweringState StatepointLowering;
Chris Lattner1a32ede2009-12-24 00:37:38 +0000126private:
Dan Gohman575fad32008-09-03 16:12:24 +0000127
128 /// PendingExports - CopyToReg nodes that copy values to virtual registers
129 /// for export to other blocks need to be emitted before any terminator
130 /// instruction, but they have no other ordering requirements. We bunch them
131 /// up and the emit a single tokenfactor for them just before terminator
132 /// instructions.
133 SmallVector<SDValue, 8> PendingExports;
134
Bill Wendling022d18f2009-12-18 23:32:53 +0000135 /// SDNodeOrder - A unique monotonically increasing number used to order the
136 /// SDNodes we create.
137 unsigned SDNodeOrder;
138
Hans Wennborg0867b152015-04-23 16:45:24 +0000139 enum CaseClusterKind {
140 /// A cluster of adjacent case labels with the same destination, or just one
141 /// case.
142 CC_Range,
143 /// A cluster of cases suitable for jump table lowering.
144 CC_JumpTable,
145 /// A cluster of cases suitable for bit test lowering.
146 CC_BitTests
147 };
Dan Gohman575fad32008-09-03 16:12:24 +0000148
Hans Wennborg0867b152015-04-23 16:45:24 +0000149 /// A cluster of case labels.
150 struct CaseCluster {
151 CaseClusterKind Kind;
152 const ConstantInt *Low, *High;
153 union {
154 MachineBasicBlock *MBB;
155 unsigned JTCasesIndex;
156 unsigned BTCasesIndex;
157 };
Cong Hou1938f2e2015-11-24 08:51:23 +0000158 BranchProbability Prob;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000159
Hans Wennborg0867b152015-04-23 16:45:24 +0000160 static CaseCluster range(const ConstantInt *Low, const ConstantInt *High,
Cong Hou1938f2e2015-11-24 08:51:23 +0000161 MachineBasicBlock *MBB, BranchProbability Prob) {
Hans Wennborg0867b152015-04-23 16:45:24 +0000162 CaseCluster C;
163 C.Kind = CC_Range;
164 C.Low = Low;
165 C.High = High;
166 C.MBB = MBB;
Cong Hou1938f2e2015-11-24 08:51:23 +0000167 C.Prob = Prob;
Hans Wennborg0867b152015-04-23 16:45:24 +0000168 return C;
169 }
170
171 static CaseCluster jumpTable(const ConstantInt *Low,
172 const ConstantInt *High, unsigned JTCasesIndex,
Cong Hou1938f2e2015-11-24 08:51:23 +0000173 BranchProbability Prob) {
Hans Wennborg0867b152015-04-23 16:45:24 +0000174 CaseCluster C;
175 C.Kind = CC_JumpTable;
176 C.Low = Low;
177 C.High = High;
178 C.JTCasesIndex = JTCasesIndex;
Cong Hou1938f2e2015-11-24 08:51:23 +0000179 C.Prob = Prob;
Hans Wennborg0867b152015-04-23 16:45:24 +0000180 return C;
181 }
182
183 static CaseCluster bitTests(const ConstantInt *Low, const ConstantInt *High,
Cong Hou1938f2e2015-11-24 08:51:23 +0000184 unsigned BTCasesIndex, BranchProbability Prob) {
Hans Wennborg0867b152015-04-23 16:45:24 +0000185 CaseCluster C;
186 C.Kind = CC_BitTests;
187 C.Low = Low;
188 C.High = High;
189 C.BTCasesIndex = BTCasesIndex;
Cong Hou1938f2e2015-11-24 08:51:23 +0000190 C.Prob = Prob;
Hans Wennborg0867b152015-04-23 16:45:24 +0000191 return C;
Dan Gohman575fad32008-09-03 16:12:24 +0000192 }
193 };
194
Hans Wennborg0867b152015-04-23 16:45:24 +0000195 typedef std::vector<CaseCluster> CaseClusterVector;
196 typedef CaseClusterVector::iterator CaseClusterIt;
197
Dan Gohman575fad32008-09-03 16:12:24 +0000198 struct CaseBits {
199 uint64_t Mask;
200 MachineBasicBlock* BB;
201 unsigned Bits;
Cong Hou1938f2e2015-11-24 08:51:23 +0000202 BranchProbability ExtraProb;
Dan Gohman575fad32008-09-03 16:12:24 +0000203
Manman Rencf104462012-08-24 18:14:27 +0000204 CaseBits(uint64_t mask, MachineBasicBlock* bb, unsigned bits,
Cong Hou1938f2e2015-11-24 08:51:23 +0000205 BranchProbability Prob):
206 Mask(mask), BB(bb), Bits(bits), ExtraProb(Prob) { }
Hans Wennborg0867b152015-04-23 16:45:24 +0000207
Cong Hou1938f2e2015-11-24 08:51:23 +0000208 CaseBits() : Mask(0), BB(nullptr), Bits(0) {}
Dan Gohman575fad32008-09-03 16:12:24 +0000209 };
210
Hans Wennborg0867b152015-04-23 16:45:24 +0000211 typedef std::vector<CaseBits> CaseBitsVector;
Dan Gohman575fad32008-09-03 16:12:24 +0000212
Hans Wennborg0867b152015-04-23 16:45:24 +0000213 /// Sort Clusters and merge adjacent cases.
214 void sortAndRangeify(CaseClusterVector &Clusters);
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000215
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000216 /// CaseBlock - This structure is used to communicate between
217 /// SelectionDAGBuilder and SDISel for the code generation of additional basic
218 /// blocks needed by multi-case switch statements.
Dan Gohman575fad32008-09-03 16:12:24 +0000219 struct CaseBlock {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000220 CaseBlock(ISD::CondCode cc, const Value *cmplhs, const Value *cmprhs,
Cong Hou1938f2e2015-11-24 08:51:23 +0000221 const Value *cmpmiddle, MachineBasicBlock *truebb,
222 MachineBasicBlock *falsebb, MachineBasicBlock *me,
223 BranchProbability trueprob = BranchProbability::getUnknown(),
224 BranchProbability falseprob = BranchProbability::getUnknown())
225 : CC(cc), CmpLHS(cmplhs), CmpMHS(cmpmiddle), CmpRHS(cmprhs),
226 TrueBB(truebb), FalseBB(falsebb), ThisBB(me), TrueProb(trueprob),
227 FalseProb(falseprob) {}
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000228
Dan Gohman575fad32008-09-03 16:12:24 +0000229 // CC - the condition code to use for the case block's setcc node
230 ISD::CondCode CC;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000231
Dan Gohman575fad32008-09-03 16:12:24 +0000232 // CmpLHS/CmpRHS/CmpMHS - The LHS/MHS/RHS of the comparison to emit.
233 // Emit by default LHS op RHS. MHS is used for range comparisons:
234 // If MHS is not null: (LHS <= MHS) and (MHS <= RHS).
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000235 const Value *CmpLHS, *CmpMHS, *CmpRHS;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000236
Dan Gohman575fad32008-09-03 16:12:24 +0000237 // TrueBB/FalseBB - the block to branch to if the setcc is true/false.
238 MachineBasicBlock *TrueBB, *FalseBB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000239
Dan Gohman575fad32008-09-03 16:12:24 +0000240 // ThisBB - the block into which to emit the code for the setcc and branches
241 MachineBasicBlock *ThisBB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000242
Cong Hou1938f2e2015-11-24 08:51:23 +0000243 // TrueProb/FalseProb - branch weights.
244 BranchProbability TrueProb, FalseProb;
Dan Gohman575fad32008-09-03 16:12:24 +0000245 };
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000246
Dan Gohman575fad32008-09-03 16:12:24 +0000247 struct JumpTable {
248 JumpTable(unsigned R, unsigned J, MachineBasicBlock *M,
249 MachineBasicBlock *D): Reg(R), JTI(J), MBB(M), Default(D) {}
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000250
Dan Gohman575fad32008-09-03 16:12:24 +0000251 /// Reg - the virtual register containing the index of the jump table entry
252 //. to jump to.
253 unsigned Reg;
254 /// JTI - the JumpTableIndex for this jump table in the function.
255 unsigned JTI;
256 /// MBB - the MBB into which to emit the code for the indirect jump.
257 MachineBasicBlock *MBB;
258 /// Default - the MBB of the default bb, which is a successor of the range
259 /// check MBB. This is when updating PHI nodes in successors.
260 MachineBasicBlock *Default;
261 };
262 struct JumpTableHeader {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000263 JumpTableHeader(APInt F, APInt L, const Value *SV, MachineBasicBlock *H,
Benjamin Kramer82de7d32016-05-27 14:27:24 +0000264 bool E = false)
265 : First(std::move(F)), Last(std::move(L)), SValue(SV), HeaderBB(H),
266 Emitted(E) {}
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000267 APInt First;
268 APInt Last;
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000269 const Value *SValue;
Dan Gohman575fad32008-09-03 16:12:24 +0000270 MachineBasicBlock *HeaderBB;
271 bool Emitted;
272 };
273 typedef std::pair<JumpTableHeader, JumpTable> JumpTableBlock;
274
275 struct BitTestCase {
Manman Rencf104462012-08-24 18:14:27 +0000276 BitTestCase(uint64_t M, MachineBasicBlock* T, MachineBasicBlock* Tr,
Cong Hou1938f2e2015-11-24 08:51:23 +0000277 BranchProbability Prob):
278 Mask(M), ThisBB(T), TargetBB(Tr), ExtraProb(Prob) { }
Dan Gohman575fad32008-09-03 16:12:24 +0000279 uint64_t Mask;
Chris Lattner24576a52010-01-01 23:37:34 +0000280 MachineBasicBlock *ThisBB;
281 MachineBasicBlock *TargetBB;
Cong Hou1938f2e2015-11-24 08:51:23 +0000282 BranchProbability ExtraProb;
Dan Gohman575fad32008-09-03 16:12:24 +0000283 };
284
285 typedef SmallVector<BitTestCase, 3> BitTestInfo;
286
287 struct BitTestBlock {
Cong Hou03127702015-08-26 23:15:32 +0000288 BitTestBlock(APInt F, APInt R, const Value *SV, unsigned Rg, MVT RgVT,
289 bool E, bool CR, MachineBasicBlock *P, MachineBasicBlock *D,
Cong Hou1938f2e2015-11-24 08:51:23 +0000290 BitTestInfo C, BranchProbability Pr)
Benjamin Kramer82de7d32016-05-27 14:27:24 +0000291 : First(std::move(F)), Range(std::move(R)), SValue(SV), Reg(Rg),
292 RegVT(RgVT), Emitted(E), ContiguousRange(CR), Parent(P), Default(D),
293 Cases(std::move(C)), Prob(Pr) {}
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000294 APInt First;
295 APInt Range;
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000296 const Value *SValue;
Dan Gohman575fad32008-09-03 16:12:24 +0000297 unsigned Reg;
Patrik Hagglund4e0f8282012-12-19 12:23:01 +0000298 MVT RegVT;
Dan Gohman575fad32008-09-03 16:12:24 +0000299 bool Emitted;
Cong Houcd595912015-08-25 21:34:38 +0000300 bool ContiguousRange;
Dan Gohman575fad32008-09-03 16:12:24 +0000301 MachineBasicBlock *Parent;
302 MachineBasicBlock *Default;
303 BitTestInfo Cases;
Cong Hou1938f2e2015-11-24 08:51:23 +0000304 BranchProbability Prob;
305 BranchProbability DefaultProb;
Dan Gohman575fad32008-09-03 16:12:24 +0000306 };
307
Hans Wennborg0867b152015-04-23 16:45:24 +0000308 /// Check whether a range of clusters is dense enough for a jump table.
309 bool isDense(const CaseClusterVector &Clusters, unsigned *TotalCases,
Kyle Butt5e241b12016-03-29 00:23:41 +0000310 unsigned First, unsigned Last, unsigned MinDensity);
Hans Wennborg0867b152015-04-23 16:45:24 +0000311
312 /// Build a jump table cluster from Clusters[First..Last]. Returns false if it
313 /// decides it's not a good idea.
314 bool buildJumpTable(CaseClusterVector &Clusters, unsigned First,
315 unsigned Last, const SwitchInst *SI,
316 MachineBasicBlock *DefaultMBB, CaseCluster &JTCluster);
317
318 /// Find clusters of cases suitable for jump table lowering.
319 void findJumpTables(CaseClusterVector &Clusters, const SwitchInst *SI,
320 MachineBasicBlock *DefaultMBB);
321
322 /// Check whether the range [Low,High] fits in a machine word.
323 bool rangeFitsInWord(const APInt &Low, const APInt &High);
324
325 /// Check whether these clusters are suitable for lowering with bit tests based
326 /// on the number of destinations, comparison metric, and range.
327 bool isSuitableForBitTests(unsigned NumDests, unsigned NumCmps,
328 const APInt &Low, const APInt &High);
329
330 /// Build a bit test cluster from Clusters[First..Last]. Returns false if it
331 /// decides it's not a good idea.
332 bool buildBitTests(CaseClusterVector &Clusters, unsigned First, unsigned Last,
333 const SwitchInst *SI, CaseCluster &BTCluster);
334
335 /// Find clusters of cases suitable for bit test lowering.
336 void findBitTestClusters(CaseClusterVector &Clusters, const SwitchInst *SI);
337
338 struct SwitchWorkListItem {
339 MachineBasicBlock *MBB;
340 CaseClusterIt FirstCluster;
341 CaseClusterIt LastCluster;
342 const ConstantInt *GE;
343 const ConstantInt *LT;
Cong Hou1938f2e2015-11-24 08:51:23 +0000344 BranchProbability DefaultProb;
Hans Wennborg0867b152015-04-23 16:45:24 +0000345 };
346 typedef SmallVector<SwitchWorkListItem, 4> SwitchWorkList;
347
Hans Wennborg6ed81cb2015-06-20 17:14:07 +0000348 /// Determine the rank by weight of CC in [First,Last]. If CC has more weight
349 /// than each cluster in the range, its rank is 0.
350 static unsigned caseClusterRank(const CaseCluster &CC, CaseClusterIt First,
351 CaseClusterIt Last);
352
Hans Wennborg0867b152015-04-23 16:45:24 +0000353 /// Emit comparison and split W into two subtrees.
354 void splitWorkItem(SwitchWorkList &WorkList, const SwitchWorkListItem &W,
355 Value *Cond, MachineBasicBlock *SwitchMBB);
356
357 /// Lower W.
358 void lowerWorkItem(SwitchWorkListItem W, Value *Cond,
359 MachineBasicBlock *SwitchMBB,
360 MachineBasicBlock *DefaultMBB);
361
362
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000363 /// A class which encapsulates all of the information needed to generate a
364 /// stack protector check and signals to isel via its state being initialized
365 /// that a stack protector needs to be generated.
366 ///
367 /// *NOTE* The following is a high level documentation of SelectionDAG Stack
368 /// Protector Generation. The reason that it is placed here is for a lack of
369 /// other good places to stick it.
370 ///
371 /// High Level Overview of SelectionDAG Stack Protector Generation:
372 ///
373 /// Previously, generation of stack protectors was done exclusively in the
374 /// pre-SelectionDAG Codegen LLVM IR Pass "Stack Protector". This necessitated
375 /// splitting basic blocks at the IR level to create the success/failure basic
376 /// blocks in the tail of the basic block in question. As a result of this,
377 /// calls that would have qualified for the sibling call optimization were no
378 /// longer eligible for optimization since said calls were no longer right in
379 /// the "tail position" (i.e. the immediate predecessor of a ReturnInst
380 /// instruction).
381 ///
382 /// Then it was noticed that since the sibling call optimization causes the
383 /// callee to reuse the caller's stack, if we could delay the generation of
384 /// the stack protector check until later in CodeGen after the sibling call
385 /// decision was made, we get both the tail call optimization and the stack
386 /// protector check!
387 ///
388 /// A few goals in solving this problem were:
389 ///
390 /// 1. Preserve the architecture independence of stack protector generation.
391 ///
392 /// 2. Preserve the normal IR level stack protector check for platforms like
Alp Tokercf218752014-06-30 18:57:16 +0000393 /// OpenBSD for which we support platform-specific stack protector
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000394 /// generation.
395 ///
396 /// The main problem that guided the present solution is that one can not
397 /// solve this problem in an architecture independent manner at the IR level
398 /// only. This is because:
399 ///
400 /// 1. The decision on whether or not to perform a sibling call on certain
401 /// platforms (for instance i386) requires lower level information
402 /// related to available registers that can not be known at the IR level.
403 ///
404 /// 2. Even if the previous point were not true, the decision on whether to
405 /// perform a tail call is done in LowerCallTo in SelectionDAG which
406 /// occurs after the Stack Protector Pass. As a result, one would need to
407 /// put the relevant callinst into the stack protector check success
408 /// basic block (where the return inst is placed) and then move it back
409 /// later at SelectionDAG/MI time before the stack protector check if the
410 /// tail call optimization failed. The MI level option was nixed
Alp Tokercf218752014-06-30 18:57:16 +0000411 /// immediately since it would require platform-specific pattern
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000412 /// matching. The SelectionDAG level option was nixed because
413 /// SelectionDAG only processes one IR level basic block at a time
414 /// implying one could not create a DAG Combine to move the callinst.
415 ///
416 /// To get around this problem a few things were realized:
417 ///
418 /// 1. While one can not handle multiple IR level basic blocks at the
419 /// SelectionDAG Level, one can generate multiple machine basic blocks
420 /// for one IR level basic block. This is how we handle bit tests and
421 /// switches.
422 ///
423 /// 2. At the MI level, tail calls are represented via a special return
424 /// MIInst called "tcreturn". Thus if we know the basic block in which we
425 /// wish to insert the stack protector check, we get the correct behavior
426 /// by always inserting the stack protector check right before the return
427 /// statement. This is a "magical transformation" since no matter where
428 /// the stack protector check intrinsic is, we always insert the stack
429 /// protector check code at the end of the BB.
430 ///
431 /// Given the aforementioned constraints, the following solution was devised:
432 ///
433 /// 1. On platforms that do not support SelectionDAG stack protector check
434 /// generation, allow for the normal IR level stack protector check
435 /// generation to continue.
436 ///
437 /// 2. On platforms that do support SelectionDAG stack protector check
438 /// generation:
439 ///
440 /// a. Use the IR level stack protector pass to decide if a stack
441 /// protector is required/which BB we insert the stack protector check
442 /// in by reusing the logic already therein. If we wish to generate a
443 /// stack protector check in a basic block, we place a special IR
444 /// intrinsic called llvm.stackprotectorcheck right before the BB's
445 /// returninst or if there is a callinst that could potentially be
446 /// sibling call optimized, before the call inst.
447 ///
448 /// b. Then when a BB with said intrinsic is processed, we codegen the BB
449 /// normally via SelectBasicBlock. In said process, when we visit the
450 /// stack protector check, we do not actually emit anything into the
451 /// BB. Instead, we just initialize the stack protector descriptor
452 /// class (which involves stashing information/creating the success
453 /// mbbb and the failure mbb if we have not created one for this
454 /// function yet) and export the guard variable that we are going to
455 /// compare.
456 ///
457 /// c. After we finish selecting the basic block, in FinishBasicBlock if
458 /// the StackProtectorDescriptor attached to the SelectionDAGBuilder is
459 /// initialized, we first find a splice point in the parent basic block
460 /// before the terminator and then splice the terminator of said basic
461 /// block into the success basic block. Then we code-gen a new tail for
462 /// the parent basic block consisting of the two loads, the comparison,
463 /// and finally two branches to the success/failure basic blocks. We
464 /// conclude by code-gening the failure basic block if we have not
465 /// code-gened it already (all stack protector checks we generate in
466 /// the same function, use the same failure basic block).
467 class StackProtectorDescriptor {
468 public:
Tim Shen00127562016-04-08 21:26:31 +0000469 StackProtectorDescriptor()
Tim Shene885d5e2016-04-19 19:40:37 +0000470 : ParentMBB(nullptr), SuccessMBB(nullptr), FailureMBB(nullptr) {}
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000471
472 /// Returns true if all fields of the stack protector descriptor are
473 /// initialized implying that we should/are ready to emit a stack protector.
474 bool shouldEmitStackProtector() const {
Tim Shen00127562016-04-08 21:26:31 +0000475 return ParentMBB && SuccessMBB && FailureMBB;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000476 }
477
478 /// Initialize the stack protector descriptor structure for a new basic
479 /// block.
Tim Shen00127562016-04-08 21:26:31 +0000480 void initialize(const BasicBlock *BB, MachineBasicBlock *MBB) {
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000481 // Make sure we are not initialized yet.
482 assert(!shouldEmitStackProtector() && "Stack Protector Descriptor is "
483 "already initialized!");
484 ParentMBB = MBB;
Akira Hatanakab9991a22014-12-01 04:27:03 +0000485 SuccessMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ true);
486 FailureMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ false, FailureMBB);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000487 }
488
489 /// Reset state that changes when we handle different basic blocks.
490 ///
491 /// This currently includes:
492 ///
493 /// 1. The specific basic block we are generating a
494 /// stack protector for (ParentMBB).
495 ///
496 /// 2. The successor machine basic block that will contain the tail of
497 /// parent mbb after we create the stack protector check (SuccessMBB). This
498 /// BB is visited only on stack protector check success.
499 void resetPerBBState() {
Craig Topperada08572014-04-16 04:21:27 +0000500 ParentMBB = nullptr;
501 SuccessMBB = nullptr;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000502 }
503
504 /// Reset state that only changes when we switch functions.
505 ///
506 /// This currently includes:
507 ///
508 /// 1. FailureMBB since we reuse the failure code path for all stack
509 /// protector checks created in an individual function.
510 ///
511 /// 2.The guard variable since the guard variable we are checking against is
512 /// always the same.
513 void resetPerFunctionState() {
Craig Topperada08572014-04-16 04:21:27 +0000514 FailureMBB = nullptr;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000515 }
516
517 MachineBasicBlock *getParentMBB() { return ParentMBB; }
518 MachineBasicBlock *getSuccessMBB() { return SuccessMBB; }
519 MachineBasicBlock *getFailureMBB() { return FailureMBB; }
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000520
521 private:
522 /// The basic block for which we are generating the stack protector.
523 ///
524 /// As a result of stack protector generation, we will splice the
525 /// terminators of this basic block into the successor mbb SuccessMBB and
526 /// replace it with a compare/branch to the successor mbbs
527 /// SuccessMBB/FailureMBB depending on whether or not the stack protector
528 /// was violated.
529 MachineBasicBlock *ParentMBB;
530
531 /// A basic block visited on stack protector check success that contains the
532 /// terminators of ParentMBB.
533 MachineBasicBlock *SuccessMBB;
534
535 /// This basic block visited on stack protector check failure that will
536 /// contain a call to __stack_chk_fail().
537 MachineBasicBlock *FailureMBB;
538
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000539 /// Add a successor machine basic block to ParentMBB. If the successor mbb
540 /// has not been created yet (i.e. if SuccMBB = 0), then the machine basic
Akira Hatanakab9991a22014-12-01 04:27:03 +0000541 /// block will be created. Assign a large weight if IsLikely is true.
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000542 MachineBasicBlock *AddSuccessorMBB(const BasicBlock *BB,
543 MachineBasicBlock *ParentMBB,
Akira Hatanakab9991a22014-12-01 04:27:03 +0000544 bool IsLikely,
Craig Topperada08572014-04-16 04:21:27 +0000545 MachineBasicBlock *SuccMBB = nullptr);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000546 };
547
Bill Wendlinga3cd3502013-06-19 21:36:55 +0000548private:
Dan Gohmanc3349602010-04-19 19:05:59 +0000549 const TargetMachine &TM;
Bill Wendlinga3cd3502013-06-19 21:36:55 +0000550public:
Nico Rieckb5262d62014-01-12 14:09:17 +0000551 /// Lowest valid SDNodeOrder. The special case 0 is reserved for scheduling
552 /// nodes without a corresponding SDNode.
553 static const unsigned LowestSDNodeOrder = 1;
554
Dan Gohman575fad32008-09-03 16:12:24 +0000555 SelectionDAG &DAG;
Rafael Espindola5f57f462014-02-21 18:34:28 +0000556 const DataLayout *DL;
Dan Gohman575fad32008-09-03 16:12:24 +0000557 AliasAnalysis *AA;
Owen Andersonbb15fec2011-12-08 22:15:21 +0000558 const TargetLibraryInfo *LibInfo;
Dan Gohman575fad32008-09-03 16:12:24 +0000559
560 /// SwitchCases - Vector of CaseBlock structures used to communicate
561 /// SwitchInst code generation information.
562 std::vector<CaseBlock> SwitchCases;
563 /// JTCases - Vector of JumpTable structures used to communicate
564 /// SwitchInst code generation information.
565 std::vector<JumpTableBlock> JTCases;
566 /// BitTestCases - Vector of BitTestBlock structures used to communicate
567 /// SwitchInst code generation information.
568 std::vector<BitTestBlock> BitTestCases;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000569 /// A StackProtectorDescriptor structure used to communicate stack protector
570 /// information in between SelectBasicBlock and FinishBasicBlock.
571 StackProtectorDescriptor SPDescriptor;
Evan Cheng270d0f92009-09-18 21:02:19 +0000572
Dan Gohman575fad32008-09-03 16:12:24 +0000573 // Emit PHI-node-operand constants only once even if used by multiple
574 // PHI nodes.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000575 DenseMap<const Constant *, unsigned> ConstantsOut;
Dan Gohman575fad32008-09-03 16:12:24 +0000576
577 /// FuncInfo - Information about the function as a whole.
578 ///
579 FunctionLoweringInfo &FuncInfo;
Bill Wendling19e0a5b2009-02-19 21:12:54 +0000580
Dan Gohman575fad32008-09-03 16:12:24 +0000581 /// GFI - Garbage collection metadata for the function.
582 GCFunctionInfo *GFI;
583
Bill Wendling267f3232011-10-05 22:24:35 +0000584 /// LPadToCallSiteMap - Map a landing pad to the call site indexes.
585 DenseMap<MachineBasicBlock*, SmallVector<unsigned, 4> > LPadToCallSiteMap;
Bill Wendling3d11aa72011-10-04 22:00:35 +0000586
Dan Gohmanf9bbcd12009-08-05 01:29:28 +0000587 /// HasTailCall - This is set to true if a call in the current
588 /// block has been translated as a tail call. In this case,
589 /// no subsequent DAG nodes should be created.
590 ///
591 bool HasTailCall;
592
Owen Anderson53a52212009-07-13 04:09:18 +0000593 LLVMContext *Context;
594
Dan Gohmanc3349602010-04-19 19:05:59 +0000595 SelectionDAGBuilder(SelectionDAG &dag, FunctionLoweringInfo &funcinfo,
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000596 CodeGenOpt::Level ol)
Craig Topperada08572014-04-16 04:21:27 +0000597 : CurInst(nullptr), SDNodeOrder(LowestSDNodeOrder), TM(dag.getTarget()),
Benjamin Kramerbacc7ba2015-10-15 17:54:06 +0000598 DAG(dag), FuncInfo(funcinfo),
Richard Smith3fb20472012-08-22 00:42:39 +0000599 HasTailCall(false) {
Dan Gohman575fad32008-09-03 16:12:24 +0000600 }
601
Owen Andersonbb15fec2011-12-08 22:15:21 +0000602 void init(GCFunctionInfo *gfi, AliasAnalysis &aa,
603 const TargetLibraryInfo *li);
Dan Gohman575fad32008-09-03 16:12:24 +0000604
Dan Gohmanf5cca352010-04-14 18:24:06 +0000605 /// clear - Clear out the current SelectionDAG and the associated
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000606 /// state and prepare this SelectionDAGBuilder object to be used
Dan Gohman575fad32008-09-03 16:12:24 +0000607 /// for a new block. This doesn't clear out information about
608 /// additional blocks that are needed to complete switch lowering
609 /// or PHI node updating; that information is cleared out as it is
610 /// consumed.
611 void clear();
612
Devang Patel799288382011-05-23 17:44:13 +0000613 /// clearDanglingDebugInfo - Clear the dangling debug information
Benjamin Kramerbde91762012-06-02 10:20:22 +0000614 /// map. This function is separated from the clear so that debug
Devang Patel799288382011-05-23 17:44:13 +0000615 /// information that is dangling in a basic block can be properly
616 /// resolved in a different basic block. This allows the
617 /// SelectionDAG to resolve dangling debug information attached
618 /// to PHI nodes.
619 void clearDanglingDebugInfo();
620
Dan Gohman575fad32008-09-03 16:12:24 +0000621 /// getRoot - Return the current virtual root of the Selection DAG,
622 /// flushing any PendingLoad items. This must be done before emitting
623 /// a store or any other node that may need to be ordered after any
624 /// prior load instructions.
625 ///
626 SDValue getRoot();
627
628 /// getControlRoot - Similar to getRoot, but instead of flushing all the
629 /// PendingLoad items, flush all the PendingExports items. It is necessary
630 /// to do this before emitting a terminator instruction.
631 ///
632 SDValue getControlRoot();
633
Andrew Trick175143b2013-05-25 02:20:36 +0000634 SDLoc getCurSDLoc() const {
Andrew Trick175143b2013-05-25 02:20:36 +0000635 return SDLoc(CurInst, SDNodeOrder);
636 }
637
638 DebugLoc getCurDebugLoc() const {
639 return CurInst ? CurInst->getDebugLoc() : DebugLoc();
640 }
Devang Patelf3292b22011-02-21 23:21:26 +0000641
Bill Wendling919b7aa2009-12-22 02:10:19 +0000642 unsigned getSDNodeOrder() const { return SDNodeOrder; }
643
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000644 void CopyValueToVirtualRegister(const Value *V, unsigned Reg);
Dan Gohman575fad32008-09-03 16:12:24 +0000645
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000646 void visit(const Instruction &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000647
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000648 void visit(unsigned Opcode, const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000649
Igor Laevsky85f7f722015-03-10 16:26:48 +0000650 /// getCopyFromRegs - If there was virtual register allocated for the value V
651 /// emit CopyFromReg of the specified type Ty. Return empty SDValue() otherwise.
652 SDValue getCopyFromRegs(const Value *V, Type *Ty);
653
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000654 // resolveDanglingDebugInfo - if we saw an earlier dbg_value referring to V,
655 // generate the debug data structures now that we've seen its definition.
656 void resolveDanglingDebugInfo(const Value *V, SDValue Val);
Dan Gohman575fad32008-09-03 16:12:24 +0000657 SDValue getValue(const Value *V);
Elena Demikhovsky584ce372015-04-28 07:57:37 +0000658 bool findValue(const Value *V) const;
659
Dan Gohmand4322232010-07-01 01:59:43 +0000660 SDValue getNonRegisterValue(const Value *V);
661 SDValue getValueImpl(const Value *V);
Dan Gohman575fad32008-09-03 16:12:24 +0000662
663 void setValue(const Value *V, SDValue NewN) {
664 SDValue &N = NodeMap[V];
Craig Topperada08572014-04-16 04:21:27 +0000665 assert(!N.getNode() && "Already set a value for this node!");
Dan Gohman575fad32008-09-03 16:12:24 +0000666 N = NewN;
667 }
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000668
Devang Patelb0c76392010-06-01 19:59:01 +0000669 void setUnusedArgValue(const Value *V, SDValue NewN) {
670 SDValue &N = UnusedArgNodeMap[V];
Craig Topperada08572014-04-16 04:21:27 +0000671 assert(!N.getNode() && "Already set a value for this node!");
Devang Patelb0c76392010-06-01 19:59:01 +0000672 N = NewN;
673 }
Dan Gohman575fad32008-09-03 16:12:24 +0000674
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000675 void FindMergedConditions(const Value *Cond, MachineBasicBlock *TBB,
Dan Gohman575fad32008-09-03 16:12:24 +0000676 MachineBasicBlock *FBB, MachineBasicBlock *CurBB,
Pete Cooper69234612015-07-15 01:31:26 +0000677 MachineBasicBlock *SwitchBB,
Cong Hou1938f2e2015-11-24 08:51:23 +0000678 Instruction::BinaryOps Opc, BranchProbability TW,
679 BranchProbability FW);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000680 void EmitBranchForMergedCondition(const Value *Cond, MachineBasicBlock *TBB,
Dan Gohmand01ddb52008-10-17 21:16:08 +0000681 MachineBasicBlock *FBB,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000682 MachineBasicBlock *CurBB,
Manman Ren4ece7452014-01-31 00:42:44 +0000683 MachineBasicBlock *SwitchBB,
Cong Hou1938f2e2015-11-24 08:51:23 +0000684 BranchProbability TW, BranchProbability FW);
Dan Gohman575fad32008-09-03 16:12:24 +0000685 bool ShouldEmitAsBranches(const std::vector<CaseBlock> &Cases);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000686 bool isExportableFromCurrentBlock(const Value *V, const BasicBlock *FromBB);
687 void CopyToExportRegsIfNeeded(const Value *V);
688 void ExportFromCurrentBlock(const Value *V);
689 void LowerCallTo(ImmutableCallSite CS, SDValue Callee, bool IsTailCall,
Reid Kleckner51189f0a2015-09-08 23:28:38 +0000690 const BasicBlock *EHPadBB = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000691
Matt Arsenault2bba7792016-02-08 16:28:19 +0000692 // Lower range metadata from 0 to N to assert zext to an integer of nearest
693 // floor power of two.
694 SDValue lowerRangeToAssertZExt(SelectionDAG &DAG, const Instruction &I,
695 SDValue Op);
696
Sanjoy Das19c61592016-03-16 20:49:31 +0000697 void populateCallLoweringInfo(TargetLowering::CallLoweringInfo &CLI,
698 ImmutableCallSite CS, unsigned ArgIdx,
699 unsigned NumArgs, SDValue Callee,
700 Type *ReturnTy, bool IsPatchPoint);
701
702 std::pair<SDValue, SDValue>
703 lowerInvokable(TargetLowering::CallLoweringInfo &CLI,
704 const BasicBlock *EHPadBB = nullptr);
Andrew Trick74f4c742013-10-31 17:18:24 +0000705
Jakob Stoklund Olesen665aa6e2010-09-30 19:44:31 +0000706 /// UpdateSplitBlock - When an MBB was split during scheduling, update the
Alp Toker798060e2014-01-11 14:01:43 +0000707 /// references that need to refer to the last resulting block.
Jakob Stoklund Olesen665aa6e2010-09-30 19:44:31 +0000708 void UpdateSplitBlock(MachineBasicBlock *First, MachineBasicBlock *Last);
709
Sanjoy Das70697ff2016-03-16 23:08:00 +0000710 /// Describes a gc.statepoint or a gc.statepoint like thing for the purposes
Sanjoy Dasa5b29722016-03-23 02:28:35 +0000711 /// of lowering into a STATEPOINT node.
Sanjoy Das70697ff2016-03-16 23:08:00 +0000712 struct StatepointLoweringInfo {
713 /// Bases[i] is the base pointer for Ptrs[i]. Together they denote the set
714 /// of gc pointers this STATEPOINT has to relocate.
Sanjoy Dase58ca592016-03-23 02:24:07 +0000715 SmallVector<const Value *, 16> Bases;
716 SmallVector<const Value *, 16> Ptrs;
Sanjoy Das70697ff2016-03-16 23:08:00 +0000717
718 /// The set of gc.relocate calls associated with this gc.statepoint.
Sanjoy Dase58ca592016-03-23 02:24:07 +0000719 SmallVector<const GCRelocateInst *, 16> GCRelocates;
Sanjoy Das70697ff2016-03-16 23:08:00 +0000720
721 /// The full list of gc arguments to the gc.statepoint being lowered.
722 ArrayRef<const Use> GCArgs;
723
724 /// The gc.statepoint instruction.
725 const Instruction *StatepointInstr = nullptr;
726
727 /// The list of gc transition arguments present in the gc.statepoint being
728 /// lowered.
729 ArrayRef<const Use> GCTransitionArgs;
730
731 /// The ID that the resulting STATEPOINT instruction has to report.
732 unsigned ID = -1;
733
734 /// Information regarding the underlying call instruction.
735 TargetLowering::CallLoweringInfo CLI;
736
737 /// The deoptimization state associated with this gc.statepoint call, if
738 /// any.
739 ArrayRef<const Use> DeoptState;
740
741 /// Flags associated with the meta arguments being lowered.
742 uint64_t StatepointFlags = -1;
743
744 /// The number of patchable bytes the call needs to get lowered into.
745 unsigned NumPatchBytes = -1;
746
747 /// The exception handling unwind destination, in case this represents an
748 /// invoke of gc.statepoint.
749 const BasicBlock *EHPadBB = nullptr;
750
751 explicit StatepointLoweringInfo(SelectionDAG &DAG) : CLI(DAG) {}
752 };
753
754 /// Lower \p SLI into a STATEPOINT instruction.
Sanjoy Das38bfc222016-03-22 00:59:13 +0000755 SDValue LowerAsSTATEPOINT(StatepointLoweringInfo &SLI);
Sanjoy Das70697ff2016-03-16 23:08:00 +0000756
Igor Laevsky7fc58a42015-02-20 15:28:35 +0000757 // This function is responsible for the whole statepoint lowering process.
Igor Laevsky85f7f722015-03-10 16:26:48 +0000758 // It uniformly handles invoke and call statepoints.
759 void LowerStatepoint(ImmutableStatepoint Statepoint,
Reid Kleckner51189f0a2015-09-08 23:28:38 +0000760 const BasicBlock *EHPadBB = nullptr);
Sanjoy Das38bfc222016-03-22 00:59:13 +0000761
762 void LowerCallSiteWithDeoptBundle(ImmutableCallSite CS, SDValue Callee,
763 const BasicBlock *EHPadBB);
764
Sanjoy Dasdf9ae702016-03-24 20:23:29 +0000765 void LowerDeoptimizeCall(const CallInst *CI);
Sanjoy Das65a60672016-04-06 01:33:49 +0000766 void LowerDeoptimizingReturn();
Sanjoy Dasdf9ae702016-03-24 20:23:29 +0000767
Sanjoy Dasfd3eaa82016-03-24 22:51:49 +0000768 void LowerCallSiteWithDeoptBundleImpl(ImmutableCallSite CS, SDValue Callee,
769 const BasicBlock *EHPadBB,
Sanjoy Das65a60672016-04-06 01:33:49 +0000770 bool VarArgDisallowed,
771 bool ForceVoidReturnTy);
Sanjoy Dasfd3eaa82016-03-24 22:51:49 +0000772
Dan Gohman575fad32008-09-03 16:12:24 +0000773private:
774 // Terminator instructions.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000775 void visitRet(const ReturnInst &I);
776 void visitBr(const BranchInst &I);
777 void visitSwitch(const SwitchInst &I);
778 void visitIndirectBr(const IndirectBrInst &I);
Yaron Kerend7ba46b2014-04-19 13:47:43 +0000779 void visitUnreachable(const UnreachableInst &I);
David Majnemer654e1302015-07-31 17:58:14 +0000780 void visitCleanupRet(const CleanupReturnInst &I);
David Majnemer8a1c45d2015-12-12 05:38:55 +0000781 void visitCatchSwitch(const CatchSwitchInst &I);
David Majnemer654e1302015-07-31 17:58:14 +0000782 void visitCatchRet(const CatchReturnInst &I);
783 void visitCatchPad(const CatchPadInst &I);
David Majnemer654e1302015-07-31 17:58:14 +0000784 void visitCleanupPad(const CleanupPadInst &CPI);
Dan Gohman575fad32008-09-03 16:12:24 +0000785
Cong Hou1938f2e2015-11-24 08:51:23 +0000786 BranchProbability getEdgeProbability(const MachineBasicBlock *Src,
787 const MachineBasicBlock *Dst) const;
788 void addSuccessorWithProb(
789 MachineBasicBlock *Src, MachineBasicBlock *Dst,
790 BranchProbability Prob = BranchProbability::getUnknown());
791
Dan Gohman575fad32008-09-03 16:12:24 +0000792public:
Dan Gohman7c0303a2010-04-19 22:41:47 +0000793 void visitSwitchCase(CaseBlock &CB,
794 MachineBasicBlock *SwitchBB);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000795 void visitSPDescriptorParent(StackProtectorDescriptor &SPD,
796 MachineBasicBlock *ParentBB);
797 void visitSPDescriptorFailure(StackProtectorDescriptor &SPD);
Dan Gohman7c0303a2010-04-19 22:41:47 +0000798 void visitBitTestHeader(BitTestBlock &B, MachineBasicBlock *SwitchBB);
Evan Chengac730dd2011-01-06 01:02:44 +0000799 void visitBitTestCase(BitTestBlock &BB,
800 MachineBasicBlock* NextMBB,
Cong Hou1938f2e2015-11-24 08:51:23 +0000801 BranchProbability BranchProbToNext,
Dan Gohman575fad32008-09-03 16:12:24 +0000802 unsigned Reg,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000803 BitTestCase &B,
804 MachineBasicBlock *SwitchBB);
Dan Gohman575fad32008-09-03 16:12:24 +0000805 void visitJumpTable(JumpTable &JT);
Dan Gohman7c0303a2010-04-19 22:41:47 +0000806 void visitJumpTableHeader(JumpTable &JT, JumpTableHeader &JTH,
807 MachineBasicBlock *SwitchBB);
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000808
Dan Gohman575fad32008-09-03 16:12:24 +0000809private:
810 // These all get lowered before this pass.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000811 void visitInvoke(const InvokeInst &I);
Bill Wendlingf891bf82011-07-31 06:30:59 +0000812 void visitResume(const ResumeInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000813
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000814 void visitBinary(const User &I, unsigned OpCode);
815 void visitShift(const User &I, unsigned Opcode);
816 void visitAdd(const User &I) { visitBinary(I, ISD::ADD); }
817 void visitFAdd(const User &I) { visitBinary(I, ISD::FADD); }
818 void visitSub(const User &I) { visitBinary(I, ISD::SUB); }
819 void visitFSub(const User &I);
820 void visitMul(const User &I) { visitBinary(I, ISD::MUL); }
821 void visitFMul(const User &I) { visitBinary(I, ISD::FMUL); }
822 void visitURem(const User &I) { visitBinary(I, ISD::UREM); }
823 void visitSRem(const User &I) { visitBinary(I, ISD::SREM); }
824 void visitFRem(const User &I) { visitBinary(I, ISD::FREM); }
825 void visitUDiv(const User &I) { visitBinary(I, ISD::UDIV); }
Benjamin Kramer9960a252011-07-08 10:31:30 +0000826 void visitSDiv(const User &I);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000827 void visitFDiv(const User &I) { visitBinary(I, ISD::FDIV); }
828 void visitAnd (const User &I) { visitBinary(I, ISD::AND); }
829 void visitOr (const User &I) { visitBinary(I, ISD::OR); }
830 void visitXor (const User &I) { visitBinary(I, ISD::XOR); }
831 void visitShl (const User &I) { visitShift(I, ISD::SHL); }
832 void visitLShr(const User &I) { visitShift(I, ISD::SRL); }
833 void visitAShr(const User &I) { visitShift(I, ISD::SRA); }
834 void visitICmp(const User &I);
835 void visitFCmp(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000836 // Visit the conversion instructions
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000837 void visitTrunc(const User &I);
838 void visitZExt(const User &I);
839 void visitSExt(const User &I);
840 void visitFPTrunc(const User &I);
841 void visitFPExt(const User &I);
842 void visitFPToUI(const User &I);
843 void visitFPToSI(const User &I);
844 void visitUIToFP(const User &I);
845 void visitSIToFP(const User &I);
846 void visitPtrToInt(const User &I);
847 void visitIntToPtr(const User &I);
848 void visitBitCast(const User &I);
Matt Arsenaultb03bd4d2013-11-15 01:34:59 +0000849 void visitAddrSpaceCast(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000850
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000851 void visitExtractElement(const User &I);
852 void visitInsertElement(const User &I);
853 void visitShuffleVector(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000854
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000855 void visitExtractValue(const ExtractValueInst &I);
856 void visitInsertValue(const InsertValueInst &I);
Bill Wendlingfae14752011-08-12 20:24:12 +0000857 void visitLandingPad(const LandingPadInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000858
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000859 void visitGetElementPtr(const User &I);
860 void visitSelect(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000861
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000862 void visitAlloca(const AllocaInst &I);
863 void visitLoad(const LoadInst &I);
864 void visitStore(const StoreInst &I);
Elena Demikhovskyf1de34b2014-12-04 09:40:44 +0000865 void visitMaskedLoad(const CallInst &I);
866 void visitMaskedStore(const CallInst &I);
Elena Demikhovsky584ce372015-04-28 07:57:37 +0000867 void visitMaskedGather(const CallInst &I);
868 void visitMaskedScatter(const CallInst &I);
Eli Friedmanc9a551e2011-07-28 21:48:00 +0000869 void visitAtomicCmpXchg(const AtomicCmpXchgInst &I);
870 void visitAtomicRMW(const AtomicRMWInst &I);
Eli Friedmanfee02c62011-07-25 23:16:38 +0000871 void visitFence(const FenceInst &I);
Dan Gohmanf41ad472010-04-20 15:00:41 +0000872 void visitPHI(const PHINode &I);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000873 void visitCall(const CallInst &I);
874 bool visitMemCmpCall(const CallInst &I);
Richard Sandiford6f6d5512013-08-20 09:38:48 +0000875 bool visitMemChrCall(const CallInst &I);
Richard Sandifordbb83a502013-08-16 11:29:37 +0000876 bool visitStrCpyCall(const CallInst &I, bool isStpcpy);
Richard Sandifordca232712013-08-16 11:21:54 +0000877 bool visitStrCmpCall(const CallInst &I);
Richard Sandiford0dec06a2013-08-16 11:41:43 +0000878 bool visitStrLenCall(const CallInst &I);
879 bool visitStrNLenCall(const CallInst &I);
Bob Wilson874886c2012-08-03 23:29:17 +0000880 bool visitUnaryFloatCall(const CallInst &I, unsigned Opcode);
Matt Arsenault7c936902014-10-21 23:01:01 +0000881 bool visitBinaryFloatCall(const CallInst &I, unsigned Opcode);
Eli Friedman342e8df2011-08-24 20:50:09 +0000882 void visitAtomicLoad(const LoadInst &I);
883 void visitAtomicStore(const StoreInst &I);
Manman Rene221a872016-04-05 18:13:16 +0000884 void visitLoadFromSwiftError(const LoadInst &I);
885 void visitStoreToSwiftError(const StoreInst &I);
Eli Friedman342e8df2011-08-24 20:50:09 +0000886
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000887 void visitInlineAsm(ImmutableCallSite CS);
888 const char *visitIntrinsicCall(const CallInst &I, unsigned Intrinsic);
889 void visitTargetIntrinsic(const CallInst &I, unsigned Intrinsic);
Dan Gohman575fad32008-09-03 16:12:24 +0000890
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000891 void visitVAStart(const CallInst &I);
892 void visitVAArg(const VAArgInst &I);
893 void visitVAEnd(const CallInst &I);
894 void visitVACopy(const CallInst &I);
Andrew Trick74f4c742013-10-31 17:18:24 +0000895 void visitStackmap(const CallInst &I);
Juergen Ributzkaad2363f2014-10-17 17:39:00 +0000896 void visitPatchpoint(ImmutableCallSite CS,
Reid Kleckner51189f0a2015-09-08 23:28:38 +0000897 const BasicBlock *EHPadBB = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000898
Sanjoy Das3a020192016-03-17 00:47:14 +0000899 // These two are implemented in StatepointLowering.cpp
Manuel Jacob83eefa62016-01-05 04:03:00 +0000900 void visitGCRelocate(const GCRelocateInst &I);
Philip Reames92d1f0c2016-04-12 18:05:10 +0000901 void visitGCResult(const GCResultInst &I);
Philip Reames1a1bdb22014-12-02 18:50:36 +0000902
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000903 void visitUserOp1(const Instruction &I) {
Torok Edwinfbcc6632009-07-14 16:55:14 +0000904 llvm_unreachable("UserOp1 should not exist at instruction selection time!");
Dan Gohman575fad32008-09-03 16:12:24 +0000905 }
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000906 void visitUserOp2(const Instruction &I) {
Torok Edwinfbcc6632009-07-14 16:55:14 +0000907 llvm_unreachable("UserOp2 should not exist at instruction selection time!");
Dan Gohman575fad32008-09-03 16:12:24 +0000908 }
Dan Gohman5b43aa02010-04-22 20:55:53 +0000909
Richard Sandiforde3827752013-08-16 10:55:47 +0000910 void processIntegerCallValue(const Instruction &I,
911 SDValue Value, bool IsSigned);
912
Dan Gohman5b43aa02010-04-22 20:55:53 +0000913 void HandlePHINodesInSuccessorBlocks(const BasicBlock *LLVMBB);
Evan Cheng6e822452010-04-28 23:08:54 +0000914
Renato Golin38ed8022016-05-17 19:52:01 +0000915 void emitInlineAsmError(ImmutableCallSite CS, const Twine &Message);
916
Devang Patel32a72ab2010-08-25 20:41:24 +0000917 /// EmitFuncArgumentDbgValue - If V is an function argument then create
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000918 /// corresponding DBG_VALUE machine instruction for it now. At the end of
Devang Patel32a72ab2010-08-25 20:41:24 +0000919 /// instruction selection, they will be inserted to the entry BB.
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +0000920 bool EmitFuncArgumentDbgValue(const Value *V, DILocalVariable *Variable,
921 DIExpression *Expr, DILocation *DL,
Duncan P. N. Exon Smith3bef6a32015-04-03 19:20:26 +0000922 int64_t Offset, bool IsIndirect,
923 const SDValue &N);
Hans Wennborgb4db1422015-03-19 20:41:48 +0000924
925 /// Return the next block after MBB, or nullptr if there is none.
926 MachineBasicBlock *NextBlock(MachineBasicBlock *MBB);
Krzysztof Parzyszeka46c36b2015-04-13 17:16:45 +0000927
928 /// Update the DAG and DAG builder with the relevant information after
929 /// a new root node has been created which could be a tail call.
930 void updateDAGForMaybeTailCall(SDValue MaybeTC);
Dan Gohman575fad32008-09-03 16:12:24 +0000931};
932
Sanjoy Das3936a972015-05-05 23:06:54 +0000933/// RegsForValue - This struct represents the registers (physical or virtual)
934/// that a particular set of values is assigned, and the type information about
935/// the value. The most common situation is to represent one value at a time,
936/// but struct or array values are handled element-wise as multiple values. The
937/// splitting of aggregates is performed recursively, so that we never have
938/// aggregate-typed registers. The values at this point do not necessarily have
939/// legal types, so each value may require one or more registers of some legal
940/// type.
941///
942struct RegsForValue {
943 /// ValueVTs - The value types of the values, which may not be legal, and
944 /// may need be promoted or synthesized from one or more registers.
945 ///
946 SmallVector<EVT, 4> ValueVTs;
947
948 /// RegVTs - The value types of the registers. This is the same size as
949 /// ValueVTs and it records, for each value, what the type of the assigned
950 /// register or registers are. (Individual values are never synthesized
951 /// from more than one type of register.)
952 ///
953 /// With virtual registers, the contents of RegVTs is redundant with TLI's
954 /// getRegisterType member function, however when with physical registers
955 /// it is necessary to have a separate record of the types.
956 ///
957 SmallVector<MVT, 4> RegVTs;
958
959 /// Regs - This list holds the registers assigned to the values.
960 /// Each legal or promoted value requires one register, and each
961 /// expanded value requires multiple registers.
962 ///
963 SmallVector<unsigned, 4> Regs;
964
965 RegsForValue();
966
967 RegsForValue(const SmallVector<unsigned, 4> &regs, MVT regvt, EVT valuevt);
968
Mehdi Amini56228da2015-07-09 01:57:34 +0000969 RegsForValue(LLVMContext &Context, const TargetLowering &TLI,
970 const DataLayout &DL, unsigned Reg, Type *Ty);
Sanjoy Das3936a972015-05-05 23:06:54 +0000971
972 /// append - Add the specified values to this one.
973 void append(const RegsForValue &RHS) {
974 ValueVTs.append(RHS.ValueVTs.begin(), RHS.ValueVTs.end());
975 RegVTs.append(RHS.RegVTs.begin(), RHS.RegVTs.end());
976 Regs.append(RHS.Regs.begin(), RHS.Regs.end());
977 }
978
979 /// getCopyFromRegs - Emit a series of CopyFromReg nodes that copies from
980 /// this value and returns the result as a ValueVTs value. This uses
981 /// Chain/Flag as the input and updates them for the output Chain/Flag.
982 /// If the Flag pointer is NULL, no flag is used.
983 SDValue getCopyFromRegs(SelectionDAG &DAG, FunctionLoweringInfo &FuncInfo,
984 SDLoc dl,
985 SDValue &Chain, SDValue *Flag,
986 const Value *V = nullptr) const;
987
Sanjoy Das1194d1e72015-05-05 23:06:57 +0000988 /// getCopyToRegs - Emit a series of CopyToReg nodes that copies the specified
989 /// value into the registers specified by this object. This uses Chain/Flag
990 /// as the input and updates them for the output Chain/Flag. If the Flag
991 /// pointer is nullptr, no flag is used. If V is not nullptr, then it is used
992 /// in printing better diagnostic messages on error.
Sanjoy Das3936a972015-05-05 23:06:54 +0000993 void
994 getCopyToRegs(SDValue Val, SelectionDAG &DAG, SDLoc dl, SDValue &Chain,
Sanjoy Das1194d1e72015-05-05 23:06:57 +0000995 SDValue *Flag, const Value *V = nullptr,
Sanjoy Das3936a972015-05-05 23:06:54 +0000996 ISD::NodeType PreferredExtendType = ISD::ANY_EXTEND) const;
997
998 /// AddInlineAsmOperands - Add this value to the specified inlineasm node
999 /// operand list. This adds the code marker, matching input operand index
1000 /// (if applicable), and includes the number of values added into it.
1001 void AddInlineAsmOperands(unsigned Kind,
1002 bool HasMatching, unsigned MatchingIdx, SDLoc dl,
1003 SelectionDAG &DAG,
1004 std::vector<SDValue> &Ops) const;
1005};
1006
Dan Gohman575fad32008-09-03 16:12:24 +00001007} // end namespace llvm
1008
1009#endif