blob: e3f3cd26d974adedda943a1fe646501907dca70f [file] [log] [blame]
Michael Gottesman3923bec2013-08-12 21:02:02 +00001//===-- SelectionDAGBuilder.h - Selection-DAG building --------*- C++ -*---===//
Dan Gohman575fad32008-09-03 16:12:24 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This implements routines for translating from LLVM IR into SelectionDAG IR.
11//
12//===----------------------------------------------------------------------===//
13
Benjamin Kramera7c40ef2014-08-13 16:26:38 +000014#ifndef LLVM_LIB_CODEGEN_SELECTIONDAG_SELECTIONDAGBUILDER_H
15#define LLVM_LIB_CODEGEN_SELECTIONDAG_SELECTIONDAGBUILDER_H
Dan Gohman575fad32008-09-03 16:12:24 +000016
Chandler Carruthd9903882015-01-14 11:23:27 +000017#include "StatepointLowering.h"
Dan Gohman575fad32008-09-03 16:12:24 +000018#include "llvm/ADT/APInt.h"
19#include "llvm/ADT/DenseMap.h"
Sanjoy Das3936a972015-05-05 23:06:54 +000020#include "llvm/CodeGen/Analysis.h"
Chandler Carruth802d7552012-12-04 07:12:27 +000021#include "llvm/CodeGen/SelectionDAG.h"
Dan Gohman575fad32008-09-03 16:12:24 +000022#include "llvm/CodeGen/SelectionDAGNodes.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000023#include "llvm/IR/CallSite.h"
Igor Laevsky7fc58a42015-02-20 15:28:35 +000024#include "llvm/IR/Statepoint.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000025#include "llvm/IR/Constants.h"
Torok Edwin56d06592009-07-11 20:10:48 +000026#include "llvm/Support/ErrorHandling.h"
Juergen Ributzkafd4633e2014-10-16 21:26:35 +000027#include "llvm/Target/TargetLowering.h"
Dan Gohman575fad32008-09-03 16:12:24 +000028#include <vector>
Dan Gohman575fad32008-09-03 16:12:24 +000029
30namespace llvm {
31
Matt Arsenaultb03bd4d2013-11-15 01:34:59 +000032class AddrSpaceCastInst;
Dan Gohman575fad32008-09-03 16:12:24 +000033class AliasAnalysis;
34class AllocaInst;
35class BasicBlock;
36class BitCastInst;
37class BranchInst;
38class CallInst;
Devang Patelb12ff592010-08-26 23:35:15 +000039class DbgValueInst;
Dan Gohman575fad32008-09-03 16:12:24 +000040class ExtractElementInst;
41class ExtractValueInst;
42class FCmpInst;
43class FPExtInst;
44class FPToSIInst;
45class FPToUIInst;
46class FPTruncInst;
Dan Gohman575fad32008-09-03 16:12:24 +000047class Function;
Dan Gohmana3624b62009-11-23 17:16:22 +000048class FunctionLoweringInfo;
Dan Gohman575fad32008-09-03 16:12:24 +000049class GetElementPtrInst;
50class GCFunctionInfo;
51class ICmpInst;
52class IntToPtrInst;
Chris Lattnerd04cb6d2009-10-28 00:19:10 +000053class IndirectBrInst;
Dan Gohman575fad32008-09-03 16:12:24 +000054class InvokeInst;
55class InsertElementInst;
56class InsertValueInst;
57class Instruction;
58class LoadInst;
59class MachineBasicBlock;
Dan Gohman575fad32008-09-03 16:12:24 +000060class MachineInstr;
Dan Gohman575fad32008-09-03 16:12:24 +000061class MachineRegisterInfo;
Evan Cheng6e822452010-04-28 23:08:54 +000062class MDNode;
Patrik Hagglund1da35122014-03-12 08:00:24 +000063class MVT;
Dan Gohman575fad32008-09-03 16:12:24 +000064class PHINode;
65class PtrToIntInst;
66class ReturnInst;
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000067class SDDbgValue;
Dan Gohman575fad32008-09-03 16:12:24 +000068class SExtInst;
69class SelectInst;
70class ShuffleVectorInst;
71class SIToFPInst;
72class StoreInst;
73class SwitchInst;
Micah Villmowcdfe20b2012-10-08 16:38:25 +000074class DataLayout;
Owen Andersonbb15fec2011-12-08 22:15:21 +000075class TargetLibraryInfo;
Dan Gohman575fad32008-09-03 16:12:24 +000076class TargetLowering;
77class TruncInst;
78class UIToFPInst;
79class UnreachableInst;
Dan Gohman575fad32008-09-03 16:12:24 +000080class VAArgInst;
81class ZExtInst;
82
Dan Gohman575fad32008-09-03 16:12:24 +000083//===----------------------------------------------------------------------===//
Dan Gohman1a6c47f2009-11-23 18:04:58 +000084/// SelectionDAGBuilder - This is the common target-independent lowering
Dan Gohman575fad32008-09-03 16:12:24 +000085/// implementation that is parameterized by a TargetLowering object.
Dan Gohman575fad32008-09-03 16:12:24 +000086///
Benjamin Kramer079b96e2013-09-11 18:05:11 +000087class SelectionDAGBuilder {
Andrew Trick175143b2013-05-25 02:20:36 +000088 /// CurInst - The current instruction being visited
89 const Instruction *CurInst;
Dale Johannesendb7c5f62009-01-31 02:22:37 +000090
Dan Gohman575fad32008-09-03 16:12:24 +000091 DenseMap<const Value*, SDValue> NodeMap;
Andrew Trickd4d1d9c2013-10-31 17:18:07 +000092
Devang Patelb0c76392010-06-01 19:59:01 +000093 /// UnusedArgNodeMap - Maps argument value for unused arguments. This is used
94 /// to preserve debug information for incoming arguments.
95 DenseMap<const Value*, SDValue> UnusedArgNodeMap;
Dan Gohman575fad32008-09-03 16:12:24 +000096
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000097 /// DanglingDebugInfo - Helper type for DanglingDebugInfoMap.
98 class DanglingDebugInfo {
Devang Patelb12ff592010-08-26 23:35:15 +000099 const DbgValueInst* DI;
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000100 DebugLoc dl;
101 unsigned SDNodeOrder;
102 public:
Craig Topperada08572014-04-16 04:21:27 +0000103 DanglingDebugInfo() : DI(nullptr), dl(DebugLoc()), SDNodeOrder(0) { }
Devang Patelb12ff592010-08-26 23:35:15 +0000104 DanglingDebugInfo(const DbgValueInst *di, DebugLoc DL, unsigned SDNO) :
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000105 DI(di), dl(DL), SDNodeOrder(SDNO) { }
Devang Patelb12ff592010-08-26 23:35:15 +0000106 const DbgValueInst* getDI() { return DI; }
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000107 DebugLoc getdl() { return dl; }
108 unsigned getSDNodeOrder() { return SDNodeOrder; }
109 };
110
111 /// DanglingDebugInfoMap - Keeps track of dbg_values for which we have not
112 /// yet seen the referent. We defer handling these until we do see it.
113 DenseMap<const Value*, DanglingDebugInfo> DanglingDebugInfoMap;
114
Chris Lattner1a32ede2009-12-24 00:37:38 +0000115public:
Dan Gohman575fad32008-09-03 16:12:24 +0000116 /// PendingLoads - Loads are not emitted to the program immediately. We bunch
117 /// them up and then emit token factor nodes when possible. This allows us to
118 /// get simple disambiguation between loads without worrying about alias
119 /// analysis.
120 SmallVector<SDValue, 8> PendingLoads;
Philip Reames1a1bdb22014-12-02 18:50:36 +0000121
122 /// State used while lowering a statepoint sequence (gc_statepoint,
123 /// gc_relocate, and gc_result). See StatepointLowering.hpp/cpp for details.
124 StatepointLoweringState StatepointLowering;
Chris Lattner1a32ede2009-12-24 00:37:38 +0000125private:
Dan Gohman575fad32008-09-03 16:12:24 +0000126
127 /// PendingExports - CopyToReg nodes that copy values to virtual registers
128 /// for export to other blocks need to be emitted before any terminator
129 /// instruction, but they have no other ordering requirements. We bunch them
130 /// up and the emit a single tokenfactor for them just before terminator
131 /// instructions.
132 SmallVector<SDValue, 8> PendingExports;
133
Bill Wendling022d18f2009-12-18 23:32:53 +0000134 /// SDNodeOrder - A unique monotonically increasing number used to order the
135 /// SDNodes we create.
136 unsigned SDNodeOrder;
137
Hans Wennborg0867b152015-04-23 16:45:24 +0000138 enum CaseClusterKind {
139 /// A cluster of adjacent case labels with the same destination, or just one
140 /// case.
141 CC_Range,
142 /// A cluster of cases suitable for jump table lowering.
143 CC_JumpTable,
144 /// A cluster of cases suitable for bit test lowering.
145 CC_BitTests
146 };
Dan Gohman575fad32008-09-03 16:12:24 +0000147
Hans Wennborg0867b152015-04-23 16:45:24 +0000148 /// A cluster of case labels.
149 struct CaseCluster {
150 CaseClusterKind Kind;
151 const ConstantInt *Low, *High;
152 union {
153 MachineBasicBlock *MBB;
154 unsigned JTCasesIndex;
155 unsigned BTCasesIndex;
156 };
Hans Wennborg7bf4d4e2015-04-27 23:52:19 +0000157 uint32_t Weight;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000158
Hans Wennborg0867b152015-04-23 16:45:24 +0000159 static CaseCluster range(const ConstantInt *Low, const ConstantInt *High,
160 MachineBasicBlock *MBB, uint32_t Weight) {
161 CaseCluster C;
162 C.Kind = CC_Range;
163 C.Low = Low;
164 C.High = High;
165 C.MBB = MBB;
166 C.Weight = Weight;
167 return C;
168 }
169
170 static CaseCluster jumpTable(const ConstantInt *Low,
171 const ConstantInt *High, unsigned JTCasesIndex,
172 uint32_t Weight) {
173 CaseCluster C;
174 C.Kind = CC_JumpTable;
175 C.Low = Low;
176 C.High = High;
177 C.JTCasesIndex = JTCasesIndex;
178 C.Weight = Weight;
179 return C;
180 }
181
182 static CaseCluster bitTests(const ConstantInt *Low, const ConstantInt *High,
183 unsigned BTCasesIndex, uint32_t Weight) {
184 CaseCluster C;
185 C.Kind = CC_BitTests;
186 C.Low = Low;
187 C.High = High;
188 C.BTCasesIndex = BTCasesIndex;
189 C.Weight = Weight;
190 return C;
Dan Gohman575fad32008-09-03 16:12:24 +0000191 }
192 };
193
Hans Wennborg0867b152015-04-23 16:45:24 +0000194 typedef std::vector<CaseCluster> CaseClusterVector;
195 typedef CaseClusterVector::iterator CaseClusterIt;
196
Dan Gohman575fad32008-09-03 16:12:24 +0000197 struct CaseBits {
198 uint64_t Mask;
199 MachineBasicBlock* BB;
200 unsigned Bits;
Manman Rencf104462012-08-24 18:14:27 +0000201 uint32_t ExtraWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000202
Manman Rencf104462012-08-24 18:14:27 +0000203 CaseBits(uint64_t mask, MachineBasicBlock* bb, unsigned bits,
204 uint32_t Weight):
205 Mask(mask), BB(bb), Bits(bits), ExtraWeight(Weight) { }
Hans Wennborg0867b152015-04-23 16:45:24 +0000206
207 CaseBits() : Mask(0), BB(nullptr), Bits(0), ExtraWeight(0) {}
Dan Gohman575fad32008-09-03 16:12:24 +0000208 };
209
Hans Wennborg0867b152015-04-23 16:45:24 +0000210 typedef std::vector<CaseBits> CaseBitsVector;
Dan Gohman575fad32008-09-03 16:12:24 +0000211
Hans Wennborg0867b152015-04-23 16:45:24 +0000212 /// Sort Clusters and merge adjacent cases.
213 void sortAndRangeify(CaseClusterVector &Clusters);
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000214
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000215 /// CaseBlock - This structure is used to communicate between
216 /// SelectionDAGBuilder and SDISel for the code generation of additional basic
217 /// blocks needed by multi-case switch statements.
Dan Gohman575fad32008-09-03 16:12:24 +0000218 struct CaseBlock {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000219 CaseBlock(ISD::CondCode cc, const Value *cmplhs, const Value *cmprhs,
220 const Value *cmpmiddle,
Dan Gohman575fad32008-09-03 16:12:24 +0000221 MachineBasicBlock *truebb, MachineBasicBlock *falsebb,
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000222 MachineBasicBlock *me,
223 uint32_t trueweight = 0, uint32_t falseweight = 0)
Dan Gohman575fad32008-09-03 16:12:24 +0000224 : CC(cc), CmpLHS(cmplhs), CmpMHS(cmpmiddle), CmpRHS(cmprhs),
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000225 TrueBB(truebb), FalseBB(falsebb), ThisBB(me),
226 TrueWeight(trueweight), FalseWeight(falseweight) { }
227
Dan Gohman575fad32008-09-03 16:12:24 +0000228 // CC - the condition code to use for the case block's setcc node
229 ISD::CondCode CC;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000230
Dan Gohman575fad32008-09-03 16:12:24 +0000231 // CmpLHS/CmpRHS/CmpMHS - The LHS/MHS/RHS of the comparison to emit.
232 // Emit by default LHS op RHS. MHS is used for range comparisons:
233 // If MHS is not null: (LHS <= MHS) and (MHS <= RHS).
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000234 const Value *CmpLHS, *CmpMHS, *CmpRHS;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000235
Dan Gohman575fad32008-09-03 16:12:24 +0000236 // TrueBB/FalseBB - the block to branch to if the setcc is true/false.
237 MachineBasicBlock *TrueBB, *FalseBB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000238
Dan Gohman575fad32008-09-03 16:12:24 +0000239 // ThisBB - the block into which to emit the code for the setcc and branches
240 MachineBasicBlock *ThisBB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000241
242 // TrueWeight/FalseWeight - branch weights.
243 uint32_t TrueWeight, FalseWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000244 };
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000245
Dan Gohman575fad32008-09-03 16:12:24 +0000246 struct JumpTable {
247 JumpTable(unsigned R, unsigned J, MachineBasicBlock *M,
248 MachineBasicBlock *D): Reg(R), JTI(J), MBB(M), Default(D) {}
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000249
Dan Gohman575fad32008-09-03 16:12:24 +0000250 /// Reg - the virtual register containing the index of the jump table entry
251 //. to jump to.
252 unsigned Reg;
253 /// JTI - the JumpTableIndex for this jump table in the function.
254 unsigned JTI;
255 /// MBB - the MBB into which to emit the code for the indirect jump.
256 MachineBasicBlock *MBB;
257 /// Default - the MBB of the default bb, which is a successor of the range
258 /// check MBB. This is when updating PHI nodes in successors.
259 MachineBasicBlock *Default;
260 };
261 struct JumpTableHeader {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000262 JumpTableHeader(APInt F, APInt L, const Value *SV, MachineBasicBlock *H,
Dan Gohman575fad32008-09-03 16:12:24 +0000263 bool E = false):
264 First(F), Last(L), SValue(SV), HeaderBB(H), Emitted(E) {}
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000265 APInt First;
266 APInt Last;
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000267 const Value *SValue;
Dan Gohman575fad32008-09-03 16:12:24 +0000268 MachineBasicBlock *HeaderBB;
269 bool Emitted;
270 };
271 typedef std::pair<JumpTableHeader, JumpTable> JumpTableBlock;
272
273 struct BitTestCase {
Manman Rencf104462012-08-24 18:14:27 +0000274 BitTestCase(uint64_t M, MachineBasicBlock* T, MachineBasicBlock* Tr,
275 uint32_t Weight):
276 Mask(M), ThisBB(T), TargetBB(Tr), ExtraWeight(Weight) { }
Dan Gohman575fad32008-09-03 16:12:24 +0000277 uint64_t Mask;
Chris Lattner24576a52010-01-01 23:37:34 +0000278 MachineBasicBlock *ThisBB;
279 MachineBasicBlock *TargetBB;
Manman Rencf104462012-08-24 18:14:27 +0000280 uint32_t ExtraWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000281 };
282
283 typedef SmallVector<BitTestCase, 3> BitTestInfo;
284
285 struct BitTestBlock {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000286 BitTestBlock(APInt F, APInt R, const Value* SV,
Patrik Hagglund4e0f8282012-12-19 12:23:01 +0000287 unsigned Rg, MVT RgVT, bool E,
Dan Gohman575fad32008-09-03 16:12:24 +0000288 MachineBasicBlock* P, MachineBasicBlock* D,
Benjamin Kramerc6cc58e2014-10-04 16:55:56 +0000289 BitTestInfo C):
Evan Chengac730dd2011-01-06 01:02:44 +0000290 First(F), Range(R), SValue(SV), Reg(Rg), RegVT(RgVT), Emitted(E),
Benjamin Kramerc6cc58e2014-10-04 16:55:56 +0000291 Parent(P), Default(D), Cases(std::move(C)) { }
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000292 APInt First;
293 APInt Range;
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000294 const Value *SValue;
Dan Gohman575fad32008-09-03 16:12:24 +0000295 unsigned Reg;
Patrik Hagglund4e0f8282012-12-19 12:23:01 +0000296 MVT RegVT;
Dan Gohman575fad32008-09-03 16:12:24 +0000297 bool Emitted;
298 MachineBasicBlock *Parent;
299 MachineBasicBlock *Default;
300 BitTestInfo Cases;
301 };
302
Hans Wennborg0867b152015-04-23 16:45:24 +0000303 /// Minimum jump table density, in percent.
304 enum { MinJumpTableDensity = 40 };
305
306 /// Check whether a range of clusters is dense enough for a jump table.
307 bool isDense(const CaseClusterVector &Clusters, unsigned *TotalCases,
308 unsigned First, unsigned Last);
309
310 /// Build a jump table cluster from Clusters[First..Last]. Returns false if it
311 /// decides it's not a good idea.
312 bool buildJumpTable(CaseClusterVector &Clusters, unsigned First,
313 unsigned Last, const SwitchInst *SI,
314 MachineBasicBlock *DefaultMBB, CaseCluster &JTCluster);
315
316 /// Find clusters of cases suitable for jump table lowering.
317 void findJumpTables(CaseClusterVector &Clusters, const SwitchInst *SI,
318 MachineBasicBlock *DefaultMBB);
319
320 /// Check whether the range [Low,High] fits in a machine word.
321 bool rangeFitsInWord(const APInt &Low, const APInt &High);
322
323 /// Check whether these clusters are suitable for lowering with bit tests based
324 /// on the number of destinations, comparison metric, and range.
325 bool isSuitableForBitTests(unsigned NumDests, unsigned NumCmps,
326 const APInt &Low, const APInt &High);
327
328 /// Build a bit test cluster from Clusters[First..Last]. Returns false if it
329 /// decides it's not a good idea.
330 bool buildBitTests(CaseClusterVector &Clusters, unsigned First, unsigned Last,
331 const SwitchInst *SI, CaseCluster &BTCluster);
332
333 /// Find clusters of cases suitable for bit test lowering.
334 void findBitTestClusters(CaseClusterVector &Clusters, const SwitchInst *SI);
335
336 struct SwitchWorkListItem {
337 MachineBasicBlock *MBB;
338 CaseClusterIt FirstCluster;
339 CaseClusterIt LastCluster;
340 const ConstantInt *GE;
341 const ConstantInt *LT;
342 };
343 typedef SmallVector<SwitchWorkListItem, 4> SwitchWorkList;
344
345 /// Emit comparison and split W into two subtrees.
346 void splitWorkItem(SwitchWorkList &WorkList, const SwitchWorkListItem &W,
347 Value *Cond, MachineBasicBlock *SwitchMBB);
348
349 /// Lower W.
350 void lowerWorkItem(SwitchWorkListItem W, Value *Cond,
351 MachineBasicBlock *SwitchMBB,
352 MachineBasicBlock *DefaultMBB);
353
354
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000355 /// A class which encapsulates all of the information needed to generate a
356 /// stack protector check and signals to isel via its state being initialized
357 /// that a stack protector needs to be generated.
358 ///
359 /// *NOTE* The following is a high level documentation of SelectionDAG Stack
360 /// Protector Generation. The reason that it is placed here is for a lack of
361 /// other good places to stick it.
362 ///
363 /// High Level Overview of SelectionDAG Stack Protector Generation:
364 ///
365 /// Previously, generation of stack protectors was done exclusively in the
366 /// pre-SelectionDAG Codegen LLVM IR Pass "Stack Protector". This necessitated
367 /// splitting basic blocks at the IR level to create the success/failure basic
368 /// blocks in the tail of the basic block in question. As a result of this,
369 /// calls that would have qualified for the sibling call optimization were no
370 /// longer eligible for optimization since said calls were no longer right in
371 /// the "tail position" (i.e. the immediate predecessor of a ReturnInst
372 /// instruction).
373 ///
374 /// Then it was noticed that since the sibling call optimization causes the
375 /// callee to reuse the caller's stack, if we could delay the generation of
376 /// the stack protector check until later in CodeGen after the sibling call
377 /// decision was made, we get both the tail call optimization and the stack
378 /// protector check!
379 ///
380 /// A few goals in solving this problem were:
381 ///
382 /// 1. Preserve the architecture independence of stack protector generation.
383 ///
384 /// 2. Preserve the normal IR level stack protector check for platforms like
Alp Tokercf218752014-06-30 18:57:16 +0000385 /// OpenBSD for which we support platform-specific stack protector
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000386 /// generation.
387 ///
388 /// The main problem that guided the present solution is that one can not
389 /// solve this problem in an architecture independent manner at the IR level
390 /// only. This is because:
391 ///
392 /// 1. The decision on whether or not to perform a sibling call on certain
393 /// platforms (for instance i386) requires lower level information
394 /// related to available registers that can not be known at the IR level.
395 ///
396 /// 2. Even if the previous point were not true, the decision on whether to
397 /// perform a tail call is done in LowerCallTo in SelectionDAG which
398 /// occurs after the Stack Protector Pass. As a result, one would need to
399 /// put the relevant callinst into the stack protector check success
400 /// basic block (where the return inst is placed) and then move it back
401 /// later at SelectionDAG/MI time before the stack protector check if the
402 /// tail call optimization failed. The MI level option was nixed
Alp Tokercf218752014-06-30 18:57:16 +0000403 /// immediately since it would require platform-specific pattern
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000404 /// matching. The SelectionDAG level option was nixed because
405 /// SelectionDAG only processes one IR level basic block at a time
406 /// implying one could not create a DAG Combine to move the callinst.
407 ///
408 /// To get around this problem a few things were realized:
409 ///
410 /// 1. While one can not handle multiple IR level basic blocks at the
411 /// SelectionDAG Level, one can generate multiple machine basic blocks
412 /// for one IR level basic block. This is how we handle bit tests and
413 /// switches.
414 ///
415 /// 2. At the MI level, tail calls are represented via a special return
416 /// MIInst called "tcreturn". Thus if we know the basic block in which we
417 /// wish to insert the stack protector check, we get the correct behavior
418 /// by always inserting the stack protector check right before the return
419 /// statement. This is a "magical transformation" since no matter where
420 /// the stack protector check intrinsic is, we always insert the stack
421 /// protector check code at the end of the BB.
422 ///
423 /// Given the aforementioned constraints, the following solution was devised:
424 ///
425 /// 1. On platforms that do not support SelectionDAG stack protector check
426 /// generation, allow for the normal IR level stack protector check
427 /// generation to continue.
428 ///
429 /// 2. On platforms that do support SelectionDAG stack protector check
430 /// generation:
431 ///
432 /// a. Use the IR level stack protector pass to decide if a stack
433 /// protector is required/which BB we insert the stack protector check
434 /// in by reusing the logic already therein. If we wish to generate a
435 /// stack protector check in a basic block, we place a special IR
436 /// intrinsic called llvm.stackprotectorcheck right before the BB's
437 /// returninst or if there is a callinst that could potentially be
438 /// sibling call optimized, before the call inst.
439 ///
440 /// b. Then when a BB with said intrinsic is processed, we codegen the BB
441 /// normally via SelectBasicBlock. In said process, when we visit the
442 /// stack protector check, we do not actually emit anything into the
443 /// BB. Instead, we just initialize the stack protector descriptor
444 /// class (which involves stashing information/creating the success
445 /// mbbb and the failure mbb if we have not created one for this
446 /// function yet) and export the guard variable that we are going to
447 /// compare.
448 ///
449 /// c. After we finish selecting the basic block, in FinishBasicBlock if
450 /// the StackProtectorDescriptor attached to the SelectionDAGBuilder is
451 /// initialized, we first find a splice point in the parent basic block
452 /// before the terminator and then splice the terminator of said basic
453 /// block into the success basic block. Then we code-gen a new tail for
454 /// the parent basic block consisting of the two loads, the comparison,
455 /// and finally two branches to the success/failure basic blocks. We
456 /// conclude by code-gening the failure basic block if we have not
457 /// code-gened it already (all stack protector checks we generate in
458 /// the same function, use the same failure basic block).
459 class StackProtectorDescriptor {
460 public:
Craig Topperada08572014-04-16 04:21:27 +0000461 StackProtectorDescriptor() : ParentMBB(nullptr), SuccessMBB(nullptr),
Akira Hatanakae5b6e0d2014-07-25 19:31:34 +0000462 FailureMBB(nullptr), Guard(nullptr),
463 GuardReg(0) { }
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000464
465 /// Returns true if all fields of the stack protector descriptor are
466 /// initialized implying that we should/are ready to emit a stack protector.
467 bool shouldEmitStackProtector() const {
468 return ParentMBB && SuccessMBB && FailureMBB && Guard;
469 }
470
471 /// Initialize the stack protector descriptor structure for a new basic
472 /// block.
473 void initialize(const BasicBlock *BB,
474 MachineBasicBlock *MBB,
475 const CallInst &StackProtCheckCall) {
476 // Make sure we are not initialized yet.
477 assert(!shouldEmitStackProtector() && "Stack Protector Descriptor is "
478 "already initialized!");
479 ParentMBB = MBB;
Akira Hatanakab9991a22014-12-01 04:27:03 +0000480 SuccessMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ true);
481 FailureMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ false, FailureMBB);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000482 if (!Guard)
483 Guard = StackProtCheckCall.getArgOperand(0);
484 }
485
486 /// Reset state that changes when we handle different basic blocks.
487 ///
488 /// This currently includes:
489 ///
490 /// 1. The specific basic block we are generating a
491 /// stack protector for (ParentMBB).
492 ///
493 /// 2. The successor machine basic block that will contain the tail of
494 /// parent mbb after we create the stack protector check (SuccessMBB). This
495 /// BB is visited only on stack protector check success.
496 void resetPerBBState() {
Craig Topperada08572014-04-16 04:21:27 +0000497 ParentMBB = nullptr;
498 SuccessMBB = nullptr;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000499 }
500
501 /// Reset state that only changes when we switch functions.
502 ///
503 /// This currently includes:
504 ///
505 /// 1. FailureMBB since we reuse the failure code path for all stack
506 /// protector checks created in an individual function.
507 ///
508 /// 2.The guard variable since the guard variable we are checking against is
509 /// always the same.
510 void resetPerFunctionState() {
Craig Topperada08572014-04-16 04:21:27 +0000511 FailureMBB = nullptr;
512 Guard = nullptr;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000513 }
514
515 MachineBasicBlock *getParentMBB() { return ParentMBB; }
516 MachineBasicBlock *getSuccessMBB() { return SuccessMBB; }
517 MachineBasicBlock *getFailureMBB() { return FailureMBB; }
518 const Value *getGuard() { return Guard; }
519
Akira Hatanakae5b6e0d2014-07-25 19:31:34 +0000520 unsigned getGuardReg() const { return GuardReg; }
521 void setGuardReg(unsigned R) { GuardReg = R; }
522
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000523 private:
524 /// The basic block for which we are generating the stack protector.
525 ///
526 /// As a result of stack protector generation, we will splice the
527 /// terminators of this basic block into the successor mbb SuccessMBB and
528 /// replace it with a compare/branch to the successor mbbs
529 /// SuccessMBB/FailureMBB depending on whether or not the stack protector
530 /// was violated.
531 MachineBasicBlock *ParentMBB;
532
533 /// A basic block visited on stack protector check success that contains the
534 /// terminators of ParentMBB.
535 MachineBasicBlock *SuccessMBB;
536
537 /// This basic block visited on stack protector check failure that will
538 /// contain a call to __stack_chk_fail().
539 MachineBasicBlock *FailureMBB;
540
541 /// The guard variable which we will compare against the stored value in the
542 /// stack protector stack slot.
543 const Value *Guard;
544
Akira Hatanakae5b6e0d2014-07-25 19:31:34 +0000545 /// The virtual register holding the stack guard value.
546 unsigned GuardReg;
547
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000548 /// Add a successor machine basic block to ParentMBB. If the successor mbb
549 /// has not been created yet (i.e. if SuccMBB = 0), then the machine basic
Akira Hatanakab9991a22014-12-01 04:27:03 +0000550 /// block will be created. Assign a large weight if IsLikely is true.
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000551 MachineBasicBlock *AddSuccessorMBB(const BasicBlock *BB,
552 MachineBasicBlock *ParentMBB,
Akira Hatanakab9991a22014-12-01 04:27:03 +0000553 bool IsLikely,
Craig Topperada08572014-04-16 04:21:27 +0000554 MachineBasicBlock *SuccMBB = nullptr);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000555 };
556
Bill Wendlinga3cd3502013-06-19 21:36:55 +0000557private:
Dan Gohmanc3349602010-04-19 19:05:59 +0000558 const TargetMachine &TM;
Bill Wendlinga3cd3502013-06-19 21:36:55 +0000559public:
Nico Rieckb5262d62014-01-12 14:09:17 +0000560 /// Lowest valid SDNodeOrder. The special case 0 is reserved for scheduling
561 /// nodes without a corresponding SDNode.
562 static const unsigned LowestSDNodeOrder = 1;
563
Dan Gohman575fad32008-09-03 16:12:24 +0000564 SelectionDAG &DAG;
Rafael Espindola5f57f462014-02-21 18:34:28 +0000565 const DataLayout *DL;
Dan Gohman575fad32008-09-03 16:12:24 +0000566 AliasAnalysis *AA;
Owen Andersonbb15fec2011-12-08 22:15:21 +0000567 const TargetLibraryInfo *LibInfo;
Dan Gohman575fad32008-09-03 16:12:24 +0000568
569 /// SwitchCases - Vector of CaseBlock structures used to communicate
570 /// SwitchInst code generation information.
571 std::vector<CaseBlock> SwitchCases;
572 /// JTCases - Vector of JumpTable structures used to communicate
573 /// SwitchInst code generation information.
574 std::vector<JumpTableBlock> JTCases;
575 /// BitTestCases - Vector of BitTestBlock structures used to communicate
576 /// SwitchInst code generation information.
577 std::vector<BitTestBlock> BitTestCases;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000578 /// A StackProtectorDescriptor structure used to communicate stack protector
579 /// information in between SelectBasicBlock and FinishBasicBlock.
580 StackProtectorDescriptor SPDescriptor;
Evan Cheng270d0f92009-09-18 21:02:19 +0000581
Dan Gohman575fad32008-09-03 16:12:24 +0000582 // Emit PHI-node-operand constants only once even if used by multiple
583 // PHI nodes.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000584 DenseMap<const Constant *, unsigned> ConstantsOut;
Dan Gohman575fad32008-09-03 16:12:24 +0000585
586 /// FuncInfo - Information about the function as a whole.
587 ///
588 FunctionLoweringInfo &FuncInfo;
Bill Wendling19e0a5b2009-02-19 21:12:54 +0000589
Bill Wendling084669a2009-04-29 00:15:41 +0000590 /// OptLevel - What optimization level we're generating code for.
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000591 ///
Bill Wendling026e5d72009-04-29 23:29:43 +0000592 CodeGenOpt::Level OptLevel;
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000593
Dan Gohman575fad32008-09-03 16:12:24 +0000594 /// GFI - Garbage collection metadata for the function.
595 GCFunctionInfo *GFI;
596
Bill Wendling267f3232011-10-05 22:24:35 +0000597 /// LPadToCallSiteMap - Map a landing pad to the call site indexes.
598 DenseMap<MachineBasicBlock*, SmallVector<unsigned, 4> > LPadToCallSiteMap;
Bill Wendling3d11aa72011-10-04 22:00:35 +0000599
Dan Gohmanf9bbcd12009-08-05 01:29:28 +0000600 /// HasTailCall - This is set to true if a call in the current
601 /// block has been translated as a tail call. In this case,
602 /// no subsequent DAG nodes should be created.
603 ///
604 bool HasTailCall;
605
Owen Anderson53a52212009-07-13 04:09:18 +0000606 LLVMContext *Context;
607
Dan Gohmanc3349602010-04-19 19:05:59 +0000608 SelectionDAGBuilder(SelectionDAG &dag, FunctionLoweringInfo &funcinfo,
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000609 CodeGenOpt::Level ol)
Craig Topperada08572014-04-16 04:21:27 +0000610 : CurInst(nullptr), SDNodeOrder(LowestSDNodeOrder), TM(dag.getTarget()),
Dan Gohmanc3349602010-04-19 19:05:59 +0000611 DAG(dag), FuncInfo(funcinfo), OptLevel(ol),
Richard Smith3fb20472012-08-22 00:42:39 +0000612 HasTailCall(false) {
Dan Gohman575fad32008-09-03 16:12:24 +0000613 }
614
Owen Andersonbb15fec2011-12-08 22:15:21 +0000615 void init(GCFunctionInfo *gfi, AliasAnalysis &aa,
616 const TargetLibraryInfo *li);
Dan Gohman575fad32008-09-03 16:12:24 +0000617
Dan Gohmanf5cca352010-04-14 18:24:06 +0000618 /// clear - Clear out the current SelectionDAG and the associated
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000619 /// state and prepare this SelectionDAGBuilder object to be used
Dan Gohman575fad32008-09-03 16:12:24 +0000620 /// for a new block. This doesn't clear out information about
621 /// additional blocks that are needed to complete switch lowering
622 /// or PHI node updating; that information is cleared out as it is
623 /// consumed.
624 void clear();
625
Devang Patel799288382011-05-23 17:44:13 +0000626 /// clearDanglingDebugInfo - Clear the dangling debug information
Benjamin Kramerbde91762012-06-02 10:20:22 +0000627 /// map. This function is separated from the clear so that debug
Devang Patel799288382011-05-23 17:44:13 +0000628 /// information that is dangling in a basic block can be properly
629 /// resolved in a different basic block. This allows the
630 /// SelectionDAG to resolve dangling debug information attached
631 /// to PHI nodes.
632 void clearDanglingDebugInfo();
633
Dan Gohman575fad32008-09-03 16:12:24 +0000634 /// getRoot - Return the current virtual root of the Selection DAG,
635 /// flushing any PendingLoad items. This must be done before emitting
636 /// a store or any other node that may need to be ordered after any
637 /// prior load instructions.
638 ///
639 SDValue getRoot();
640
641 /// getControlRoot - Similar to getRoot, but instead of flushing all the
642 /// PendingLoad items, flush all the PendingExports items. It is necessary
643 /// to do this before emitting a terminator instruction.
644 ///
645 SDValue getControlRoot();
646
Andrew Trick175143b2013-05-25 02:20:36 +0000647 SDLoc getCurSDLoc() const {
Andrew Trick175143b2013-05-25 02:20:36 +0000648 return SDLoc(CurInst, SDNodeOrder);
649 }
650
651 DebugLoc getCurDebugLoc() const {
652 return CurInst ? CurInst->getDebugLoc() : DebugLoc();
653 }
Devang Patelf3292b22011-02-21 23:21:26 +0000654
Bill Wendling919b7aa2009-12-22 02:10:19 +0000655 unsigned getSDNodeOrder() const { return SDNodeOrder; }
656
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000657 void CopyValueToVirtualRegister(const Value *V, unsigned Reg);
Dan Gohman575fad32008-09-03 16:12:24 +0000658
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000659 void visit(const Instruction &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000660
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000661 void visit(unsigned Opcode, const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000662
Igor Laevsky85f7f722015-03-10 16:26:48 +0000663 /// getCopyFromRegs - If there was virtual register allocated for the value V
664 /// emit CopyFromReg of the specified type Ty. Return empty SDValue() otherwise.
665 SDValue getCopyFromRegs(const Value *V, Type *Ty);
666
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000667 // resolveDanglingDebugInfo - if we saw an earlier dbg_value referring to V,
668 // generate the debug data structures now that we've seen its definition.
669 void resolveDanglingDebugInfo(const Value *V, SDValue Val);
Dan Gohman575fad32008-09-03 16:12:24 +0000670 SDValue getValue(const Value *V);
Elena Demikhovsky584ce372015-04-28 07:57:37 +0000671 bool findValue(const Value *V) const;
672
Dan Gohmand4322232010-07-01 01:59:43 +0000673 SDValue getNonRegisterValue(const Value *V);
674 SDValue getValueImpl(const Value *V);
Dan Gohman575fad32008-09-03 16:12:24 +0000675
676 void setValue(const Value *V, SDValue NewN) {
677 SDValue &N = NodeMap[V];
Craig Topperada08572014-04-16 04:21:27 +0000678 assert(!N.getNode() && "Already set a value for this node!");
Dan Gohman575fad32008-09-03 16:12:24 +0000679 N = NewN;
680 }
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000681
Philip Reames1a1bdb22014-12-02 18:50:36 +0000682 void removeValue(const Value *V) {
683 // This is to support hack in lowerCallFromStatepoint
684 // Should be removed when hack is resolved
Benjamin Kramer4e3b9032015-02-27 21:43:14 +0000685 NodeMap.erase(V);
Philip Reames1a1bdb22014-12-02 18:50:36 +0000686 }
687
Devang Patelb0c76392010-06-01 19:59:01 +0000688 void setUnusedArgValue(const Value *V, SDValue NewN) {
689 SDValue &N = UnusedArgNodeMap[V];
Craig Topperada08572014-04-16 04:21:27 +0000690 assert(!N.getNode() && "Already set a value for this node!");
Devang Patelb0c76392010-06-01 19:59:01 +0000691 N = NewN;
692 }
Dan Gohman575fad32008-09-03 16:12:24 +0000693
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000694 void FindMergedConditions(const Value *Cond, MachineBasicBlock *TBB,
Dan Gohman575fad32008-09-03 16:12:24 +0000695 MachineBasicBlock *FBB, MachineBasicBlock *CurBB,
Manman Ren4ece7452014-01-31 00:42:44 +0000696 MachineBasicBlock *SwitchBB, unsigned Opc,
697 uint32_t TW, uint32_t FW);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000698 void EmitBranchForMergedCondition(const Value *Cond, MachineBasicBlock *TBB,
Dan Gohmand01ddb52008-10-17 21:16:08 +0000699 MachineBasicBlock *FBB,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000700 MachineBasicBlock *CurBB,
Manman Ren4ece7452014-01-31 00:42:44 +0000701 MachineBasicBlock *SwitchBB,
702 uint32_t TW, uint32_t FW);
Dan Gohman575fad32008-09-03 16:12:24 +0000703 bool ShouldEmitAsBranches(const std::vector<CaseBlock> &Cases);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000704 bool isExportableFromCurrentBlock(const Value *V, const BasicBlock *FromBB);
705 void CopyToExportRegsIfNeeded(const Value *V);
706 void ExportFromCurrentBlock(const Value *V);
707 void LowerCallTo(ImmutableCallSite CS, SDValue Callee, bool IsTailCall,
Craig Topperada08572014-04-16 04:21:27 +0000708 MachineBasicBlock *LandingPad = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000709
Juergen Ributzkaad2363f2014-10-17 17:39:00 +0000710 std::pair<SDValue, SDValue> lowerCallOperands(
711 ImmutableCallSite CS,
712 unsigned ArgIdx,
713 unsigned NumArgs,
714 SDValue Callee,
Sanjoy Das84153c42015-05-05 23:06:52 +0000715 Type *ReturnTy,
Hal Finkel0ad96c82015-01-13 17:48:04 +0000716 MachineBasicBlock *LandingPad = nullptr,
717 bool IsPatchPoint = false);
Andrew Trick74f4c742013-10-31 17:18:24 +0000718
Jakob Stoklund Olesen665aa6e2010-09-30 19:44:31 +0000719 /// UpdateSplitBlock - When an MBB was split during scheduling, update the
Alp Toker798060e2014-01-11 14:01:43 +0000720 /// references that need to refer to the last resulting block.
Jakob Stoklund Olesen665aa6e2010-09-30 19:44:31 +0000721 void UpdateSplitBlock(MachineBasicBlock *First, MachineBasicBlock *Last);
722
Igor Laevsky7fc58a42015-02-20 15:28:35 +0000723 // This function is responsible for the whole statepoint lowering process.
Igor Laevsky85f7f722015-03-10 16:26:48 +0000724 // It uniformly handles invoke and call statepoints.
725 void LowerStatepoint(ImmutableStatepoint Statepoint,
726 MachineBasicBlock *LandingPad = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000727private:
Juergen Ributzkafd4633e2014-10-16 21:26:35 +0000728 std::pair<SDValue, SDValue> lowerInvokable(
729 TargetLowering::CallLoweringInfo &CLI,
730 MachineBasicBlock *LandingPad);
731
Dan Gohman575fad32008-09-03 16:12:24 +0000732 // Terminator instructions.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000733 void visitRet(const ReturnInst &I);
734 void visitBr(const BranchInst &I);
735 void visitSwitch(const SwitchInst &I);
736 void visitIndirectBr(const IndirectBrInst &I);
Yaron Kerend7ba46b2014-04-19 13:47:43 +0000737 void visitUnreachable(const UnreachableInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000738
Jakub Staszak96f8c552011-12-20 20:03:10 +0000739 uint32_t getEdgeWeight(const MachineBasicBlock *Src,
740 const MachineBasicBlock *Dst) const;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000741 void addSuccessorWithWeight(MachineBasicBlock *Src, MachineBasicBlock *Dst,
742 uint32_t Weight = 0);
Dan Gohman575fad32008-09-03 16:12:24 +0000743public:
Dan Gohman7c0303a2010-04-19 22:41:47 +0000744 void visitSwitchCase(CaseBlock &CB,
745 MachineBasicBlock *SwitchBB);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000746 void visitSPDescriptorParent(StackProtectorDescriptor &SPD,
747 MachineBasicBlock *ParentBB);
748 void visitSPDescriptorFailure(StackProtectorDescriptor &SPD);
Dan Gohman7c0303a2010-04-19 22:41:47 +0000749 void visitBitTestHeader(BitTestBlock &B, MachineBasicBlock *SwitchBB);
Evan Chengac730dd2011-01-06 01:02:44 +0000750 void visitBitTestCase(BitTestBlock &BB,
751 MachineBasicBlock* NextMBB,
Manman Rencf104462012-08-24 18:14:27 +0000752 uint32_t BranchWeightToNext,
Dan Gohman575fad32008-09-03 16:12:24 +0000753 unsigned Reg,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000754 BitTestCase &B,
755 MachineBasicBlock *SwitchBB);
Dan Gohman575fad32008-09-03 16:12:24 +0000756 void visitJumpTable(JumpTable &JT);
Dan Gohman7c0303a2010-04-19 22:41:47 +0000757 void visitJumpTableHeader(JumpTable &JT, JumpTableHeader &JTH,
758 MachineBasicBlock *SwitchBB);
Reid Kleckner0a57f652015-01-14 01:05:27 +0000759 unsigned visitLandingPadClauseBB(GlobalValue *ClauseGV,
760 MachineBasicBlock *LPadMBB);
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000761
Dan Gohman575fad32008-09-03 16:12:24 +0000762private:
763 // These all get lowered before this pass.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000764 void visitInvoke(const InvokeInst &I);
Bill Wendlingf891bf82011-07-31 06:30:59 +0000765 void visitResume(const ResumeInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000766
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000767 void visitBinary(const User &I, unsigned OpCode);
768 void visitShift(const User &I, unsigned Opcode);
769 void visitAdd(const User &I) { visitBinary(I, ISD::ADD); }
770 void visitFAdd(const User &I) { visitBinary(I, ISD::FADD); }
771 void visitSub(const User &I) { visitBinary(I, ISD::SUB); }
772 void visitFSub(const User &I);
773 void visitMul(const User &I) { visitBinary(I, ISD::MUL); }
774 void visitFMul(const User &I) { visitBinary(I, ISD::FMUL); }
775 void visitURem(const User &I) { visitBinary(I, ISD::UREM); }
776 void visitSRem(const User &I) { visitBinary(I, ISD::SREM); }
777 void visitFRem(const User &I) { visitBinary(I, ISD::FREM); }
778 void visitUDiv(const User &I) { visitBinary(I, ISD::UDIV); }
Benjamin Kramer9960a252011-07-08 10:31:30 +0000779 void visitSDiv(const User &I);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000780 void visitFDiv(const User &I) { visitBinary(I, ISD::FDIV); }
781 void visitAnd (const User &I) { visitBinary(I, ISD::AND); }
782 void visitOr (const User &I) { visitBinary(I, ISD::OR); }
783 void visitXor (const User &I) { visitBinary(I, ISD::XOR); }
784 void visitShl (const User &I) { visitShift(I, ISD::SHL); }
785 void visitLShr(const User &I) { visitShift(I, ISD::SRL); }
786 void visitAShr(const User &I) { visitShift(I, ISD::SRA); }
787 void visitICmp(const User &I);
788 void visitFCmp(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000789 // Visit the conversion instructions
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000790 void visitTrunc(const User &I);
791 void visitZExt(const User &I);
792 void visitSExt(const User &I);
793 void visitFPTrunc(const User &I);
794 void visitFPExt(const User &I);
795 void visitFPToUI(const User &I);
796 void visitFPToSI(const User &I);
797 void visitUIToFP(const User &I);
798 void visitSIToFP(const User &I);
799 void visitPtrToInt(const User &I);
800 void visitIntToPtr(const User &I);
801 void visitBitCast(const User &I);
Matt Arsenaultb03bd4d2013-11-15 01:34:59 +0000802 void visitAddrSpaceCast(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000803
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000804 void visitExtractElement(const User &I);
805 void visitInsertElement(const User &I);
806 void visitShuffleVector(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000807
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000808 void visitExtractValue(const ExtractValueInst &I);
809 void visitInsertValue(const InsertValueInst &I);
Bill Wendlingfae14752011-08-12 20:24:12 +0000810 void visitLandingPad(const LandingPadInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000811
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000812 void visitGetElementPtr(const User &I);
813 void visitSelect(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000814
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000815 void visitAlloca(const AllocaInst &I);
816 void visitLoad(const LoadInst &I);
817 void visitStore(const StoreInst &I);
Elena Demikhovskyf1de34b2014-12-04 09:40:44 +0000818 void visitMaskedLoad(const CallInst &I);
819 void visitMaskedStore(const CallInst &I);
Elena Demikhovsky584ce372015-04-28 07:57:37 +0000820 void visitMaskedGather(const CallInst &I);
821 void visitMaskedScatter(const CallInst &I);
Eli Friedmanc9a551e2011-07-28 21:48:00 +0000822 void visitAtomicCmpXchg(const AtomicCmpXchgInst &I);
823 void visitAtomicRMW(const AtomicRMWInst &I);
Eli Friedmanfee02c62011-07-25 23:16:38 +0000824 void visitFence(const FenceInst &I);
Dan Gohmanf41ad472010-04-20 15:00:41 +0000825 void visitPHI(const PHINode &I);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000826 void visitCall(const CallInst &I);
827 bool visitMemCmpCall(const CallInst &I);
Richard Sandiford6f6d5512013-08-20 09:38:48 +0000828 bool visitMemChrCall(const CallInst &I);
Richard Sandifordbb83a502013-08-16 11:29:37 +0000829 bool visitStrCpyCall(const CallInst &I, bool isStpcpy);
Richard Sandifordca232712013-08-16 11:21:54 +0000830 bool visitStrCmpCall(const CallInst &I);
Richard Sandiford0dec06a2013-08-16 11:41:43 +0000831 bool visitStrLenCall(const CallInst &I);
832 bool visitStrNLenCall(const CallInst &I);
Bob Wilson874886c2012-08-03 23:29:17 +0000833 bool visitUnaryFloatCall(const CallInst &I, unsigned Opcode);
Matt Arsenault7c936902014-10-21 23:01:01 +0000834 bool visitBinaryFloatCall(const CallInst &I, unsigned Opcode);
Eli Friedman342e8df2011-08-24 20:50:09 +0000835 void visitAtomicLoad(const LoadInst &I);
836 void visitAtomicStore(const StoreInst &I);
837
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000838 void visitInlineAsm(ImmutableCallSite CS);
839 const char *visitIntrinsicCall(const CallInst &I, unsigned Intrinsic);
840 void visitTargetIntrinsic(const CallInst &I, unsigned Intrinsic);
Dan Gohman575fad32008-09-03 16:12:24 +0000841
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000842 void visitVAStart(const CallInst &I);
843 void visitVAArg(const VAArgInst &I);
844 void visitVAEnd(const CallInst &I);
845 void visitVACopy(const CallInst &I);
Andrew Trick74f4c742013-10-31 17:18:24 +0000846 void visitStackmap(const CallInst &I);
Juergen Ributzkaad2363f2014-10-17 17:39:00 +0000847 void visitPatchpoint(ImmutableCallSite CS,
848 MachineBasicBlock *LandingPad = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000849
Philip Reames1a1bdb22014-12-02 18:50:36 +0000850 // These three are implemented in StatepointLowering.cpp
851 void visitStatepoint(const CallInst &I);
852 void visitGCRelocate(const CallInst &I);
853 void visitGCResult(const CallInst &I);
854
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000855 void visitUserOp1(const Instruction &I) {
Torok Edwinfbcc6632009-07-14 16:55:14 +0000856 llvm_unreachable("UserOp1 should not exist at instruction selection time!");
Dan Gohman575fad32008-09-03 16:12:24 +0000857 }
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000858 void visitUserOp2(const Instruction &I) {
Torok Edwinfbcc6632009-07-14 16:55:14 +0000859 llvm_unreachable("UserOp2 should not exist at instruction selection time!");
Dan Gohman575fad32008-09-03 16:12:24 +0000860 }
Dan Gohman5b43aa02010-04-22 20:55:53 +0000861
Richard Sandiforde3827752013-08-16 10:55:47 +0000862 void processIntegerCallValue(const Instruction &I,
863 SDValue Value, bool IsSigned);
864
Dan Gohman5b43aa02010-04-22 20:55:53 +0000865 void HandlePHINodesInSuccessorBlocks(const BasicBlock *LLVMBB);
Evan Cheng6e822452010-04-28 23:08:54 +0000866
Devang Patel32a72ab2010-08-25 20:41:24 +0000867 /// EmitFuncArgumentDbgValue - If V is an function argument then create
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000868 /// corresponding DBG_VALUE machine instruction for it now. At the end of
Devang Patel32a72ab2010-08-25 20:41:24 +0000869 /// instruction selection, they will be inserted to the entry BB.
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +0000870 bool EmitFuncArgumentDbgValue(const Value *V, DILocalVariable *Variable,
871 DIExpression *Expr, DILocation *DL,
Duncan P. N. Exon Smith3bef6a32015-04-03 19:20:26 +0000872 int64_t Offset, bool IsIndirect,
873 const SDValue &N);
Hans Wennborgb4db1422015-03-19 20:41:48 +0000874
875 /// Return the next block after MBB, or nullptr if there is none.
876 MachineBasicBlock *NextBlock(MachineBasicBlock *MBB);
Krzysztof Parzyszeka46c36b2015-04-13 17:16:45 +0000877
878 /// Update the DAG and DAG builder with the relevant information after
879 /// a new root node has been created which could be a tail call.
880 void updateDAGForMaybeTailCall(SDValue MaybeTC);
Dan Gohman575fad32008-09-03 16:12:24 +0000881};
882
Sanjoy Das3936a972015-05-05 23:06:54 +0000883/// RegsForValue - This struct represents the registers (physical or virtual)
884/// that a particular set of values is assigned, and the type information about
885/// the value. The most common situation is to represent one value at a time,
886/// but struct or array values are handled element-wise as multiple values. The
887/// splitting of aggregates is performed recursively, so that we never have
888/// aggregate-typed registers. The values at this point do not necessarily have
889/// legal types, so each value may require one or more registers of some legal
890/// type.
891///
892struct RegsForValue {
893 /// ValueVTs - The value types of the values, which may not be legal, and
894 /// may need be promoted or synthesized from one or more registers.
895 ///
896 SmallVector<EVT, 4> ValueVTs;
897
898 /// RegVTs - The value types of the registers. This is the same size as
899 /// ValueVTs and it records, for each value, what the type of the assigned
900 /// register or registers are. (Individual values are never synthesized
901 /// from more than one type of register.)
902 ///
903 /// With virtual registers, the contents of RegVTs is redundant with TLI's
904 /// getRegisterType member function, however when with physical registers
905 /// it is necessary to have a separate record of the types.
906 ///
907 SmallVector<MVT, 4> RegVTs;
908
909 /// Regs - This list holds the registers assigned to the values.
910 /// Each legal or promoted value requires one register, and each
911 /// expanded value requires multiple registers.
912 ///
913 SmallVector<unsigned, 4> Regs;
914
915 RegsForValue();
916
917 RegsForValue(const SmallVector<unsigned, 4> &regs, MVT regvt, EVT valuevt);
918
919 RegsForValue(LLVMContext &Context, const TargetLowering &tli, unsigned Reg,
920 Type *Ty);
921
922 /// append - Add the specified values to this one.
923 void append(const RegsForValue &RHS) {
924 ValueVTs.append(RHS.ValueVTs.begin(), RHS.ValueVTs.end());
925 RegVTs.append(RHS.RegVTs.begin(), RHS.RegVTs.end());
926 Regs.append(RHS.Regs.begin(), RHS.Regs.end());
927 }
928
929 /// getCopyFromRegs - Emit a series of CopyFromReg nodes that copies from
930 /// this value and returns the result as a ValueVTs value. This uses
931 /// Chain/Flag as the input and updates them for the output Chain/Flag.
932 /// If the Flag pointer is NULL, no flag is used.
933 SDValue getCopyFromRegs(SelectionDAG &DAG, FunctionLoweringInfo &FuncInfo,
934 SDLoc dl,
935 SDValue &Chain, SDValue *Flag,
936 const Value *V = nullptr) const;
937
Sanjoy Das1194d1e72015-05-05 23:06:57 +0000938 /// getCopyToRegs - Emit a series of CopyToReg nodes that copies the specified
939 /// value into the registers specified by this object. This uses Chain/Flag
940 /// as the input and updates them for the output Chain/Flag. If the Flag
941 /// pointer is nullptr, no flag is used. If V is not nullptr, then it is used
942 /// in printing better diagnostic messages on error.
Sanjoy Das3936a972015-05-05 23:06:54 +0000943 void
944 getCopyToRegs(SDValue Val, SelectionDAG &DAG, SDLoc dl, SDValue &Chain,
Sanjoy Das1194d1e72015-05-05 23:06:57 +0000945 SDValue *Flag, const Value *V = nullptr,
Sanjoy Das3936a972015-05-05 23:06:54 +0000946 ISD::NodeType PreferredExtendType = ISD::ANY_EXTEND) const;
947
948 /// AddInlineAsmOperands - Add this value to the specified inlineasm node
949 /// operand list. This adds the code marker, matching input operand index
950 /// (if applicable), and includes the number of values added into it.
951 void AddInlineAsmOperands(unsigned Kind,
952 bool HasMatching, unsigned MatchingIdx, SDLoc dl,
953 SelectionDAG &DAG,
954 std::vector<SDValue> &Ops) const;
955};
956
Dan Gohman575fad32008-09-03 16:12:24 +0000957} // end namespace llvm
958
959#endif