blob: c1f483186d9526da34ac60d9b2def701aa87d77f [file] [log] [blame]
Michael Gottesman3923bec2013-08-12 21:02:02 +00001//===-- SelectionDAGBuilder.h - Selection-DAG building --------*- C++ -*---===//
Dan Gohman575fad32008-09-03 16:12:24 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This implements routines for translating from LLVM IR into SelectionDAG IR.
11//
12//===----------------------------------------------------------------------===//
13
Benjamin Kramera7c40ef2014-08-13 16:26:38 +000014#ifndef LLVM_LIB_CODEGEN_SELECTIONDAG_SELECTIONDAGBUILDER_H
15#define LLVM_LIB_CODEGEN_SELECTIONDAG_SELECTIONDAGBUILDER_H
Dan Gohman575fad32008-09-03 16:12:24 +000016
Dan Gohman575fad32008-09-03 16:12:24 +000017#include "llvm/ADT/APInt.h"
18#include "llvm/ADT/DenseMap.h"
Chandler Carruth802d7552012-12-04 07:12:27 +000019#include "llvm/CodeGen/SelectionDAG.h"
Dan Gohman575fad32008-09-03 16:12:24 +000020#include "llvm/CodeGen/SelectionDAGNodes.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000021#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000022#include "llvm/IR/Constants.h"
Torok Edwin56d06592009-07-11 20:10:48 +000023#include "llvm/Support/ErrorHandling.h"
Juergen Ributzkafd4633e2014-10-16 21:26:35 +000024#include "llvm/Target/TargetLowering.h"
Philip Reames1a1bdb22014-12-02 18:50:36 +000025#include "StatepointLowering.h"
Dan Gohman575fad32008-09-03 16:12:24 +000026#include <vector>
Dan Gohman575fad32008-09-03 16:12:24 +000027
28namespace llvm {
29
Matt Arsenaultb03bd4d2013-11-15 01:34:59 +000030class AddrSpaceCastInst;
Dan Gohman575fad32008-09-03 16:12:24 +000031class AliasAnalysis;
32class AllocaInst;
33class BasicBlock;
34class BitCastInst;
35class BranchInst;
36class CallInst;
Devang Patelb12ff592010-08-26 23:35:15 +000037class DbgValueInst;
Dan Gohman575fad32008-09-03 16:12:24 +000038class ExtractElementInst;
39class ExtractValueInst;
40class FCmpInst;
41class FPExtInst;
42class FPToSIInst;
43class FPToUIInst;
44class FPTruncInst;
Dan Gohman575fad32008-09-03 16:12:24 +000045class Function;
Dan Gohmana3624b62009-11-23 17:16:22 +000046class FunctionLoweringInfo;
Dan Gohman575fad32008-09-03 16:12:24 +000047class GetElementPtrInst;
48class GCFunctionInfo;
49class ICmpInst;
50class IntToPtrInst;
Chris Lattnerd04cb6d2009-10-28 00:19:10 +000051class IndirectBrInst;
Dan Gohman575fad32008-09-03 16:12:24 +000052class InvokeInst;
53class InsertElementInst;
54class InsertValueInst;
55class Instruction;
56class LoadInst;
57class MachineBasicBlock;
Dan Gohman575fad32008-09-03 16:12:24 +000058class MachineInstr;
Dan Gohman575fad32008-09-03 16:12:24 +000059class MachineRegisterInfo;
Evan Cheng6e822452010-04-28 23:08:54 +000060class MDNode;
Patrik Hagglund1da35122014-03-12 08:00:24 +000061class MVT;
Dan Gohman575fad32008-09-03 16:12:24 +000062class PHINode;
63class PtrToIntInst;
64class ReturnInst;
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000065class SDDbgValue;
Dan Gohman575fad32008-09-03 16:12:24 +000066class SExtInst;
67class SelectInst;
68class ShuffleVectorInst;
69class SIToFPInst;
70class StoreInst;
71class SwitchInst;
Micah Villmowcdfe20b2012-10-08 16:38:25 +000072class DataLayout;
Owen Andersonbb15fec2011-12-08 22:15:21 +000073class TargetLibraryInfo;
Dan Gohman575fad32008-09-03 16:12:24 +000074class TargetLowering;
75class TruncInst;
76class UIToFPInst;
77class UnreachableInst;
Dan Gohman575fad32008-09-03 16:12:24 +000078class VAArgInst;
79class ZExtInst;
80
Dan Gohman575fad32008-09-03 16:12:24 +000081//===----------------------------------------------------------------------===//
Dan Gohman1a6c47f2009-11-23 18:04:58 +000082/// SelectionDAGBuilder - This is the common target-independent lowering
Dan Gohman575fad32008-09-03 16:12:24 +000083/// implementation that is parameterized by a TargetLowering object.
Dan Gohman575fad32008-09-03 16:12:24 +000084///
Benjamin Kramer079b96e2013-09-11 18:05:11 +000085class SelectionDAGBuilder {
Andrew Trick175143b2013-05-25 02:20:36 +000086 /// CurInst - The current instruction being visited
87 const Instruction *CurInst;
Dale Johannesendb7c5f62009-01-31 02:22:37 +000088
Dan Gohman575fad32008-09-03 16:12:24 +000089 DenseMap<const Value*, SDValue> NodeMap;
Andrew Trickd4d1d9c2013-10-31 17:18:07 +000090
Devang Patelb0c76392010-06-01 19:59:01 +000091 /// UnusedArgNodeMap - Maps argument value for unused arguments. This is used
92 /// to preserve debug information for incoming arguments.
93 DenseMap<const Value*, SDValue> UnusedArgNodeMap;
Dan Gohman575fad32008-09-03 16:12:24 +000094
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000095 /// DanglingDebugInfo - Helper type for DanglingDebugInfoMap.
96 class DanglingDebugInfo {
Devang Patelb12ff592010-08-26 23:35:15 +000097 const DbgValueInst* DI;
Dale Johannesenbfd4fd72010-07-16 00:02:08 +000098 DebugLoc dl;
99 unsigned SDNodeOrder;
100 public:
Craig Topperada08572014-04-16 04:21:27 +0000101 DanglingDebugInfo() : DI(nullptr), dl(DebugLoc()), SDNodeOrder(0) { }
Devang Patelb12ff592010-08-26 23:35:15 +0000102 DanglingDebugInfo(const DbgValueInst *di, DebugLoc DL, unsigned SDNO) :
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000103 DI(di), dl(DL), SDNodeOrder(SDNO) { }
Devang Patelb12ff592010-08-26 23:35:15 +0000104 const DbgValueInst* getDI() { return DI; }
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000105 DebugLoc getdl() { return dl; }
106 unsigned getSDNodeOrder() { return SDNodeOrder; }
107 };
108
109 /// DanglingDebugInfoMap - Keeps track of dbg_values for which we have not
110 /// yet seen the referent. We defer handling these until we do see it.
111 DenseMap<const Value*, DanglingDebugInfo> DanglingDebugInfoMap;
112
Chris Lattner1a32ede2009-12-24 00:37:38 +0000113public:
Dan Gohman575fad32008-09-03 16:12:24 +0000114 /// PendingLoads - Loads are not emitted to the program immediately. We bunch
115 /// them up and then emit token factor nodes when possible. This allows us to
116 /// get simple disambiguation between loads without worrying about alias
117 /// analysis.
118 SmallVector<SDValue, 8> PendingLoads;
Philip Reames1a1bdb22014-12-02 18:50:36 +0000119
120 /// State used while lowering a statepoint sequence (gc_statepoint,
121 /// gc_relocate, and gc_result). See StatepointLowering.hpp/cpp for details.
122 StatepointLoweringState StatepointLowering;
Chris Lattner1a32ede2009-12-24 00:37:38 +0000123private:
Dan Gohman575fad32008-09-03 16:12:24 +0000124
125 /// PendingExports - CopyToReg nodes that copy values to virtual registers
126 /// for export to other blocks need to be emitted before any terminator
127 /// instruction, but they have no other ordering requirements. We bunch them
128 /// up and the emit a single tokenfactor for them just before terminator
129 /// instructions.
130 SmallVector<SDValue, 8> PendingExports;
131
Bill Wendling022d18f2009-12-18 23:32:53 +0000132 /// SDNodeOrder - A unique monotonically increasing number used to order the
133 /// SDNodes we create.
134 unsigned SDNodeOrder;
135
Dan Gohman575fad32008-09-03 16:12:24 +0000136 /// Case - A struct to record the Value for a switch case, and the
137 /// case's target basic block.
138 struct Case {
Stepan Dyatkovskiy513aaa52012-02-01 07:49:51 +0000139 const Constant *Low;
140 const Constant *High;
Dan Gohman575fad32008-09-03 16:12:24 +0000141 MachineBasicBlock* BB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000142 uint32_t ExtraWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000143
Craig Topperada08572014-04-16 04:21:27 +0000144 Case() : Low(nullptr), High(nullptr), BB(nullptr), ExtraWeight(0) { }
Stepan Dyatkovskiy513aaa52012-02-01 07:49:51 +0000145 Case(const Constant *low, const Constant *high, MachineBasicBlock *bb,
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000146 uint32_t extraweight) : Low(low), High(high), BB(bb),
147 ExtraWeight(extraweight) { }
148
Chris Lattner8e1d7222009-11-07 07:50:34 +0000149 APInt size() const {
150 const APInt &rHigh = cast<ConstantInt>(High)->getValue();
151 const APInt &rLow = cast<ConstantInt>(Low)->getValue();
Dan Gohman575fad32008-09-03 16:12:24 +0000152 return (rHigh - rLow + 1ULL);
153 }
154 };
155
156 struct CaseBits {
157 uint64_t Mask;
158 MachineBasicBlock* BB;
159 unsigned Bits;
Manman Rencf104462012-08-24 18:14:27 +0000160 uint32_t ExtraWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000161
Manman Rencf104462012-08-24 18:14:27 +0000162 CaseBits(uint64_t mask, MachineBasicBlock* bb, unsigned bits,
163 uint32_t Weight):
164 Mask(mask), BB(bb), Bits(bits), ExtraWeight(Weight) { }
Dan Gohman575fad32008-09-03 16:12:24 +0000165 };
166
167 typedef std::vector<Case> CaseVector;
168 typedef std::vector<CaseBits> CaseBitsVector;
169 typedef CaseVector::iterator CaseItr;
170 typedef std::pair<CaseItr, CaseItr> CaseRange;
171
172 /// CaseRec - A struct with ctor used in lowering switches to a binary tree
173 /// of conditional branches.
174 struct CaseRec {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000175 CaseRec(MachineBasicBlock *bb, const Constant *lt, const Constant *ge,
176 CaseRange r) :
Dan Gohman575fad32008-09-03 16:12:24 +0000177 CaseBB(bb), LT(lt), GE(ge), Range(r) {}
178
179 /// CaseBB - The MBB in which to emit the compare and branch
180 MachineBasicBlock *CaseBB;
181 /// LT, GE - If nonzero, we know the current case value must be less-than or
182 /// greater-than-or-equal-to these Constants.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000183 const Constant *LT;
184 const Constant *GE;
Dan Gohman575fad32008-09-03 16:12:24 +0000185 /// Range - A pair of iterators representing the range of case values to be
186 /// processed at this point in the binary search tree.
187 CaseRange Range;
188 };
189
190 typedef std::vector<CaseRec> CaseRecVector;
191
Bob Wilsone4077362013-09-09 19:14:35 +0000192 /// The comparison function for sorting the switch case values in the vector.
193 /// WARNING: Case ranges should be disjoint!
194 struct CaseCmp {
195 bool operator()(const Case &C1, const Case &C2) {
196 assert(isa<ConstantInt>(C1.Low) && isa<ConstantInt>(C2.High));
197 const ConstantInt* CI1 = cast<const ConstantInt>(C1.Low);
198 const ConstantInt* CI2 = cast<const ConstantInt>(C2.High);
199 return CI1->getValue().slt(CI2->getValue());
200 }
201 };
202
Dan Gohman575fad32008-09-03 16:12:24 +0000203 struct CaseBitsCmp {
Chris Lattner24576a52010-01-01 23:37:34 +0000204 bool operator()(const CaseBits &C1, const CaseBits &C2) {
Dan Gohman575fad32008-09-03 16:12:24 +0000205 return C1.Bits > C2.Bits;
206 }
207 };
208
Chad Rosierdf82a332014-10-13 19:46:39 +0000209 void Clusterify(CaseVector &Cases, const SwitchInst &SI);
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000210
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000211 /// CaseBlock - This structure is used to communicate between
212 /// SelectionDAGBuilder and SDISel for the code generation of additional basic
213 /// blocks needed by multi-case switch statements.
Dan Gohman575fad32008-09-03 16:12:24 +0000214 struct CaseBlock {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000215 CaseBlock(ISD::CondCode cc, const Value *cmplhs, const Value *cmprhs,
216 const Value *cmpmiddle,
Dan Gohman575fad32008-09-03 16:12:24 +0000217 MachineBasicBlock *truebb, MachineBasicBlock *falsebb,
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000218 MachineBasicBlock *me,
219 uint32_t trueweight = 0, uint32_t falseweight = 0)
Dan Gohman575fad32008-09-03 16:12:24 +0000220 : CC(cc), CmpLHS(cmplhs), CmpMHS(cmpmiddle), CmpRHS(cmprhs),
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000221 TrueBB(truebb), FalseBB(falsebb), ThisBB(me),
222 TrueWeight(trueweight), FalseWeight(falseweight) { }
223
Dan Gohman575fad32008-09-03 16:12:24 +0000224 // CC - the condition code to use for the case block's setcc node
225 ISD::CondCode CC;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000226
Dan Gohman575fad32008-09-03 16:12:24 +0000227 // CmpLHS/CmpRHS/CmpMHS - The LHS/MHS/RHS of the comparison to emit.
228 // Emit by default LHS op RHS. MHS is used for range comparisons:
229 // If MHS is not null: (LHS <= MHS) and (MHS <= RHS).
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000230 const Value *CmpLHS, *CmpMHS, *CmpRHS;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000231
Dan Gohman575fad32008-09-03 16:12:24 +0000232 // TrueBB/FalseBB - the block to branch to if the setcc is true/false.
233 MachineBasicBlock *TrueBB, *FalseBB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000234
Dan Gohman575fad32008-09-03 16:12:24 +0000235 // ThisBB - the block into which to emit the code for the setcc and branches
236 MachineBasicBlock *ThisBB;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000237
238 // TrueWeight/FalseWeight - branch weights.
239 uint32_t TrueWeight, FalseWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000240 };
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000241
Dan Gohman575fad32008-09-03 16:12:24 +0000242 struct JumpTable {
243 JumpTable(unsigned R, unsigned J, MachineBasicBlock *M,
244 MachineBasicBlock *D): Reg(R), JTI(J), MBB(M), Default(D) {}
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000245
Dan Gohman575fad32008-09-03 16:12:24 +0000246 /// Reg - the virtual register containing the index of the jump table entry
247 //. to jump to.
248 unsigned Reg;
249 /// JTI - the JumpTableIndex for this jump table in the function.
250 unsigned JTI;
251 /// MBB - the MBB into which to emit the code for the indirect jump.
252 MachineBasicBlock *MBB;
253 /// Default - the MBB of the default bb, which is a successor of the range
254 /// check MBB. This is when updating PHI nodes in successors.
255 MachineBasicBlock *Default;
256 };
257 struct JumpTableHeader {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000258 JumpTableHeader(APInt F, APInt L, const Value *SV, MachineBasicBlock *H,
Dan Gohman575fad32008-09-03 16:12:24 +0000259 bool E = false):
260 First(F), Last(L), SValue(SV), HeaderBB(H), Emitted(E) {}
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000261 APInt First;
262 APInt Last;
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000263 const Value *SValue;
Dan Gohman575fad32008-09-03 16:12:24 +0000264 MachineBasicBlock *HeaderBB;
265 bool Emitted;
266 };
267 typedef std::pair<JumpTableHeader, JumpTable> JumpTableBlock;
268
269 struct BitTestCase {
Manman Rencf104462012-08-24 18:14:27 +0000270 BitTestCase(uint64_t M, MachineBasicBlock* T, MachineBasicBlock* Tr,
271 uint32_t Weight):
272 Mask(M), ThisBB(T), TargetBB(Tr), ExtraWeight(Weight) { }
Dan Gohman575fad32008-09-03 16:12:24 +0000273 uint64_t Mask;
Chris Lattner24576a52010-01-01 23:37:34 +0000274 MachineBasicBlock *ThisBB;
275 MachineBasicBlock *TargetBB;
Manman Rencf104462012-08-24 18:14:27 +0000276 uint32_t ExtraWeight;
Dan Gohman575fad32008-09-03 16:12:24 +0000277 };
278
279 typedef SmallVector<BitTestCase, 3> BitTestInfo;
280
281 struct BitTestBlock {
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000282 BitTestBlock(APInt F, APInt R, const Value* SV,
Patrik Hagglund4e0f8282012-12-19 12:23:01 +0000283 unsigned Rg, MVT RgVT, bool E,
Dan Gohman575fad32008-09-03 16:12:24 +0000284 MachineBasicBlock* P, MachineBasicBlock* D,
Benjamin Kramerc6cc58e2014-10-04 16:55:56 +0000285 BitTestInfo C):
Evan Chengac730dd2011-01-06 01:02:44 +0000286 First(F), Range(R), SValue(SV), Reg(Rg), RegVT(RgVT), Emitted(E),
Benjamin Kramerc6cc58e2014-10-04 16:55:56 +0000287 Parent(P), Default(D), Cases(std::move(C)) { }
Anton Korobeynikov6f219132008-12-23 22:25:27 +0000288 APInt First;
289 APInt Range;
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000290 const Value *SValue;
Dan Gohman575fad32008-09-03 16:12:24 +0000291 unsigned Reg;
Patrik Hagglund4e0f8282012-12-19 12:23:01 +0000292 MVT RegVT;
Dan Gohman575fad32008-09-03 16:12:24 +0000293 bool Emitted;
294 MachineBasicBlock *Parent;
295 MachineBasicBlock *Default;
296 BitTestInfo Cases;
297 };
298
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000299 /// A class which encapsulates all of the information needed to generate a
300 /// stack protector check and signals to isel via its state being initialized
301 /// that a stack protector needs to be generated.
302 ///
303 /// *NOTE* The following is a high level documentation of SelectionDAG Stack
304 /// Protector Generation. The reason that it is placed here is for a lack of
305 /// other good places to stick it.
306 ///
307 /// High Level Overview of SelectionDAG Stack Protector Generation:
308 ///
309 /// Previously, generation of stack protectors was done exclusively in the
310 /// pre-SelectionDAG Codegen LLVM IR Pass "Stack Protector". This necessitated
311 /// splitting basic blocks at the IR level to create the success/failure basic
312 /// blocks in the tail of the basic block in question. As a result of this,
313 /// calls that would have qualified for the sibling call optimization were no
314 /// longer eligible for optimization since said calls were no longer right in
315 /// the "tail position" (i.e. the immediate predecessor of a ReturnInst
316 /// instruction).
317 ///
318 /// Then it was noticed that since the sibling call optimization causes the
319 /// callee to reuse the caller's stack, if we could delay the generation of
320 /// the stack protector check until later in CodeGen after the sibling call
321 /// decision was made, we get both the tail call optimization and the stack
322 /// protector check!
323 ///
324 /// A few goals in solving this problem were:
325 ///
326 /// 1. Preserve the architecture independence of stack protector generation.
327 ///
328 /// 2. Preserve the normal IR level stack protector check for platforms like
Alp Tokercf218752014-06-30 18:57:16 +0000329 /// OpenBSD for which we support platform-specific stack protector
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000330 /// generation.
331 ///
332 /// The main problem that guided the present solution is that one can not
333 /// solve this problem in an architecture independent manner at the IR level
334 /// only. This is because:
335 ///
336 /// 1. The decision on whether or not to perform a sibling call on certain
337 /// platforms (for instance i386) requires lower level information
338 /// related to available registers that can not be known at the IR level.
339 ///
340 /// 2. Even if the previous point were not true, the decision on whether to
341 /// perform a tail call is done in LowerCallTo in SelectionDAG which
342 /// occurs after the Stack Protector Pass. As a result, one would need to
343 /// put the relevant callinst into the stack protector check success
344 /// basic block (where the return inst is placed) and then move it back
345 /// later at SelectionDAG/MI time before the stack protector check if the
346 /// tail call optimization failed. The MI level option was nixed
Alp Tokercf218752014-06-30 18:57:16 +0000347 /// immediately since it would require platform-specific pattern
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000348 /// matching. The SelectionDAG level option was nixed because
349 /// SelectionDAG only processes one IR level basic block at a time
350 /// implying one could not create a DAG Combine to move the callinst.
351 ///
352 /// To get around this problem a few things were realized:
353 ///
354 /// 1. While one can not handle multiple IR level basic blocks at the
355 /// SelectionDAG Level, one can generate multiple machine basic blocks
356 /// for one IR level basic block. This is how we handle bit tests and
357 /// switches.
358 ///
359 /// 2. At the MI level, tail calls are represented via a special return
360 /// MIInst called "tcreturn". Thus if we know the basic block in which we
361 /// wish to insert the stack protector check, we get the correct behavior
362 /// by always inserting the stack protector check right before the return
363 /// statement. This is a "magical transformation" since no matter where
364 /// the stack protector check intrinsic is, we always insert the stack
365 /// protector check code at the end of the BB.
366 ///
367 /// Given the aforementioned constraints, the following solution was devised:
368 ///
369 /// 1. On platforms that do not support SelectionDAG stack protector check
370 /// generation, allow for the normal IR level stack protector check
371 /// generation to continue.
372 ///
373 /// 2. On platforms that do support SelectionDAG stack protector check
374 /// generation:
375 ///
376 /// a. Use the IR level stack protector pass to decide if a stack
377 /// protector is required/which BB we insert the stack protector check
378 /// in by reusing the logic already therein. If we wish to generate a
379 /// stack protector check in a basic block, we place a special IR
380 /// intrinsic called llvm.stackprotectorcheck right before the BB's
381 /// returninst or if there is a callinst that could potentially be
382 /// sibling call optimized, before the call inst.
383 ///
384 /// b. Then when a BB with said intrinsic is processed, we codegen the BB
385 /// normally via SelectBasicBlock. In said process, when we visit the
386 /// stack protector check, we do not actually emit anything into the
387 /// BB. Instead, we just initialize the stack protector descriptor
388 /// class (which involves stashing information/creating the success
389 /// mbbb and the failure mbb if we have not created one for this
390 /// function yet) and export the guard variable that we are going to
391 /// compare.
392 ///
393 /// c. After we finish selecting the basic block, in FinishBasicBlock if
394 /// the StackProtectorDescriptor attached to the SelectionDAGBuilder is
395 /// initialized, we first find a splice point in the parent basic block
396 /// before the terminator and then splice the terminator of said basic
397 /// block into the success basic block. Then we code-gen a new tail for
398 /// the parent basic block consisting of the two loads, the comparison,
399 /// and finally two branches to the success/failure basic blocks. We
400 /// conclude by code-gening the failure basic block if we have not
401 /// code-gened it already (all stack protector checks we generate in
402 /// the same function, use the same failure basic block).
403 class StackProtectorDescriptor {
404 public:
Craig Topperada08572014-04-16 04:21:27 +0000405 StackProtectorDescriptor() : ParentMBB(nullptr), SuccessMBB(nullptr),
Akira Hatanakae5b6e0d2014-07-25 19:31:34 +0000406 FailureMBB(nullptr), Guard(nullptr),
407 GuardReg(0) { }
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000408 ~StackProtectorDescriptor() { }
409
410 /// Returns true if all fields of the stack protector descriptor are
411 /// initialized implying that we should/are ready to emit a stack protector.
412 bool shouldEmitStackProtector() const {
413 return ParentMBB && SuccessMBB && FailureMBB && Guard;
414 }
415
416 /// Initialize the stack protector descriptor structure for a new basic
417 /// block.
418 void initialize(const BasicBlock *BB,
419 MachineBasicBlock *MBB,
420 const CallInst &StackProtCheckCall) {
421 // Make sure we are not initialized yet.
422 assert(!shouldEmitStackProtector() && "Stack Protector Descriptor is "
423 "already initialized!");
424 ParentMBB = MBB;
Akira Hatanakab9991a22014-12-01 04:27:03 +0000425 SuccessMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ true);
426 FailureMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ false, FailureMBB);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000427 if (!Guard)
428 Guard = StackProtCheckCall.getArgOperand(0);
429 }
430
431 /// Reset state that changes when we handle different basic blocks.
432 ///
433 /// This currently includes:
434 ///
435 /// 1. The specific basic block we are generating a
436 /// stack protector for (ParentMBB).
437 ///
438 /// 2. The successor machine basic block that will contain the tail of
439 /// parent mbb after we create the stack protector check (SuccessMBB). This
440 /// BB is visited only on stack protector check success.
441 void resetPerBBState() {
Craig Topperada08572014-04-16 04:21:27 +0000442 ParentMBB = nullptr;
443 SuccessMBB = nullptr;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000444 }
445
446 /// Reset state that only changes when we switch functions.
447 ///
448 /// This currently includes:
449 ///
450 /// 1. FailureMBB since we reuse the failure code path for all stack
451 /// protector checks created in an individual function.
452 ///
453 /// 2.The guard variable since the guard variable we are checking against is
454 /// always the same.
455 void resetPerFunctionState() {
Craig Topperada08572014-04-16 04:21:27 +0000456 FailureMBB = nullptr;
457 Guard = nullptr;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000458 }
459
460 MachineBasicBlock *getParentMBB() { return ParentMBB; }
461 MachineBasicBlock *getSuccessMBB() { return SuccessMBB; }
462 MachineBasicBlock *getFailureMBB() { return FailureMBB; }
463 const Value *getGuard() { return Guard; }
464
Akira Hatanakae5b6e0d2014-07-25 19:31:34 +0000465 unsigned getGuardReg() const { return GuardReg; }
466 void setGuardReg(unsigned R) { GuardReg = R; }
467
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000468 private:
469 /// The basic block for which we are generating the stack protector.
470 ///
471 /// As a result of stack protector generation, we will splice the
472 /// terminators of this basic block into the successor mbb SuccessMBB and
473 /// replace it with a compare/branch to the successor mbbs
474 /// SuccessMBB/FailureMBB depending on whether or not the stack protector
475 /// was violated.
476 MachineBasicBlock *ParentMBB;
477
478 /// A basic block visited on stack protector check success that contains the
479 /// terminators of ParentMBB.
480 MachineBasicBlock *SuccessMBB;
481
482 /// This basic block visited on stack protector check failure that will
483 /// contain a call to __stack_chk_fail().
484 MachineBasicBlock *FailureMBB;
485
486 /// The guard variable which we will compare against the stored value in the
487 /// stack protector stack slot.
488 const Value *Guard;
489
Akira Hatanakae5b6e0d2014-07-25 19:31:34 +0000490 /// The virtual register holding the stack guard value.
491 unsigned GuardReg;
492
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000493 /// Add a successor machine basic block to ParentMBB. If the successor mbb
494 /// has not been created yet (i.e. if SuccMBB = 0), then the machine basic
Akira Hatanakab9991a22014-12-01 04:27:03 +0000495 /// block will be created. Assign a large weight if IsLikely is true.
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000496 MachineBasicBlock *AddSuccessorMBB(const BasicBlock *BB,
497 MachineBasicBlock *ParentMBB,
Akira Hatanakab9991a22014-12-01 04:27:03 +0000498 bool IsLikely,
Craig Topperada08572014-04-16 04:21:27 +0000499 MachineBasicBlock *SuccMBB = nullptr);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000500 };
501
Bill Wendlinga3cd3502013-06-19 21:36:55 +0000502private:
Dan Gohmanc3349602010-04-19 19:05:59 +0000503 const TargetMachine &TM;
Bill Wendlinga3cd3502013-06-19 21:36:55 +0000504public:
Nico Rieckb5262d62014-01-12 14:09:17 +0000505 /// Lowest valid SDNodeOrder. The special case 0 is reserved for scheduling
506 /// nodes without a corresponding SDNode.
507 static const unsigned LowestSDNodeOrder = 1;
508
Dan Gohman575fad32008-09-03 16:12:24 +0000509 SelectionDAG &DAG;
Rafael Espindola5f57f462014-02-21 18:34:28 +0000510 const DataLayout *DL;
Dan Gohman575fad32008-09-03 16:12:24 +0000511 AliasAnalysis *AA;
Owen Andersonbb15fec2011-12-08 22:15:21 +0000512 const TargetLibraryInfo *LibInfo;
Dan Gohman575fad32008-09-03 16:12:24 +0000513
514 /// SwitchCases - Vector of CaseBlock structures used to communicate
515 /// SwitchInst code generation information.
516 std::vector<CaseBlock> SwitchCases;
517 /// JTCases - Vector of JumpTable structures used to communicate
518 /// SwitchInst code generation information.
519 std::vector<JumpTableBlock> JTCases;
520 /// BitTestCases - Vector of BitTestBlock structures used to communicate
521 /// SwitchInst code generation information.
522 std::vector<BitTestBlock> BitTestCases;
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000523 /// A StackProtectorDescriptor structure used to communicate stack protector
524 /// information in between SelectBasicBlock and FinishBasicBlock.
525 StackProtectorDescriptor SPDescriptor;
Evan Cheng270d0f92009-09-18 21:02:19 +0000526
Dan Gohman575fad32008-09-03 16:12:24 +0000527 // Emit PHI-node-operand constants only once even if used by multiple
528 // PHI nodes.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000529 DenseMap<const Constant *, unsigned> ConstantsOut;
Dan Gohman575fad32008-09-03 16:12:24 +0000530
531 /// FuncInfo - Information about the function as a whole.
532 ///
533 FunctionLoweringInfo &FuncInfo;
Bill Wendling19e0a5b2009-02-19 21:12:54 +0000534
Bill Wendling084669a2009-04-29 00:15:41 +0000535 /// OptLevel - What optimization level we're generating code for.
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000536 ///
Bill Wendling026e5d72009-04-29 23:29:43 +0000537 CodeGenOpt::Level OptLevel;
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000538
Dan Gohman575fad32008-09-03 16:12:24 +0000539 /// GFI - Garbage collection metadata for the function.
540 GCFunctionInfo *GFI;
541
Bill Wendling267f3232011-10-05 22:24:35 +0000542 /// LPadToCallSiteMap - Map a landing pad to the call site indexes.
543 DenseMap<MachineBasicBlock*, SmallVector<unsigned, 4> > LPadToCallSiteMap;
Bill Wendling3d11aa72011-10-04 22:00:35 +0000544
Dan Gohmanf9bbcd12009-08-05 01:29:28 +0000545 /// HasTailCall - This is set to true if a call in the current
546 /// block has been translated as a tail call. In this case,
547 /// no subsequent DAG nodes should be created.
548 ///
549 bool HasTailCall;
550
Owen Anderson53a52212009-07-13 04:09:18 +0000551 LLVMContext *Context;
552
Dan Gohmanc3349602010-04-19 19:05:59 +0000553 SelectionDAGBuilder(SelectionDAG &dag, FunctionLoweringInfo &funcinfo,
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000554 CodeGenOpt::Level ol)
Craig Topperada08572014-04-16 04:21:27 +0000555 : CurInst(nullptr), SDNodeOrder(LowestSDNodeOrder), TM(dag.getTarget()),
Dan Gohmanc3349602010-04-19 19:05:59 +0000556 DAG(dag), FuncInfo(funcinfo), OptLevel(ol),
Richard Smith3fb20472012-08-22 00:42:39 +0000557 HasTailCall(false) {
Dan Gohman575fad32008-09-03 16:12:24 +0000558 }
559
Owen Andersonbb15fec2011-12-08 22:15:21 +0000560 void init(GCFunctionInfo *gfi, AliasAnalysis &aa,
561 const TargetLibraryInfo *li);
Dan Gohman575fad32008-09-03 16:12:24 +0000562
Dan Gohmanf5cca352010-04-14 18:24:06 +0000563 /// clear - Clear out the current SelectionDAG and the associated
Dan Gohman1a6c47f2009-11-23 18:04:58 +0000564 /// state and prepare this SelectionDAGBuilder object to be used
Dan Gohman575fad32008-09-03 16:12:24 +0000565 /// for a new block. This doesn't clear out information about
566 /// additional blocks that are needed to complete switch lowering
567 /// or PHI node updating; that information is cleared out as it is
568 /// consumed.
569 void clear();
570
Devang Patel799288382011-05-23 17:44:13 +0000571 /// clearDanglingDebugInfo - Clear the dangling debug information
Benjamin Kramerbde91762012-06-02 10:20:22 +0000572 /// map. This function is separated from the clear so that debug
Devang Patel799288382011-05-23 17:44:13 +0000573 /// information that is dangling in a basic block can be properly
574 /// resolved in a different basic block. This allows the
575 /// SelectionDAG to resolve dangling debug information attached
576 /// to PHI nodes.
577 void clearDanglingDebugInfo();
578
Dan Gohman575fad32008-09-03 16:12:24 +0000579 /// getRoot - Return the current virtual root of the Selection DAG,
580 /// flushing any PendingLoad items. This must be done before emitting
581 /// a store or any other node that may need to be ordered after any
582 /// prior load instructions.
583 ///
584 SDValue getRoot();
585
586 /// getControlRoot - Similar to getRoot, but instead of flushing all the
587 /// PendingLoad items, flush all the PendingExports items. It is necessary
588 /// to do this before emitting a terminator instruction.
589 ///
590 SDValue getControlRoot();
591
Andrew Trick175143b2013-05-25 02:20:36 +0000592 SDLoc getCurSDLoc() const {
Andrew Trick175143b2013-05-25 02:20:36 +0000593 return SDLoc(CurInst, SDNodeOrder);
594 }
595
596 DebugLoc getCurDebugLoc() const {
597 return CurInst ? CurInst->getDebugLoc() : DebugLoc();
598 }
Devang Patelf3292b22011-02-21 23:21:26 +0000599
Bill Wendling919b7aa2009-12-22 02:10:19 +0000600 unsigned getSDNodeOrder() const { return SDNodeOrder; }
601
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000602 void CopyValueToVirtualRegister(const Value *V, unsigned Reg);
Dan Gohman575fad32008-09-03 16:12:24 +0000603
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000604 void visit(const Instruction &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000605
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000606 void visit(unsigned Opcode, const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000607
Dale Johannesenbfd4fd72010-07-16 00:02:08 +0000608 // resolveDanglingDebugInfo - if we saw an earlier dbg_value referring to V,
609 // generate the debug data structures now that we've seen its definition.
610 void resolveDanglingDebugInfo(const Value *V, SDValue Val);
Dan Gohman575fad32008-09-03 16:12:24 +0000611 SDValue getValue(const Value *V);
Dan Gohmand4322232010-07-01 01:59:43 +0000612 SDValue getNonRegisterValue(const Value *V);
613 SDValue getValueImpl(const Value *V);
Dan Gohman575fad32008-09-03 16:12:24 +0000614
615 void setValue(const Value *V, SDValue NewN) {
616 SDValue &N = NodeMap[V];
Craig Topperada08572014-04-16 04:21:27 +0000617 assert(!N.getNode() && "Already set a value for this node!");
Dan Gohman575fad32008-09-03 16:12:24 +0000618 N = NewN;
619 }
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000620
Philip Reames1a1bdb22014-12-02 18:50:36 +0000621 void removeValue(const Value *V) {
622 // This is to support hack in lowerCallFromStatepoint
623 // Should be removed when hack is resolved
624 if (NodeMap.count(V))
625 NodeMap.erase(V);
626 }
627
Devang Patelb0c76392010-06-01 19:59:01 +0000628 void setUnusedArgValue(const Value *V, SDValue NewN) {
629 SDValue &N = UnusedArgNodeMap[V];
Craig Topperada08572014-04-16 04:21:27 +0000630 assert(!N.getNode() && "Already set a value for this node!");
Devang Patelb0c76392010-06-01 19:59:01 +0000631 N = NewN;
632 }
Dan Gohman575fad32008-09-03 16:12:24 +0000633
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000634 void FindMergedConditions(const Value *Cond, MachineBasicBlock *TBB,
Dan Gohman575fad32008-09-03 16:12:24 +0000635 MachineBasicBlock *FBB, MachineBasicBlock *CurBB,
Manman Ren4ece7452014-01-31 00:42:44 +0000636 MachineBasicBlock *SwitchBB, unsigned Opc,
637 uint32_t TW, uint32_t FW);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000638 void EmitBranchForMergedCondition(const Value *Cond, MachineBasicBlock *TBB,
Dan Gohmand01ddb52008-10-17 21:16:08 +0000639 MachineBasicBlock *FBB,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000640 MachineBasicBlock *CurBB,
Manman Ren4ece7452014-01-31 00:42:44 +0000641 MachineBasicBlock *SwitchBB,
642 uint32_t TW, uint32_t FW);
Dan Gohman575fad32008-09-03 16:12:24 +0000643 bool ShouldEmitAsBranches(const std::vector<CaseBlock> &Cases);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000644 bool isExportableFromCurrentBlock(const Value *V, const BasicBlock *FromBB);
645 void CopyToExportRegsIfNeeded(const Value *V);
646 void ExportFromCurrentBlock(const Value *V);
647 void LowerCallTo(ImmutableCallSite CS, SDValue Callee, bool IsTailCall,
Craig Topperada08572014-04-16 04:21:27 +0000648 MachineBasicBlock *LandingPad = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000649
Juergen Ributzkaad2363f2014-10-17 17:39:00 +0000650 std::pair<SDValue, SDValue> lowerCallOperands(
651 ImmutableCallSite CS,
652 unsigned ArgIdx,
653 unsigned NumArgs,
654 SDValue Callee,
655 bool UseVoidTy = false,
Hal Finkel0ad96c82015-01-13 17:48:04 +0000656 MachineBasicBlock *LandingPad = nullptr,
657 bool IsPatchPoint = false);
Andrew Trick74f4c742013-10-31 17:18:24 +0000658
Jakob Stoklund Olesen665aa6e2010-09-30 19:44:31 +0000659 /// UpdateSplitBlock - When an MBB was split during scheduling, update the
Alp Toker798060e2014-01-11 14:01:43 +0000660 /// references that need to refer to the last resulting block.
Jakob Stoklund Olesen665aa6e2010-09-30 19:44:31 +0000661 void UpdateSplitBlock(MachineBasicBlock *First, MachineBasicBlock *Last);
662
Dan Gohman575fad32008-09-03 16:12:24 +0000663private:
Juergen Ributzkafd4633e2014-10-16 21:26:35 +0000664 std::pair<SDValue, SDValue> lowerInvokable(
665 TargetLowering::CallLoweringInfo &CLI,
666 MachineBasicBlock *LandingPad);
667
Dan Gohman575fad32008-09-03 16:12:24 +0000668 // Terminator instructions.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000669 void visitRet(const ReturnInst &I);
670 void visitBr(const BranchInst &I);
671 void visitSwitch(const SwitchInst &I);
672 void visitIndirectBr(const IndirectBrInst &I);
Yaron Kerend7ba46b2014-04-19 13:47:43 +0000673 void visitUnreachable(const UnreachableInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000674
675 // Helpers for visitSwitch
676 bool handleSmallSwitchRange(CaseRec& CR,
677 CaseRecVector& WorkList,
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000678 const Value* SV,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000679 MachineBasicBlock* Default,
680 MachineBasicBlock *SwitchBB);
Dan Gohman575fad32008-09-03 16:12:24 +0000681 bool handleJTSwitchCase(CaseRec& CR,
682 CaseRecVector& WorkList,
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000683 const Value* SV,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000684 MachineBasicBlock* Default,
685 MachineBasicBlock *SwitchBB);
Dan Gohman575fad32008-09-03 16:12:24 +0000686 bool handleBTSplitSwitchCase(CaseRec& CR,
687 CaseRecVector& WorkList,
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000688 const Value* SV,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000689 MachineBasicBlock *SwitchBB);
Dan Gohman575fad32008-09-03 16:12:24 +0000690 bool handleBitTestsSwitchCase(CaseRec& CR,
691 CaseRecVector& WorkList,
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000692 const Value* SV,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000693 MachineBasicBlock* Default,
694 MachineBasicBlock *SwitchBB);
Jakub Staszak12a43bd2011-06-16 20:22:37 +0000695
Jakub Staszak96f8c552011-12-20 20:03:10 +0000696 uint32_t getEdgeWeight(const MachineBasicBlock *Src,
697 const MachineBasicBlock *Dst) const;
Jakub Staszak0480a8f2011-07-29 22:25:21 +0000698 void addSuccessorWithWeight(MachineBasicBlock *Src, MachineBasicBlock *Dst,
699 uint32_t Weight = 0);
Dan Gohman575fad32008-09-03 16:12:24 +0000700public:
Dan Gohman7c0303a2010-04-19 22:41:47 +0000701 void visitSwitchCase(CaseBlock &CB,
702 MachineBasicBlock *SwitchBB);
Michael Gottesmanb27f0f12013-08-20 07:00:16 +0000703 void visitSPDescriptorParent(StackProtectorDescriptor &SPD,
704 MachineBasicBlock *ParentBB);
705 void visitSPDescriptorFailure(StackProtectorDescriptor &SPD);
Dan Gohman7c0303a2010-04-19 22:41:47 +0000706 void visitBitTestHeader(BitTestBlock &B, MachineBasicBlock *SwitchBB);
Evan Chengac730dd2011-01-06 01:02:44 +0000707 void visitBitTestCase(BitTestBlock &BB,
708 MachineBasicBlock* NextMBB,
Manman Rencf104462012-08-24 18:14:27 +0000709 uint32_t BranchWeightToNext,
Dan Gohman575fad32008-09-03 16:12:24 +0000710 unsigned Reg,
Dan Gohman7c0303a2010-04-19 22:41:47 +0000711 BitTestCase &B,
712 MachineBasicBlock *SwitchBB);
Dan Gohman575fad32008-09-03 16:12:24 +0000713 void visitJumpTable(JumpTable &JT);
Dan Gohman7c0303a2010-04-19 22:41:47 +0000714 void visitJumpTableHeader(JumpTable &JT, JumpTableHeader &JTH,
715 MachineBasicBlock *SwitchBB);
Reid Kleckner0a57f652015-01-14 01:05:27 +0000716 unsigned visitLandingPadClauseBB(GlobalValue *ClauseGV,
717 MachineBasicBlock *LPadMBB);
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000718
Dan Gohman575fad32008-09-03 16:12:24 +0000719private:
720 // These all get lowered before this pass.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000721 void visitInvoke(const InvokeInst &I);
Bill Wendlingf891bf82011-07-31 06:30:59 +0000722 void visitResume(const ResumeInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000723
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000724 void visitBinary(const User &I, unsigned OpCode);
725 void visitShift(const User &I, unsigned Opcode);
726 void visitAdd(const User &I) { visitBinary(I, ISD::ADD); }
727 void visitFAdd(const User &I) { visitBinary(I, ISD::FADD); }
728 void visitSub(const User &I) { visitBinary(I, ISD::SUB); }
729 void visitFSub(const User &I);
730 void visitMul(const User &I) { visitBinary(I, ISD::MUL); }
731 void visitFMul(const User &I) { visitBinary(I, ISD::FMUL); }
732 void visitURem(const User &I) { visitBinary(I, ISD::UREM); }
733 void visitSRem(const User &I) { visitBinary(I, ISD::SREM); }
734 void visitFRem(const User &I) { visitBinary(I, ISD::FREM); }
735 void visitUDiv(const User &I) { visitBinary(I, ISD::UDIV); }
Benjamin Kramer9960a252011-07-08 10:31:30 +0000736 void visitSDiv(const User &I);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000737 void visitFDiv(const User &I) { visitBinary(I, ISD::FDIV); }
738 void visitAnd (const User &I) { visitBinary(I, ISD::AND); }
739 void visitOr (const User &I) { visitBinary(I, ISD::OR); }
740 void visitXor (const User &I) { visitBinary(I, ISD::XOR); }
741 void visitShl (const User &I) { visitShift(I, ISD::SHL); }
742 void visitLShr(const User &I) { visitShift(I, ISD::SRL); }
743 void visitAShr(const User &I) { visitShift(I, ISD::SRA); }
744 void visitICmp(const User &I);
745 void visitFCmp(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000746 // Visit the conversion instructions
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000747 void visitTrunc(const User &I);
748 void visitZExt(const User &I);
749 void visitSExt(const User &I);
750 void visitFPTrunc(const User &I);
751 void visitFPExt(const User &I);
752 void visitFPToUI(const User &I);
753 void visitFPToSI(const User &I);
754 void visitUIToFP(const User &I);
755 void visitSIToFP(const User &I);
756 void visitPtrToInt(const User &I);
757 void visitIntToPtr(const User &I);
758 void visitBitCast(const User &I);
Matt Arsenaultb03bd4d2013-11-15 01:34:59 +0000759 void visitAddrSpaceCast(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000760
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000761 void visitExtractElement(const User &I);
762 void visitInsertElement(const User &I);
763 void visitShuffleVector(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000764
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000765 void visitExtractValue(const ExtractValueInst &I);
766 void visitInsertValue(const InsertValueInst &I);
Bill Wendlingfae14752011-08-12 20:24:12 +0000767 void visitLandingPad(const LandingPadInst &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000768
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000769 void visitGetElementPtr(const User &I);
770 void visitSelect(const User &I);
Dan Gohman575fad32008-09-03 16:12:24 +0000771
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000772 void visitAlloca(const AllocaInst &I);
773 void visitLoad(const LoadInst &I);
774 void visitStore(const StoreInst &I);
Elena Demikhovskyf1de34b2014-12-04 09:40:44 +0000775 void visitMaskedLoad(const CallInst &I);
776 void visitMaskedStore(const CallInst &I);
Eli Friedmanc9a551e2011-07-28 21:48:00 +0000777 void visitAtomicCmpXchg(const AtomicCmpXchgInst &I);
778 void visitAtomicRMW(const AtomicRMWInst &I);
Eli Friedmanfee02c62011-07-25 23:16:38 +0000779 void visitFence(const FenceInst &I);
Dan Gohmanf41ad472010-04-20 15:00:41 +0000780 void visitPHI(const PHINode &I);
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000781 void visitCall(const CallInst &I);
782 bool visitMemCmpCall(const CallInst &I);
Richard Sandiford6f6d5512013-08-20 09:38:48 +0000783 bool visitMemChrCall(const CallInst &I);
Richard Sandifordbb83a502013-08-16 11:29:37 +0000784 bool visitStrCpyCall(const CallInst &I, bool isStpcpy);
Richard Sandifordca232712013-08-16 11:21:54 +0000785 bool visitStrCmpCall(const CallInst &I);
Richard Sandiford0dec06a2013-08-16 11:41:43 +0000786 bool visitStrLenCall(const CallInst &I);
787 bool visitStrNLenCall(const CallInst &I);
Bob Wilson874886c2012-08-03 23:29:17 +0000788 bool visitUnaryFloatCall(const CallInst &I, unsigned Opcode);
Matt Arsenault7c936902014-10-21 23:01:01 +0000789 bool visitBinaryFloatCall(const CallInst &I, unsigned Opcode);
Eli Friedman342e8df2011-08-24 20:50:09 +0000790 void visitAtomicLoad(const LoadInst &I);
791 void visitAtomicStore(const StoreInst &I);
792
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000793 void visitInlineAsm(ImmutableCallSite CS);
794 const char *visitIntrinsicCall(const CallInst &I, unsigned Intrinsic);
795 void visitTargetIntrinsic(const CallInst &I, unsigned Intrinsic);
Dan Gohman575fad32008-09-03 16:12:24 +0000796
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000797 void visitVAStart(const CallInst &I);
798 void visitVAArg(const VAArgInst &I);
799 void visitVAEnd(const CallInst &I);
800 void visitVACopy(const CallInst &I);
Andrew Trick74f4c742013-10-31 17:18:24 +0000801 void visitStackmap(const CallInst &I);
Juergen Ributzkaad2363f2014-10-17 17:39:00 +0000802 void visitPatchpoint(ImmutableCallSite CS,
803 MachineBasicBlock *LandingPad = nullptr);
Dan Gohman575fad32008-09-03 16:12:24 +0000804
Philip Reames1a1bdb22014-12-02 18:50:36 +0000805 // These three are implemented in StatepointLowering.cpp
806 void visitStatepoint(const CallInst &I);
807 void visitGCRelocate(const CallInst &I);
808 void visitGCResult(const CallInst &I);
809
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000810 void visitUserOp1(const Instruction &I) {
Torok Edwinfbcc6632009-07-14 16:55:14 +0000811 llvm_unreachable("UserOp1 should not exist at instruction selection time!");
Dan Gohman575fad32008-09-03 16:12:24 +0000812 }
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000813 void visitUserOp2(const Instruction &I) {
Torok Edwinfbcc6632009-07-14 16:55:14 +0000814 llvm_unreachable("UserOp2 should not exist at instruction selection time!");
Dan Gohman575fad32008-09-03 16:12:24 +0000815 }
Dan Gohman5b43aa02010-04-22 20:55:53 +0000816
Richard Sandiforde3827752013-08-16 10:55:47 +0000817 void processIntegerCallValue(const Instruction &I,
818 SDValue Value, bool IsSigned);
819
Dan Gohman5b43aa02010-04-22 20:55:53 +0000820 void HandlePHINodesInSuccessorBlocks(const BasicBlock *LLVMBB);
Evan Cheng6e822452010-04-28 23:08:54 +0000821
Devang Patel32a72ab2010-08-25 20:41:24 +0000822 /// EmitFuncArgumentDbgValue - If V is an function argument then create
Andrew Trickd4d1d9c2013-10-31 17:18:07 +0000823 /// corresponding DBG_VALUE machine instruction for it now. At the end of
Devang Patel32a72ab2010-08-25 20:41:24 +0000824 /// instruction selection, they will be inserted to the entry BB.
Adrian Prantl87b7eb92014-10-01 18:55:02 +0000825 bool EmitFuncArgumentDbgValue(const Value *V, MDNode *Variable, MDNode *Expr,
Adrian Prantl32da8892014-04-25 20:49:25 +0000826 int64_t Offset, bool IsIndirect,
827 const SDValue &N);
Dan Gohman575fad32008-09-03 16:12:24 +0000828};
829
Dan Gohman575fad32008-09-03 16:12:24 +0000830} // end namespace llvm
831
832#endif