blob: 6be5f8a57e2b99223d272571f62c407d77c5a8ff [file] [log] [blame]
Dan Gohmane149e982010-04-22 20:06:42 +00001//===-- FastISel.cpp - Implementation of the FastISel class ---------------===//
Dan Gohmanb2226e22008-08-13 20:19:35 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file contains the implementation of the FastISel class.
11//
Dan Gohmanb4863502008-09-30 20:48:29 +000012// "Fast" instruction selection is designed to emit very poor code quickly.
13// Also, it is not designed to be able to do much lowering, so most illegal
Chris Lattnerc52af452008-10-13 01:59:13 +000014// types (e.g. i64 on 32-bit targets) and operations are not supported. It is
15// also not intended to be able to do much optimization, except in a few cases
16// where doing optimizations reduces overall compile time. For example, folding
17// constants into immediate fields is often done, because it's cheap and it
18// reduces the number of instructions later phases have to examine.
Dan Gohmanb4863502008-09-30 20:48:29 +000019//
20// "Fast" instruction selection is able to fail gracefully and transfer
21// control to the SelectionDAG selector for operations that it doesn't
Chris Lattnerc52af452008-10-13 01:59:13 +000022// support. In many cases, this allows us to avoid duplicating a lot of
Dan Gohmanb4863502008-09-30 20:48:29 +000023// the complicated lowering logic that SelectionDAG currently has.
24//
25// The intended use for "fast" instruction selection is "-O0" mode
26// compilation, where the quality of the generated code is irrelevant when
Chris Lattnerc52af452008-10-13 01:59:13 +000027// weighed against the speed at which the code can be generated. Also,
Dan Gohmanb4863502008-09-30 20:48:29 +000028// at -O0, the LLVM optimizers are not running, and this makes the
29// compile time of codegen a much higher portion of the overall compile
Chris Lattnerc52af452008-10-13 01:59:13 +000030// time. Despite its limitations, "fast" instruction selection is able to
Dan Gohmanb4863502008-09-30 20:48:29 +000031// handle enough code on its own to provide noticeable overall speedups
32// in -O0 compiles.
33//
34// Basic operations are supported in a target-independent way, by reading
35// the same instruction descriptions that the SelectionDAG selector reads,
36// and identifying simple arithmetic operations that can be directly selected
Chris Lattnerc52af452008-10-13 01:59:13 +000037// from simple operators. More complicated operations currently require
Dan Gohmanb4863502008-09-30 20:48:29 +000038// target-specific code.
39//
Dan Gohmanb2226e22008-08-13 20:19:35 +000040//===----------------------------------------------------------------------===//
41
Juergen Ributzka8179e9e2014-07-11 22:01:42 +000042#include "llvm/CodeGen/Analysis.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000043#include "llvm/CodeGen/FastISel.h"
David Blaikie0252265b2013-06-16 20:34:15 +000044#include "llvm/ADT/Optional.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000045#include "llvm/ADT/Statistic.h"
Juergen Ributzka454d3742014-06-13 00:45:11 +000046#include "llvm/Analysis/BranchProbabilityInfo.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000047#include "llvm/Analysis/Loads.h"
48#include "llvm/CodeGen/Analysis.h"
49#include "llvm/CodeGen/FunctionLoweringInfo.h"
Juergen Ributzka04558dc2014-06-12 03:29:26 +000050#include "llvm/CodeGen/MachineFrameInfo.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000051#include "llvm/CodeGen/MachineInstrBuilder.h"
52#include "llvm/CodeGen/MachineModuleInfo.h"
53#include "llvm/CodeGen/MachineRegisterInfo.h"
Juergen Ributzka04558dc2014-06-12 03:29:26 +000054#include "llvm/CodeGen/StackMaps.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000055#include "llvm/IR/DataLayout.h"
Chandler Carruth9a4c9e52014-03-06 00:46:21 +000056#include "llvm/IR/DebugInfo.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000057#include "llvm/IR/Function.h"
58#include "llvm/IR/GlobalVariable.h"
59#include "llvm/IR/Instructions.h"
60#include "llvm/IR/IntrinsicInst.h"
61#include "llvm/IR/Operator.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000062#include "llvm/Support/Debug.h"
63#include "llvm/Support/ErrorHandling.h"
Dan Gohmanb2226e22008-08-13 20:19:35 +000064#include "llvm/Target/TargetInstrInfo.h"
Bob Wilson3e6fa462012-08-03 04:06:28 +000065#include "llvm/Target/TargetLibraryInfo.h"
Evan Cheng864fcc12008-08-20 22:45:34 +000066#include "llvm/Target/TargetLowering.h"
Dan Gohman02c84b82008-08-20 21:05:57 +000067#include "llvm/Target/TargetMachine.h"
Eric Christopherd9134482014-08-04 21:25:23 +000068#include "llvm/Target/TargetSubtargetInfo.h"
Dan Gohmanb2226e22008-08-13 20:19:35 +000069using namespace llvm;
70
Chandler Carruth1b9dde02014-04-22 02:02:50 +000071#define DEBUG_TYPE "isel"
72
Chad Rosier61e8d102011-11-28 19:59:09 +000073STATISTIC(NumFastIselSuccessIndependent, "Number of insts selected by "
74 "target-independent selector");
75STATISTIC(NumFastIselSuccessTarget, "Number of insts selected by "
76 "target-specific selector");
Chad Rosier46addb92011-11-29 19:40:47 +000077STATISTIC(NumFastIselDead, "Number of dead insts removed on failure");
Chad Rosierff40b1e2011-11-16 21:05:28 +000078
Juergen Ributzka8179e9e2014-07-11 22:01:42 +000079/// \brief Set CallLoweringInfo attribute flags based on a call instruction
80/// and called function attributes.
81void FastISel::ArgListEntry::setAttributes(ImmutableCallSite *CS,
82 unsigned AttrIdx) {
83 isSExt = CS->paramHasAttr(AttrIdx, Attribute::SExt);
84 isZExt = CS->paramHasAttr(AttrIdx, Attribute::ZExt);
85 isInReg = CS->paramHasAttr(AttrIdx, Attribute::InReg);
86 isSRet = CS->paramHasAttr(AttrIdx, Attribute::StructRet);
87 isNest = CS->paramHasAttr(AttrIdx, Attribute::Nest);
88 isByVal = CS->paramHasAttr(AttrIdx, Attribute::ByVal);
89 isInAlloca = CS->paramHasAttr(AttrIdx, Attribute::InAlloca);
90 isReturned = CS->paramHasAttr(AttrIdx, Attribute::Returned);
91 Alignment = CS->getParamAlignment(AttrIdx);
92}
93
Dan Gohmand7b5ce32010-07-10 09:00:22 +000094/// startNewBlock - Set the current block to which generated machine
95/// instructions will be appended, and clear the local CSE map.
96///
97void FastISel::startNewBlock() {
98 LocalValueMap.clear();
99
Jakob Stoklund Olesen6a7d6832013-07-04 04:53:49 +0000100 // Instructions are appended to FuncInfo.MBB. If the basic block already
Jakob Stoklund Olesen3d8560c2013-07-04 04:32:39 +0000101 // contains labels or copies, use the last instruction as the last local
102 // value.
Craig Topperc0196b12014-04-14 00:51:57 +0000103 EmitStartPt = nullptr;
Jakob Stoklund Olesen3d8560c2013-07-04 04:32:39 +0000104 if (!FuncInfo.MBB->empty())
105 EmitStartPt = &FuncInfo.MBB->back();
Ivan Krasind7cbd4c2011-08-18 22:06:10 +0000106 LastLocalValue = EmitStartPt;
107}
108
Evan Cheng615620c2013-02-11 01:27:15 +0000109bool FastISel::LowerArguments() {
110 if (!FuncInfo.CanLowerReturn)
111 // Fallback to SDISel argument lowering code to deal with sret pointer
112 // parameter.
113 return false;
Stephen Lincfe7f352013-07-08 00:37:03 +0000114
Evan Cheng615620c2013-02-11 01:27:15 +0000115 if (!FastLowerArguments())
116 return false;
117
David Blaikie97c6c5b2013-06-21 22:56:30 +0000118 // Enter arguments into ValueMap for uses in non-entry BBs.
Evan Cheng615620c2013-02-11 01:27:15 +0000119 for (Function::const_arg_iterator I = FuncInfo.Fn->arg_begin(),
120 E = FuncInfo.Fn->arg_end(); I != E; ++I) {
David Blaikie97c6c5b2013-06-21 22:56:30 +0000121 DenseMap<const Value *, unsigned>::iterator VI = LocalValueMap.find(I);
122 assert(VI != LocalValueMap.end() && "Missed an argument?");
123 FuncInfo.ValueMap[I] = VI->second;
Evan Cheng615620c2013-02-11 01:27:15 +0000124 }
125 return true;
126}
127
Ivan Krasind7cbd4c2011-08-18 22:06:10 +0000128void FastISel::flushLocalValueMap() {
129 LocalValueMap.clear();
130 LastLocalValue = EmitStartPt;
131 recomputeInsertPt();
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000132}
133
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000134bool FastISel::hasTrivialKill(const Value *V) const {
Dan Gohman88fb2532010-05-14 22:53:18 +0000135 // Don't consider constants or arguments to have trivial kills.
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000136 const Instruction *I = dyn_cast<Instruction>(V);
Dan Gohman88fb2532010-05-14 22:53:18 +0000137 if (!I)
138 return false;
139
140 // No-op casts are trivially coalesced by fast-isel.
141 if (const CastInst *Cast = dyn_cast<CastInst>(I))
Rafael Espindolaea09c592014-02-18 22:05:46 +0000142 if (Cast->isNoopCast(DL.getIntPtrType(Cast->getContext())) &&
Chandler Carruth7ec50852012-11-01 08:07:29 +0000143 !hasTrivialKill(Cast->getOperand(0)))
Dan Gohman88fb2532010-05-14 22:53:18 +0000144 return false;
145
Chad Rosier291ce472011-11-15 23:34:05 +0000146 // GEPs with all zero indices are trivially coalesced by fast-isel.
147 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(I))
148 if (GEP->hasAllZeroIndices() && !hasTrivialKill(GEP->getOperand(0)))
149 return false;
150
Dan Gohman88fb2532010-05-14 22:53:18 +0000151 // Only instructions with a single use in the same basic block are considered
152 // to have trivial kills.
153 return I->hasOneUse() &&
154 !(I->getOpcode() == Instruction::BitCast ||
155 I->getOpcode() == Instruction::PtrToInt ||
156 I->getOpcode() == Instruction::IntToPtr) &&
Chandler Carruthcdf47882014-03-09 03:16:01 +0000157 cast<Instruction>(*I->user_begin())->getParent() == I->getParent();
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000158}
159
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000160unsigned FastISel::getRegForValue(const Value *V) {
Owen Anderson53aa7a92009-08-10 22:56:29 +0000161 EVT RealVT = TLI.getValueType(V->getType(), /*AllowUnknown=*/true);
Dan Gohmanca93aab2009-04-07 20:40:11 +0000162 // Don't handle non-simple values in FastISel.
163 if (!RealVT.isSimple())
164 return 0;
Dan Gohman4c315242008-12-08 07:57:47 +0000165
166 // Ignore illegal types. We must do this before looking up the value
167 // in ValueMap because Arguments are given virtual registers regardless
168 // of whether FastISel can handle them.
Owen Anderson9f944592009-08-11 20:47:22 +0000169 MVT VT = RealVT.getSimpleVT();
Dan Gohman4c315242008-12-08 07:57:47 +0000170 if (!TLI.isTypeLegal(VT)) {
Eli Friedmanc7035512011-05-25 23:49:02 +0000171 // Handle integer promotions, though, because they're common and easy.
172 if (VT == MVT::i1 || VT == MVT::i8 || VT == MVT::i16)
Owen Anderson117c9e82009-08-12 00:36:31 +0000173 VT = TLI.getTypeToTransformTo(V->getContext(), VT).getSimpleVT();
Dan Gohman4c315242008-12-08 07:57:47 +0000174 else
175 return 0;
176 }
177
Eric Christopher1a06cc92012-03-20 01:07:47 +0000178 // Look up the value to see if we already have a register for it.
179 unsigned Reg = lookUpRegForValue(V);
Dan Gohmane039d552008-09-03 23:32:19 +0000180 if (Reg != 0)
181 return Reg;
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000182
Dan Gohmana7c717d82010-05-06 00:02:14 +0000183 // In bottom-up mode, just create the virtual register which will be used
184 // to hold the value. It will be materialized later.
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000185 if (isa<Instruction>(V) &&
186 (!isa<AllocaInst>(V) ||
187 !FuncInfo.StaticAllocaMap.count(cast<AllocaInst>(V))))
188 return FuncInfo.InitializeRegForValue(V);
Dan Gohmana7c717d82010-05-06 00:02:14 +0000189
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000190 SavePoint SaveInsertPt = enterLocalValueArea();
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000191
192 // Materialize the value in a register. Emit any instructions in the
193 // local value area.
194 Reg = materializeRegForValue(V, VT);
195
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000196 leaveLocalValueArea(SaveInsertPt);
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000197
198 return Reg;
Dan Gohman626b5d82010-05-03 23:36:34 +0000199}
200
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000201/// materializeRegForValue - Helper for getRegForValue. This function is
202/// called when the value isn't already available in a register and must
203/// be materialized with new instructions.
204unsigned FastISel::materializeRegForValue(const Value *V, MVT VT) {
Dan Gohman626b5d82010-05-03 23:36:34 +0000205 unsigned Reg = 0;
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000206
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000207 if (const ConstantInt *CI = dyn_cast<ConstantInt>(V)) {
Dan Gohman9801ba42008-09-19 22:16:54 +0000208 if (CI->getValue().getActiveBits() <= 64)
209 Reg = FastEmit_i(VT, VT, ISD::Constant, CI->getZExtValue());
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000210 } else if (isa<AllocaInst>(V)) {
Dan Gohman9801ba42008-09-19 22:16:54 +0000211 Reg = TargetMaterializeAlloca(cast<AllocaInst>(V));
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000212 } else if (isa<ConstantPointerNull>(V)) {
Dan Gohmanc1d47c52008-10-07 22:03:27 +0000213 // Translate this as an integer zero so that it can be
214 // local-CSE'd with actual integer zeros.
Owen Anderson55f1c092009-08-13 21:58:54 +0000215 Reg =
Rafael Espindolaea09c592014-02-18 22:05:46 +0000216 getRegForValue(Constant::getNullValue(DL.getIntPtrType(V->getContext())));
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000217 } else if (const ConstantFP *CF = dyn_cast<ConstantFP>(V)) {
218 if (CF->isNullValue()) {
Eli Friedman406c4712011-04-27 22:41:55 +0000219 Reg = TargetMaterializeFloatZero(CF);
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000220 } else {
Eli Friedman406c4712011-04-27 22:41:55 +0000221 // Try to emit the constant directly.
222 Reg = FastEmit_f(VT, VT, ISD::ConstantFP, CF);
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000223 }
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000224
225 if (!Reg) {
Dan Gohman8a2dae52010-04-13 17:07:06 +0000226 // Try to emit the constant by using an integer constant with a cast.
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000227 const APFloat &Flt = CF->getValueAPF();
Owen Anderson53aa7a92009-08-10 22:56:29 +0000228 EVT IntVT = TLI.getPointerTy();
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000229
230 uint64_t x[2];
231 uint32_t IntBitWidth = IntVT.getSizeInBits();
Dale Johannesen4f0bd682008-10-09 23:00:39 +0000232 bool isExact;
233 (void) Flt.convertToInteger(x, IntBitWidth, /*isSigned=*/true,
Eric Christopher997aaa92012-03-20 01:07:56 +0000234 APFloat::rmTowardZero, &isExact);
Dale Johannesen4f0bd682008-10-09 23:00:39 +0000235 if (isExact) {
Jeffrey Yasskin7a162882011-07-18 21:45:40 +0000236 APInt IntVal(IntBitWidth, x);
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000237
Owen Anderson47db9412009-07-22 00:24:57 +0000238 unsigned IntegerReg =
Owen Andersonedb4a702009-07-24 23:12:02 +0000239 getRegForValue(ConstantInt::get(V->getContext(), IntVal));
Dan Gohman9801ba42008-09-19 22:16:54 +0000240 if (IntegerReg != 0)
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000241 Reg = FastEmit_r(IntVT.getSimpleVT(), VT, ISD::SINT_TO_FP,
242 IntegerReg, /*Kill=*/false);
Dan Gohman9801ba42008-09-19 22:16:54 +0000243 }
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000244 }
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000245 } else if (const Operator *Op = dyn_cast<Operator>(V)) {
Dan Gohman722f5fc2010-07-01 02:58:57 +0000246 if (!SelectOperator(Op, Op->getOpcode()))
247 if (!isa<Instruction>(Op) ||
248 !TargetSelectInstruction(cast<Instruction>(Op)))
249 return 0;
Dan Gohman7c58cf72010-06-21 14:17:46 +0000250 Reg = lookUpRegForValue(Op);
Dan Gohmanc45733f2008-08-28 21:19:07 +0000251 } else if (isa<UndefValue>(V)) {
Dan Gohmane039d552008-09-03 23:32:19 +0000252 Reg = createResultReg(TLI.getRegClassFor(VT));
Rafael Espindolaea09c592014-02-18 22:05:46 +0000253 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000254 TII.get(TargetOpcode::IMPLICIT_DEF), Reg);
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000255 }
Wesley Peck527da1b2010-11-23 03:31:01 +0000256
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000257 // If target-independent code couldn't handle the value, give target-specific
258 // code a try.
259 if (!Reg && isa<Constant>(V))
Dan Gohman9801ba42008-09-19 22:16:54 +0000260 Reg = TargetMaterializeConstant(cast<Constant>(V));
Wesley Peck527da1b2010-11-23 03:31:01 +0000261
Dan Gohman9801ba42008-09-19 22:16:54 +0000262 // Don't cache constant materializations in the general ValueMap.
263 // To do so would require tracking what uses they dominate.
Juergen Ributzka790bacf2014-08-14 19:56:28 +0000264 if (Reg != 0) {
Dan Gohman3663f152008-09-25 01:28:51 +0000265 LocalValueMap[V] = Reg;
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000266 LastLocalValue = MRI.getVRegDef(Reg);
267 }
Dan Gohmane039d552008-09-03 23:32:19 +0000268 return Reg;
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000269}
270
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000271unsigned FastISel::lookUpRegForValue(const Value *V) {
Evan Cheng1e979012008-09-09 01:26:59 +0000272 // Look up the value to see if we already have a register for it. We
273 // cache values defined by Instructions across blocks, and other values
274 // only locally. This is because Instructions already have the SSA
Dan Gohman626b5d82010-05-03 23:36:34 +0000275 // def-dominates-use requirement enforced.
Dan Gohman87fb4e82010-07-07 16:29:44 +0000276 DenseMap<const Value *, unsigned>::iterator I = FuncInfo.ValueMap.find(V);
277 if (I != FuncInfo.ValueMap.end())
Dan Gohmanf91aff52010-06-21 14:21:47 +0000278 return I->second;
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000279 return LocalValueMap[V];
Evan Cheng1e979012008-09-09 01:26:59 +0000280}
281
Owen Anderson6f0c51d2008-08-30 00:38:46 +0000282/// UpdateValueMap - Update the value map to include the new mapping for this
283/// instruction, or insert an extra copy to get the result in a previous
284/// determined register.
285/// NOTE: This is only necessary because we might select a block that uses
286/// a value before we select the block that defines the value. It might be
287/// possible to fix this by selecting blocks in reverse postorder.
Eli Friedmana4d4a012011-05-16 21:06:17 +0000288void FastISel::UpdateValueMap(const Value *I, unsigned Reg, unsigned NumRegs) {
Dan Gohmanfcf54562008-09-05 18:18:20 +0000289 if (!isa<Instruction>(I)) {
290 LocalValueMap[I] = Reg;
Eli Friedmana4d4a012011-05-16 21:06:17 +0000291 return;
Dan Gohmanfcf54562008-09-05 18:18:20 +0000292 }
Wesley Peck527da1b2010-11-23 03:31:01 +0000293
Dan Gohman87fb4e82010-07-07 16:29:44 +0000294 unsigned &AssignedReg = FuncInfo.ValueMap[I];
Chris Lattnerada5d6c2009-04-12 07:45:01 +0000295 if (AssignedReg == 0)
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000296 // Use the new register.
Chris Lattnerada5d6c2009-04-12 07:45:01 +0000297 AssignedReg = Reg;
Chris Lattnera101f6f2009-04-12 07:46:30 +0000298 else if (Reg != AssignedReg) {
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000299 // Arrange for uses of AssignedReg to be replaced by uses of Reg.
Eli Friedmana4d4a012011-05-16 21:06:17 +0000300 for (unsigned i = 0; i < NumRegs; i++)
301 FuncInfo.RegFixups[AssignedReg+i] = Reg+i;
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000302
303 AssignedReg = Reg;
Chris Lattnerada5d6c2009-04-12 07:45:01 +0000304 }
Owen Anderson6f0c51d2008-08-30 00:38:46 +0000305}
306
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000307std::pair<unsigned, bool> FastISel::getRegForGEPIndex(const Value *Idx) {
Dan Gohman4c315242008-12-08 07:57:47 +0000308 unsigned IdxN = getRegForValue(Idx);
309 if (IdxN == 0)
310 // Unhandled operand. Halt "fast" selection and bail.
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000311 return std::pair<unsigned, bool>(0, false);
312
313 bool IdxNIsKill = hasTrivialKill(Idx);
Dan Gohman4c315242008-12-08 07:57:47 +0000314
315 // If the index is smaller or larger than intptr_t, truncate or extend it.
Owen Andersonc6daf8f2009-08-11 21:59:30 +0000316 MVT PtrVT = TLI.getPointerTy();
Owen Anderson53aa7a92009-08-10 22:56:29 +0000317 EVT IdxVT = EVT::getEVT(Idx->getType(), /*HandleUnknown=*/false);
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000318 if (IdxVT.bitsLT(PtrVT)) {
319 IdxN = FastEmit_r(IdxVT.getSimpleVT(), PtrVT, ISD::SIGN_EXTEND,
320 IdxN, IdxNIsKill);
321 IdxNIsKill = true;
322 }
323 else if (IdxVT.bitsGT(PtrVT)) {
324 IdxN = FastEmit_r(IdxVT.getSimpleVT(), PtrVT, ISD::TRUNCATE,
325 IdxN, IdxNIsKill);
326 IdxNIsKill = true;
327 }
328 return std::pair<unsigned, bool>(IdxN, IdxNIsKill);
Dan Gohman4c315242008-12-08 07:57:47 +0000329}
330
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000331void FastISel::recomputeInsertPt() {
332 if (getLastLocalValue()) {
333 FuncInfo.InsertPt = getLastLocalValue();
Dan Gohmanb5e918d2010-07-19 22:48:56 +0000334 FuncInfo.MBB = FuncInfo.InsertPt->getParent();
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000335 ++FuncInfo.InsertPt;
336 } else
337 FuncInfo.InsertPt = FuncInfo.MBB->getFirstNonPHI();
338
339 // Now skip past any EH_LABELs, which must remain at the beginning.
340 while (FuncInfo.InsertPt != FuncInfo.MBB->end() &&
341 FuncInfo.InsertPt->getOpcode() == TargetOpcode::EH_LABEL)
342 ++FuncInfo.InsertPt;
343}
344
Chad Rosier46addb92011-11-29 19:40:47 +0000345void FastISel::removeDeadCode(MachineBasicBlock::iterator I,
346 MachineBasicBlock::iterator E) {
347 assert (I && E && std::distance(I, E) > 0 && "Invalid iterator!");
348 while (I != E) {
349 MachineInstr *Dead = &*I;
350 ++I;
351 Dead->eraseFromParent();
Jan Wen Voung7857a642013-03-08 22:56:31 +0000352 ++NumFastIselDead;
Chad Rosier46addb92011-11-29 19:40:47 +0000353 }
354 recomputeInsertPt();
355}
356
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000357FastISel::SavePoint FastISel::enterLocalValueArea() {
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000358 MachineBasicBlock::iterator OldInsertPt = FuncInfo.InsertPt;
Rafael Espindolaea09c592014-02-18 22:05:46 +0000359 DebugLoc OldDL = DbgLoc;
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000360 recomputeInsertPt();
Rafael Espindolaea09c592014-02-18 22:05:46 +0000361 DbgLoc = DebugLoc();
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000362 SavePoint SP = { OldInsertPt, OldDL };
363 return SP;
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000364}
365
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000366void FastISel::leaveLocalValueArea(SavePoint OldInsertPt) {
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000367 if (FuncInfo.InsertPt != FuncInfo.MBB->begin())
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000368 LastLocalValue = std::prev(FuncInfo.InsertPt);
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000369
370 // Restore the previous insert position.
Eric Christopherf4fba5c2012-10-03 08:10:01 +0000371 FuncInfo.InsertPt = OldInsertPt.InsertPt;
Rafael Espindolaea09c592014-02-18 22:05:46 +0000372 DbgLoc = OldInsertPt.DL;
Dan Gohmand7b5ce32010-07-10 09:00:22 +0000373}
374
Dan Gohmana3e4d5a2008-08-20 00:11:48 +0000375/// SelectBinaryOp - Select and emit code for a binary operator instruction,
376/// which has an opcode which directly corresponds to the given ISD opcode.
377///
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000378bool FastISel::SelectBinaryOp(const User *I, unsigned ISDOpcode) {
Owen Anderson53aa7a92009-08-10 22:56:29 +0000379 EVT VT = EVT::getEVT(I->getType(), /*HandleUnknown=*/true);
Owen Anderson9f944592009-08-11 20:47:22 +0000380 if (VT == MVT::Other || !VT.isSimple())
Dan Gohmana3e4d5a2008-08-20 00:11:48 +0000381 // Unhandled type. Halt "fast" selection and bail.
382 return false;
Dan Gohmanfd634592008-09-05 18:44:22 +0000383
Dan Gohman3bcbbec2008-08-26 20:52:40 +0000384 // We only handle legal types. For example, on x86-32 the instruction
385 // selector contains all of the 64-bit instructions from x86-64,
386 // under the assumption that i64 won't be used if the target doesn't
387 // support it.
Dan Gohmanfd634592008-09-05 18:44:22 +0000388 if (!TLI.isTypeLegal(VT)) {
Owen Anderson9f944592009-08-11 20:47:22 +0000389 // MVT::i1 is special. Allow AND, OR, or XOR because they
Dan Gohmanfd634592008-09-05 18:44:22 +0000390 // don't require additional zeroing, which makes them easy.
Owen Anderson9f944592009-08-11 20:47:22 +0000391 if (VT == MVT::i1 &&
Dan Gohman5e490a72008-09-25 17:22:52 +0000392 (ISDOpcode == ISD::AND || ISDOpcode == ISD::OR ||
393 ISDOpcode == ISD::XOR))
Owen Anderson117c9e82009-08-12 00:36:31 +0000394 VT = TLI.getTypeToTransformTo(I->getContext(), VT);
Dan Gohmanfd634592008-09-05 18:44:22 +0000395 else
396 return false;
397 }
Dan Gohmana3e4d5a2008-08-20 00:11:48 +0000398
Chris Lattnerfba7ca62011-04-17 01:16:47 +0000399 // Check if the first operand is a constant, and handle it as "ri". At -O0,
400 // we don't have anything that canonicalizes operand order.
401 if (ConstantInt *CI = dyn_cast<ConstantInt>(I->getOperand(0)))
402 if (isa<Instruction>(I) && cast<Instruction>(I)->isCommutative()) {
403 unsigned Op1 = getRegForValue(I->getOperand(1));
404 if (Op1 == 0) return false;
405
406 bool Op1IsKill = hasTrivialKill(I->getOperand(1));
Owen Andersondd450b82011-04-22 23:38:06 +0000407
Chris Lattnerb53ccb82011-04-17 20:23:29 +0000408 unsigned ResultReg = FastEmit_ri_(VT.getSimpleVT(), ISDOpcode, Op1,
409 Op1IsKill, CI->getZExtValue(),
410 VT.getSimpleVT());
411 if (ResultReg == 0) return false;
Owen Andersondd450b82011-04-22 23:38:06 +0000412
Chris Lattnerb53ccb82011-04-17 20:23:29 +0000413 // We successfully emitted code for the given LLVM Instruction.
414 UpdateValueMap(I, ResultReg);
415 return true;
Chris Lattnerfba7ca62011-04-17 01:16:47 +0000416 }
Owen Andersondd450b82011-04-22 23:38:06 +0000417
418
Dan Gohman7bda51f2008-09-03 23:12:08 +0000419 unsigned Op0 = getRegForValue(I->getOperand(0));
Chris Lattnerb53ccb82011-04-17 20:23:29 +0000420 if (Op0 == 0) // Unhandled operand. Halt "fast" selection and bail.
Dan Gohmanfe905652008-08-21 01:41:07 +0000421 return false;
422
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000423 bool Op0IsKill = hasTrivialKill(I->getOperand(0));
424
Dan Gohmanfe905652008-08-21 01:41:07 +0000425 // Check if the second operand is a constant and handle it appropriately.
426 if (ConstantInt *CI = dyn_cast<ConstantInt>(I->getOperand(1))) {
Chris Lattnerb53ccb82011-04-17 20:23:29 +0000427 uint64_t Imm = CI->getZExtValue();
Owen Andersondd450b82011-04-22 23:38:06 +0000428
Chris Lattner48f75ad2011-04-18 07:00:40 +0000429 // Transform "sdiv exact X, 8" -> "sra X, 3".
430 if (ISDOpcode == ISD::SDIV && isa<BinaryOperator>(I) &&
431 cast<BinaryOperator>(I)->isExact() &&
432 isPowerOf2_64(Imm)) {
433 Imm = Log2_64(Imm);
434 ISDOpcode = ISD::SRA;
435 }
Owen Andersondd450b82011-04-22 23:38:06 +0000436
Chad Rosier6a63a742012-03-22 00:21:17 +0000437 // Transform "urem x, pow2" -> "and x, pow2-1".
438 if (ISDOpcode == ISD::UREM && isa<BinaryOperator>(I) &&
439 isPowerOf2_64(Imm)) {
440 --Imm;
441 ISDOpcode = ISD::AND;
442 }
443
Chris Lattnerb53ccb82011-04-17 20:23:29 +0000444 unsigned ResultReg = FastEmit_ri_(VT.getSimpleVT(), ISDOpcode, Op0,
445 Op0IsKill, Imm, VT.getSimpleVT());
446 if (ResultReg == 0) return false;
Owen Andersondd450b82011-04-22 23:38:06 +0000447
Chris Lattnerb53ccb82011-04-17 20:23:29 +0000448 // We successfully emitted code for the given LLVM Instruction.
449 UpdateValueMap(I, ResultReg);
450 return true;
Dan Gohmanfe905652008-08-21 01:41:07 +0000451 }
452
Dan Gohman5ca269e2008-08-27 01:09:54 +0000453 // Check if the second operand is a constant float.
454 if (ConstantFP *CF = dyn_cast<ConstantFP>(I->getOperand(1))) {
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000455 unsigned ResultReg = FastEmit_rf(VT.getSimpleVT(), VT.getSimpleVT(),
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000456 ISDOpcode, Op0, Op0IsKill, CF);
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000457 if (ResultReg != 0) {
458 // We successfully emitted code for the given LLVM Instruction.
Dan Gohman7bda51f2008-09-03 23:12:08 +0000459 UpdateValueMap(I, ResultReg);
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000460 return true;
461 }
Dan Gohman5ca269e2008-08-27 01:09:54 +0000462 }
463
Dan Gohman7bda51f2008-09-03 23:12:08 +0000464 unsigned Op1 = getRegForValue(I->getOperand(1));
Dan Gohmanfe905652008-08-21 01:41:07 +0000465 if (Op1 == 0)
466 // Unhandled operand. Halt "fast" selection and bail.
467 return false;
468
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000469 bool Op1IsKill = hasTrivialKill(I->getOperand(1));
470
Dan Gohmanb0b5a272008-08-27 18:10:19 +0000471 // Now we have both operands in registers. Emit the instruction.
Owen Anderson8dd01cc2008-08-25 23:58:18 +0000472 unsigned ResultReg = FastEmit_rr(VT.getSimpleVT(), VT.getSimpleVT(),
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000473 ISDOpcode,
474 Op0, Op0IsKill,
475 Op1, Op1IsKill);
Dan Gohmana3e4d5a2008-08-20 00:11:48 +0000476 if (ResultReg == 0)
477 // Target-specific code wasn't able to find a machine opcode for
478 // the given ISD opcode and type. Halt "fast" selection and bail.
479 return false;
480
Dan Gohmanb16a7782008-08-20 00:23:20 +0000481 // We successfully emitted code for the given LLVM Instruction.
Dan Gohman7bda51f2008-09-03 23:12:08 +0000482 UpdateValueMap(I, ResultReg);
Dan Gohmana3e4d5a2008-08-20 00:11:48 +0000483 return true;
484}
485
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000486bool FastISel::SelectGetElementPtr(const User *I) {
Dan Gohman7bda51f2008-09-03 23:12:08 +0000487 unsigned N = getRegForValue(I->getOperand(0));
Evan Cheng864fcc12008-08-20 22:45:34 +0000488 if (N == 0)
489 // Unhandled operand. Halt "fast" selection and bail.
490 return false;
491
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000492 bool NIsKill = hasTrivialKill(I->getOperand(0));
493
Chad Rosierf83ab702011-11-17 07:15:58 +0000494 // Keep a running tab of the total offset to coalesce multiple N = N + Offset
495 // into a single N = N + TotalOffset.
496 uint64_t TotalOffs = 0;
497 // FIXME: What's a good SWAG number for MaxOffs?
498 uint64_t MaxOffs = 2048;
Chris Lattner229907c2011-07-18 04:54:35 +0000499 Type *Ty = I->getOperand(0)->getType();
Owen Anderson9f944592009-08-11 20:47:22 +0000500 MVT VT = TLI.getPointerTy();
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000501 for (GetElementPtrInst::const_op_iterator OI = I->op_begin()+1,
502 E = I->op_end(); OI != E; ++OI) {
503 const Value *Idx = *OI;
Chris Lattner229907c2011-07-18 04:54:35 +0000504 if (StructType *StTy = dyn_cast<StructType>(Ty)) {
Evan Cheng864fcc12008-08-20 22:45:34 +0000505 unsigned Field = cast<ConstantInt>(Idx)->getZExtValue();
506 if (Field) {
507 // N = N + Offset
Rafael Espindolaea09c592014-02-18 22:05:46 +0000508 TotalOffs += DL.getStructLayout(StTy)->getElementOffset(Field);
Chad Rosierf83ab702011-11-17 07:15:58 +0000509 if (TotalOffs >= MaxOffs) {
510 N = FastEmit_ri_(VT, ISD::ADD, N, NIsKill, TotalOffs, VT);
511 if (N == 0)
512 // Unhandled operand. Halt "fast" selection and bail.
513 return false;
514 NIsKill = true;
515 TotalOffs = 0;
516 }
Evan Cheng864fcc12008-08-20 22:45:34 +0000517 }
518 Ty = StTy->getElementType(Field);
519 } else {
520 Ty = cast<SequentialType>(Ty)->getElementType();
521
522 // If this is a constant subscript, handle it quickly.
Dan Gohmanbcaf6812010-04-15 01:51:59 +0000523 if (const ConstantInt *CI = dyn_cast<ConstantInt>(Idx)) {
Dan Gohmanf1d83042010-06-18 14:22:04 +0000524 if (CI->isZero()) continue;
Chad Rosierf83ab702011-11-17 07:15:58 +0000525 // N = N + Offset
Chad Rosier879c34f2012-07-06 17:44:22 +0000526 TotalOffs +=
Rafael Espindolaea09c592014-02-18 22:05:46 +0000527 DL.getTypeAllocSize(Ty)*cast<ConstantInt>(CI)->getSExtValue();
Chad Rosierf83ab702011-11-17 07:15:58 +0000528 if (TotalOffs >= MaxOffs) {
529 N = FastEmit_ri_(VT, ISD::ADD, N, NIsKill, TotalOffs, VT);
530 if (N == 0)
531 // Unhandled operand. Halt "fast" selection and bail.
532 return false;
533 NIsKill = true;
534 TotalOffs = 0;
535 }
536 continue;
537 }
538 if (TotalOffs) {
539 N = FastEmit_ri_(VT, ISD::ADD, N, NIsKill, TotalOffs, VT);
Evan Cheng864fcc12008-08-20 22:45:34 +0000540 if (N == 0)
541 // Unhandled operand. Halt "fast" selection and bail.
542 return false;
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000543 NIsKill = true;
Chad Rosierf83ab702011-11-17 07:15:58 +0000544 TotalOffs = 0;
Evan Cheng864fcc12008-08-20 22:45:34 +0000545 }
Wesley Peck527da1b2010-11-23 03:31:01 +0000546
Evan Cheng864fcc12008-08-20 22:45:34 +0000547 // N = N + Idx * ElementSize;
Rafael Espindolaea09c592014-02-18 22:05:46 +0000548 uint64_t ElementSize = DL.getTypeAllocSize(Ty);
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000549 std::pair<unsigned, bool> Pair = getRegForGEPIndex(Idx);
550 unsigned IdxN = Pair.first;
551 bool IdxNIsKill = Pair.second;
Evan Cheng864fcc12008-08-20 22:45:34 +0000552 if (IdxN == 0)
553 // Unhandled operand. Halt "fast" selection and bail.
554 return false;
555
Dan Gohmanb5e04bf2008-08-26 20:57:08 +0000556 if (ElementSize != 1) {
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000557 IdxN = FastEmit_ri_(VT, ISD::MUL, IdxN, IdxNIsKill, ElementSize, VT);
Dan Gohmanb5e04bf2008-08-26 20:57:08 +0000558 if (IdxN == 0)
559 // Unhandled operand. Halt "fast" selection and bail.
560 return false;
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000561 IdxNIsKill = true;
Dan Gohmanb5e04bf2008-08-26 20:57:08 +0000562 }
Dan Gohman1a1b51f2010-05-11 23:54:07 +0000563 N = FastEmit_rr(VT, VT, ISD::ADD, N, NIsKill, IdxN, IdxNIsKill);
Evan Cheng864fcc12008-08-20 22:45:34 +0000564 if (N == 0)
565 // Unhandled operand. Halt "fast" selection and bail.
566 return false;
567 }
568 }
Chad Rosierf83ab702011-11-17 07:15:58 +0000569 if (TotalOffs) {
570 N = FastEmit_ri_(VT, ISD::ADD, N, NIsKill, TotalOffs, VT);
571 if (N == 0)
572 // Unhandled operand. Halt "fast" selection and bail.
573 return false;
574 }
Evan Cheng864fcc12008-08-20 22:45:34 +0000575
576 // We successfully emitted code for the given LLVM Instruction.
Dan Gohman7bda51f2008-09-03 23:12:08 +0000577 UpdateValueMap(I, N);
Evan Cheng864fcc12008-08-20 22:45:34 +0000578 return true;
Dan Gohmana3e4d5a2008-08-20 00:11:48 +0000579}
580
Juergen Ributzka190305b2014-07-01 22:25:49 +0000581/// \brief Add a stackmap or patchpoint intrinsic call's live variable operands
582/// to a stackmap or patchpoint machine instruction.
Juergen Ributzka04558dc2014-06-12 03:29:26 +0000583bool FastISel::addStackMapLiveVars(SmallVectorImpl<MachineOperand> &Ops,
584 const CallInst *CI, unsigned StartIdx) {
585 for (unsigned i = StartIdx, e = CI->getNumArgOperands(); i != e; ++i) {
586 Value *Val = CI->getArgOperand(i);
Juergen Ributzka190305b2014-07-01 22:25:49 +0000587 // Check for constants and encode them with a StackMaps::ConstantOp prefix.
Juergen Ributzka04558dc2014-06-12 03:29:26 +0000588 if (auto *C = dyn_cast<ConstantInt>(Val)) {
589 Ops.push_back(MachineOperand::CreateImm(StackMaps::ConstantOp));
590 Ops.push_back(MachineOperand::CreateImm(C->getSExtValue()));
591 } else if (isa<ConstantPointerNull>(Val)) {
592 Ops.push_back(MachineOperand::CreateImm(StackMaps::ConstantOp));
593 Ops.push_back(MachineOperand::CreateImm(0));
594 } else if (auto *AI = dyn_cast<AllocaInst>(Val)) {
Juergen Ributzka190305b2014-07-01 22:25:49 +0000595 // Values coming from a stack location also require a sepcial encoding,
596 // but that is added later on by the target specific frame index
597 // elimination implementation.
Juergen Ributzka04558dc2014-06-12 03:29:26 +0000598 auto SI = FuncInfo.StaticAllocaMap.find(AI);
599 if (SI != FuncInfo.StaticAllocaMap.end())
600 Ops.push_back(MachineOperand::CreateFI(SI->second));
601 else
602 return false;
603 } else {
604 unsigned Reg = getRegForValue(Val);
605 if (Reg == 0)
606 return false;
607 Ops.push_back(MachineOperand::CreateReg(Reg, /*IsDef=*/false));
608 }
609 }
610
611 return true;
612}
613
Juergen Ributzka190305b2014-07-01 22:25:49 +0000614bool FastISel::SelectStackmap(const CallInst *I) {
615 // void @llvm.experimental.stackmap(i64 <id>, i32 <numShadowBytes>,
616 // [live variables...])
617 assert(I->getCalledFunction()->getReturnType()->isVoidTy() &&
618 "Stackmap cannot return a value.");
619
620 // The stackmap intrinsic only records the live variables (the arguments
621 // passed to it) and emits NOPS (if requested). Unlike the patchpoint
622 // intrinsic, this won't be lowered to a function call. This means we don't
623 // have to worry about calling conventions and target-specific lowering code.
624 // Instead we perform the call lowering right here.
625 //
626 // CALLSEQ_START(0)
627 // STACKMAP(id, nbytes, ...)
628 // CALLSEQ_END(0, 0)
629 //
630 SmallVector<MachineOperand, 32> Ops;
631
632 // Add the <id> and <numBytes> constants.
633 assert(isa<ConstantInt>(I->getOperand(PatchPointOpers::IDPos)) &&
634 "Expected a constant integer.");
635 const auto *ID = cast<ConstantInt>(I->getOperand(PatchPointOpers::IDPos));
636 Ops.push_back(MachineOperand::CreateImm(ID->getZExtValue()));
637
638 assert(isa<ConstantInt>(I->getOperand(PatchPointOpers::NBytesPos)) &&
639 "Expected a constant integer.");
640 const auto *NumBytes =
641 cast<ConstantInt>(I->getOperand(PatchPointOpers::NBytesPos));
642 Ops.push_back(MachineOperand::CreateImm(NumBytes->getZExtValue()));
643
644 // Push live variables for the stack map (skipping the first two arguments
645 // <id> and <numBytes>).
646 if (!addStackMapLiveVars(Ops, I, 2))
647 return false;
648
649 // We are not adding any register mask info here, because the stackmap doesn't
650 // clobber anything.
651
652 // Add scratch registers as implicit def and early clobber.
653 CallingConv::ID CC = I->getCallingConv();
654 const MCPhysReg *ScratchRegs = TLI.getScratchRegisters(CC);
655 for (unsigned i = 0; ScratchRegs[i]; ++i)
656 Ops.push_back(MachineOperand::CreateReg(
657 ScratchRegs[i], /*IsDef=*/true, /*IsImp=*/true, /*IsKill=*/false,
658 /*IsDead=*/false, /*IsUndef=*/false, /*IsEarlyClobber=*/true));
659
660 // Issue CALLSEQ_START
661 unsigned AdjStackDown = TII.getCallFrameSetupOpcode();
662 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(AdjStackDown))
663 .addImm(0);
664
665 // Issue STACKMAP.
666 MachineInstrBuilder MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
667 TII.get(TargetOpcode::STACKMAP));
668 for (auto const &MO : Ops)
669 MIB.addOperand(MO);
670
671 // Issue CALLSEQ_END
672 unsigned AdjStackUp = TII.getCallFrameDestroyOpcode();
673 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(AdjStackUp))
674 .addImm(0).addImm(0);
675
676 // Inform the Frame Information that we have a stackmap in this function.
677 FuncInfo.MF->getFrameInfo()->setHasStackMap();
678
679 return true;
680}
681
Juergen Ributzka3d9e6752014-07-11 22:19:02 +0000682/// \brief Lower an argument list according to the target calling convention.
683///
684/// This is a helper for lowering intrinsics that follow a target calling
685/// convention or require stack pointer adjustment. Only a subset of the
686/// intrinsic's operands need to participate in the calling convention.
687bool FastISel::lowerCallOperands(const CallInst *CI, unsigned ArgIdx,
688 unsigned NumArgs, const Value *Callee,
689 bool ForceRetVoidTy, CallLoweringInfo &CLI) {
690 ArgListTy Args;
691 Args.reserve(NumArgs);
692
693 // Populate the argument list.
694 // Attributes for args start at offset 1, after the return attribute.
695 ImmutableCallSite CS(CI);
696 for (unsigned ArgI = ArgIdx, ArgE = ArgIdx + NumArgs, AttrI = ArgIdx + 1;
697 ArgI != ArgE; ++ArgI) {
698 Value *V = CI->getOperand(ArgI);
699
700 assert(!V->getType()->isEmptyTy() && "Empty type passed to intrinsic.");
701
702 ArgListEntry Entry;
703 Entry.Val = V;
704 Entry.Ty = V->getType();
705 Entry.setAttributes(&CS, AttrI);
706 Args.push_back(Entry);
707 }
708
709 Type *RetTy = ForceRetVoidTy ? Type::getVoidTy(CI->getType()->getContext())
710 : CI->getType();
711 CLI.setCallee(CI->getCallingConv(), RetTy, Callee, std::move(Args), NumArgs);
712
713 return LowerCallTo(CLI);
714}
715
716bool FastISel::SelectPatchpoint(const CallInst *I) {
717 // void|i64 @llvm.experimental.patchpoint.void|i64(i64 <id>,
718 // i32 <numBytes>,
719 // i8* <target>,
720 // i32 <numArgs>,
721 // [Args...],
722 // [live variables...])
723 CallingConv::ID CC = I->getCallingConv();
724 bool IsAnyRegCC = CC == CallingConv::AnyReg;
725 bool HasDef = !I->getType()->isVoidTy();
726 Value *Callee = I->getOperand(PatchPointOpers::TargetPos);
727
728 // Get the real number of arguments participating in the call <numArgs>
729 assert(isa<ConstantInt>(I->getOperand(PatchPointOpers::NArgPos)) &&
730 "Expected a constant integer.");
731 const auto *NumArgsVal =
732 cast<ConstantInt>(I->getOperand(PatchPointOpers::NArgPos));
733 unsigned NumArgs = NumArgsVal->getZExtValue();
734
735 // Skip the four meta args: <id>, <numNopBytes>, <target>, <numArgs>
736 // This includes all meta-operands up to but not including CC.
737 unsigned NumMetaOpers = PatchPointOpers::CCPos;
738 assert(I->getNumArgOperands() >= NumMetaOpers + NumArgs &&
739 "Not enough arguments provided to the patchpoint intrinsic");
740
741 // For AnyRegCC the arguments are lowered later on manually.
742 unsigned NumCallArgs = IsAnyRegCC ? 0 : NumArgs;
743 CallLoweringInfo CLI;
744 if (!lowerCallOperands(I, NumMetaOpers, NumCallArgs, Callee, IsAnyRegCC, CLI))
745 return false;
746
747 assert(CLI.Call && "No call instruction specified.");
748
749 SmallVector<MachineOperand, 32> Ops;
750
751 // Add an explicit result reg if we use the anyreg calling convention.
Juergen Ributzka3d9e6752014-07-11 22:19:02 +0000752 if (IsAnyRegCC && HasDef) {
Juergen Ributzkaa4159432014-07-15 02:22:43 +0000753 assert(CLI.NumResultRegs == 0 && "Unexpected result register.");
754 CLI.ResultReg = createResultReg(TLI.getRegClassFor(MVT::i64));
755 CLI.NumResultRegs = 1;
756 Ops.push_back(MachineOperand::CreateReg(CLI.ResultReg, /*IsDef=*/true));
Juergen Ributzka3d9e6752014-07-11 22:19:02 +0000757 }
758
759 // Add the <id> and <numBytes> constants.
760 assert(isa<ConstantInt>(I->getOperand(PatchPointOpers::IDPos)) &&
761 "Expected a constant integer.");
762 const auto *ID = cast<ConstantInt>(I->getOperand(PatchPointOpers::IDPos));
763 Ops.push_back(MachineOperand::CreateImm(ID->getZExtValue()));
764
765 assert(isa<ConstantInt>(I->getOperand(PatchPointOpers::NBytesPos)) &&
766 "Expected a constant integer.");
767 const auto *NumBytes =
768 cast<ConstantInt>(I->getOperand(PatchPointOpers::NBytesPos));
769 Ops.push_back(MachineOperand::CreateImm(NumBytes->getZExtValue()));
770
771 // Assume that the callee is a constant address or null pointer.
772 // FIXME: handle function symbols in the future.
Juergen Ributzkae8514fc2014-07-31 00:11:16 +0000773 uint64_t CalleeAddr;
Juergen Ributzka3d9e6752014-07-11 22:19:02 +0000774 if (const auto *C = dyn_cast<IntToPtrInst>(Callee))
775 CalleeAddr = cast<ConstantInt>(C->getOperand(0))->getZExtValue();
776 else if (const auto *C = dyn_cast<ConstantExpr>(Callee)) {
777 if (C->getOpcode() == Instruction::IntToPtr)
778 CalleeAddr = cast<ConstantInt>(C->getOperand(0))->getZExtValue();
779 else
780 llvm_unreachable("Unsupported ConstantExpr.");
781 } else if (isa<ConstantPointerNull>(Callee))
782 CalleeAddr = 0;
783 else
784 llvm_unreachable("Unsupported callee address.");
785
786 Ops.push_back(MachineOperand::CreateImm(CalleeAddr));
787
788 // Adjust <numArgs> to account for any arguments that have been passed on
789 // the stack instead.
790 unsigned NumCallRegArgs = IsAnyRegCC ? NumArgs : CLI.OutRegs.size();
791 Ops.push_back(MachineOperand::CreateImm(NumCallRegArgs));
792
793 // Add the calling convention
794 Ops.push_back(MachineOperand::CreateImm((unsigned)CC));
795
796 // Add the arguments we omitted previously. The register allocator should
797 // place these in any free register.
798 if (IsAnyRegCC) {
799 for (unsigned i = NumMetaOpers, e = NumMetaOpers + NumArgs; i != e; ++i) {
800 unsigned Reg = getRegForValue(I->getArgOperand(i));
801 if (!Reg)
802 return false;
803 Ops.push_back(MachineOperand::CreateReg(Reg, /*IsDef=*/false));
804 }
805 }
806
807 // Push the arguments from the call instruction.
808 for (auto Reg : CLI.OutRegs)
809 Ops.push_back(MachineOperand::CreateReg(Reg, /*IsDef=*/false));
810
811 // Push live variables for the stack map.
812 if (!addStackMapLiveVars(Ops, I, NumMetaOpers + NumArgs))
813 return false;
814
815 // Push the register mask info.
816 Ops.push_back(MachineOperand::CreateRegMask(TRI.getCallPreservedMask(CC)));
817
818 // Add scratch registers as implicit def and early clobber.
819 const MCPhysReg *ScratchRegs = TLI.getScratchRegisters(CC);
820 for (unsigned i = 0; ScratchRegs[i]; ++i)
821 Ops.push_back(MachineOperand::CreateReg(
822 ScratchRegs[i], /*IsDef=*/true, /*IsImp=*/true, /*IsKill=*/false,
823 /*IsDead=*/false, /*IsUndef=*/false, /*IsEarlyClobber=*/true));
824
825 // Add implicit defs (return values).
826 for (auto Reg : CLI.InRegs)
827 Ops.push_back(MachineOperand::CreateReg(Reg, /*IsDef=*/true,
828 /*IsImpl=*/true));
829
Juergen Ributzka718bb712014-07-15 02:22:46 +0000830 // Insert the patchpoint instruction before the call generated by the target.
831 MachineInstrBuilder MIB = BuildMI(*FuncInfo.MBB, CLI.Call, DbgLoc,
Juergen Ributzka3d9e6752014-07-11 22:19:02 +0000832 TII.get(TargetOpcode::PATCHPOINT));
833
834 for (auto &MO : Ops)
835 MIB.addOperand(MO);
836
837 MIB->setPhysRegsDeadExcept(CLI.InRegs, TRI);
838
839 // Delete the original call instruction.
840 CLI.Call->eraseFromParent();
841
842 // Inform the Frame Information that we have a patchpoint in this function.
843 FuncInfo.MF->getFrameInfo()->setHasPatchPoint();
844
Juergen Ributzkaa4159432014-07-15 02:22:43 +0000845 if (CLI.NumResultRegs)
846 UpdateValueMap(I, CLI.ResultReg, CLI.NumResultRegs);
Juergen Ributzka3d9e6752014-07-11 22:19:02 +0000847 return true;
848}
849
Juergen Ributzka8179e9e2014-07-11 22:01:42 +0000850/// Returns an AttributeSet representing the attributes applied to the return
851/// value of the given call.
852static AttributeSet getReturnAttrs(FastISel::CallLoweringInfo &CLI) {
853 SmallVector<Attribute::AttrKind, 2> Attrs;
854 if (CLI.RetSExt)
855 Attrs.push_back(Attribute::SExt);
856 if (CLI.RetZExt)
857 Attrs.push_back(Attribute::ZExt);
858 if (CLI.IsInReg)
859 Attrs.push_back(Attribute::InReg);
860
861 return AttributeSet::get(CLI.RetTy->getContext(), AttributeSet::ReturnIndex,
862 Attrs);
863}
864
865bool FastISel::LowerCallTo(const CallInst *CI, const char *SymName,
866 unsigned NumArgs) {
867 ImmutableCallSite CS(CI);
868
869 PointerType *PT = cast<PointerType>(CS.getCalledValue()->getType());
870 FunctionType *FTy = cast<FunctionType>(PT->getElementType());
871 Type *RetTy = FTy->getReturnType();
872
873 ArgListTy Args;
874 Args.reserve(NumArgs);
875
876 // Populate the argument list.
877 // Attributes for args start at offset 1, after the return attribute.
878 for (unsigned ArgI = 0; ArgI != NumArgs; ++ArgI) {
879 Value *V = CI->getOperand(ArgI);
880
881 assert(!V->getType()->isEmptyTy() && "Empty type passed to intrinsic.");
882
883 ArgListEntry Entry;
884 Entry.Val = V;
885 Entry.Ty = V->getType();
886 Entry.setAttributes(&CS, ArgI + 1);
887 Args.push_back(Entry);
888 }
889
890 CallLoweringInfo CLI;
891 CLI.setCallee(RetTy, FTy, SymName, std::move(Args), CS, NumArgs);
892
893 return LowerCallTo(CLI);
894}
895
896bool FastISel::LowerCallTo(CallLoweringInfo &CLI) {
897 // Handle the incoming return values from the call.
898 CLI.clearIns();
899 SmallVector<EVT, 4> RetTys;
900 ComputeValueVTs(TLI, CLI.RetTy, RetTys);
901
902 SmallVector<ISD::OutputArg, 4> Outs;
903 GetReturnInfo(CLI.RetTy, getReturnAttrs(CLI), Outs, TLI);
904
905 bool CanLowerReturn = TLI.CanLowerReturn(CLI.CallConv, *FuncInfo.MF,
906 CLI.IsVarArg, Outs,
907 CLI.RetTy->getContext());
908
909 // FIXME: sret demotion isn't supported yet - bail out.
910 if (!CanLowerReturn)
911 return false;
912
913 for (unsigned I = 0, E = RetTys.size(); I != E; ++I) {
914 EVT VT = RetTys[I];
915 MVT RegisterVT = TLI.getRegisterType(CLI.RetTy->getContext(), VT);
916 unsigned NumRegs = TLI.getNumRegisters(CLI.RetTy->getContext(), VT);
917 for (unsigned i = 0; i != NumRegs; ++i) {
918 ISD::InputArg MyFlags;
919 MyFlags.VT = RegisterVT;
920 MyFlags.ArgVT = VT;
921 MyFlags.Used = CLI.IsReturnValueUsed;
922 if (CLI.RetSExt)
923 MyFlags.Flags.setSExt();
924 if (CLI.RetZExt)
925 MyFlags.Flags.setZExt();
926 if (CLI.IsInReg)
927 MyFlags.Flags.setInReg();
928 CLI.Ins.push_back(MyFlags);
929 }
930 }
931
932 // Handle all of the outgoing arguments.
933 CLI.clearOuts();
934 for (auto &Arg : CLI.getArgs()) {
935 Type *FinalType = Arg.Ty;
936 if (Arg.isByVal)
937 FinalType = cast<PointerType>(Arg.Ty)->getElementType();
938 bool NeedsRegBlock = TLI.functionArgumentNeedsConsecutiveRegisters(
939 FinalType, CLI.CallConv, CLI.IsVarArg);
940
941 ISD::ArgFlagsTy Flags;
942 if (Arg.isZExt)
943 Flags.setZExt();
944 if (Arg.isSExt)
945 Flags.setSExt();
946 if (Arg.isInReg)
947 Flags.setInReg();
948 if (Arg.isSRet)
949 Flags.setSRet();
950 if (Arg.isByVal)
951 Flags.setByVal();
952 if (Arg.isInAlloca) {
953 Flags.setInAlloca();
954 // Set the byval flag for CCAssignFn callbacks that don't know about
955 // inalloca. This way we can know how many bytes we should've allocated
956 // and how many bytes a callee cleanup function will pop. If we port
957 // inalloca to more targets, we'll have to add custom inalloca handling in
958 // the various CC lowering callbacks.
959 Flags.setByVal();
960 }
961 if (Arg.isByVal || Arg.isInAlloca) {
962 PointerType *Ty = cast<PointerType>(Arg.Ty);
963 Type *ElementTy = Ty->getElementType();
964 unsigned FrameSize = DL.getTypeAllocSize(ElementTy);
965 // For ByVal, alignment should come from FE. BE will guess if this info is
966 // not there, but there are cases it cannot get right.
967 unsigned FrameAlign = Arg.Alignment;
968 if (!FrameAlign)
969 FrameAlign = TLI.getByValTypeAlignment(ElementTy);
970 Flags.setByValSize(FrameSize);
971 Flags.setByValAlign(FrameAlign);
972 }
973 if (Arg.isNest)
974 Flags.setNest();
975 if (NeedsRegBlock)
976 Flags.setInConsecutiveRegs();
977 unsigned OriginalAlignment = DL.getABITypeAlignment(Arg.Ty);
978 Flags.setOrigAlign(OriginalAlignment);
979
980 CLI.OutVals.push_back(Arg.Val);
981 CLI.OutFlags.push_back(Flags);
982 }
983
984 if (!FastLowerCall(CLI))
985 return false;
986
987 // Set all unused physreg defs as dead.
988 assert(CLI.Call && "No call instruction specified.");
989 CLI.Call->setPhysRegsDeadExcept(CLI.InRegs, TRI);
990
991 if (CLI.NumResultRegs && CLI.CS)
992 UpdateValueMap(CLI.CS->getInstruction(), CLI.ResultReg, CLI.NumResultRegs);
993
994 return true;
995}
996
997bool FastISel::LowerCall(const CallInst *CI) {
998 ImmutableCallSite CS(CI);
999
1000 PointerType *PT = cast<PointerType>(CS.getCalledValue()->getType());
1001 FunctionType *FuncTy = cast<FunctionType>(PT->getElementType());
1002 Type *RetTy = FuncTy->getReturnType();
1003
1004 ArgListTy Args;
1005 ArgListEntry Entry;
1006 Args.reserve(CS.arg_size());
1007
1008 for (ImmutableCallSite::arg_iterator i = CS.arg_begin(), e = CS.arg_end();
1009 i != e; ++i) {
1010 Value *V = *i;
1011
1012 // Skip empty types
1013 if (V->getType()->isEmptyTy())
1014 continue;
1015
1016 Entry.Val = V;
1017 Entry.Ty = V->getType();
1018
1019 // Skip the first return-type Attribute to get to params.
1020 Entry.setAttributes(&CS, i - CS.arg_begin() + 1);
1021 Args.push_back(Entry);
1022 }
1023
1024 // Check if target-independent constraints permit a tail call here.
1025 // Target-dependent constraints are checked within FastLowerCall.
1026 bool IsTailCall = CI->isTailCall();
Juergen Ributzka480872b2014-07-16 00:01:22 +00001027 if (IsTailCall && !isInTailCallPosition(CS, TM))
Juergen Ributzka8179e9e2014-07-11 22:01:42 +00001028 IsTailCall = false;
1029
1030 CallLoweringInfo CLI;
1031 CLI.setCallee(RetTy, FuncTy, CI->getCalledValue(), std::move(Args), CS)
1032 .setTailCall(IsTailCall);
1033
1034 return LowerCallTo(CLI);
1035}
1036
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001037bool FastISel::SelectCall(const User *I) {
Dan Gohman7da91ae2011-04-26 17:18:34 +00001038 const CallInst *Call = cast<CallInst>(I);
1039
1040 // Handle simple inline asms.
Dan Gohmande239d22011-10-12 15:56:56 +00001041 if (const InlineAsm *IA = dyn_cast<InlineAsm>(Call->getCalledValue())) {
Juergen Ributzka618ce3e2014-07-16 22:20:51 +00001042 // If the inline asm has side effects, then make sure that no local value
1043 // lives across by flushing the local value map.
1044 if (IA->hasSideEffects())
1045 flushLocalValueMap();
1046
Dan Gohman7da91ae2011-04-26 17:18:34 +00001047 // Don't attempt to handle constraints.
1048 if (!IA->getConstraintString().empty())
1049 return false;
1050
1051 unsigned ExtraInfo = 0;
1052 if (IA->hasSideEffects())
1053 ExtraInfo |= InlineAsm::Extra_HasSideEffects;
1054 if (IA->isAlignStack())
1055 ExtraInfo |= InlineAsm::Extra_IsAlignStack;
1056
Rafael Espindolaea09c592014-02-18 22:05:46 +00001057 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
Dan Gohman7da91ae2011-04-26 17:18:34 +00001058 TII.get(TargetOpcode::INLINEASM))
1059 .addExternalSymbol(IA->getAsmString().c_str())
1060 .addImm(ExtraInfo);
1061 return true;
1062 }
1063
Michael J. Spencer8b98bf22012-02-22 19:06:13 +00001064 MachineModuleInfo &MMI = FuncInfo.MF->getMMI();
1065 ComputeUsesVAFloatArgument(*Call, &MMI);
1066
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001067 // Handle intrinsic function calls.
1068 if (const auto *II = dyn_cast<IntrinsicInst>(Call))
1069 return SelectIntrinsicCall(II);
Dan Gohman32a733e2008-09-25 17:05:24 +00001070
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001071 // Usually, it does not make sense to initialize a value,
1072 // make an unrelated function call and use the value, because
1073 // it tends to be spilled on the stack. So, we move the pointer
1074 // to the last local value to the beginning of the block, so that
1075 // all the values which have already been materialized,
1076 // appear after the call. It also makes sense to skip intrinsics
1077 // since they tend to be inlined.
1078 flushLocalValueMap();
1079
Juergen Ributzka8179e9e2014-07-11 22:01:42 +00001080 return LowerCall(Call);
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001081}
1082
1083bool FastISel::SelectIntrinsicCall(const IntrinsicInst *II) {
1084 switch (II->getIntrinsicID()) {
Dan Gohman32a733e2008-09-25 17:05:24 +00001085 default: break;
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001086 // At -O0 we don't care about the lifetime intrinsics.
Eric Christopher81e2bf22012-02-17 23:03:39 +00001087 case Intrinsic::lifetime_start:
1088 case Intrinsic::lifetime_end:
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001089 // The donothing intrinsic does, well, nothing.
Chad Rosier88d53ea2012-07-06 17:33:39 +00001090 case Intrinsic::donothing:
Eric Christopher81e2bf22012-02-17 23:03:39 +00001091 return true;
Bill Wendling65c0fd42009-02-13 02:16:35 +00001092 case Intrinsic::dbg_declare: {
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001093 const DbgDeclareInst *DI = cast<DbgDeclareInst>(II);
Manman Ren983a16c2013-06-28 05:43:10 +00001094 DIVariable DIVar(DI->getVariable());
Stephen Lincfe7f352013-07-08 00:37:03 +00001095 assert((!DIVar || DIVar.isVariable()) &&
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001096 "Variable in DbgDeclareInst should be either null or a DIVariable.");
1097 if (!DIVar || !FuncInfo.MF->getMMI().hasDebugInfo()) {
Eric Christopher142820b2012-03-15 21:33:44 +00001098 DEBUG(dbgs() << "Dropping debug info for " << *DI << "\n");
Devang Patel87127712009-07-02 22:43:26 +00001099 return true;
Eric Christopher142820b2012-03-15 21:33:44 +00001100 }
Devang Patel87127712009-07-02 22:43:26 +00001101
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001102 const Value *Address = DI->getAddress();
Eric Christopher3390a6e2012-03-15 21:33:47 +00001103 if (!Address || isa<UndefValue>(Address)) {
Eric Christopher142820b2012-03-15 21:33:44 +00001104 DEBUG(dbgs() << "Dropping debug info for " << *DI << "\n");
Dale Johannesendb2eb472010-02-06 02:26:02 +00001105 return true;
Eric Christopher142820b2012-03-15 21:33:44 +00001106 }
Devang Patele4682fa2010-09-14 20:29:31 +00001107
Adrian Prantl418d1d12013-07-09 20:28:37 +00001108 unsigned Offset = 0;
David Blaikie0252265b2013-06-16 20:34:15 +00001109 Optional<MachineOperand> Op;
1110 if (const Argument *Arg = dyn_cast<Argument>(Address))
Devang Patel9d904e12011-09-08 22:59:09 +00001111 // Some arguments' frame index is recorded during argument lowering.
Adrian Prantl418d1d12013-07-09 20:28:37 +00001112 Offset = FuncInfo.getArgumentFrameIndex(Arg);
1113 if (Offset)
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001114 Op = MachineOperand::CreateFI(Offset);
David Blaikie0252265b2013-06-16 20:34:15 +00001115 if (!Op)
1116 if (unsigned Reg = lookUpRegForValue(Address))
1117 Op = MachineOperand::CreateReg(Reg, false);
Eric Christopher60e01c52012-03-20 01:07:58 +00001118
Bill Wendling9f829f12012-03-30 00:02:55 +00001119 // If we have a VLA that has a "use" in a metadata node that's then used
1120 // here but it has no other uses, then we have a problem. E.g.,
1121 //
1122 // int foo (const int *x) {
1123 // char a[*x];
1124 // return 0;
1125 // }
1126 //
1127 // If we assign 'a' a vreg and fast isel later on has to use the selection
1128 // DAG isel, it will want to copy the value to the vreg. However, there are
1129 // no uses, which goes counter to what selection DAG isel expects.
David Blaikie0252265b2013-06-16 20:34:15 +00001130 if (!Op && !Address->use_empty() && isa<Instruction>(Address) &&
Eric Christopher60e01c52012-03-20 01:07:58 +00001131 (!isa<AllocaInst>(Address) ||
1132 !FuncInfo.StaticAllocaMap.count(cast<AllocaInst>(Address))))
David Blaikie0252265b2013-06-16 20:34:15 +00001133 Op = MachineOperand::CreateReg(FuncInfo.InitializeRegForValue(Address),
Adrian Prantl262bcf42013-09-18 22:08:59 +00001134 false);
Wesley Peck527da1b2010-11-23 03:31:01 +00001135
Adrian Prantl262bcf42013-09-18 22:08:59 +00001136 if (Op) {
Adrian Prantl418d1d12013-07-09 20:28:37 +00001137 if (Op->isReg()) {
1138 Op->setIsDebug(true);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001139 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
David Blaikie6004dbc2013-10-14 20:15:04 +00001140 TII.get(TargetOpcode::DBG_VALUE), false, Op->getReg(), 0,
1141 DI->getVariable());
1142 } else
Rafael Espindolaea09c592014-02-18 22:05:46 +00001143 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
David Blaikie6004dbc2013-10-14 20:15:04 +00001144 TII.get(TargetOpcode::DBG_VALUE))
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001145 .addOperand(*Op)
1146 .addImm(0)
1147 .addMetadata(DI->getVariable());
Adrian Prantl262bcf42013-09-18 22:08:59 +00001148 } else {
Eric Christophere5e54c82012-03-20 01:07:53 +00001149 // We can't yet handle anything else here because it would require
1150 // generating code, thus altering codegen because of debug info.
Adrian Prantl0d1e5592013-05-22 18:02:19 +00001151 DEBUG(dbgs() << "Dropping debug info for " << *DI << "\n");
Adrian Prantl262bcf42013-09-18 22:08:59 +00001152 }
Dan Gohman32a733e2008-09-25 17:05:24 +00001153 return true;
Bill Wendling65c0fd42009-02-13 02:16:35 +00001154 }
Dale Johannesendd331042010-02-26 20:01:55 +00001155 case Intrinsic::dbg_value: {
Dale Johannesen5d7f0a02010-04-07 01:15:14 +00001156 // This form of DBG_VALUE is target-independent.
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001157 const DbgValueInst *DI = cast<DbgValueInst>(II);
Evan Cheng6cc775f2011-06-28 19:10:37 +00001158 const MCInstrDesc &II = TII.get(TargetOpcode::DBG_VALUE);
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001159 const Value *V = DI->getValue();
Dale Johannesendd331042010-02-26 20:01:55 +00001160 if (!V) {
1161 // Currently the optimizer can produce this; insert an undef to
1162 // help debugging. Probably the optimizer should not do this.
Rafael Espindolaea09c592014-02-18 22:05:46 +00001163 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohmand7b5ce32010-07-10 09:00:22 +00001164 .addReg(0U).addImm(DI->getOffset())
1165 .addMetadata(DI->getVariable());
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001166 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(V)) {
Devang Patelf071d722011-06-24 20:46:11 +00001167 if (CI->getBitWidth() > 64)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001168 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Devang Patelf071d722011-06-24 20:46:11 +00001169 .addCImm(CI).addImm(DI->getOffset())
1170 .addMetadata(DI->getVariable());
Chad Rosier879c34f2012-07-06 17:44:22 +00001171 else
Rafael Espindolaea09c592014-02-18 22:05:46 +00001172 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Devang Patelf071d722011-06-24 20:46:11 +00001173 .addImm(CI->getZExtValue()).addImm(DI->getOffset())
1174 .addMetadata(DI->getVariable());
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001175 } else if (const ConstantFP *CF = dyn_cast<ConstantFP>(V)) {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001176 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohmand7b5ce32010-07-10 09:00:22 +00001177 .addFPImm(CF).addImm(DI->getOffset())
1178 .addMetadata(DI->getVariable());
Dale Johannesendd331042010-02-26 20:01:55 +00001179 } else if (unsigned Reg = lookUpRegForValue(V)) {
Adrian Prantldb3e26d2013-09-16 23:29:03 +00001180 // FIXME: This does not handle register-indirect values at offset 0.
Adrian Prantl418d1d12013-07-09 20:28:37 +00001181 bool IsIndirect = DI->getOffset() != 0;
Rafael Espindolaea09c592014-02-18 22:05:46 +00001182 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, IsIndirect,
Adrian Prantl418d1d12013-07-09 20:28:37 +00001183 Reg, DI->getOffset(), DI->getVariable());
Dale Johannesendd331042010-02-26 20:01:55 +00001184 } else {
1185 // We can't yet handle anything else here because it would require
1186 // generating code, thus altering codegen because of debug info.
Adrian Prantl0d1e5592013-05-22 18:02:19 +00001187 DEBUG(dbgs() << "Dropping debug info for " << *DI << "\n");
Wesley Peck527da1b2010-11-23 03:31:01 +00001188 }
Dale Johannesendd331042010-02-26 20:01:55 +00001189 return true;
1190 }
Eli Friedman8f1e11c2011-05-14 00:47:51 +00001191 case Intrinsic::objectsize: {
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001192 ConstantInt *CI = cast<ConstantInt>(II->getArgOperand(1));
Eli Friedman8f1e11c2011-05-14 00:47:51 +00001193 unsigned long long Res = CI->isZero() ? -1ULL : 0;
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001194 Constant *ResCI = ConstantInt::get(II->getType(), Res);
Eli Friedman8f1e11c2011-05-14 00:47:51 +00001195 unsigned ResultReg = getRegForValue(ResCI);
1196 if (ResultReg == 0)
1197 return false;
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001198 UpdateValueMap(II, ResultReg);
Eli Friedman8f1e11c2011-05-14 00:47:51 +00001199 return true;
1200 }
Chad Rosier9c1796f2013-03-07 20:42:17 +00001201 case Intrinsic::expect: {
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001202 unsigned ResultReg = getRegForValue(II->getArgOperand(0));
Nick Lewycky48beb212013-03-11 21:44:37 +00001203 if (ResultReg == 0)
1204 return false;
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001205 UpdateValueMap(II, ResultReg);
Chad Rosier3a200e12013-03-07 21:38:33 +00001206 return true;
Chad Rosier9c1796f2013-03-07 20:42:17 +00001207 }
Juergen Ributzka190305b2014-07-01 22:25:49 +00001208 case Intrinsic::experimental_stackmap:
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001209 return SelectStackmap(II);
Juergen Ributzka3d9e6752014-07-11 22:19:02 +00001210 case Intrinsic::experimental_patchpoint_void:
1211 case Intrinsic::experimental_patchpoint_i64:
1212 return SelectPatchpoint(II);
Dan Gohman32a733e2008-09-25 17:05:24 +00001213 }
Dan Gohman8a2dae52010-04-13 17:07:06 +00001214
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001215 return FastLowerIntrinsicCall(II);
Dan Gohman32a733e2008-09-25 17:05:24 +00001216}
1217
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001218bool FastISel::SelectCast(const User *I, unsigned Opcode) {
Owen Anderson53aa7a92009-08-10 22:56:29 +00001219 EVT SrcVT = TLI.getValueType(I->getOperand(0)->getType());
1220 EVT DstVT = TLI.getValueType(I->getType());
Wesley Peck527da1b2010-11-23 03:31:01 +00001221
Owen Anderson9f944592009-08-11 20:47:22 +00001222 if (SrcVT == MVT::Other || !SrcVT.isSimple() ||
1223 DstVT == MVT::Other || !DstVT.isSimple())
Owen Andersonca1711a2008-08-26 23:46:32 +00001224 // Unhandled type. Halt "fast" selection and bail.
1225 return false;
Wesley Peck527da1b2010-11-23 03:31:01 +00001226
Eli Friedmanc7035512011-05-25 23:49:02 +00001227 // Check if the destination type is legal.
Dan Gohmana62e4ab2009-03-13 23:53:06 +00001228 if (!TLI.isTypeLegal(DstVT))
Eli Friedmanc7035512011-05-25 23:49:02 +00001229 return false;
Dan Gohmana62e4ab2009-03-13 23:53:06 +00001230
Eli Friedmanc7035512011-05-25 23:49:02 +00001231 // Check if the source operand is legal.
Dan Gohmana62e4ab2009-03-13 23:53:06 +00001232 if (!TLI.isTypeLegal(SrcVT))
Eli Friedmanc7035512011-05-25 23:49:02 +00001233 return false;
Dan Gohmana62e4ab2009-03-13 23:53:06 +00001234
Dan Gohman7bda51f2008-09-03 23:12:08 +00001235 unsigned InputReg = getRegForValue(I->getOperand(0));
Owen Andersonca1711a2008-08-26 23:46:32 +00001236 if (!InputReg)
1237 // Unhandled operand. Halt "fast" selection and bail.
1238 return false;
Dan Gohmanc0bb9592009-03-13 20:42:20 +00001239
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001240 bool InputRegIsKill = hasTrivialKill(I->getOperand(0));
1241
Owen Andersonca1711a2008-08-26 23:46:32 +00001242 unsigned ResultReg = FastEmit_r(SrcVT.getSimpleVT(),
1243 DstVT.getSimpleVT(),
1244 Opcode,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001245 InputReg, InputRegIsKill);
Owen Andersonca1711a2008-08-26 23:46:32 +00001246 if (!ResultReg)
1247 return false;
Wesley Peck527da1b2010-11-23 03:31:01 +00001248
Dan Gohman7bda51f2008-09-03 23:12:08 +00001249 UpdateValueMap(I, ResultReg);
Owen Andersonca1711a2008-08-26 23:46:32 +00001250 return true;
1251}
1252
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001253bool FastISel::SelectBitCast(const User *I) {
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001254 // If the bitcast doesn't change the type, just use the operand value.
1255 if (I->getType() == I->getOperand(0)->getType()) {
Dan Gohman7bda51f2008-09-03 23:12:08 +00001256 unsigned Reg = getRegForValue(I->getOperand(0));
Dan Gohman61cfa302008-08-27 20:41:38 +00001257 if (Reg == 0)
1258 return false;
Dan Gohman7bda51f2008-09-03 23:12:08 +00001259 UpdateValueMap(I, Reg);
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001260 return true;
1261 }
1262
Wesley Peck527da1b2010-11-23 03:31:01 +00001263 // Bitcasts of other values become reg-reg copies or BITCAST operators.
Patrik Hagglundc494d242012-12-17 14:30:06 +00001264 EVT SrcEVT = TLI.getValueType(I->getOperand(0)->getType());
1265 EVT DstEVT = TLI.getValueType(I->getType());
1266 if (SrcEVT == MVT::Other || DstEVT == MVT::Other ||
1267 !TLI.isTypeLegal(SrcEVT) || !TLI.isTypeLegal(DstEVT))
Owen Andersonca1711a2008-08-26 23:46:32 +00001268 // Unhandled type. Halt "fast" selection and bail.
1269 return false;
Wesley Peck527da1b2010-11-23 03:31:01 +00001270
Patrik Hagglundc494d242012-12-17 14:30:06 +00001271 MVT SrcVT = SrcEVT.getSimpleVT();
1272 MVT DstVT = DstEVT.getSimpleVT();
Dan Gohman7bda51f2008-09-03 23:12:08 +00001273 unsigned Op0 = getRegForValue(I->getOperand(0));
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001274 if (Op0 == 0)
1275 // Unhandled operand. Halt "fast" selection and bail.
Owen Andersonca1711a2008-08-26 23:46:32 +00001276 return false;
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001277
1278 bool Op0IsKill = hasTrivialKill(I->getOperand(0));
Wesley Peck527da1b2010-11-23 03:31:01 +00001279
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001280 // First, try to perform the bitcast by inserting a reg-reg copy.
1281 unsigned ResultReg = 0;
Patrik Hagglund5e6c3612012-12-13 06:34:11 +00001282 if (SrcVT == DstVT) {
Craig Topper760b1342012-02-22 05:59:10 +00001283 const TargetRegisterClass* SrcClass = TLI.getRegClassFor(SrcVT);
1284 const TargetRegisterClass* DstClass = TLI.getRegClassFor(DstVT);
Jakob Stoklund Olesen51642ae2010-07-11 05:16:54 +00001285 // Don't attempt a cross-class copy. It will likely fail.
1286 if (SrcClass == DstClass) {
1287 ResultReg = createResultReg(DstClass);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001288 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1289 TII.get(TargetOpcode::COPY), ResultReg).addReg(Op0);
Jakob Stoklund Olesen51642ae2010-07-11 05:16:54 +00001290 }
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001291 }
Wesley Peck527da1b2010-11-23 03:31:01 +00001292
1293 // If the reg-reg copy failed, select a BITCAST opcode.
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001294 if (!ResultReg)
Patrik Hagglund5e6c3612012-12-13 06:34:11 +00001295 ResultReg = FastEmit_r(SrcVT, DstVT, ISD::BITCAST, Op0, Op0IsKill);
Wesley Peck527da1b2010-11-23 03:31:01 +00001296
Dan Gohmanb0b5a272008-08-27 18:10:19 +00001297 if (!ResultReg)
Owen Andersonca1711a2008-08-26 23:46:32 +00001298 return false;
Wesley Peck527da1b2010-11-23 03:31:01 +00001299
Dan Gohman7bda51f2008-09-03 23:12:08 +00001300 UpdateValueMap(I, ResultReg);
Owen Andersonca1711a2008-08-26 23:46:32 +00001301 return true;
1302}
1303
Dan Gohman7bda51f2008-09-03 23:12:08 +00001304bool
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001305FastISel::SelectInstruction(const Instruction *I) {
Dan Gohman6e9a8fc2010-04-23 15:29:50 +00001306 // Just before the terminator instruction, insert instructions to
1307 // feed PHI nodes in successor blocks.
1308 if (isa<TerminatorInst>(I))
1309 if (!HandlePHINodesInSuccessorBlocks(I->getParent()))
1310 return false;
1311
Rafael Espindolaea09c592014-02-18 22:05:46 +00001312 DbgLoc = I->getDebugLoc();
Dan Gohmane450d742010-04-20 00:48:35 +00001313
Chad Rosier46addb92011-11-29 19:40:47 +00001314 MachineBasicBlock::iterator SavedInsertPt = FuncInfo.InsertPt;
1315
Bob Wilson3e6fa462012-08-03 04:06:28 +00001316 if (const CallInst *Call = dyn_cast<CallInst>(I)) {
1317 const Function *F = Call->getCalledFunction();
1318 LibFunc::Func Func;
Akira Hatanaka3d90f992014-04-15 21:30:06 +00001319
1320 // As a special case, don't handle calls to builtin library functions that
1321 // may be translated directly to target instructions.
Bob Wilson3e6fa462012-08-03 04:06:28 +00001322 if (F && !F->hasLocalLinkage() && F->hasName() &&
1323 LibInfo->getLibFunc(F->getName(), Func) &&
Bob Wilson871701c2012-08-03 21:26:24 +00001324 LibInfo->hasOptimizedCodeGen(Func))
Bob Wilson3e6fa462012-08-03 04:06:28 +00001325 return false;
Akira Hatanaka3d90f992014-04-15 21:30:06 +00001326
1327 // Don't handle Intrinsic::trap if a trap funciton is specified.
1328 if (F && F->getIntrinsicID() == Intrinsic::trap &&
1329 !TM.Options.getTrapFunctionName().empty())
1330 return false;
Bob Wilson3e6fa462012-08-03 04:06:28 +00001331 }
1332
Dan Gohman18f94462009-12-05 01:27:58 +00001333 // First, try doing target-independent selection.
Michael Ilsemanba8446c2013-02-27 19:54:00 +00001334 if (SelectOperator(I, I->getOpcode())) {
Jan Wen Voung7857a642013-03-08 22:56:31 +00001335 ++NumFastIselSuccessIndependent;
Rafael Espindolaea09c592014-02-18 22:05:46 +00001336 DbgLoc = DebugLoc();
Dan Gohman18f94462009-12-05 01:27:58 +00001337 return true;
Dan Gohmane450d742010-04-20 00:48:35 +00001338 }
Chad Rosier879c34f2012-07-06 17:44:22 +00001339 // Remove dead code. However, ignore call instructions since we've flushed
Chad Rosier46addb92011-11-29 19:40:47 +00001340 // the local value map and recomputed the insert point.
1341 if (!isa<CallInst>(I)) {
1342 recomputeInsertPt();
1343 if (SavedInsertPt != FuncInfo.InsertPt)
1344 removeDeadCode(FuncInfo.InsertPt, SavedInsertPt);
1345 }
Dan Gohman18f94462009-12-05 01:27:58 +00001346
1347 // Next, try calling the target to attempt to handle the instruction.
Chad Rosier46addb92011-11-29 19:40:47 +00001348 SavedInsertPt = FuncInfo.InsertPt;
Dan Gohmane450d742010-04-20 00:48:35 +00001349 if (TargetSelectInstruction(I)) {
Jan Wen Voung7857a642013-03-08 22:56:31 +00001350 ++NumFastIselSuccessTarget;
Rafael Espindolaea09c592014-02-18 22:05:46 +00001351 DbgLoc = DebugLoc();
Dan Gohman18f94462009-12-05 01:27:58 +00001352 return true;
Dan Gohmane450d742010-04-20 00:48:35 +00001353 }
Chad Rosier46addb92011-11-29 19:40:47 +00001354 // Check for dead code and remove as necessary.
1355 recomputeInsertPt();
1356 if (SavedInsertPt != FuncInfo.InsertPt)
1357 removeDeadCode(FuncInfo.InsertPt, SavedInsertPt);
Dan Gohman18f94462009-12-05 01:27:58 +00001358
Rafael Espindolaea09c592014-02-18 22:05:46 +00001359 DbgLoc = DebugLoc();
Dan Gohman18f94462009-12-05 01:27:58 +00001360 return false;
Dan Gohmanfcf54562008-09-05 18:18:20 +00001361}
1362
Dan Gohman1ab1d312008-10-02 22:15:21 +00001363/// FastEmitBranch - Emit an unconditional branch to the given block,
1364/// unless it is the immediate (fall-through) successor, and update
1365/// the CFG.
1366void
Rafael Espindolaea09c592014-02-18 22:05:46 +00001367FastISel::FastEmitBranch(MachineBasicBlock *MSucc, DebugLoc DbgLoc) {
Evan Cheng615620c2013-02-11 01:27:15 +00001368 if (FuncInfo.MBB->getBasicBlock()->size() > 1 &&
1369 FuncInfo.MBB->isLayoutSuccessor(MSucc)) {
Eric Christophere9abba72012-04-10 18:18:10 +00001370 // For more accurate line information if this is the only instruction
1371 // in the block then emit it, otherwise we have the unconditional
1372 // fall-through case, which needs no instructions.
Dan Gohman1ab1d312008-10-02 22:15:21 +00001373 } else {
1374 // The unconditional branch case.
Craig Topperc0196b12014-04-14 00:51:57 +00001375 TII.InsertBranch(*FuncInfo.MBB, MSucc, nullptr,
Rafael Espindolaea09c592014-02-18 22:05:46 +00001376 SmallVector<MachineOperand, 0>(), DbgLoc);
Dan Gohman1ab1d312008-10-02 22:15:21 +00001377 }
Juergen Ributzka454d3742014-06-13 00:45:11 +00001378 uint32_t BranchWeight = 0;
1379 if (FuncInfo.BPI)
1380 BranchWeight = FuncInfo.BPI->getEdgeWeight(FuncInfo.MBB->getBasicBlock(),
1381 MSucc->getBasicBlock());
1382 FuncInfo.MBB->addSuccessor(MSucc, BranchWeight);
Dan Gohman1ab1d312008-10-02 22:15:21 +00001383}
1384
Dan Gohmanaa92dc12009-09-03 22:53:57 +00001385/// SelectFNeg - Emit an FNeg operation.
1386///
1387bool
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001388FastISel::SelectFNeg(const User *I) {
Dan Gohmanaa92dc12009-09-03 22:53:57 +00001389 unsigned OpReg = getRegForValue(BinaryOperator::getFNegArgument(I));
1390 if (OpReg == 0) return false;
1391
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001392 bool OpRegIsKill = hasTrivialKill(I);
1393
Dan Gohman9cbef322009-09-11 00:36:43 +00001394 // If the target has ISD::FNEG, use it.
1395 EVT VT = TLI.getValueType(I->getType());
1396 unsigned ResultReg = FastEmit_r(VT.getSimpleVT(), VT.getSimpleVT(),
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001397 ISD::FNEG, OpReg, OpRegIsKill);
Dan Gohman9cbef322009-09-11 00:36:43 +00001398 if (ResultReg != 0) {
1399 UpdateValueMap(I, ResultReg);
1400 return true;
1401 }
1402
Dan Gohman89b090e2009-09-11 00:34:46 +00001403 // Bitcast the value to integer, twiddle the sign bit with xor,
1404 // and then bitcast it back to floating-point.
Dan Gohmanaa92dc12009-09-03 22:53:57 +00001405 if (VT.getSizeInBits() > 64) return false;
Dan Gohman89b090e2009-09-11 00:34:46 +00001406 EVT IntVT = EVT::getIntegerVT(I->getContext(), VT.getSizeInBits());
1407 if (!TLI.isTypeLegal(IntVT))
1408 return false;
1409
1410 unsigned IntReg = FastEmit_r(VT.getSimpleVT(), IntVT.getSimpleVT(),
Wesley Peck527da1b2010-11-23 03:31:01 +00001411 ISD::BITCAST, OpReg, OpRegIsKill);
Dan Gohman89b090e2009-09-11 00:34:46 +00001412 if (IntReg == 0)
1413 return false;
1414
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001415 unsigned IntResultReg = FastEmit_ri_(IntVT.getSimpleVT(), ISD::XOR,
1416 IntReg, /*Kill=*/true,
Dan Gohman89b090e2009-09-11 00:34:46 +00001417 UINT64_C(1) << (VT.getSizeInBits()-1),
1418 IntVT.getSimpleVT());
1419 if (IntResultReg == 0)
1420 return false;
1421
1422 ResultReg = FastEmit_r(IntVT.getSimpleVT(), VT.getSimpleVT(),
Wesley Peck527da1b2010-11-23 03:31:01 +00001423 ISD::BITCAST, IntResultReg, /*Kill=*/true);
Dan Gohmanaa92dc12009-09-03 22:53:57 +00001424 if (ResultReg == 0)
1425 return false;
1426
1427 UpdateValueMap(I, ResultReg);
1428 return true;
1429}
1430
Dan Gohmanfcf54562008-09-05 18:18:20 +00001431bool
Eli Friedman9ac94472011-05-16 20:27:46 +00001432FastISel::SelectExtractValue(const User *U) {
1433 const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(U);
Eli Friedman4c08bb42011-05-16 20:34:53 +00001434 if (!EVI)
Eli Friedman9ac94472011-05-16 20:27:46 +00001435 return false;
1436
Eli Friedmana4d4a012011-05-16 21:06:17 +00001437 // Make sure we only try to handle extracts with a legal result. But also
1438 // allow i1 because it's easy.
Eli Friedman9ac94472011-05-16 20:27:46 +00001439 EVT RealVT = TLI.getValueType(EVI->getType(), /*AllowUnknown=*/true);
1440 if (!RealVT.isSimple())
1441 return false;
1442 MVT VT = RealVT.getSimpleVT();
Eli Friedmana4d4a012011-05-16 21:06:17 +00001443 if (!TLI.isTypeLegal(VT) && VT != MVT::i1)
Eli Friedman9ac94472011-05-16 20:27:46 +00001444 return false;
1445
1446 const Value *Op0 = EVI->getOperand(0);
Chris Lattner229907c2011-07-18 04:54:35 +00001447 Type *AggTy = Op0->getType();
Eli Friedman9ac94472011-05-16 20:27:46 +00001448
1449 // Get the base result register.
1450 unsigned ResultReg;
1451 DenseMap<const Value *, unsigned>::iterator I = FuncInfo.ValueMap.find(Op0);
1452 if (I != FuncInfo.ValueMap.end())
1453 ResultReg = I->second;
Eli Friedmanbd375f12011-06-06 05:46:34 +00001454 else if (isa<Instruction>(Op0))
Eli Friedman9ac94472011-05-16 20:27:46 +00001455 ResultReg = FuncInfo.InitializeRegForValue(Op0);
Eli Friedmanbd375f12011-06-06 05:46:34 +00001456 else
1457 return false; // fast-isel can't handle aggregate constants at the moment
Eli Friedman9ac94472011-05-16 20:27:46 +00001458
1459 // Get the actual result register, which is an offset from the base register.
Jay Foad57aa6362011-07-13 10:26:04 +00001460 unsigned VTIndex = ComputeLinearIndex(AggTy, EVI->getIndices());
Eli Friedman9ac94472011-05-16 20:27:46 +00001461
1462 SmallVector<EVT, 4> AggValueVTs;
1463 ComputeValueVTs(TLI, AggTy, AggValueVTs);
1464
1465 for (unsigned i = 0; i < VTIndex; i++)
1466 ResultReg += TLI.getNumRegisters(FuncInfo.Fn->getContext(), AggValueVTs[i]);
1467
1468 UpdateValueMap(EVI, ResultReg);
1469 return true;
1470}
1471
1472bool
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001473FastISel::SelectOperator(const User *I, unsigned Opcode) {
Dan Gohmanfcf54562008-09-05 18:18:20 +00001474 switch (Opcode) {
Dan Gohmana5b96452009-06-04 22:49:04 +00001475 case Instruction::Add:
1476 return SelectBinaryOp(I, ISD::ADD);
1477 case Instruction::FAdd:
1478 return SelectBinaryOp(I, ISD::FADD);
1479 case Instruction::Sub:
1480 return SelectBinaryOp(I, ISD::SUB);
1481 case Instruction::FSub:
Dan Gohmanaa92dc12009-09-03 22:53:57 +00001482 // FNeg is currently represented in LLVM IR as a special case of FSub.
1483 if (BinaryOperator::isFNeg(I))
1484 return SelectFNeg(I);
Dan Gohmana5b96452009-06-04 22:49:04 +00001485 return SelectBinaryOp(I, ISD::FSUB);
1486 case Instruction::Mul:
1487 return SelectBinaryOp(I, ISD::MUL);
1488 case Instruction::FMul:
1489 return SelectBinaryOp(I, ISD::FMUL);
Dan Gohman7bda51f2008-09-03 23:12:08 +00001490 case Instruction::SDiv:
1491 return SelectBinaryOp(I, ISD::SDIV);
1492 case Instruction::UDiv:
1493 return SelectBinaryOp(I, ISD::UDIV);
1494 case Instruction::FDiv:
1495 return SelectBinaryOp(I, ISD::FDIV);
1496 case Instruction::SRem:
1497 return SelectBinaryOp(I, ISD::SREM);
1498 case Instruction::URem:
1499 return SelectBinaryOp(I, ISD::UREM);
1500 case Instruction::FRem:
1501 return SelectBinaryOp(I, ISD::FREM);
1502 case Instruction::Shl:
1503 return SelectBinaryOp(I, ISD::SHL);
1504 case Instruction::LShr:
1505 return SelectBinaryOp(I, ISD::SRL);
1506 case Instruction::AShr:
1507 return SelectBinaryOp(I, ISD::SRA);
1508 case Instruction::And:
1509 return SelectBinaryOp(I, ISD::AND);
1510 case Instruction::Or:
1511 return SelectBinaryOp(I, ISD::OR);
1512 case Instruction::Xor:
1513 return SelectBinaryOp(I, ISD::XOR);
Dan Gohmanb2226e22008-08-13 20:19:35 +00001514
Dan Gohman7bda51f2008-09-03 23:12:08 +00001515 case Instruction::GetElementPtr:
1516 return SelectGetElementPtr(I);
Dan Gohmana3e4d5a2008-08-20 00:11:48 +00001517
Dan Gohman7bda51f2008-09-03 23:12:08 +00001518 case Instruction::Br: {
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001519 const BranchInst *BI = cast<BranchInst>(I);
Dan Gohmana3e4d5a2008-08-20 00:11:48 +00001520
Dan Gohman7bda51f2008-09-03 23:12:08 +00001521 if (BI->isUnconditional()) {
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001522 const BasicBlock *LLVMSucc = BI->getSuccessor(0);
Dan Gohman87fb4e82010-07-07 16:29:44 +00001523 MachineBasicBlock *MSucc = FuncInfo.MBBMap[LLVMSucc];
Stuart Hastings0125b642010-06-17 22:43:56 +00001524 FastEmitBranch(MSucc, BI->getDebugLoc());
Dan Gohman7bda51f2008-09-03 23:12:08 +00001525 return true;
Owen Anderson14054922008-08-27 00:31:01 +00001526 }
Dan Gohman7bda51f2008-09-03 23:12:08 +00001527
1528 // Conditional branches are not handed yet.
1529 // Halt "fast" selection and bail.
1530 return false;
Dan Gohmanb2226e22008-08-13 20:19:35 +00001531 }
1532
Dan Gohmanea56bdd2008-09-05 01:08:41 +00001533 case Instruction::Unreachable:
Yaron Kerend7ba46b2014-04-19 13:47:43 +00001534 if (TM.Options.TrapUnreachable)
1535 return FastEmit_(MVT::Other, MVT::Other, ISD::TRAP) != 0;
1536 else
1537 return true;
Dan Gohmanea56bdd2008-09-05 01:08:41 +00001538
Dan Gohman39d82f92008-09-10 20:11:02 +00001539 case Instruction::Alloca:
1540 // FunctionLowering has the static-sized case covered.
Dan Gohman87fb4e82010-07-07 16:29:44 +00001541 if (FuncInfo.StaticAllocaMap.count(cast<AllocaInst>(I)))
Dan Gohman39d82f92008-09-10 20:11:02 +00001542 return true;
1543
1544 // Dynamic-sized alloca is not handled yet.
1545 return false;
Wesley Peck527da1b2010-11-23 03:31:01 +00001546
Dan Gohman32a733e2008-09-25 17:05:24 +00001547 case Instruction::Call:
1548 return SelectCall(I);
Wesley Peck527da1b2010-11-23 03:31:01 +00001549
Dan Gohman7bda51f2008-09-03 23:12:08 +00001550 case Instruction::BitCast:
1551 return SelectBitCast(I);
1552
1553 case Instruction::FPToSI:
1554 return SelectCast(I, ISD::FP_TO_SINT);
1555 case Instruction::ZExt:
1556 return SelectCast(I, ISD::ZERO_EXTEND);
1557 case Instruction::SExt:
1558 return SelectCast(I, ISD::SIGN_EXTEND);
1559 case Instruction::Trunc:
1560 return SelectCast(I, ISD::TRUNCATE);
1561 case Instruction::SIToFP:
1562 return SelectCast(I, ISD::SINT_TO_FP);
1563
1564 case Instruction::IntToPtr: // Deliberate fall-through.
1565 case Instruction::PtrToInt: {
Owen Anderson53aa7a92009-08-10 22:56:29 +00001566 EVT SrcVT = TLI.getValueType(I->getOperand(0)->getType());
1567 EVT DstVT = TLI.getValueType(I->getType());
Dan Gohman7bda51f2008-09-03 23:12:08 +00001568 if (DstVT.bitsGT(SrcVT))
1569 return SelectCast(I, ISD::ZERO_EXTEND);
1570 if (DstVT.bitsLT(SrcVT))
1571 return SelectCast(I, ISD::TRUNCATE);
1572 unsigned Reg = getRegForValue(I->getOperand(0));
1573 if (Reg == 0) return false;
1574 UpdateValueMap(I, Reg);
1575 return true;
1576 }
Dan Gohman918fe082008-09-23 21:53:34 +00001577
Eli Friedman9ac94472011-05-16 20:27:46 +00001578 case Instruction::ExtractValue:
1579 return SelectExtractValue(I);
1580
Dan Gohmanf41ad472010-04-20 15:00:41 +00001581 case Instruction::PHI:
1582 llvm_unreachable("FastISel shouldn't visit PHI nodes!");
1583
Dan Gohman7bda51f2008-09-03 23:12:08 +00001584 default:
1585 // Unhandled instruction. Halt "fast" selection and bail.
1586 return false;
1587 }
Dan Gohmanb2226e22008-08-13 20:19:35 +00001588}
1589
Bob Wilson3e6fa462012-08-03 04:06:28 +00001590FastISel::FastISel(FunctionLoweringInfo &funcInfo,
1591 const TargetLibraryInfo *libInfo)
Eric Christopherd9134482014-08-04 21:25:23 +00001592 : FuncInfo(funcInfo), MF(funcInfo.MF), MRI(FuncInfo.MF->getRegInfo()),
1593 MFI(*FuncInfo.MF->getFrameInfo()), MCP(*FuncInfo.MF->getConstantPool()),
1594 TM(FuncInfo.MF->getTarget()), DL(*TM.getSubtargetImpl()->getDataLayout()),
1595 TII(*TM.getSubtargetImpl()->getInstrInfo()),
1596 TLI(*TM.getSubtargetImpl()->getTargetLowering()),
1597 TRI(*TM.getSubtargetImpl()->getRegisterInfo()), LibInfo(libInfo) {}
Dan Gohman02c84b82008-08-20 21:05:57 +00001598
Dan Gohmanc4442382008-08-14 21:51:29 +00001599FastISel::~FastISel() {}
1600
Evan Cheng615620c2013-02-11 01:27:15 +00001601bool FastISel::FastLowerArguments() {
1602 return false;
1603}
1604
Juergen Ributzka8179e9e2014-07-11 22:01:42 +00001605bool FastISel::FastLowerCall(CallLoweringInfo &/*CLI*/) {
1606 return false;
1607}
1608
Reid Klecknerfb951982014-07-12 00:06:46 +00001609bool FastISel::FastLowerIntrinsicCall(const IntrinsicInst * /*II*/) {
Juergen Ributzka5dd32132014-07-11 20:42:12 +00001610 return false;
1611}
1612
Owen Anderson9f944592009-08-11 20:47:22 +00001613unsigned FastISel::FastEmit_(MVT, MVT,
Dan Gohman404a9842010-01-05 22:26:32 +00001614 unsigned) {
Dan Gohmanb2226e22008-08-13 20:19:35 +00001615 return 0;
1616}
1617
Owen Anderson9f944592009-08-11 20:47:22 +00001618unsigned FastISel::FastEmit_r(MVT, MVT,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001619 unsigned,
1620 unsigned /*Op0*/, bool /*Op0IsKill*/) {
Dan Gohmanb2226e22008-08-13 20:19:35 +00001621 return 0;
1622}
1623
Wesley Peck527da1b2010-11-23 03:31:01 +00001624unsigned FastISel::FastEmit_rr(MVT, MVT,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001625 unsigned,
1626 unsigned /*Op0*/, bool /*Op0IsKill*/,
1627 unsigned /*Op1*/, bool /*Op1IsKill*/) {
Dan Gohmanb2226e22008-08-13 20:19:35 +00001628 return 0;
1629}
1630
Dan Gohman404a9842010-01-05 22:26:32 +00001631unsigned FastISel::FastEmit_i(MVT, MVT, unsigned, uint64_t /*Imm*/) {
Evan Cheng864fcc12008-08-20 22:45:34 +00001632 return 0;
1633}
1634
Owen Anderson9f944592009-08-11 20:47:22 +00001635unsigned FastISel::FastEmit_f(MVT, MVT,
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001636 unsigned, const ConstantFP * /*FPImm*/) {
Dan Gohman5ca269e2008-08-27 01:09:54 +00001637 return 0;
1638}
1639
Owen Anderson9f944592009-08-11 20:47:22 +00001640unsigned FastISel::FastEmit_ri(MVT, MVT,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001641 unsigned,
1642 unsigned /*Op0*/, bool /*Op0IsKill*/,
Owen Anderson8dd01cc2008-08-25 23:58:18 +00001643 uint64_t /*Imm*/) {
Dan Gohmanfe905652008-08-21 01:41:07 +00001644 return 0;
1645}
1646
Owen Anderson9f944592009-08-11 20:47:22 +00001647unsigned FastISel::FastEmit_rf(MVT, MVT,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001648 unsigned,
1649 unsigned /*Op0*/, bool /*Op0IsKill*/,
Dan Gohmanbcaf6812010-04-15 01:51:59 +00001650 const ConstantFP * /*FPImm*/) {
Dan Gohman5ca269e2008-08-27 01:09:54 +00001651 return 0;
1652}
1653
Owen Anderson9f944592009-08-11 20:47:22 +00001654unsigned FastISel::FastEmit_rri(MVT, MVT,
Dan Gohman404a9842010-01-05 22:26:32 +00001655 unsigned,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001656 unsigned /*Op0*/, bool /*Op0IsKill*/,
1657 unsigned /*Op1*/, bool /*Op1IsKill*/,
Dan Gohmanfe905652008-08-21 01:41:07 +00001658 uint64_t /*Imm*/) {
Evan Cheng864fcc12008-08-20 22:45:34 +00001659 return 0;
1660}
1661
1662/// FastEmit_ri_ - This method is a wrapper of FastEmit_ri. It first tries
1663/// to emit an instruction with an immediate operand using FastEmit_ri.
1664/// If that fails, it materializes the immediate into a register and try
1665/// FastEmit_rr instead.
Dan Gohman404a9842010-01-05 22:26:32 +00001666unsigned FastISel::FastEmit_ri_(MVT VT, unsigned Opcode,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001667 unsigned Op0, bool Op0IsKill,
1668 uint64_t Imm, MVT ImmType) {
Chris Lattnerb53ccb82011-04-17 20:23:29 +00001669 // If this is a multiply by a power of two, emit this as a shift left.
1670 if (Opcode == ISD::MUL && isPowerOf2_64(Imm)) {
1671 Opcode = ISD::SHL;
1672 Imm = Log2_64(Imm);
Chris Lattner562d6e82011-04-18 06:55:51 +00001673 } else if (Opcode == ISD::UDIV && isPowerOf2_64(Imm)) {
1674 // div x, 8 -> srl x, 3
1675 Opcode = ISD::SRL;
1676 Imm = Log2_64(Imm);
Chris Lattnerb53ccb82011-04-17 20:23:29 +00001677 }
Owen Andersondd450b82011-04-22 23:38:06 +00001678
Chris Lattnerb53ccb82011-04-17 20:23:29 +00001679 // Horrible hack (to be removed), check to make sure shift amounts are
1680 // in-range.
1681 if ((Opcode == ISD::SHL || Opcode == ISD::SRA || Opcode == ISD::SRL) &&
1682 Imm >= VT.getSizeInBits())
1683 return 0;
Owen Andersondd450b82011-04-22 23:38:06 +00001684
Evan Cheng864fcc12008-08-20 22:45:34 +00001685 // First check if immediate type is legal. If not, we can't use the ri form.
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001686 unsigned ResultReg = FastEmit_ri(VT, VT, Opcode, Op0, Op0IsKill, Imm);
Evan Cheng864fcc12008-08-20 22:45:34 +00001687 if (ResultReg != 0)
1688 return ResultReg;
Owen Anderson8dd01cc2008-08-25 23:58:18 +00001689 unsigned MaterialReg = FastEmit_i(ImmType, ImmType, ISD::Constant, Imm);
Eli Friedman4105ed12011-04-29 23:34:52 +00001690 if (MaterialReg == 0) {
1691 // This is a bit ugly/slow, but failing here means falling out of
1692 // fast-isel, which would be very slow.
Chris Lattner229907c2011-07-18 04:54:35 +00001693 IntegerType *ITy = IntegerType::get(FuncInfo.Fn->getContext(),
Eli Friedman4105ed12011-04-29 23:34:52 +00001694 VT.getSizeInBits());
1695 MaterialReg = getRegForValue(ConstantInt::get(ITy, Imm));
Chad Rosierdbac0252013-03-28 23:04:47 +00001696 if (MaterialReg == 0) return 0;
Eli Friedman4105ed12011-04-29 23:34:52 +00001697 }
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001698 return FastEmit_rr(VT, VT, Opcode,
1699 Op0, Op0IsKill,
1700 MaterialReg, /*Kill=*/true);
Dan Gohmanfe905652008-08-21 01:41:07 +00001701}
1702
1703unsigned FastISel::createResultReg(const TargetRegisterClass* RC) {
1704 return MRI.createVirtualRegister(RC);
Evan Cheng864fcc12008-08-20 22:45:34 +00001705}
1706
Tim Northover2f553f32014-04-15 13:59:49 +00001707unsigned FastISel::constrainOperandRegClass(const MCInstrDesc &II,
1708 unsigned Op, unsigned OpNum) {
1709 if (TargetRegisterInfo::isVirtualRegister(Op)) {
1710 const TargetRegisterClass *RegClass =
1711 TII.getRegClass(II, OpNum, &TRI, *FuncInfo.MF);
1712 if (!MRI.constrainRegClass(Op, RegClass)) {
1713 // If it's not legal to COPY between the register classes, something
1714 // has gone very wrong before we got here.
1715 unsigned NewOp = createResultReg(RegClass);
1716 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1717 TII.get(TargetOpcode::COPY), NewOp).addReg(Op);
1718 return NewOp;
1719 }
1720 }
1721 return Op;
1722}
1723
Dan Gohmanb2226e22008-08-13 20:19:35 +00001724unsigned FastISel::FastEmitInst_(unsigned MachineInstOpcode,
Dan Gohman2471f6c2008-08-20 18:09:38 +00001725 const TargetRegisterClass* RC) {
Dan Gohmanfe905652008-08-21 01:41:07 +00001726 unsigned ResultReg = createResultReg(RC);
Evan Cheng6cc775f2011-06-28 19:10:37 +00001727 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Dan Gohmanb2226e22008-08-13 20:19:35 +00001728
Rafael Espindolaea09c592014-02-18 22:05:46 +00001729 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg);
Dan Gohmanb2226e22008-08-13 20:19:35 +00001730 return ResultReg;
1731}
1732
1733unsigned FastISel::FastEmitInst_r(unsigned MachineInstOpcode,
1734 const TargetRegisterClass *RC,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001735 unsigned Op0, bool Op0IsKill) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001736 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Dan Gohmanb2226e22008-08-13 20:19:35 +00001737
Tim Northover2f553f32014-04-15 13:59:49 +00001738 unsigned ResultReg = createResultReg(RC);
1739 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1740
Evan Chenge775d352008-09-08 08:38:20 +00001741 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001742 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Dan Gohmand7b5ce32010-07-10 09:00:22 +00001743 .addReg(Op0, Op0IsKill * RegState::Kill);
Evan Chenge775d352008-09-08 08:38:20 +00001744 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001745 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohmand7b5ce32010-07-10 09:00:22 +00001746 .addReg(Op0, Op0IsKill * RegState::Kill);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001747 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1748 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Evan Chenge775d352008-09-08 08:38:20 +00001749 }
1750
Dan Gohmanb2226e22008-08-13 20:19:35 +00001751 return ResultReg;
1752}
1753
1754unsigned FastISel::FastEmitInst_rr(unsigned MachineInstOpcode,
1755 const TargetRegisterClass *RC,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001756 unsigned Op0, bool Op0IsKill,
1757 unsigned Op1, bool Op1IsKill) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001758 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Dan Gohmanb2226e22008-08-13 20:19:35 +00001759
Tim Northover2f553f32014-04-15 13:59:49 +00001760 unsigned ResultReg = createResultReg(RC);
1761 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1762 Op1 = constrainOperandRegClass(II, Op1, II.getNumDefs() + 1);
1763
Evan Chenge775d352008-09-08 08:38:20 +00001764 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001765 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001766 .addReg(Op0, Op0IsKill * RegState::Kill)
1767 .addReg(Op1, Op1IsKill * RegState::Kill);
Evan Chenge775d352008-09-08 08:38:20 +00001768 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001769 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001770 .addReg(Op0, Op0IsKill * RegState::Kill)
1771 .addReg(Op1, Op1IsKill * RegState::Kill);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001772 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1773 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Evan Chenge775d352008-09-08 08:38:20 +00001774 }
Dan Gohmanb2226e22008-08-13 20:19:35 +00001775 return ResultReg;
1776}
Dan Gohmanfe905652008-08-21 01:41:07 +00001777
Owen Anderson68b6b0e2011-05-05 17:59:04 +00001778unsigned FastISel::FastEmitInst_rrr(unsigned MachineInstOpcode,
1779 const TargetRegisterClass *RC,
1780 unsigned Op0, bool Op0IsKill,
1781 unsigned Op1, bool Op1IsKill,
1782 unsigned Op2, bool Op2IsKill) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001783 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Owen Anderson68b6b0e2011-05-05 17:59:04 +00001784
Tim Northover2f553f32014-04-15 13:59:49 +00001785 unsigned ResultReg = createResultReg(RC);
1786 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1787 Op1 = constrainOperandRegClass(II, Op1, II.getNumDefs() + 1);
1788 Op2 = constrainOperandRegClass(II, Op2, II.getNumDefs() + 2);
1789
Owen Anderson68b6b0e2011-05-05 17:59:04 +00001790 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001791 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Owen Anderson68b6b0e2011-05-05 17:59:04 +00001792 .addReg(Op0, Op0IsKill * RegState::Kill)
1793 .addReg(Op1, Op1IsKill * RegState::Kill)
1794 .addReg(Op2, Op2IsKill * RegState::Kill);
1795 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001796 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Owen Anderson68b6b0e2011-05-05 17:59:04 +00001797 .addReg(Op0, Op0IsKill * RegState::Kill)
1798 .addReg(Op1, Op1IsKill * RegState::Kill)
1799 .addReg(Op2, Op2IsKill * RegState::Kill);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001800 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1801 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Owen Anderson68b6b0e2011-05-05 17:59:04 +00001802 }
1803 return ResultReg;
1804}
1805
Dan Gohmanfe905652008-08-21 01:41:07 +00001806unsigned FastISel::FastEmitInst_ri(unsigned MachineInstOpcode,
1807 const TargetRegisterClass *RC,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001808 unsigned Op0, bool Op0IsKill,
1809 uint64_t Imm) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001810 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Dan Gohmanfe905652008-08-21 01:41:07 +00001811
Tim Northover2f553f32014-04-15 13:59:49 +00001812 unsigned ResultReg = createResultReg(RC);
1813 RC = TII.getRegClass(II, II.getNumDefs(), &TRI, *FuncInfo.MF);
1814 MRI.constrainRegClass(Op0, RC);
1815
Evan Chenge775d352008-09-08 08:38:20 +00001816 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001817 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001818 .addReg(Op0, Op0IsKill * RegState::Kill)
1819 .addImm(Imm);
Evan Chenge775d352008-09-08 08:38:20 +00001820 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001821 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001822 .addReg(Op0, Op0IsKill * RegState::Kill)
1823 .addImm(Imm);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001824 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1825 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Evan Chenge775d352008-09-08 08:38:20 +00001826 }
Dan Gohmanfe905652008-08-21 01:41:07 +00001827 return ResultReg;
1828}
1829
Owen Anderson66443c02011-03-11 21:33:55 +00001830unsigned FastISel::FastEmitInst_rii(unsigned MachineInstOpcode,
1831 const TargetRegisterClass *RC,
1832 unsigned Op0, bool Op0IsKill,
1833 uint64_t Imm1, uint64_t Imm2) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001834 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Owen Anderson66443c02011-03-11 21:33:55 +00001835
Tim Northover2f553f32014-04-15 13:59:49 +00001836 unsigned ResultReg = createResultReg(RC);
1837 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1838
Owen Anderson66443c02011-03-11 21:33:55 +00001839 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001840 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Owen Anderson66443c02011-03-11 21:33:55 +00001841 .addReg(Op0, Op0IsKill * RegState::Kill)
1842 .addImm(Imm1)
1843 .addImm(Imm2);
1844 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001845 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Owen Anderson66443c02011-03-11 21:33:55 +00001846 .addReg(Op0, Op0IsKill * RegState::Kill)
1847 .addImm(Imm1)
1848 .addImm(Imm2);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001849 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1850 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Owen Anderson66443c02011-03-11 21:33:55 +00001851 }
1852 return ResultReg;
1853}
1854
Dan Gohman5ca269e2008-08-27 01:09:54 +00001855unsigned FastISel::FastEmitInst_rf(unsigned MachineInstOpcode,
1856 const TargetRegisterClass *RC,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001857 unsigned Op0, bool Op0IsKill,
1858 const ConstantFP *FPImm) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001859 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Dan Gohman5ca269e2008-08-27 01:09:54 +00001860
Tim Northover2f553f32014-04-15 13:59:49 +00001861 unsigned ResultReg = createResultReg(RC);
1862 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1863
Evan Chenge775d352008-09-08 08:38:20 +00001864 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001865 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001866 .addReg(Op0, Op0IsKill * RegState::Kill)
1867 .addFPImm(FPImm);
Evan Chenge775d352008-09-08 08:38:20 +00001868 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001869 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001870 .addReg(Op0, Op0IsKill * RegState::Kill)
1871 .addFPImm(FPImm);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001872 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1873 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Evan Chenge775d352008-09-08 08:38:20 +00001874 }
Dan Gohman5ca269e2008-08-27 01:09:54 +00001875 return ResultReg;
1876}
1877
Dan Gohmanfe905652008-08-21 01:41:07 +00001878unsigned FastISel::FastEmitInst_rri(unsigned MachineInstOpcode,
1879 const TargetRegisterClass *RC,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001880 unsigned Op0, bool Op0IsKill,
1881 unsigned Op1, bool Op1IsKill,
1882 uint64_t Imm) {
Evan Cheng6cc775f2011-06-28 19:10:37 +00001883 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Dan Gohmanfe905652008-08-21 01:41:07 +00001884
Tim Northover2f553f32014-04-15 13:59:49 +00001885 unsigned ResultReg = createResultReg(RC);
1886 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1887 Op1 = constrainOperandRegClass(II, Op1, II.getNumDefs() + 1);
1888
Evan Chenge775d352008-09-08 08:38:20 +00001889 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001890 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001891 .addReg(Op0, Op0IsKill * RegState::Kill)
1892 .addReg(Op1, Op1IsKill * RegState::Kill)
1893 .addImm(Imm);
Evan Chenge775d352008-09-08 08:38:20 +00001894 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001895 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001896 .addReg(Op0, Op0IsKill * RegState::Kill)
1897 .addReg(Op1, Op1IsKill * RegState::Kill)
1898 .addImm(Imm);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001899 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1900 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Evan Chenge775d352008-09-08 08:38:20 +00001901 }
Dan Gohmanfe905652008-08-21 01:41:07 +00001902 return ResultReg;
1903}
Owen Anderson32635db2008-08-25 20:20:32 +00001904
Manman Rene8735522012-06-01 19:33:18 +00001905unsigned FastISel::FastEmitInst_rrii(unsigned MachineInstOpcode,
1906 const TargetRegisterClass *RC,
1907 unsigned Op0, bool Op0IsKill,
1908 unsigned Op1, bool Op1IsKill,
1909 uint64_t Imm1, uint64_t Imm2) {
Manman Rene8735522012-06-01 19:33:18 +00001910 const MCInstrDesc &II = TII.get(MachineInstOpcode);
1911
Tim Northover2f553f32014-04-15 13:59:49 +00001912 unsigned ResultReg = createResultReg(RC);
1913 Op0 = constrainOperandRegClass(II, Op0, II.getNumDefs());
1914 Op1 = constrainOperandRegClass(II, Op1, II.getNumDefs() + 1);
1915
Manman Rene8735522012-06-01 19:33:18 +00001916 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001917 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Manman Rene8735522012-06-01 19:33:18 +00001918 .addReg(Op0, Op0IsKill * RegState::Kill)
1919 .addReg(Op1, Op1IsKill * RegState::Kill)
1920 .addImm(Imm1).addImm(Imm2);
1921 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001922 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II)
Manman Rene8735522012-06-01 19:33:18 +00001923 .addReg(Op0, Op0IsKill * RegState::Kill)
1924 .addReg(Op1, Op1IsKill * RegState::Kill)
1925 .addImm(Imm1).addImm(Imm2);
Rafael Espindolaea09c592014-02-18 22:05:46 +00001926 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1927 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Manman Rene8735522012-06-01 19:33:18 +00001928 }
1929 return ResultReg;
1930}
1931
Owen Anderson32635db2008-08-25 20:20:32 +00001932unsigned FastISel::FastEmitInst_i(unsigned MachineInstOpcode,
1933 const TargetRegisterClass *RC,
1934 uint64_t Imm) {
1935 unsigned ResultReg = createResultReg(RC);
Evan Cheng6cc775f2011-06-28 19:10:37 +00001936 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Wesley Peck527da1b2010-11-23 03:31:01 +00001937
Evan Chenge775d352008-09-08 08:38:20 +00001938 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001939 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg).addImm(Imm);
Evan Chenge775d352008-09-08 08:38:20 +00001940 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001941 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II).addImm(Imm);
1942 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1943 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Evan Chenge775d352008-09-08 08:38:20 +00001944 }
Owen Anderson32635db2008-08-25 20:20:32 +00001945 return ResultReg;
Evan Cheng2c067322008-08-25 22:20:39 +00001946}
Owen Anderson5f57bc22008-08-27 22:30:02 +00001947
Owen Andersondd450b82011-04-22 23:38:06 +00001948unsigned FastISel::FastEmitInst_ii(unsigned MachineInstOpcode,
1949 const TargetRegisterClass *RC,
1950 uint64_t Imm1, uint64_t Imm2) {
1951 unsigned ResultReg = createResultReg(RC);
Evan Cheng6cc775f2011-06-28 19:10:37 +00001952 const MCInstrDesc &II = TII.get(MachineInstOpcode);
Owen Andersondd450b82011-04-22 23:38:06 +00001953
1954 if (II.getNumDefs() >= 1)
Rafael Espindolaea09c592014-02-18 22:05:46 +00001955 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II, ResultReg)
Owen Andersondd450b82011-04-22 23:38:06 +00001956 .addImm(Imm1).addImm(Imm2);
1957 else {
Rafael Espindolaea09c592014-02-18 22:05:46 +00001958 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II).addImm(Imm1).addImm(Imm2);
1959 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
1960 TII.get(TargetOpcode::COPY), ResultReg).addReg(II.ImplicitDefs[0]);
Owen Andersondd450b82011-04-22 23:38:06 +00001961 }
1962 return ResultReg;
1963}
1964
Owen Anderson9f944592009-08-11 20:47:22 +00001965unsigned FastISel::FastEmitInst_extractsubreg(MVT RetVT,
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001966 unsigned Op0, bool Op0IsKill,
1967 uint32_t Idx) {
Evan Cheng4a0bf662009-01-22 09:10:11 +00001968 unsigned ResultReg = createResultReg(TLI.getRegClassFor(RetVT));
Jakob Stoklund Olesen00264622010-07-08 16:40:22 +00001969 assert(TargetRegisterInfo::isVirtualRegister(Op0) &&
1970 "Cannot yet extract from physregs");
Jakob Stoklund Olesen1f1c6ad2012-05-20 06:38:37 +00001971 const TargetRegisterClass *RC = MRI.getRegClass(Op0);
1972 MRI.constrainRegClass(Op0, TRI.getSubClassWithSubReg(RC, Idx));
Dan Gohmand7b5ce32010-07-10 09:00:22 +00001973 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt,
Rafael Espindolaea09c592014-02-18 22:05:46 +00001974 DbgLoc, TII.get(TargetOpcode::COPY), ResultReg)
Jakob Stoklund Olesen00264622010-07-08 16:40:22 +00001975 .addReg(Op0, getKillRegState(Op0IsKill), Idx);
Owen Anderson5f57bc22008-08-27 22:30:02 +00001976 return ResultReg;
1977}
Dan Gohmanc0bb9592009-03-13 20:42:20 +00001978
1979/// FastEmitZExtFromI1 - Emit MachineInstrs to compute the value of Op
1980/// with all but the least significant bit set to zero.
Dan Gohman1a1b51f2010-05-11 23:54:07 +00001981unsigned FastISel::FastEmitZExtFromI1(MVT VT, unsigned Op0, bool Op0IsKill) {
1982 return FastEmit_ri(VT, VT, ISD::AND, Op0, Op0IsKill, 1);
Dan Gohmanc0bb9592009-03-13 20:42:20 +00001983}
Dan Gohmanc594eab2010-04-22 20:46:50 +00001984
1985/// HandlePHINodesInSuccessorBlocks - Handle PHI nodes in successor blocks.
1986/// Emit code to ensure constants are copied into registers when needed.
1987/// Remember the virtual registers that need to be added to the Machine PHI
1988/// nodes as input. We cannot just directly add them, because expansion
1989/// might result in multiple MBB's for one BB. As such, the start of the
1990/// BB might correspond to a different MBB than the end.
1991bool FastISel::HandlePHINodesInSuccessorBlocks(const BasicBlock *LLVMBB) {
1992 const TerminatorInst *TI = LLVMBB->getTerminator();
1993
1994 SmallPtrSet<MachineBasicBlock *, 4> SuccsHandled;
Dan Gohman87fb4e82010-07-07 16:29:44 +00001995 unsigned OrigNumPHINodesToUpdate = FuncInfo.PHINodesToUpdate.size();
Dan Gohmanc594eab2010-04-22 20:46:50 +00001996
1997 // Check successor nodes' PHI nodes that expect a constant to be available
1998 // from this block.
1999 for (unsigned succ = 0, e = TI->getNumSuccessors(); succ != e; ++succ) {
2000 const BasicBlock *SuccBB = TI->getSuccessor(succ);
2001 if (!isa<PHINode>(SuccBB->begin())) continue;
Dan Gohman87fb4e82010-07-07 16:29:44 +00002002 MachineBasicBlock *SuccMBB = FuncInfo.MBBMap[SuccBB];
Dan Gohmanc594eab2010-04-22 20:46:50 +00002003
2004 // If this terminator has multiple identical successors (common for
2005 // switches), only handle each succ once.
2006 if (!SuccsHandled.insert(SuccMBB)) continue;
2007
2008 MachineBasicBlock::iterator MBBI = SuccMBB->begin();
2009
2010 // At this point we know that there is a 1-1 correspondence between LLVM PHI
2011 // nodes and Machine PHI nodes, but the incoming operands have not been
2012 // emitted yet.
2013 for (BasicBlock::const_iterator I = SuccBB->begin();
2014 const PHINode *PN = dyn_cast<PHINode>(I); ++I) {
Dan Gohmane6d40162010-05-07 01:10:20 +00002015
Dan Gohmanc594eab2010-04-22 20:46:50 +00002016 // Ignore dead phi's.
2017 if (PN->use_empty()) continue;
2018
2019 // Only handle legal types. Two interesting things to note here. First,
2020 // by bailing out early, we may leave behind some dead instructions,
2021 // since SelectionDAG's HandlePHINodesInSuccessorBlocks will insert its
Chris Lattner0ab5e2c2011-04-15 05:18:47 +00002022 // own moves. Second, this check is necessary because FastISel doesn't
Dan Gohman93f59202010-07-02 00:10:16 +00002023 // use CreateRegs to create registers, so it always creates
Dan Gohmanc594eab2010-04-22 20:46:50 +00002024 // exactly one register for each non-void instruction.
2025 EVT VT = TLI.getValueType(PN->getType(), /*AllowUnknown=*/true);
2026 if (VT == MVT::Other || !TLI.isTypeLegal(VT)) {
Chad Rosier6d68c7c2012-02-04 00:39:19 +00002027 // Handle integer promotions, though, because they're common and easy.
2028 if (VT == MVT::i1 || VT == MVT::i8 || VT == MVT::i16)
Dan Gohmanc594eab2010-04-22 20:46:50 +00002029 VT = TLI.getTypeToTransformTo(LLVMBB->getContext(), VT);
2030 else {
Dan Gohman87fb4e82010-07-07 16:29:44 +00002031 FuncInfo.PHINodesToUpdate.resize(OrigNumPHINodesToUpdate);
Dan Gohmanc594eab2010-04-22 20:46:50 +00002032 return false;
2033 }
2034 }
2035
2036 const Value *PHIOp = PN->getIncomingValueForBlock(LLVMBB);
2037
Dan Gohmane6d40162010-05-07 01:10:20 +00002038 // Set the DebugLoc for the copy. Prefer the location of the operand
2039 // if there is one; use the location of the PHI otherwise.
Rafael Espindolaea09c592014-02-18 22:05:46 +00002040 DbgLoc = PN->getDebugLoc();
Dan Gohmane6d40162010-05-07 01:10:20 +00002041 if (const Instruction *Inst = dyn_cast<Instruction>(PHIOp))
Rafael Espindolaea09c592014-02-18 22:05:46 +00002042 DbgLoc = Inst->getDebugLoc();
Dan Gohmane6d40162010-05-07 01:10:20 +00002043
Dan Gohmanc594eab2010-04-22 20:46:50 +00002044 unsigned Reg = getRegForValue(PHIOp);
2045 if (Reg == 0) {
Dan Gohman87fb4e82010-07-07 16:29:44 +00002046 FuncInfo.PHINodesToUpdate.resize(OrigNumPHINodesToUpdate);
Dan Gohmanc594eab2010-04-22 20:46:50 +00002047 return false;
2048 }
Dan Gohman87fb4e82010-07-07 16:29:44 +00002049 FuncInfo.PHINodesToUpdate.push_back(std::make_pair(MBBI++, Reg));
Rafael Espindolaea09c592014-02-18 22:05:46 +00002050 DbgLoc = DebugLoc();
Dan Gohmanc594eab2010-04-22 20:46:50 +00002051 }
2052 }
2053
2054 return true;
2055}
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002056
2057bool FastISel::tryToFoldLoad(const LoadInst *LI, const Instruction *FoldInst) {
Eli Benderskye80691d2013-04-19 23:26:18 +00002058 assert(LI->hasOneUse() &&
2059 "tryToFoldLoad expected a LoadInst with a single use");
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002060 // We know that the load has a single use, but don't know what it is. If it
2061 // isn't one of the folded instructions, then we can't succeed here. Handle
2062 // this by scanning the single-use users of the load until we get to FoldInst.
2063 unsigned MaxUsers = 6; // Don't scan down huge single-use chains of instrs.
2064
Chandler Carruthcdf47882014-03-09 03:16:01 +00002065 const Instruction *TheUser = LI->user_back();
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002066 while (TheUser != FoldInst && // Scan up until we find FoldInst.
2067 // Stay in the right block.
2068 TheUser->getParent() == FoldInst->getParent() &&
2069 --MaxUsers) { // Don't scan too far.
2070 // If there are multiple or no uses of this instruction, then bail out.
2071 if (!TheUser->hasOneUse())
2072 return false;
2073
Chandler Carruthcdf47882014-03-09 03:16:01 +00002074 TheUser = TheUser->user_back();
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002075 }
2076
2077 // If we didn't find the fold instruction, then we failed to collapse the
2078 // sequence.
2079 if (TheUser != FoldInst)
2080 return false;
2081
2082 // Don't try to fold volatile loads. Target has to deal with alignment
2083 // constraints.
Eli Benderskye80691d2013-04-19 23:26:18 +00002084 if (LI->isVolatile())
2085 return false;
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002086
2087 // Figure out which vreg this is going into. If there is no assigned vreg yet
2088 // then there actually was no reference to it. Perhaps the load is referenced
2089 // by a dead instruction.
2090 unsigned LoadReg = getRegForValue(LI);
2091 if (LoadReg == 0)
2092 return false;
2093
Eli Benderskye80691d2013-04-19 23:26:18 +00002094 // We can't fold if this vreg has no uses or more than one use. Multiple uses
2095 // may mean that the instruction got lowered to multiple MIs, or the use of
2096 // the loaded value ended up being multiple operands of the result.
2097 if (!MRI.hasOneUse(LoadReg))
2098 return false;
2099
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002100 MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(LoadReg);
Owen Anderson16c6bf42014-03-13 23:12:04 +00002101 MachineInstr *User = RI->getParent();
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002102
2103 // Set the insertion point properly. Folding the load can cause generation of
Eli Benderskye80691d2013-04-19 23:26:18 +00002104 // other random instructions (like sign extends) for addressing modes; make
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002105 // sure they get inserted in a logical place before the new instruction.
2106 FuncInfo.InsertPt = User;
2107 FuncInfo.MBB = User->getParent();
2108
2109 // Ask the target to try folding the load.
2110 return tryToFoldLoadIntoMI(User, RI.getOperandNo(), LI);
2111}
2112
Bob Wilson9f3e6b22013-11-15 19:09:27 +00002113bool FastISel::canFoldAddIntoGEP(const User *GEP, const Value *Add) {
2114 // Must be an add.
2115 if (!isa<AddOperator>(Add))
2116 return false;
2117 // Type size needs to match.
Rafael Espindolaea09c592014-02-18 22:05:46 +00002118 if (DL.getTypeSizeInBits(GEP->getType()) !=
2119 DL.getTypeSizeInBits(Add->getType()))
Bob Wilson9f3e6b22013-11-15 19:09:27 +00002120 return false;
2121 // Must be in the same basic block.
2122 if (isa<Instruction>(Add) &&
2123 FuncInfo.MBBMap[cast<Instruction>(Add)->getParent()] != FuncInfo.MBB)
2124 return false;
2125 // Must have a constant operand.
2126 return isa<ConstantInt>(cast<AddOperator>(Add)->getOperand(1));
2127}
Eli Bendersky90dd3e72013-04-19 22:29:18 +00002128
Juergen Ributzka349777d2014-06-12 23:27:57 +00002129MachineMemOperand *
2130FastISel::createMachineMemOperandFor(const Instruction *I) const {
2131 const Value *Ptr;
2132 Type *ValTy;
2133 unsigned Alignment;
2134 unsigned Flags;
2135 bool IsVolatile;
2136
2137 if (const auto *LI = dyn_cast<LoadInst>(I)) {
2138 Alignment = LI->getAlignment();
2139 IsVolatile = LI->isVolatile();
2140 Flags = MachineMemOperand::MOLoad;
2141 Ptr = LI->getPointerOperand();
2142 ValTy = LI->getType();
2143 } else if (const auto *SI = dyn_cast<StoreInst>(I)) {
2144 Alignment = SI->getAlignment();
2145 IsVolatile = SI->isVolatile();
2146 Flags = MachineMemOperand::MOStore;
2147 Ptr = SI->getPointerOperand();
2148 ValTy = SI->getValueOperand()->getType();
2149 } else {
2150 return nullptr;
2151 }
2152
2153 bool IsNonTemporal = I->getMetadata("nontemporal") != nullptr;
2154 bool IsInvariant = I->getMetadata("invariant.load") != nullptr;
Juergen Ributzka349777d2014-06-12 23:27:57 +00002155 const MDNode *Ranges = I->getMetadata(LLVMContext::MD_range);
2156
Hal Finkelcc39b672014-07-24 12:16:19 +00002157 AAMDNodes AAInfo;
2158 I->getAAMetadata(AAInfo);
2159
Juergen Ributzka349777d2014-06-12 23:27:57 +00002160 if (Alignment == 0) // Ensure that codegen never sees alignment 0.
2161 Alignment = DL.getABITypeAlignment(ValTy);
2162
Eric Christopherd9134482014-08-04 21:25:23 +00002163 unsigned Size =
2164 TM.getSubtargetImpl()->getDataLayout()->getTypeStoreSize(ValTy);
Juergen Ributzka349777d2014-06-12 23:27:57 +00002165
2166 if (IsVolatile)
2167 Flags |= MachineMemOperand::MOVolatile;
2168 if (IsNonTemporal)
2169 Flags |= MachineMemOperand::MONonTemporal;
2170 if (IsInvariant)
2171 Flags |= MachineMemOperand::MOInvariant;
2172
2173 return FuncInfo.MF->getMachineMemOperand(MachinePointerInfo(Ptr), Flags, Size,
Hal Finkelcc39b672014-07-24 12:16:19 +00002174 Alignment, AAInfo, Ranges);
Juergen Ributzka349777d2014-06-12 23:27:57 +00002175}