blob: b47f6d9085197bced96a529a649671dbfa708fd7 [file] [log] [blame]
Eugene Zelenko76bf48d2017-06-26 22:44:03 +00001//===- llvm/CodeGen/GlobalISel/IRTranslator.cpp - IRTranslator ---*- C++ -*-==//
Quentin Colombet105cf2b2016-01-20 20:58:56 +00002//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Quentin Colombet105cf2b2016-01-20 20:58:56 +00006//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file implements the IRTranslator class.
10//===----------------------------------------------------------------------===//
11
12#include "llvm/CodeGen/GlobalISel/IRTranslator.h"
Amara Emerson6cdfe292018-08-01 02:17:42 +000013#include "llvm/ADT/PostOrderIterator.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000014#include "llvm/ADT/STLExtras.h"
Ahmed Bougachaeceabdd2017-02-23 23:57:28 +000015#include "llvm/ADT/ScopeExit.h"
Tim Northoverb6636fd2017-01-17 22:13:50 +000016#include "llvm/ADT/SmallSet.h"
Quentin Colombetfd9d0a02016-02-11 19:59:41 +000017#include "llvm/ADT/SmallVector.h"
Amara Emersonfe4625f2019-06-21 18:10:38 +000018#include "llvm/Analysis/BranchProbabilityInfo.h"
Adam Nemet0965da22017-10-09 23:19:02 +000019#include "llvm/Analysis/OptimizationRemarkEmitter.h"
Jessica Paquette2e35dc52019-01-28 19:22:29 +000020#include "llvm/Analysis/ValueTracking.h"
Tim Northovera9105be2016-11-09 22:39:54 +000021#include "llvm/CodeGen/Analysis.h"
Amara Emersonfe4625f2019-06-21 18:10:38 +000022#include "llvm/CodeGen/FunctionLoweringInfo.h"
Chandler Carruth6bda14b2017-06-06 11:49:48 +000023#include "llvm/CodeGen/GlobalISel/CallLowering.h"
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +000024#include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000025#include "llvm/CodeGen/LowLevelType.h"
26#include "llvm/CodeGen/MachineBasicBlock.h"
Tim Northoverbd505462016-07-22 16:59:52 +000027#include "llvm/CodeGen/MachineFrameInfo.h"
Chandler Carruth6bda14b2017-06-06 11:49:48 +000028#include "llvm/CodeGen/MachineFunction.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000029#include "llvm/CodeGen/MachineInstrBuilder.h"
30#include "llvm/CodeGen/MachineMemOperand.h"
31#include "llvm/CodeGen/MachineOperand.h"
Quentin Colombet17c494b2016-02-11 17:51:31 +000032#include "llvm/CodeGen/MachineRegisterInfo.h"
Matthias Braun90ad6832018-07-13 00:08:38 +000033#include "llvm/CodeGen/StackProtector.h"
David Blaikieb3bde2e2017-11-17 01:07:10 +000034#include "llvm/CodeGen/TargetFrameLowering.h"
35#include "llvm/CodeGen/TargetLowering.h"
Quentin Colombet3bb32cc2016-08-26 23:49:05 +000036#include "llvm/CodeGen/TargetPassConfig.h"
David Blaikieb3bde2e2017-11-17 01:07:10 +000037#include "llvm/CodeGen/TargetRegisterInfo.h"
38#include "llvm/CodeGen/TargetSubtargetInfo.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000039#include "llvm/IR/BasicBlock.h"
Amara Emerson6cdfe292018-08-01 02:17:42 +000040#include "llvm/IR/CFG.h"
Quentin Colombet17c494b2016-02-11 17:51:31 +000041#include "llvm/IR/Constant.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000042#include "llvm/IR/Constants.h"
43#include "llvm/IR/DataLayout.h"
Tim Northover09aac4a2017-01-26 23:39:14 +000044#include "llvm/IR/DebugInfo.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000045#include "llvm/IR/DerivedTypes.h"
Quentin Colombet2ecff3b2016-02-10 22:59:27 +000046#include "llvm/IR/Function.h"
Tim Northovera7653b32016-09-12 11:20:22 +000047#include "llvm/IR/GetElementPtrTypeIterator.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000048#include "llvm/IR/InlineAsm.h"
49#include "llvm/IR/InstrTypes.h"
50#include "llvm/IR/Instructions.h"
Tim Northover5fb414d2016-07-29 22:32:36 +000051#include "llvm/IR/IntrinsicInst.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000052#include "llvm/IR/Intrinsics.h"
53#include "llvm/IR/LLVMContext.h"
54#include "llvm/IR/Metadata.h"
Quentin Colombet17c494b2016-02-11 17:51:31 +000055#include "llvm/IR/Type.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000056#include "llvm/IR/User.h"
Quentin Colombet17c494b2016-02-11 17:51:31 +000057#include "llvm/IR/Value.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000058#include "llvm/MC/MCContext.h"
59#include "llvm/Pass.h"
60#include "llvm/Support/Casting.h"
61#include "llvm/Support/CodeGen.h"
62#include "llvm/Support/Debug.h"
63#include "llvm/Support/ErrorHandling.h"
64#include "llvm/Support/LowLevelTypeImpl.h"
65#include "llvm/Support/MathExtras.h"
66#include "llvm/Support/raw_ostream.h"
Tim Northover5fb414d2016-07-29 22:32:36 +000067#include "llvm/Target/TargetIntrinsicInfo.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000068#include "llvm/Target/TargetMachine.h"
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000069#include <algorithm>
70#include <cassert>
71#include <cstdint>
72#include <iterator>
73#include <string>
74#include <utility>
75#include <vector>
Quentin Colombet2ecff3b2016-02-10 22:59:27 +000076
77#define DEBUG_TYPE "irtranslator"
78
Quentin Colombet105cf2b2016-01-20 20:58:56 +000079using namespace llvm;
80
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +000081static cl::opt<bool>
82 EnableCSEInIRTranslator("enable-cse-in-irtranslator",
83 cl::desc("Should enable CSE in irtranslator"),
84 cl::Optional, cl::init(false));
Quentin Colombet105cf2b2016-01-20 20:58:56 +000085char IRTranslator::ID = 0;
Eugene Zelenko76bf48d2017-06-26 22:44:03 +000086
Quentin Colombet3bb32cc2016-08-26 23:49:05 +000087INITIALIZE_PASS_BEGIN(IRTranslator, DEBUG_TYPE, "IRTranslator LLVM IR -> MI",
88 false, false)
89INITIALIZE_PASS_DEPENDENCY(TargetPassConfig)
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +000090INITIALIZE_PASS_DEPENDENCY(GISelCSEAnalysisWrapperPass)
Quentin Colombet3bb32cc2016-08-26 23:49:05 +000091INITIALIZE_PASS_END(IRTranslator, DEBUG_TYPE, "IRTranslator LLVM IR -> MI",
Tim Northover884b47e2016-07-26 03:29:18 +000092 false, false)
Quentin Colombet105cf2b2016-01-20 20:58:56 +000093
Ahmed Bougachaae9dade2017-02-23 21:05:42 +000094static void reportTranslationError(MachineFunction &MF,
95 const TargetPassConfig &TPC,
96 OptimizationRemarkEmitter &ORE,
97 OptimizationRemarkMissed &R) {
98 MF.getProperties().set(MachineFunctionProperties::Property::FailedISel);
99
100 // Print the function name explicitly if we don't have a debug location (which
101 // makes the diagnostic less useful) or if we're going to emit a raw error.
102 if (!R.getLocation().isValid() || TPC.isGlobalISelAbortEnabled())
103 R << (" (in function: " + MF.getName() + ")").str();
104
105 if (TPC.isGlobalISelAbortEnabled())
106 report_fatal_error(R.getMsg());
107 else
108 ORE.emit(R);
Tim Northover60f23492016-11-08 01:12:17 +0000109}
110
Tom Stellard1f7f6462019-06-18 02:05:06 +0000111IRTranslator::IRTranslator() : MachineFunctionPass(ID) { }
Quentin Colombeta7fae162016-02-11 17:53:23 +0000112
Daniel Sanders3b390402018-10-31 17:31:23 +0000113#ifndef NDEBUG
Benjamin Kramerb17d2132019-01-12 18:36:22 +0000114namespace {
Daniel Sanders3b390402018-10-31 17:31:23 +0000115/// Verify that every instruction created has the same DILocation as the
116/// instruction being translated.
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +0000117class DILocationVerifier : public GISelChangeObserver {
Daniel Sanders3b390402018-10-31 17:31:23 +0000118 const Instruction *CurrInst = nullptr;
119
120public:
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +0000121 DILocationVerifier() = default;
122 ~DILocationVerifier() = default;
Daniel Sanders3b390402018-10-31 17:31:23 +0000123
124 const Instruction *getCurrentInst() const { return CurrInst; }
125 void setCurrentInst(const Instruction *Inst) { CurrInst = Inst; }
126
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +0000127 void erasingInstr(MachineInstr &MI) override {}
128 void changingInstr(MachineInstr &MI) override {}
129 void changedInstr(MachineInstr &MI) override {}
130
131 void createdInstr(MachineInstr &MI) override {
Daniel Sanders3b390402018-10-31 17:31:23 +0000132 assert(getCurrentInst() && "Inserted instruction without a current MI");
133
134 // Only print the check message if we're actually checking it.
135#ifndef NDEBUG
136 LLVM_DEBUG(dbgs() << "Checking DILocation from " << *CurrInst
137 << " was copied to " << MI);
138#endif
Amara Emersonfb0a40f2019-06-13 22:15:35 +0000139 // We allow insts in the entry block to have a debug loc line of 0 because
140 // they could have originated from constants, and we don't want a jumpy
141 // debug experience.
142 assert((CurrInst->getDebugLoc() == MI.getDebugLoc() ||
143 MI.getDebugLoc().getLine() == 0) &&
Daniel Sanders3b390402018-10-31 17:31:23 +0000144 "Line info was not transferred to all instructions");
145 }
Daniel Sanders3b390402018-10-31 17:31:23 +0000146};
Benjamin Kramerb17d2132019-01-12 18:36:22 +0000147} // namespace
Daniel Sanders3b390402018-10-31 17:31:23 +0000148#endif // ifndef NDEBUG
149
150
Quentin Colombet3bb32cc2016-08-26 23:49:05 +0000151void IRTranslator::getAnalysisUsage(AnalysisUsage &AU) const {
Matthias Braun90ad6832018-07-13 00:08:38 +0000152 AU.addRequired<StackProtector>();
Quentin Colombet3bb32cc2016-08-26 23:49:05 +0000153 AU.addRequired<TargetPassConfig>();
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +0000154 AU.addRequired<GISelCSEAnalysisWrapperPass>();
Matthias Braun90ad6832018-07-13 00:08:38 +0000155 getSelectionDAGFallbackAnalysisUsage(AU);
Quentin Colombet3bb32cc2016-08-26 23:49:05 +0000156 MachineFunctionPass::getAnalysisUsage(AU);
157}
158
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000159IRTranslator::ValueToVRegInfo::VRegListT &
160IRTranslator::allocateVRegs(const Value &Val) {
161 assert(!VMap.contains(Val) && "Value already allocated in VMap");
162 auto *Regs = VMap.getVRegs(Val);
163 auto *Offsets = VMap.getOffsets(Val);
164 SmallVector<LLT, 4> SplitTys;
165 computeValueLLTs(*DL, *Val.getType(), SplitTys,
166 Offsets->empty() ? Offsets : nullptr);
167 for (unsigned i = 0; i < SplitTys.size(); ++i)
168 Regs->push_back(0);
169 return *Regs;
170}
Tim Northover9e35f1e2017-01-25 20:58:22 +0000171
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000172ArrayRef<Register> IRTranslator::getOrCreateVRegs(const Value &Val) {
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000173 auto VRegsIt = VMap.findVRegs(Val);
174 if (VRegsIt != VMap.vregs_end())
175 return *VRegsIt->second;
176
177 if (Val.getType()->isVoidTy())
178 return *VMap.getVRegs(Val);
179
180 // Create entry for this type.
181 auto *VRegs = VMap.getVRegs(Val);
182 auto *Offsets = VMap.getOffsets(Val);
183
Tim Northover9e35f1e2017-01-25 20:58:22 +0000184 assert(Val.getType()->isSized() &&
185 "Don't know how to create an empty vreg");
Tim Northover9e35f1e2017-01-25 20:58:22 +0000186
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000187 SmallVector<LLT, 4> SplitTys;
188 computeValueLLTs(*DL, *Val.getType(), SplitTys,
189 Offsets->empty() ? Offsets : nullptr);
190
191 if (!isa<Constant>(Val)) {
192 for (auto Ty : SplitTys)
193 VRegs->push_back(MRI->createGenericVirtualRegister(Ty));
194 return *VRegs;
195 }
196
197 if (Val.getType()->isAggregateType()) {
198 // UndefValue, ConstantAggregateZero
199 auto &C = cast<Constant>(Val);
200 unsigned Idx = 0;
201 while (auto Elt = C.getAggregateElement(Idx++)) {
202 auto EltRegs = getOrCreateVRegs(*Elt);
Fangrui Song75709322018-11-17 01:44:25 +0000203 llvm::copy(EltRegs, std::back_inserter(*VRegs));
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000204 }
205 } else {
206 assert(SplitTys.size() == 1 && "unexpectedly split LLT");
207 VRegs->push_back(MRI->createGenericVirtualRegister(SplitTys[0]));
208 bool Success = translate(cast<Constant>(Val), VRegs->front());
Tim Northover9e35f1e2017-01-25 20:58:22 +0000209 if (!Success) {
Ahmed Bougachaae9dade2017-02-23 21:05:42 +0000210 OptimizationRemarkMissed R("gisel-irtranslator", "GISelFailure",
Matthias Braunf1caa282017-12-15 22:22:58 +0000211 MF->getFunction().getSubprogram(),
212 &MF->getFunction().getEntryBlock());
Ahmed Bougachaae9dade2017-02-23 21:05:42 +0000213 R << "unable to translate constant: " << ore::NV("Type", Val.getType());
214 reportTranslationError(*MF, *TPC, *ORE, R);
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000215 return *VRegs;
Tim Northover5ed648e2016-08-09 21:28:04 +0000216 }
Quentin Colombet17c494b2016-02-11 17:51:31 +0000217 }
Tim Northover7f3ad2e2017-01-20 23:25:17 +0000218
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000219 return *VRegs;
Quentin Colombet17c494b2016-02-11 17:51:31 +0000220}
221
Tim Northovercdf23f12016-10-31 18:30:59 +0000222int IRTranslator::getOrCreateFrameIndex(const AllocaInst &AI) {
223 if (FrameIndices.find(&AI) != FrameIndices.end())
224 return FrameIndices[&AI];
225
Quentin Colombetc9256cc2019-05-03 01:23:56 +0000226 unsigned ElementSize = DL->getTypeAllocSize(AI.getAllocatedType());
Tim Northovercdf23f12016-10-31 18:30:59 +0000227 unsigned Size =
228 ElementSize * cast<ConstantInt>(AI.getArraySize())->getZExtValue();
229
230 // Always allocate at least one byte.
231 Size = std::max(Size, 1u);
232
233 unsigned Alignment = AI.getAlignment();
234 if (!Alignment)
235 Alignment = DL->getABITypeAlignment(AI.getAllocatedType());
236
237 int &FI = FrameIndices[&AI];
Tim Northover50db7f412016-12-07 21:17:47 +0000238 FI = MF->getFrameInfo().CreateStackObject(Size, Alignment, false, &AI);
Tim Northovercdf23f12016-10-31 18:30:59 +0000239 return FI;
240}
241
Tim Northoverad2b7172016-07-26 20:23:26 +0000242unsigned IRTranslator::getMemOpAlignment(const Instruction &I) {
243 unsigned Alignment = 0;
244 Type *ValTy = nullptr;
245 if (const StoreInst *SI = dyn_cast<StoreInst>(&I)) {
246 Alignment = SI->getAlignment();
247 ValTy = SI->getValueOperand()->getType();
248 } else if (const LoadInst *LI = dyn_cast<LoadInst>(&I)) {
249 Alignment = LI->getAlignment();
250 ValTy = LI->getType();
Daniel Sanders94813992018-07-09 19:33:40 +0000251 } else if (const AtomicCmpXchgInst *AI = dyn_cast<AtomicCmpXchgInst>(&I)) {
252 // TODO(PR27168): This instruction has no alignment attribute, but unlike
253 // the default alignment for load/store, the default here is to assume
254 // it has NATURAL alignment, not DataLayout-specified alignment.
255 const DataLayout &DL = AI->getModule()->getDataLayout();
256 Alignment = DL.getTypeStoreSize(AI->getCompareOperand()->getType());
257 ValTy = AI->getCompareOperand()->getType();
258 } else if (const AtomicRMWInst *AI = dyn_cast<AtomicRMWInst>(&I)) {
259 // TODO(PR27168): This instruction has no alignment attribute, but unlike
260 // the default alignment for load/store, the default here is to assume
261 // it has NATURAL alignment, not DataLayout-specified alignment.
262 const DataLayout &DL = AI->getModule()->getDataLayout();
263 Alignment = DL.getTypeStoreSize(AI->getValOperand()->getType());
264 ValTy = AI->getType();
Ahmed Bougachaae9dade2017-02-23 21:05:42 +0000265 } else {
266 OptimizationRemarkMissed R("gisel-irtranslator", "", &I);
267 R << "unable to translate memop: " << ore::NV("Opcode", &I);
268 reportTranslationError(*MF, *TPC, *ORE, R);
Quentin Colombet3bb32cc2016-08-26 23:49:05 +0000269 return 1;
Ahmed Bougachaae9dade2017-02-23 21:05:42 +0000270 }
Tim Northoverad2b7172016-07-26 20:23:26 +0000271
272 return Alignment ? Alignment : DL->getABITypeAlignment(ValTy);
273}
274
Ahmed Bougachaa61c2142017-03-15 18:22:33 +0000275MachineBasicBlock &IRTranslator::getMBB(const BasicBlock &BB) {
Quentin Colombet53237a92016-03-11 17:27:43 +0000276 MachineBasicBlock *&MBB = BBToMBB[&BB];
Ahmed Bougachaa61c2142017-03-15 18:22:33 +0000277 assert(MBB && "BasicBlock was not encountered before");
Quentin Colombet17c494b2016-02-11 17:51:31 +0000278 return *MBB;
279}
280
Tim Northoverb6636fd2017-01-17 22:13:50 +0000281void IRTranslator::addMachineCFGPred(CFGEdge Edge, MachineBasicBlock *NewPred) {
282 assert(NewPred && "new predecessor must be a real MachineBasicBlock");
283 MachinePreds[Edge].push_back(NewPred);
284}
285
Tim Northoverc53606e2016-12-07 21:29:15 +0000286bool IRTranslator::translateBinaryOp(unsigned Opcode, const User &U,
287 MachineIRBuilder &MIRBuilder) {
Quentin Colombet2ecff3b2016-02-10 22:59:27 +0000288 // Get or create a virtual register for each value.
289 // Unless the value is a Constant => loadimm cst?
290 // or inline constant each time?
291 // Creation of a virtual register needs to have a size.
Tim Northover357f1be2016-08-10 23:02:41 +0000292 unsigned Op0 = getOrCreateVReg(*U.getOperand(0));
293 unsigned Op1 = getOrCreateVReg(*U.getOperand(1));
294 unsigned Res = getOrCreateVReg(U);
Michael Bergf0d81a32019-02-06 19:57:06 +0000295 uint16_t Flags = 0;
Michael Berg894c39f2018-09-19 18:52:08 +0000296 if (isa<Instruction>(U)) {
Michael Berg894c39f2018-09-19 18:52:08 +0000297 const Instruction &I = cast<Instruction>(U);
Michael Bergf0d81a32019-02-06 19:57:06 +0000298 Flags = MachineInstr::copyFlagsFromInstruction(I);
Michael Berg894c39f2018-09-19 18:52:08 +0000299 }
Michael Bergf0d81a32019-02-06 19:57:06 +0000300
301 MIRBuilder.buildInstr(Opcode, {Res}, {Op0, Op1}, Flags);
Quentin Colombet17c494b2016-02-11 17:51:31 +0000302 return true;
Quentin Colombet105cf2b2016-01-20 20:58:56 +0000303}
304
Volkan Keles20d3c422017-03-07 18:03:28 +0000305bool IRTranslator::translateFSub(const User &U, MachineIRBuilder &MIRBuilder) {
306 // -0.0 - X --> G_FNEG
307 if (isa<Constant>(U.getOperand(0)) &&
308 U.getOperand(0) == ConstantFP::getZeroValueForNegation(U.getType())) {
Michael Bergf9bff2a2019-06-17 23:19:40 +0000309 unsigned Op1 = getOrCreateVReg(*U.getOperand(1));
310 unsigned Res = getOrCreateVReg(U);
311 uint16_t Flags = 0;
312 if (isa<Instruction>(U)) {
313 const Instruction &I = cast<Instruction>(U);
314 Flags = MachineInstr::copyFlagsFromInstruction(I);
315 }
316 // Negate the last operand of the FSUB
317 MIRBuilder.buildInstr(TargetOpcode::G_FNEG, {Res}, {Op1}, Flags);
Volkan Keles20d3c422017-03-07 18:03:28 +0000318 return true;
319 }
320 return translateBinaryOp(TargetOpcode::G_FSUB, U, MIRBuilder);
321}
322
Cameron McInallycbde0d92018-11-13 18:15:47 +0000323bool IRTranslator::translateFNeg(const User &U, MachineIRBuilder &MIRBuilder) {
Michael Bergf9bff2a2019-06-17 23:19:40 +0000324 unsigned Op0 = getOrCreateVReg(*U.getOperand(0));
325 unsigned Res = getOrCreateVReg(U);
326 uint16_t Flags = 0;
327 if (isa<Instruction>(U)) {
328 const Instruction &I = cast<Instruction>(U);
329 Flags = MachineInstr::copyFlagsFromInstruction(I);
330 }
331 MIRBuilder.buildInstr(TargetOpcode::G_FNEG, {Res}, {Op0}, Flags);
Cameron McInallycbde0d92018-11-13 18:15:47 +0000332 return true;
333}
334
Tim Northoverc53606e2016-12-07 21:29:15 +0000335bool IRTranslator::translateCompare(const User &U,
336 MachineIRBuilder &MIRBuilder) {
Tim Northoverd5c23bc2016-08-19 20:48:16 +0000337 const CmpInst *CI = dyn_cast<CmpInst>(&U);
338 unsigned Op0 = getOrCreateVReg(*U.getOperand(0));
339 unsigned Op1 = getOrCreateVReg(*U.getOperand(1));
340 unsigned Res = getOrCreateVReg(U);
341 CmpInst::Predicate Pred =
342 CI ? CI->getPredicate() : static_cast<CmpInst::Predicate>(
343 cast<ConstantExpr>(U).getPredicate());
Tim Northoverd5c23bc2016-08-19 20:48:16 +0000344 if (CmpInst::isIntPredicate(Pred))
Tim Northover0f140c72016-09-09 11:46:34 +0000345 MIRBuilder.buildICmp(Pred, Res, Op0, Op1);
Tim Northover7596bd72017-03-08 18:49:54 +0000346 else if (Pred == CmpInst::FCMP_FALSE)
Ahmed Bougacha2fb80302017-03-15 19:21:11 +0000347 MIRBuilder.buildCopy(
348 Res, getOrCreateVReg(*Constant::getNullValue(CI->getType())));
349 else if (Pred == CmpInst::FCMP_TRUE)
350 MIRBuilder.buildCopy(
351 Res, getOrCreateVReg(*Constant::getAllOnesValue(CI->getType())));
Michael Bergc6a52452018-12-18 17:54:52 +0000352 else {
Michael Bergf0d81a32019-02-06 19:57:06 +0000353 MIRBuilder.buildInstr(TargetOpcode::G_FCMP, {Res}, {Pred, Op0, Op1},
354 MachineInstr::copyFlagsFromInstruction(*CI));
Michael Bergc6a52452018-12-18 17:54:52 +0000355 }
Tim Northoverd5c23bc2016-08-19 20:48:16 +0000356
Tim Northoverde3aea0412016-08-17 20:25:25 +0000357 return true;
358}
359
Tim Northoverc53606e2016-12-07 21:29:15 +0000360bool IRTranslator::translateRet(const User &U, MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +0000361 const ReturnInst &RI = cast<ReturnInst>(U);
Tim Northover0d56e052016-07-29 18:11:21 +0000362 const Value *Ret = RI.getReturnValue();
Amara Emersond78d65c2017-11-30 20:06:02 +0000363 if (Ret && DL->getTypeStoreSize(Ret->getType()) == 0)
364 Ret = nullptr;
Alexander Ivchenko49168f62018-08-02 08:33:31 +0000365
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000366 ArrayRef<Register> VRegs;
Alexander Ivchenko49168f62018-08-02 08:33:31 +0000367 if (Ret)
368 VRegs = getOrCreateVRegs(*Ret);
369
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000370 Register SwiftErrorVReg = 0;
Tim Northover3b2157a2019-05-24 08:40:13 +0000371 if (CLI->supportSwiftError() && SwiftError.getFunctionArg()) {
372 SwiftErrorVReg = SwiftError.getOrCreateVRegUseAt(
373 &RI, &MIRBuilder.getMBB(), SwiftError.getFunctionArg());
374 }
375
Quentin Colombet74d7d2f2016-02-11 18:53:28 +0000376 // The target may mess up with the insertion point, but
377 // this is not important as a return is the last instruction
378 // of the block anyway.
Tim Northover3b2157a2019-05-24 08:40:13 +0000379 return CLI->lowerReturn(MIRBuilder, Ret, VRegs, SwiftErrorVReg);
Quentin Colombet74d7d2f2016-02-11 18:53:28 +0000380}
381
Tim Northoverc53606e2016-12-07 21:29:15 +0000382bool IRTranslator::translateBr(const User &U, MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +0000383 const BranchInst &BrInst = cast<BranchInst>(U);
Tim Northover69c2ba52016-07-29 17:58:00 +0000384 unsigned Succ = 0;
385 if (!BrInst.isUnconditional()) {
386 // We want a G_BRCOND to the true BB followed by an unconditional branch.
387 unsigned Tst = getOrCreateVReg(*BrInst.getCondition());
388 const BasicBlock &TrueTgt = *cast<BasicBlock>(BrInst.getSuccessor(Succ++));
Ahmed Bougachaa61c2142017-03-15 18:22:33 +0000389 MachineBasicBlock &TrueBB = getMBB(TrueTgt);
Tim Northover0f140c72016-09-09 11:46:34 +0000390 MIRBuilder.buildBrCond(Tst, TrueBB);
Quentin Colombetdd4b1372016-03-11 17:28:03 +0000391 }
Tim Northover69c2ba52016-07-29 17:58:00 +0000392
393 const BasicBlock &BrTgt = *cast<BasicBlock>(BrInst.getSuccessor(Succ));
Ahmed Bougachaa61c2142017-03-15 18:22:33 +0000394 MachineBasicBlock &TgtBB = getMBB(BrTgt);
Ahmed Bougachae8e1fa32017-03-21 23:42:50 +0000395 MachineBasicBlock &CurBB = MIRBuilder.getMBB();
396
397 // If the unconditional target is the layout successor, fallthrough.
398 if (!CurBB.isLayoutSuccessor(&TgtBB))
399 MIRBuilder.buildBr(TgtBB);
Tim Northover69c2ba52016-07-29 17:58:00 +0000400
Quentin Colombetdd4b1372016-03-11 17:28:03 +0000401 // Link successors.
Chandler Carruth96fc1de2018-08-26 08:41:15 +0000402 for (const BasicBlock *Succ : successors(&BrInst))
Ahmed Bougachaa61c2142017-03-15 18:22:33 +0000403 CurBB.addSuccessor(&getMBB(*Succ));
Quentin Colombetdd4b1372016-03-11 17:28:03 +0000404 return true;
405}
406
Amara Emersonfe4625f2019-06-21 18:10:38 +0000407void IRTranslator::addSuccessorWithProb(MachineBasicBlock *Src,
408 MachineBasicBlock *Dst,
409 BranchProbability Prob) {
410 if (!FuncInfo.BPI) {
411 Src->addSuccessorWithoutProb(Dst);
412 return;
Kristof Beylseced0712017-01-05 11:28:51 +0000413 }
Amara Emersonfe4625f2019-06-21 18:10:38 +0000414 if (Prob.isUnknown())
415 Prob = getEdgeProbability(Src, Dst);
416 Src->addSuccessor(Dst, Prob);
417}
418
419BranchProbability
420IRTranslator::getEdgeProbability(const MachineBasicBlock *Src,
421 const MachineBasicBlock *Dst) const {
422 const BasicBlock *SrcBB = Src->getBasicBlock();
423 const BasicBlock *DstBB = Dst->getBasicBlock();
424 if (!FuncInfo.BPI) {
425 // If BPI is not available, set the default probability as 1 / N, where N is
426 // the number of successors.
427 auto SuccSize = std::max<uint32_t>(succ_size(SrcBB), 1);
428 return BranchProbability(1, SuccSize);
429 }
430 return FuncInfo.BPI->getEdgeProbability(SrcBB, DstBB);
431}
432
433bool IRTranslator::translateSwitch(const User &U, MachineIRBuilder &MIB) {
434 using namespace SwitchCG;
435 // Extract cases from the switch.
436 const SwitchInst &SI = cast<SwitchInst>(U);
437 BranchProbabilityInfo *BPI = FuncInfo.BPI;
438 CaseClusterVector Clusters;
439 Clusters.reserve(SI.getNumCases());
440 for (auto &I : SI.cases()) {
441 MachineBasicBlock *Succ = &getMBB(*I.getCaseSuccessor());
442 assert(Succ && "Could not find successor mbb in mapping");
443 const ConstantInt *CaseVal = I.getCaseValue();
444 BranchProbability Prob =
445 BPI ? BPI->getEdgeProbability(SI.getParent(), I.getSuccessorIndex())
446 : BranchProbability(1, SI.getNumCases() + 1);
447 Clusters.push_back(CaseCluster::range(CaseVal, CaseVal, Succ, Prob));
448 }
449
450 MachineBasicBlock *DefaultMBB = &getMBB(*SI.getDefaultDest());
451
452 // Cluster adjacent cases with the same destination. We do this at all
453 // optimization levels because it's cheap to do and will make codegen faster
454 // if there are many clusters.
455 sortAndRangeify(Clusters);
456
457 MachineBasicBlock *SwitchMBB = &getMBB(*SI.getParent());
458
459 // If there is only the default destination, jump there directly.
460 if (Clusters.empty()) {
461 SwitchMBB->addSuccessor(DefaultMBB);
462 if (DefaultMBB != SwitchMBB->getNextNode())
463 MIB.buildBr(*DefaultMBB);
464 return true;
465 }
466
467 SL->findJumpTables(Clusters, &SI, DefaultMBB);
468
469 LLVM_DEBUG({
470 dbgs() << "Case clusters: ";
471 for (const CaseCluster &C : Clusters) {
472 if (C.Kind == CC_JumpTable)
473 dbgs() << "JT:";
474 if (C.Kind == CC_BitTests)
475 dbgs() << "BT:";
476
477 C.Low->getValue().print(dbgs(), true);
478 if (C.Low != C.High) {
479 dbgs() << '-';
480 C.High->getValue().print(dbgs(), true);
481 }
482 dbgs() << ' ';
483 }
484 dbgs() << '\n';
485 });
486
487 assert(!Clusters.empty());
488 SwitchWorkList WorkList;
489 CaseClusterIt First = Clusters.begin();
490 CaseClusterIt Last = Clusters.end() - 1;
491 auto DefaultProb = getEdgeProbability(SwitchMBB, DefaultMBB);
492 WorkList.push_back({SwitchMBB, First, Last, nullptr, nullptr, DefaultProb});
493
494 // FIXME: At the moment we don't do any splitting optimizations here like
495 // SelectionDAG does, so this worklist only has one entry.
496 while (!WorkList.empty()) {
497 SwitchWorkListItem W = WorkList.back();
498 WorkList.pop_back();
499 if (!lowerSwitchWorkItem(W, SI.getCondition(), SwitchMBB, DefaultMBB, MIB))
500 return false;
501 }
502 return true;
503}
504
505void IRTranslator::emitJumpTable(SwitchCG::JumpTable &JT,
506 MachineBasicBlock *MBB) {
507 // Emit the code for the jump table
508 assert(JT.Reg != -1U && "Should lower JT Header first!");
509 MachineIRBuilder MIB(*MBB->getParent());
510 MIB.setMBB(*MBB);
511 MIB.setDebugLoc(CurBuilder->getDebugLoc());
512
513 Type *PtrIRTy = Type::getInt8PtrTy(MF->getFunction().getContext());
514 const LLT PtrTy = getLLTForType(*PtrIRTy, *DL);
515
516 auto Table = MIB.buildJumpTable(PtrTy, JT.JTI);
517 MIB.buildBrJT(Table.getReg(0), JT.JTI, JT.Reg);
518}
519
520bool IRTranslator::emitJumpTableHeader(SwitchCG::JumpTable &JT,
521 SwitchCG::JumpTableHeader &JTH,
522 MachineBasicBlock *SwitchBB,
523 MachineIRBuilder &MIB) {
524 DebugLoc dl = MIB.getDebugLoc();
525
526 const Value &SValue = *JTH.SValue;
527 // Subtract the lowest switch case value from the value being switched on.
528 const LLT SwitchTy = getLLTForType(*SValue.getType(), *DL);
529 unsigned SwitchOpReg = getOrCreateVReg(SValue);
530 auto FirstCst = MIB.buildConstant(SwitchTy, JTH.First);
531 auto Sub = MIB.buildSub({SwitchTy}, SwitchOpReg, FirstCst);
532
533 // This value may be smaller or larger than the target's pointer type, and
534 // therefore require extension or truncating.
535 Type *PtrIRTy = SValue.getType()->getPointerTo();
536 const LLT PtrScalarTy = LLT::scalar(DL->getTypeSizeInBits(PtrIRTy));
537 Sub = MIB.buildZExtOrTrunc(PtrScalarTy, Sub);
538
539 JT.Reg = Sub.getReg(0);
540
541 if (JTH.OmitRangeCheck) {
542 if (JT.MBB != SwitchBB->getNextNode())
543 MIB.buildBr(*JT.MBB);
544 return true;
545 }
546
547 // Emit the range check for the jump table, and branch to the default block
548 // for the switch statement if the value being switched on exceeds the
549 // largest case in the switch.
550 auto Cst = getOrCreateVReg(
551 *ConstantInt::get(SValue.getType(), JTH.Last - JTH.First));
552 Cst = MIB.buildZExtOrTrunc(PtrScalarTy, Cst).getReg(0);
553 auto Cmp = MIB.buildICmp(CmpInst::ICMP_UGT, LLT::scalar(1), Sub, Cst);
554
555 auto BrCond = MIB.buildBrCond(Cmp.getReg(0), *JT.Default);
556
557 // Avoid emitting unnecessary branches to the next block.
558 if (JT.MBB != SwitchBB->getNextNode())
559 BrCond = MIB.buildBr(*JT.MBB);
560 return true;
561}
562
563void IRTranslator::emitSwitchCase(SwitchCG::CaseBlock &CB,
564 MachineBasicBlock *SwitchBB,
565 MachineIRBuilder &MIB) {
566 unsigned CondLHS = getOrCreateVReg(*CB.CmpLHS);
567 unsigned Cond = 0;
568 DebugLoc OldDbgLoc = MIB.getDebugLoc();
569 MIB.setDebugLoc(CB.DbgLoc);
570 MIB.setMBB(*CB.ThisBB);
571
572 if (CB.PredInfo.NoCmp) {
573 // Branch or fall through to TrueBB.
574 addSuccessorWithProb(CB.ThisBB, CB.TrueBB, CB.TrueProb);
575 addMachineCFGPred({SwitchBB->getBasicBlock(), CB.TrueBB->getBasicBlock()},
576 CB.ThisBB);
577 CB.ThisBB->normalizeSuccProbs();
578 if (CB.TrueBB != CB.ThisBB->getNextNode())
579 MIB.buildBr(*CB.TrueBB);
580 MIB.setDebugLoc(OldDbgLoc);
581 return;
582 }
583
584 const LLT i1Ty = LLT::scalar(1);
585 // Build the compare.
586 if (!CB.CmpMHS) {
587 unsigned CondRHS = getOrCreateVReg(*CB.CmpRHS);
588 Cond = MIB.buildICmp(CB.PredInfo.Pred, i1Ty, CondLHS, CondRHS).getReg(0);
589 } else {
590 assert(CB.PredInfo.Pred == CmpInst::ICMP_ULE &&
591 "Can only handle ULE ranges");
592
593 const APInt& Low = cast<ConstantInt>(CB.CmpLHS)->getValue();
594 const APInt& High = cast<ConstantInt>(CB.CmpRHS)->getValue();
595
596 unsigned CmpOpReg = getOrCreateVReg(*CB.CmpMHS);
597 if (cast<ConstantInt>(CB.CmpLHS)->isMinValue(true)) {
598 unsigned CondRHS = getOrCreateVReg(*CB.CmpRHS);
599 Cond =
600 MIB.buildICmp(CmpInst::ICMP_ULE, i1Ty, CmpOpReg, CondRHS).getReg(0);
601 } else {
602 const LLT &CmpTy = MRI->getType(CmpOpReg);
603 auto Sub = MIB.buildSub({CmpTy}, CmpOpReg, CondLHS);
604 auto Diff = MIB.buildConstant(CmpTy, High - Low);
605 Cond = MIB.buildICmp(CmpInst::ICMP_ULE, i1Ty, Sub, Diff).getReg(0);
606 }
607 }
608
609 // Update successor info
610 addSuccessorWithProb(CB.ThisBB, CB.TrueBB, CB.TrueProb);
611
612 addMachineCFGPred({SwitchBB->getBasicBlock(), CB.TrueBB->getBasicBlock()},
613 CB.ThisBB);
614
615 // TrueBB and FalseBB are always different unless the incoming IR is
616 // degenerate. This only happens when running llc on weird IR.
617 if (CB.TrueBB != CB.FalseBB)
618 addSuccessorWithProb(CB.ThisBB, CB.FalseBB, CB.FalseProb);
619 CB.ThisBB->normalizeSuccProbs();
620
621 addMachineCFGPred({SwitchBB->getBasicBlock(), CB.FalseBB->getBasicBlock()},
622 CB.ThisBB);
623 // If the lhs block is the next block, invert the condition so that we can
624 // fall through to the lhs instead of the rhs block.
625 if (CB.TrueBB == CB.ThisBB->getNextNode()) {
626 std::swap(CB.TrueBB, CB.FalseBB);
627 auto True = MIB.buildConstant(i1Ty, 1);
628 Cond = MIB.buildInstr(TargetOpcode::G_XOR, {i1Ty}, {Cond, True}, None)
629 .getReg(0);
630 }
631
632 MIB.buildBrCond(Cond, *CB.TrueBB);
633 MIB.buildBr(*CB.FalseBB);
634 MIB.setDebugLoc(OldDbgLoc);
635}
636
637bool IRTranslator::lowerJumpTableWorkItem(SwitchCG::SwitchWorkListItem W,
638 MachineBasicBlock *SwitchMBB,
639 MachineBasicBlock *DefaultMBB,
640 MachineIRBuilder &MIB,
641 MachineFunction::iterator BBI,
642 BranchProbability UnhandledProbs,
643 SwitchCG::CaseClusterIt I,
644 MachineBasicBlock *Fallthrough,
645 bool FallthroughUnreachable) {
646 using namespace SwitchCG;
647 MachineFunction *CurMF = SwitchMBB->getParent();
648 // FIXME: Optimize away range check based on pivot comparisons.
649 JumpTableHeader *JTH = &SL->JTCases[I->JTCasesIndex].first;
650 SwitchCG::JumpTable *JT = &SL->JTCases[I->JTCasesIndex].second;
651 BranchProbability DefaultProb = W.DefaultProb;
652 MachineBasicBlock *CurMBB = W.MBB;
653
654 // The jump block hasn't been inserted yet; insert it here.
655 MachineBasicBlock *JumpMBB = JT->MBB;
656 CurMF->insert(BBI, JumpMBB);
657
658 // Since the jump table block is separate from the switch block, we need
659 // to keep track of it as a machine predecessor to the default block,
660 // otherwise we lose the phi edges.
661 addMachineCFGPred({SwitchMBB->getBasicBlock(), DefaultMBB->getBasicBlock()},
662 SwitchMBB);
663 addMachineCFGPred({SwitchMBB->getBasicBlock(), DefaultMBB->getBasicBlock()},
664 JumpMBB);
665
666 auto JumpProb = I->Prob;
667 auto FallthroughProb = UnhandledProbs;
668
669 // If the default statement is a target of the jump table, we evenly
670 // distribute the default probability to successors of CurMBB. Also
671 // update the probability on the edge from JumpMBB to Fallthrough.
672 for (MachineBasicBlock::succ_iterator SI = JumpMBB->succ_begin(),
673 SE = JumpMBB->succ_end();
674 SI != SE; ++SI) {
675 if (*SI == DefaultMBB) {
676 JumpProb += DefaultProb / 2;
677 FallthroughProb -= DefaultProb / 2;
678 JumpMBB->setSuccProbability(SI, DefaultProb / 2);
679 JumpMBB->normalizeSuccProbs();
680 break;
681 }
682 }
683
684 // Skip the range check if the fallthrough block is unreachable.
685 if (FallthroughUnreachable)
686 JTH->OmitRangeCheck = true;
687
688 if (!JTH->OmitRangeCheck)
689 addSuccessorWithProb(CurMBB, Fallthrough, FallthroughProb);
690 addSuccessorWithProb(CurMBB, JumpMBB, JumpProb);
691 CurMBB->normalizeSuccProbs();
692
693 // The jump table header will be inserted in our current block, do the
694 // range check, and fall through to our fallthrough block.
695 JTH->HeaderBB = CurMBB;
696 JT->Default = Fallthrough; // FIXME: Move Default to JumpTableHeader.
697
698 // If we're in the right place, emit the jump table header right now.
699 if (CurMBB == SwitchMBB) {
700 if (!emitJumpTableHeader(*JT, *JTH, SwitchMBB, MIB))
701 return false;
702 JTH->Emitted = true;
703 }
704 return true;
705}
706bool IRTranslator::lowerSwitchRangeWorkItem(SwitchCG::CaseClusterIt I,
707 Value *Cond,
708 MachineBasicBlock *Fallthrough,
709 bool FallthroughUnreachable,
710 BranchProbability UnhandledProbs,
711 MachineBasicBlock *CurMBB,
712 MachineIRBuilder &MIB,
713 MachineBasicBlock *SwitchMBB) {
714 using namespace SwitchCG;
715 const Value *RHS, *LHS, *MHS;
716 CmpInst::Predicate Pred;
717 if (I->Low == I->High) {
718 // Check Cond == I->Low.
719 Pred = CmpInst::ICMP_EQ;
720 LHS = Cond;
721 RHS = I->Low;
722 MHS = nullptr;
723 } else {
724 // Check I->Low <= Cond <= I->High.
725 Pred = CmpInst::ICMP_ULE;
726 LHS = I->Low;
727 MHS = Cond;
728 RHS = I->High;
729 }
730
731 // If Fallthrough is unreachable, fold away the comparison.
732 // The false probability is the sum of all unhandled cases.
733 CaseBlock CB(Pred, FallthroughUnreachable, LHS, RHS, MHS, I->MBB, Fallthrough,
734 CurMBB, MIB.getDebugLoc(), I->Prob, UnhandledProbs);
735
736 emitSwitchCase(CB, SwitchMBB, MIB);
737 return true;
738}
739
740bool IRTranslator::lowerSwitchWorkItem(SwitchCG::SwitchWorkListItem W,
741 Value *Cond,
742 MachineBasicBlock *SwitchMBB,
743 MachineBasicBlock *DefaultMBB,
744 MachineIRBuilder &MIB) {
745 using namespace SwitchCG;
746 MachineFunction *CurMF = FuncInfo.MF;
747 MachineBasicBlock *NextMBB = nullptr;
748 MachineFunction::iterator BBI(W.MBB);
749 if (++BBI != FuncInfo.MF->end())
750 NextMBB = &*BBI;
751
752 if (EnableOpts) {
753 // Here, we order cases by probability so the most likely case will be
754 // checked first. However, two clusters can have the same probability in
755 // which case their relative ordering is non-deterministic. So we use Low
756 // as a tie-breaker as clusters are guaranteed to never overlap.
757 llvm::sort(W.FirstCluster, W.LastCluster + 1,
758 [](const CaseCluster &a, const CaseCluster &b) {
759 return a.Prob != b.Prob
760 ? a.Prob > b.Prob
761 : a.Low->getValue().slt(b.Low->getValue());
762 });
763
764 // Rearrange the case blocks so that the last one falls through if possible
765 // without changing the order of probabilities.
766 for (CaseClusterIt I = W.LastCluster; I > W.FirstCluster;) {
767 --I;
768 if (I->Prob > W.LastCluster->Prob)
769 break;
770 if (I->Kind == CC_Range && I->MBB == NextMBB) {
771 std::swap(*I, *W.LastCluster);
772 break;
773 }
774 }
775 }
776
777 // Compute total probability.
778 BranchProbability DefaultProb = W.DefaultProb;
779 BranchProbability UnhandledProbs = DefaultProb;
780 for (CaseClusterIt I = W.FirstCluster; I <= W.LastCluster; ++I)
781 UnhandledProbs += I->Prob;
782
783 MachineBasicBlock *CurMBB = W.MBB;
784 for (CaseClusterIt I = W.FirstCluster, E = W.LastCluster; I <= E; ++I) {
785 bool FallthroughUnreachable = false;
786 MachineBasicBlock *Fallthrough;
787 if (I == W.LastCluster) {
788 // For the last cluster, fall through to the default destination.
789 Fallthrough = DefaultMBB;
790 FallthroughUnreachable = isa<UnreachableInst>(
791 DefaultMBB->getBasicBlock()->getFirstNonPHIOrDbg());
792 } else {
793 Fallthrough = CurMF->CreateMachineBasicBlock(CurMBB->getBasicBlock());
794 CurMF->insert(BBI, Fallthrough);
795 }
796 UnhandledProbs -= I->Prob;
797
798 switch (I->Kind) {
799 case CC_BitTests: {
800 LLVM_DEBUG(dbgs() << "Switch to bit test optimization unimplemented");
801 return false; // Bit tests currently unimplemented.
802 }
803 case CC_JumpTable: {
804 if (!lowerJumpTableWorkItem(W, SwitchMBB, DefaultMBB, MIB, BBI,
805 UnhandledProbs, I, Fallthrough,
806 FallthroughUnreachable)) {
807 LLVM_DEBUG(dbgs() << "Failed to lower jump table");
808 return false;
809 }
810 break;
811 }
812 case CC_Range: {
813 if (!lowerSwitchRangeWorkItem(I, Cond, Fallthrough,
814 FallthroughUnreachable, UnhandledProbs,
815 CurMBB, MIB, SwitchMBB)) {
816 LLVM_DEBUG(dbgs() << "Failed to lower switch range");
817 return false;
818 }
819 break;
820 }
821 }
822 CurMBB = Fallthrough;
823 }
Kristof Beylseced0712017-01-05 11:28:51 +0000824
825 return true;
826}
827
Kristof Beyls65a12c02017-01-30 09:13:18 +0000828bool IRTranslator::translateIndirectBr(const User &U,
829 MachineIRBuilder &MIRBuilder) {
830 const IndirectBrInst &BrInst = cast<IndirectBrInst>(U);
831
832 const unsigned Tgt = getOrCreateVReg(*BrInst.getAddress());
833 MIRBuilder.buildBrIndirect(Tgt);
834
835 // Link successors.
836 MachineBasicBlock &CurBB = MIRBuilder.getMBB();
Chandler Carruth96fc1de2018-08-26 08:41:15 +0000837 for (const BasicBlock *Succ : successors(&BrInst))
Ahmed Bougachaa61c2142017-03-15 18:22:33 +0000838 CurBB.addSuccessor(&getMBB(*Succ));
Kristof Beyls65a12c02017-01-30 09:13:18 +0000839
840 return true;
841}
842
Tim Northover3b2157a2019-05-24 08:40:13 +0000843static bool isSwiftError(const Value *V) {
844 if (auto Arg = dyn_cast<Argument>(V))
845 return Arg->hasSwiftErrorAttr();
846 if (auto AI = dyn_cast<AllocaInst>(V))
847 return AI->isSwiftError();
848 return false;
849}
850
Tim Northoverc53606e2016-12-07 21:29:15 +0000851bool IRTranslator::translateLoad(const User &U, MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +0000852 const LoadInst &LI = cast<LoadInst>(U);
Quentin Colombet3bb32cc2016-08-26 23:49:05 +0000853
Tim Northover7152dca2016-10-19 15:55:06 +0000854 auto Flags = LI.isVolatile() ? MachineMemOperand::MOVolatile
855 : MachineMemOperand::MONone;
856 Flags |= MachineMemOperand::MOLoad;
Tim Northoverad2b7172016-07-26 20:23:26 +0000857
Amara Emersond78d65c2017-11-30 20:06:02 +0000858 if (DL->getTypeStoreSize(LI.getType()) == 0)
859 return true;
860
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000861 ArrayRef<Register> Regs = getOrCreateVRegs(LI);
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000862 ArrayRef<uint64_t> Offsets = *VMap.getOffsets(LI);
863 unsigned Base = getOrCreateVReg(*LI.getPointerOperand());
Daniel Sanders52b4ce72017-03-07 23:20:35 +0000864
Diana Picusa5682222019-05-14 09:25:17 +0000865 Type *OffsetIRTy = DL->getIntPtrType(LI.getPointerOperandType());
866 LLT OffsetTy = getLLTForType(*OffsetIRTy, *DL);
867
Tim Northover3b2157a2019-05-24 08:40:13 +0000868 if (CLI->supportSwiftError() && isSwiftError(LI.getPointerOperand())) {
869 assert(Regs.size() == 1 && "swifterror should be single pointer");
870 unsigned VReg = SwiftError.getOrCreateVRegUseAt(&LI, &MIRBuilder.getMBB(),
871 LI.getPointerOperand());
872 MIRBuilder.buildCopy(Regs[0], VReg);
873 return true;
874 }
875
876
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000877 for (unsigned i = 0; i < Regs.size(); ++i) {
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000878 Register Addr;
Diana Picusa5682222019-05-14 09:25:17 +0000879 MIRBuilder.materializeGEP(Addr, Base, OffsetTy, Offsets[i] / 8);
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000880
881 MachinePointerInfo Ptr(LI.getPointerOperand(), Offsets[i] / 8);
882 unsigned BaseAlign = getMemOpAlignment(LI);
883 auto MMO = MF->getMachineMemOperand(
884 Ptr, Flags, (MRI->getType(Regs[i]).getSizeInBits() + 7) / 8,
885 MinAlign(BaseAlign, Offsets[i] / 8), AAMDNodes(), nullptr,
886 LI.getSyncScopeID(), LI.getOrdering());
887 MIRBuilder.buildLoad(Regs[i], Addr, *MMO);
888 }
889
Tim Northoverad2b7172016-07-26 20:23:26 +0000890 return true;
891}
892
Tim Northoverc53606e2016-12-07 21:29:15 +0000893bool IRTranslator::translateStore(const User &U, MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +0000894 const StoreInst &SI = cast<StoreInst>(U);
Tim Northover7152dca2016-10-19 15:55:06 +0000895 auto Flags = SI.isVolatile() ? MachineMemOperand::MOVolatile
896 : MachineMemOperand::MONone;
897 Flags |= MachineMemOperand::MOStore;
Tim Northoverad2b7172016-07-26 20:23:26 +0000898
Amara Emersond78d65c2017-11-30 20:06:02 +0000899 if (DL->getTypeStoreSize(SI.getValueOperand()->getType()) == 0)
900 return true;
901
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000902 ArrayRef<Register> Vals = getOrCreateVRegs(*SI.getValueOperand());
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000903 ArrayRef<uint64_t> Offsets = *VMap.getOffsets(*SI.getValueOperand());
904 unsigned Base = getOrCreateVReg(*SI.getPointerOperand());
Tim Northoverad2b7172016-07-26 20:23:26 +0000905
Diana Picusa5682222019-05-14 09:25:17 +0000906 Type *OffsetIRTy = DL->getIntPtrType(SI.getPointerOperandType());
907 LLT OffsetTy = getLLTForType(*OffsetIRTy, *DL);
908
Tim Northover3b2157a2019-05-24 08:40:13 +0000909 if (CLI->supportSwiftError() && isSwiftError(SI.getPointerOperand())) {
910 assert(Vals.size() == 1 && "swifterror should be single pointer");
911
912 unsigned VReg = SwiftError.getOrCreateVRegDefAt(&SI, &MIRBuilder.getMBB(),
913 SI.getPointerOperand());
914 MIRBuilder.buildCopy(VReg, Vals[0]);
915 return true;
916 }
917
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000918 for (unsigned i = 0; i < Vals.size(); ++i) {
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000919 Register Addr;
Diana Picusa5682222019-05-14 09:25:17 +0000920 MIRBuilder.materializeGEP(Addr, Base, OffsetTy, Offsets[i] / 8);
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000921
922 MachinePointerInfo Ptr(SI.getPointerOperand(), Offsets[i] / 8);
923 unsigned BaseAlign = getMemOpAlignment(SI);
924 auto MMO = MF->getMachineMemOperand(
925 Ptr, Flags, (MRI->getType(Vals[i]).getSizeInBits() + 7) / 8,
926 MinAlign(BaseAlign, Offsets[i] / 8), AAMDNodes(), nullptr,
927 SI.getSyncScopeID(), SI.getOrdering());
928 MIRBuilder.buildStore(Vals[i], Addr, *MMO);
929 }
Tim Northoverad2b7172016-07-26 20:23:26 +0000930 return true;
931}
932
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000933static uint64_t getOffsetFromIndices(const User &U, const DataLayout &DL) {
Tim Northoverb6046222016-08-19 20:09:03 +0000934 const Value *Src = U.getOperand(0);
935 Type *Int32Ty = Type::getInt32Ty(U.getContext());
Volkan Keles6a36c642017-05-19 09:47:02 +0000936
Tim Northover6f80b082016-08-19 17:47:05 +0000937 // getIndexedOffsetInType is designed for GEPs, so the first index is the
938 // usual array element rather than looking into the actual aggregate.
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000939 SmallVector<Value *, 1> Indices;
Tim Northover6f80b082016-08-19 17:47:05 +0000940 Indices.push_back(ConstantInt::get(Int32Ty, 0));
Tim Northoverb6046222016-08-19 20:09:03 +0000941
942 if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(&U)) {
943 for (auto Idx : EVI->indices())
944 Indices.push_back(ConstantInt::get(Int32Ty, Idx));
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000945 } else if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(&U)) {
946 for (auto Idx : IVI->indices())
947 Indices.push_back(ConstantInt::get(Int32Ty, Idx));
Tim Northoverb6046222016-08-19 20:09:03 +0000948 } else {
949 for (unsigned i = 1; i < U.getNumOperands(); ++i)
950 Indices.push_back(U.getOperand(i));
951 }
Tim Northover6f80b082016-08-19 17:47:05 +0000952
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000953 return 8 * static_cast<uint64_t>(
954 DL.getIndexedOffsetInType(Src->getType(), Indices));
955}
Tim Northover6f80b082016-08-19 17:47:05 +0000956
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000957bool IRTranslator::translateExtractValue(const User &U,
958 MachineIRBuilder &MIRBuilder) {
959 const Value *Src = U.getOperand(0);
960 uint64_t Offset = getOffsetFromIndices(U, *DL);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000961 ArrayRef<Register> SrcRegs = getOrCreateVRegs(*Src);
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000962 ArrayRef<uint64_t> Offsets = *VMap.getOffsets(*Src);
Fangrui Songcecc4352019-04-12 02:02:06 +0000963 unsigned Idx = llvm::lower_bound(Offsets, Offset) - Offsets.begin();
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000964 auto &DstRegs = allocateVRegs(U);
965
966 for (unsigned i = 0; i < DstRegs.size(); ++i)
967 DstRegs[i] = SrcRegs[Idx++];
Tim Northover6f80b082016-08-19 17:47:05 +0000968
969 return true;
970}
971
Tim Northoverc53606e2016-12-07 21:29:15 +0000972bool IRTranslator::translateInsertValue(const User &U,
973 MachineIRBuilder &MIRBuilder) {
Tim Northoverb6046222016-08-19 20:09:03 +0000974 const Value *Src = U.getOperand(0);
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000975 uint64_t Offset = getOffsetFromIndices(U, *DL);
976 auto &DstRegs = allocateVRegs(U);
977 ArrayRef<uint64_t> DstOffsets = *VMap.getOffsets(U);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000978 ArrayRef<Register> SrcRegs = getOrCreateVRegs(*Src);
979 ArrayRef<Register> InsertedRegs = getOrCreateVRegs(*U.getOperand(1));
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000980 auto InsertedIt = InsertedRegs.begin();
Tim Northoverbbbfb1c2016-08-19 20:08:55 +0000981
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000982 for (unsigned i = 0; i < DstRegs.size(); ++i) {
983 if (DstOffsets[i] >= Offset && InsertedIt != InsertedRegs.end())
984 DstRegs[i] = *InsertedIt++;
985 else
986 DstRegs[i] = SrcRegs[i];
Tim Northoverb6046222016-08-19 20:09:03 +0000987 }
Tim Northoverbbbfb1c2016-08-19 20:08:55 +0000988
Tim Northoverbbbfb1c2016-08-19 20:08:55 +0000989 return true;
990}
991
Tim Northoverc53606e2016-12-07 21:29:15 +0000992bool IRTranslator::translateSelect(const User &U,
993 MachineIRBuilder &MIRBuilder) {
Kristof Beyls7a713502017-04-19 06:38:37 +0000994 unsigned Tst = getOrCreateVReg(*U.getOperand(0));
Matt Arsenaulte3a676e2019-06-24 15:50:29 +0000995 ArrayRef<Register> ResRegs = getOrCreateVRegs(U);
996 ArrayRef<Register> Op0Regs = getOrCreateVRegs(*U.getOperand(1));
997 ArrayRef<Register> Op1Regs = getOrCreateVRegs(*U.getOperand(2));
Amara Emerson0d6a26d2018-05-16 10:32:02 +0000998
Michael Bergc6a52452018-12-18 17:54:52 +0000999 const SelectInst &SI = cast<SelectInst>(U);
Michael Bergf0d81a32019-02-06 19:57:06 +00001000 uint16_t Flags = 0;
1001 if (const CmpInst *Cmp = dyn_cast<CmpInst>(SI.getCondition()))
1002 Flags = MachineInstr::copyFlagsFromInstruction(*Cmp);
1003
Michael Bergc6a52452018-12-18 17:54:52 +00001004 for (unsigned i = 0; i < ResRegs.size(); ++i) {
Michael Bergf0d81a32019-02-06 19:57:06 +00001005 MIRBuilder.buildInstr(TargetOpcode::G_SELECT, {ResRegs[i]},
1006 {Tst, Op0Regs[i], Op1Regs[i]}, Flags);
Michael Bergc6a52452018-12-18 17:54:52 +00001007 }
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001008
Tim Northover5a28c362016-08-19 20:09:07 +00001009 return true;
1010}
1011
Tim Northoverc53606e2016-12-07 21:29:15 +00001012bool IRTranslator::translateBitCast(const User &U,
1013 MachineIRBuilder &MIRBuilder) {
Ahmed Bougacha5c7924f2017-03-07 20:53:06 +00001014 // If we're bitcasting to the source type, we can reuse the source vreg.
Daniel Sanders52b4ce72017-03-07 23:20:35 +00001015 if (getLLTForType(*U.getOperand(0)->getType(), *DL) ==
1016 getLLTForType(*U.getType(), *DL)) {
Ahmed Bougacha5c7924f2017-03-07 20:53:06 +00001017 unsigned SrcReg = getOrCreateVReg(*U.getOperand(0));
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001018 auto &Regs = *VMap.getVRegs(U);
Ahmed Bougacha5c7924f2017-03-07 20:53:06 +00001019 // If we already assigned a vreg for this bitcast, we can't change that.
1020 // Emit a copy to satisfy the users we already emitted.
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001021 if (!Regs.empty())
1022 MIRBuilder.buildCopy(Regs[0], SrcReg);
1023 else {
1024 Regs.push_back(SrcReg);
1025 VMap.getOffsets(U)->push_back(0);
1026 }
Tim Northover7c9eba92016-07-25 21:01:29 +00001027 return true;
1028 }
Tim Northoverc53606e2016-12-07 21:29:15 +00001029 return translateCast(TargetOpcode::G_BITCAST, U, MIRBuilder);
Tim Northover7c9eba92016-07-25 21:01:29 +00001030}
1031
Tim Northoverc53606e2016-12-07 21:29:15 +00001032bool IRTranslator::translateCast(unsigned Opcode, const User &U,
1033 MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +00001034 unsigned Op = getOrCreateVReg(*U.getOperand(0));
1035 unsigned Res = getOrCreateVReg(U);
Aditya Nandakumar92663372019-04-18 02:19:29 +00001036 MIRBuilder.buildInstr(Opcode, {Res}, {Op});
Tim Northover7c9eba92016-07-25 21:01:29 +00001037 return true;
1038}
1039
Tim Northoverc53606e2016-12-07 21:29:15 +00001040bool IRTranslator::translateGetElementPtr(const User &U,
1041 MachineIRBuilder &MIRBuilder) {
Tim Northovera7653b32016-09-12 11:20:22 +00001042 // FIXME: support vector GEPs.
1043 if (U.getType()->isVectorTy())
1044 return false;
1045
1046 Value &Op0 = *U.getOperand(0);
1047 unsigned BaseReg = getOrCreateVReg(Op0);
Ahmed Bougacha2fb80302017-03-15 19:21:11 +00001048 Type *PtrIRTy = Op0.getType();
1049 LLT PtrTy = getLLTForType(*PtrIRTy, *DL);
1050 Type *OffsetIRTy = DL->getIntPtrType(PtrIRTy);
1051 LLT OffsetTy = getLLTForType(*OffsetIRTy, *DL);
Tim Northovera7653b32016-09-12 11:20:22 +00001052
1053 int64_t Offset = 0;
1054 for (gep_type_iterator GTI = gep_type_begin(&U), E = gep_type_end(&U);
1055 GTI != E; ++GTI) {
1056 const Value *Idx = GTI.getOperand();
Peter Collingbourne25a40752016-12-02 02:55:30 +00001057 if (StructType *StTy = GTI.getStructTypeOrNull()) {
Tim Northovera7653b32016-09-12 11:20:22 +00001058 unsigned Field = cast<Constant>(Idx)->getUniqueInteger().getZExtValue();
1059 Offset += DL->getStructLayout(StTy)->getElementOffset(Field);
1060 continue;
1061 } else {
1062 uint64_t ElementSize = DL->getTypeAllocSize(GTI.getIndexedType());
1063
1064 // If this is a scalar constant or a splat vector of constants,
1065 // handle it quickly.
1066 if (const auto *CI = dyn_cast<ConstantInt>(Idx)) {
1067 Offset += ElementSize * CI->getSExtValue();
1068 continue;
1069 }
1070
1071 if (Offset != 0) {
1072 unsigned NewBaseReg = MRI->createGenericVirtualRegister(PtrTy);
Amara Emerson946b1242019-04-15 05:04:20 +00001073 LLT OffsetTy = getLLTForType(*OffsetIRTy, *DL);
1074 auto OffsetMIB = MIRBuilder.buildConstant({OffsetTy}, Offset);
1075 MIRBuilder.buildGEP(NewBaseReg, BaseReg, OffsetMIB.getReg(0));
Tim Northovera7653b32016-09-12 11:20:22 +00001076
1077 BaseReg = NewBaseReg;
1078 Offset = 0;
1079 }
1080
Tim Northovera7653b32016-09-12 11:20:22 +00001081 unsigned IdxReg = getOrCreateVReg(*Idx);
1082 if (MRI->getType(IdxReg) != OffsetTy) {
1083 unsigned NewIdxReg = MRI->createGenericVirtualRegister(OffsetTy);
1084 MIRBuilder.buildSExtOrTrunc(NewIdxReg, IdxReg);
1085 IdxReg = NewIdxReg;
1086 }
1087
Aditya Nandakumar5710c442018-01-05 02:56:28 +00001088 // N = N + Idx * ElementSize;
1089 // Avoid doing it for ElementSize of 1.
1090 unsigned GepOffsetReg;
1091 if (ElementSize != 1) {
Aditya Nandakumar5710c442018-01-05 02:56:28 +00001092 GepOffsetReg = MRI->createGenericVirtualRegister(OffsetTy);
Amara Emerson946b1242019-04-15 05:04:20 +00001093 auto ElementSizeMIB = MIRBuilder.buildConstant(
1094 getLLTForType(*OffsetIRTy, *DL), ElementSize);
1095 MIRBuilder.buildMul(GepOffsetReg, ElementSizeMIB.getReg(0), IdxReg);
Aditya Nandakumar5710c442018-01-05 02:56:28 +00001096 } else
1097 GepOffsetReg = IdxReg;
Tim Northovera7653b32016-09-12 11:20:22 +00001098
1099 unsigned NewBaseReg = MRI->createGenericVirtualRegister(PtrTy);
Aditya Nandakumar5710c442018-01-05 02:56:28 +00001100 MIRBuilder.buildGEP(NewBaseReg, BaseReg, GepOffsetReg);
Tim Northovera7653b32016-09-12 11:20:22 +00001101 BaseReg = NewBaseReg;
1102 }
1103 }
1104
1105 if (Offset != 0) {
Amara Emerson946b1242019-04-15 05:04:20 +00001106 auto OffsetMIB =
1107 MIRBuilder.buildConstant(getLLTForType(*OffsetIRTy, *DL), Offset);
1108 MIRBuilder.buildGEP(getOrCreateVReg(U), BaseReg, OffsetMIB.getReg(0));
Tim Northovera7653b32016-09-12 11:20:22 +00001109 return true;
1110 }
1111
1112 MIRBuilder.buildCopy(getOrCreateVReg(U), BaseReg);
1113 return true;
1114}
1115
Tim Northover79f43f12017-01-30 19:33:07 +00001116bool IRTranslator::translateMemfunc(const CallInst &CI,
1117 MachineIRBuilder &MIRBuilder,
1118 unsigned ID) {
Jessica Paquetteb2295432019-06-10 21:53:56 +00001119
1120 // If the source is undef, then just emit a nop.
1121 if (isa<UndefValue>(CI.getArgOperand(1))) {
1122 switch (ID) {
1123 case Intrinsic::memmove:
1124 case Intrinsic::memcpy:
1125 case Intrinsic::memset:
1126 return true;
1127 default:
1128 break;
1129 }
1130 }
1131
Daniel Sanders52b4ce72017-03-07 23:20:35 +00001132 LLT SizeTy = getLLTForType(*CI.getArgOperand(2)->getType(), *DL);
Tim Northover79f43f12017-01-30 19:33:07 +00001133 Type *DstTy = CI.getArgOperand(0)->getType();
1134 if (cast<PointerType>(DstTy)->getAddressSpace() != 0 ||
Tim Northover3f186032016-10-18 20:03:45 +00001135 SizeTy.getSizeInBits() != DL->getPointerSizeInBits(0))
1136 return false;
1137
1138 SmallVector<CallLowering::ArgInfo, 8> Args;
1139 for (int i = 0; i < 3; ++i) {
1140 const auto &Arg = CI.getArgOperand(i);
1141 Args.emplace_back(getOrCreateVReg(*Arg), Arg->getType());
1142 }
1143
Tim Northover79f43f12017-01-30 19:33:07 +00001144 const char *Callee;
1145 switch (ID) {
1146 case Intrinsic::memmove:
1147 case Intrinsic::memcpy: {
1148 Type *SrcTy = CI.getArgOperand(1)->getType();
1149 if(cast<PointerType>(SrcTy)->getAddressSpace() != 0)
1150 return false;
1151 Callee = ID == Intrinsic::memcpy ? "memcpy" : "memmove";
1152 break;
1153 }
1154 case Intrinsic::memset:
1155 Callee = "memset";
1156 break;
1157 default:
1158 return false;
1159 }
Tim Northover3f186032016-10-18 20:03:45 +00001160
Diana Picusd79253a2017-03-20 14:40:18 +00001161 return CLI->lowerCall(MIRBuilder, CI.getCallingConv(),
1162 MachineOperand::CreateES(Callee),
Tim Northover3f186032016-10-18 20:03:45 +00001163 CallLowering::ArgInfo(0, CI.getType()), Args);
1164}
Tim Northovera7653b32016-09-12 11:20:22 +00001165
Tim Northoverc53606e2016-12-07 21:29:15 +00001166void IRTranslator::getStackGuard(unsigned DstReg,
1167 MachineIRBuilder &MIRBuilder) {
Tim Northoverd8b85582017-01-27 21:31:24 +00001168 const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
1169 MRI->setRegClass(DstReg, TRI->getPointerRegClass(*MF));
Tim Northovercdf23f12016-10-31 18:30:59 +00001170 auto MIB = MIRBuilder.buildInstr(TargetOpcode::LOAD_STACK_GUARD);
1171 MIB.addDef(DstReg);
1172
Tim Northover50db7f412016-12-07 21:17:47 +00001173 auto &TLI = *MF->getSubtarget().getTargetLowering();
Matthias Braunf1caa282017-12-15 22:22:58 +00001174 Value *Global = TLI.getSDagStackGuard(*MF->getFunction().getParent());
Tim Northovercdf23f12016-10-31 18:30:59 +00001175 if (!Global)
1176 return;
1177
1178 MachinePointerInfo MPInfo(Global);
Tim Northovercdf23f12016-10-31 18:30:59 +00001179 auto Flags = MachineMemOperand::MOLoad | MachineMemOperand::MOInvariant |
1180 MachineMemOperand::MODereferenceable;
Chandler Carruthc73c0302018-08-16 21:30:05 +00001181 MachineMemOperand *MemRef =
Tim Northover50db7f412016-12-07 21:17:47 +00001182 MF->getMachineMemOperand(MPInfo, Flags, DL->getPointerSizeInBits() / 8,
Fangrui Songe73534462017-11-15 06:17:32 +00001183 DL->getPointerABIAlignment(0));
Chandler Carruthc73c0302018-08-16 21:30:05 +00001184 MIB.setMemRefs({MemRef});
Tim Northovercdf23f12016-10-31 18:30:59 +00001185}
1186
Tim Northover1e656ec2016-12-08 22:44:00 +00001187bool IRTranslator::translateOverflowIntrinsic(const CallInst &CI, unsigned Op,
1188 MachineIRBuilder &MIRBuilder) {
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001189 ArrayRef<Register> ResRegs = getOrCreateVRegs(CI);
Aditya Nandakumar6b4d3432018-08-28 18:54:10 +00001190 MIRBuilder.buildInstr(Op)
1191 .addDef(ResRegs[0])
1192 .addDef(ResRegs[1])
1193 .addUse(getOrCreateVReg(*CI.getOperand(0)))
1194 .addUse(getOrCreateVReg(*CI.getOperand(1)));
Tim Northover1e656ec2016-12-08 22:44:00 +00001195
Tim Northover1e656ec2016-12-08 22:44:00 +00001196 return true;
1197}
1198
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001199unsigned IRTranslator::getSimpleIntrinsicOpcode(Intrinsic::ID ID) {
Jessica Paquettee288c522019-02-06 17:25:54 +00001200 switch (ID) {
1201 default:
1202 break;
Jessica Paquette0e71e732019-02-12 17:28:17 +00001203 case Intrinsic::bswap:
1204 return TargetOpcode::G_BSWAP;
Jessica Paquettee288c522019-02-06 17:25:54 +00001205 case Intrinsic::ceil:
1206 return TargetOpcode::G_FCEIL;
1207 case Intrinsic::cos:
1208 return TargetOpcode::G_FCOS;
1209 case Intrinsic::ctpop:
1210 return TargetOpcode::G_CTPOP;
1211 case Intrinsic::exp:
1212 return TargetOpcode::G_FEXP;
1213 case Intrinsic::exp2:
1214 return TargetOpcode::G_FEXP2;
1215 case Intrinsic::fabs:
1216 return TargetOpcode::G_FABS;
Matt Arsenault55146d32019-05-16 04:08:39 +00001217 case Intrinsic::copysign:
1218 return TargetOpcode::G_FCOPYSIGN;
Matt Arsenault9dba67f2019-02-11 17:05:20 +00001219 case Intrinsic::canonicalize:
1220 return TargetOpcode::G_FCANONICALIZE;
Jessica Paquettef472f312019-02-11 17:16:32 +00001221 case Intrinsic::floor:
1222 return TargetOpcode::G_FFLOOR;
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001223 case Intrinsic::fma:
1224 return TargetOpcode::G_FMA;
Jessica Paquettee288c522019-02-06 17:25:54 +00001225 case Intrinsic::log:
1226 return TargetOpcode::G_FLOG;
1227 case Intrinsic::log2:
1228 return TargetOpcode::G_FLOG2;
1229 case Intrinsic::log10:
1230 return TargetOpcode::G_FLOG10;
Jessica Paquettebd7ac302019-04-25 16:39:28 +00001231 case Intrinsic::nearbyint:
1232 return TargetOpcode::G_FNEARBYINT;
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001233 case Intrinsic::pow:
1234 return TargetOpcode::G_FPOW;
Jessica Paquettead69af32019-04-19 21:46:12 +00001235 case Intrinsic::rint:
1236 return TargetOpcode::G_FRINT;
Jessica Paquettee288c522019-02-06 17:25:54 +00001237 case Intrinsic::round:
1238 return TargetOpcode::G_INTRINSIC_ROUND;
1239 case Intrinsic::sin:
1240 return TargetOpcode::G_FSIN;
1241 case Intrinsic::sqrt:
1242 return TargetOpcode::G_FSQRT;
1243 case Intrinsic::trunc:
1244 return TargetOpcode::G_INTRINSIC_TRUNC;
1245 }
1246 return Intrinsic::not_intrinsic;
1247}
1248
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001249bool IRTranslator::translateSimpleIntrinsic(const CallInst &CI,
1250 Intrinsic::ID ID,
1251 MachineIRBuilder &MIRBuilder) {
Jessica Paquettee288c522019-02-06 17:25:54 +00001252
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001253 unsigned Op = getSimpleIntrinsicOpcode(ID);
Jessica Paquettee288c522019-02-06 17:25:54 +00001254
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001255 // Is this a simple intrinsic?
Jessica Paquettee288c522019-02-06 17:25:54 +00001256 if (Op == Intrinsic::not_intrinsic)
1257 return false;
1258
1259 // Yes. Let's translate it.
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001260 SmallVector<llvm::SrcOp, 4> VRegs;
1261 for (auto &Arg : CI.arg_operands())
1262 VRegs.push_back(getOrCreateVReg(*Arg));
1263
1264 MIRBuilder.buildInstr(Op, {getOrCreateVReg(CI)}, VRegs,
Michael Bergf0d81a32019-02-06 19:57:06 +00001265 MachineInstr::copyFlagsFromInstruction(CI));
Jessica Paquettee288c522019-02-06 17:25:54 +00001266 return true;
1267}
1268
Tim Northoverc53606e2016-12-07 21:29:15 +00001269bool IRTranslator::translateKnownIntrinsic(const CallInst &CI, Intrinsic::ID ID,
1270 MachineIRBuilder &MIRBuilder) {
Jessica Paquettee288c522019-02-06 17:25:54 +00001271
Jessica Paquetteacbb7ca2019-02-12 17:38:34 +00001272 // If this is a simple intrinsic (that is, we just need to add a def of
1273 // a vreg, and uses for each arg operand, then translate it.
1274 if (translateSimpleIntrinsic(CI, ID, MIRBuilder))
Jessica Paquettee288c522019-02-06 17:25:54 +00001275 return true;
1276
Tim Northover91c81732016-08-19 17:17:06 +00001277 switch (ID) {
Tim Northover1e656ec2016-12-08 22:44:00 +00001278 default:
1279 break;
Tim Northover0e011702017-02-10 19:10:38 +00001280 case Intrinsic::lifetime_start:
Jessica Paquette2e35dc52019-01-28 19:22:29 +00001281 case Intrinsic::lifetime_end: {
1282 // No stack colouring in O0, discard region information.
1283 if (MF->getTarget().getOptLevel() == CodeGenOpt::None)
1284 return true;
1285
1286 unsigned Op = ID == Intrinsic::lifetime_start ? TargetOpcode::LIFETIME_START
1287 : TargetOpcode::LIFETIME_END;
1288
1289 // Get the underlying objects for the location passed on the lifetime
1290 // marker.
Bjorn Pettersson71e8c6f2019-04-24 06:55:50 +00001291 SmallVector<const Value *, 4> Allocas;
Jessica Paquette2e35dc52019-01-28 19:22:29 +00001292 GetUnderlyingObjects(CI.getArgOperand(1), Allocas, *DL);
1293
1294 // Iterate over each underlying object, creating lifetime markers for each
1295 // static alloca. Quit if we find a non-static alloca.
Bjorn Pettersson71e8c6f2019-04-24 06:55:50 +00001296 for (const Value *V : Allocas) {
1297 const AllocaInst *AI = dyn_cast<AllocaInst>(V);
Jessica Paquette2e35dc52019-01-28 19:22:29 +00001298 if (!AI)
1299 continue;
1300
1301 if (!AI->isStaticAlloca())
1302 return true;
1303
1304 MIRBuilder.buildInstr(Op).addFrameIndex(getOrCreateFrameIndex(*AI));
1305 }
Tim Northover0e011702017-02-10 19:10:38 +00001306 return true;
Jessica Paquette2e35dc52019-01-28 19:22:29 +00001307 }
Tim Northover09aac4a2017-01-26 23:39:14 +00001308 case Intrinsic::dbg_declare: {
1309 const DbgDeclareInst &DI = cast<DbgDeclareInst>(CI);
1310 assert(DI.getVariable() && "Missing variable");
1311
1312 const Value *Address = DI.getAddress();
1313 if (!Address || isa<UndefValue>(Address)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001314 LLVM_DEBUG(dbgs() << "Dropping debug info for " << DI << "\n");
Tim Northover09aac4a2017-01-26 23:39:14 +00001315 return true;
1316 }
1317
Tim Northover09aac4a2017-01-26 23:39:14 +00001318 assert(DI.getVariable()->isValidLocationForIntrinsic(
1319 MIRBuilder.getDebugLoc()) &&
1320 "Expected inlined-at fields to agree");
Tim Northover7a9ea8f2017-03-09 21:12:06 +00001321 auto AI = dyn_cast<AllocaInst>(Address);
1322 if (AI && AI->isStaticAlloca()) {
1323 // Static allocas are tracked at the MF level, no need for DBG_VALUE
1324 // instructions (in fact, they get ignored if they *do* exist).
1325 MF->setVariableDbgInfo(DI.getVariable(), DI.getExpression(),
1326 getOrCreateFrameIndex(*AI), DI.getDebugLoc());
Josh Stonef446fac2018-09-11 17:52:01 +00001327 } else {
1328 // A dbg.declare describes the address of a source variable, so lower it
1329 // into an indirect DBG_VALUE.
1330 MIRBuilder.buildIndirectDbgValue(getOrCreateVReg(*Address),
1331 DI.getVariable(), DI.getExpression());
1332 }
Tim Northoverb58346f2016-12-08 22:44:13 +00001333 return true;
Tim Northover09aac4a2017-01-26 23:39:14 +00001334 }
Hsiangkai Wang2532ac82018-08-17 15:22:04 +00001335 case Intrinsic::dbg_label: {
1336 const DbgLabelInst &DI = cast<DbgLabelInst>(CI);
1337 assert(DI.getLabel() && "Missing label");
1338
1339 assert(DI.getLabel()->isValidLocationForIntrinsic(
1340 MIRBuilder.getDebugLoc()) &&
1341 "Expected inlined-at fields to agree");
1342
1343 MIRBuilder.buildDbgLabel(DI.getLabel());
1344 return true;
1345 }
Tim Northoverd0d025a2017-02-07 20:08:59 +00001346 case Intrinsic::vaend:
1347 // No target I know of cares about va_end. Certainly no in-tree target
1348 // does. Simplest intrinsic ever!
1349 return true;
Tim Northoverf19d4672017-02-08 17:57:20 +00001350 case Intrinsic::vastart: {
1351 auto &TLI = *MF->getSubtarget().getTargetLowering();
1352 Value *Ptr = CI.getArgOperand(0);
1353 unsigned ListSize = TLI.getVaListSizeInBits(*DL) / 8;
1354
Matt Arsenault2a645982019-01-31 01:38:47 +00001355 // FIXME: Get alignment
Tim Northoverf19d4672017-02-08 17:57:20 +00001356 MIRBuilder.buildInstr(TargetOpcode::G_VASTART)
1357 .addUse(getOrCreateVReg(*Ptr))
1358 .addMemOperand(MF->getMachineMemOperand(
Matt Arsenault2a645982019-01-31 01:38:47 +00001359 MachinePointerInfo(Ptr), MachineMemOperand::MOStore, ListSize, 1));
Tim Northoverf19d4672017-02-08 17:57:20 +00001360 return true;
1361 }
Tim Northover09aac4a2017-01-26 23:39:14 +00001362 case Intrinsic::dbg_value: {
1363 // This form of DBG_VALUE is target-independent.
1364 const DbgValueInst &DI = cast<DbgValueInst>(CI);
1365 const Value *V = DI.getValue();
1366 assert(DI.getVariable()->isValidLocationForIntrinsic(
1367 MIRBuilder.getDebugLoc()) &&
1368 "Expected inlined-at fields to agree");
1369 if (!V) {
1370 // Currently the optimizer can produce this; insert an undef to
1371 // help debugging. Probably the optimizer should not do this.
Adrian Prantld92ac5a2017-07-28 22:46:20 +00001372 MIRBuilder.buildIndirectDbgValue(0, DI.getVariable(), DI.getExpression());
Tim Northover09aac4a2017-01-26 23:39:14 +00001373 } else if (const auto *CI = dyn_cast<Constant>(V)) {
Adrian Prantld92ac5a2017-07-28 22:46:20 +00001374 MIRBuilder.buildConstDbgValue(*CI, DI.getVariable(), DI.getExpression());
Tim Northover09aac4a2017-01-26 23:39:14 +00001375 } else {
1376 unsigned Reg = getOrCreateVReg(*V);
1377 // FIXME: This does not handle register-indirect values at offset 0. The
1378 // direct/indirect thing shouldn't really be handled by something as
1379 // implicit as reg+noreg vs reg+imm in the first palce, but it seems
1380 // pretty baked in right now.
Adrian Prantlabe04752017-07-28 20:21:02 +00001381 MIRBuilder.buildDirectDbgValue(Reg, DI.getVariable(), DI.getExpression());
Tim Northover09aac4a2017-01-26 23:39:14 +00001382 }
1383 return true;
1384 }
Tim Northover1e656ec2016-12-08 22:44:00 +00001385 case Intrinsic::uadd_with_overflow:
Aditya Nandakumar6b4d3432018-08-28 18:54:10 +00001386 return translateOverflowIntrinsic(CI, TargetOpcode::G_UADDO, MIRBuilder);
Tim Northover1e656ec2016-12-08 22:44:00 +00001387 case Intrinsic::sadd_with_overflow:
1388 return translateOverflowIntrinsic(CI, TargetOpcode::G_SADDO, MIRBuilder);
1389 case Intrinsic::usub_with_overflow:
Aditya Nandakumar6b4d3432018-08-28 18:54:10 +00001390 return translateOverflowIntrinsic(CI, TargetOpcode::G_USUBO, MIRBuilder);
Tim Northover1e656ec2016-12-08 22:44:00 +00001391 case Intrinsic::ssub_with_overflow:
1392 return translateOverflowIntrinsic(CI, TargetOpcode::G_SSUBO, MIRBuilder);
1393 case Intrinsic::umul_with_overflow:
1394 return translateOverflowIntrinsic(CI, TargetOpcode::G_UMULO, MIRBuilder);
1395 case Intrinsic::smul_with_overflow:
1396 return translateOverflowIntrinsic(CI, TargetOpcode::G_SMULO, MIRBuilder);
Volkan Keles92837632018-02-13 00:47:46 +00001397 case Intrinsic::fmuladd: {
1398 const TargetMachine &TM = MF->getTarget();
1399 const TargetLowering &TLI = *MF->getSubtarget().getTargetLowering();
1400 unsigned Dst = getOrCreateVReg(CI);
1401 unsigned Op0 = getOrCreateVReg(*CI.getArgOperand(0));
1402 unsigned Op1 = getOrCreateVReg(*CI.getArgOperand(1));
1403 unsigned Op2 = getOrCreateVReg(*CI.getArgOperand(2));
1404 if (TM.Options.AllowFPOpFusion != FPOpFusion::Strict &&
1405 TLI.isFMAFasterThanFMulAndFAdd(TLI.getValueType(*DL, CI.getType()))) {
1406 // TODO: Revisit this to see if we should move this part of the
1407 // lowering to the combiner.
Michael Bergf0d81a32019-02-06 19:57:06 +00001408 MIRBuilder.buildInstr(TargetOpcode::G_FMA, {Dst}, {Op0, Op1, Op2},
1409 MachineInstr::copyFlagsFromInstruction(CI));
Volkan Keles92837632018-02-13 00:47:46 +00001410 } else {
1411 LLT Ty = getLLTForType(*CI.getType(), *DL);
Michael Bergf0d81a32019-02-06 19:57:06 +00001412 auto FMul = MIRBuilder.buildInstr(TargetOpcode::G_FMUL, {Ty}, {Op0, Op1},
1413 MachineInstr::copyFlagsFromInstruction(CI));
1414 MIRBuilder.buildInstr(TargetOpcode::G_FADD, {Dst}, {FMul, Op2},
1415 MachineInstr::copyFlagsFromInstruction(CI));
Volkan Keles92837632018-02-13 00:47:46 +00001416 }
1417 return true;
1418 }
Tim Northover3f186032016-10-18 20:03:45 +00001419 case Intrinsic::memcpy:
Tim Northover79f43f12017-01-30 19:33:07 +00001420 case Intrinsic::memmove:
1421 case Intrinsic::memset:
1422 return translateMemfunc(CI, MIRBuilder, ID);
Tim Northovera9105be2016-11-09 22:39:54 +00001423 case Intrinsic::eh_typeid_for: {
1424 GlobalValue *GV = ExtractTypeInfo(CI.getArgOperand(0));
1425 unsigned Reg = getOrCreateVReg(CI);
Tim Northover50db7f412016-12-07 21:17:47 +00001426 unsigned TypeID = MF->getTypeIDFor(GV);
Tim Northovera9105be2016-11-09 22:39:54 +00001427 MIRBuilder.buildConstant(Reg, TypeID);
1428 return true;
1429 }
Tim Northover6e904302016-10-18 20:03:51 +00001430 case Intrinsic::objectsize: {
1431 // If we don't know by now, we're never going to know.
1432 const ConstantInt *Min = cast<ConstantInt>(CI.getArgOperand(1));
1433
1434 MIRBuilder.buildConstant(getOrCreateVReg(CI), Min->isZero() ? -1ULL : 0);
1435 return true;
1436 }
James Y Knight72f76bf2018-11-07 15:24:12 +00001437 case Intrinsic::is_constant:
1438 // If this wasn't constant-folded away by now, then it's not a
1439 // constant.
1440 MIRBuilder.buildConstant(getOrCreateVReg(CI), 0);
1441 return true;
Tim Northovercdf23f12016-10-31 18:30:59 +00001442 case Intrinsic::stackguard:
Tim Northoverc53606e2016-12-07 21:29:15 +00001443 getStackGuard(getOrCreateVReg(CI), MIRBuilder);
Tim Northovercdf23f12016-10-31 18:30:59 +00001444 return true;
1445 case Intrinsic::stackprotector: {
Daniel Sanders52b4ce72017-03-07 23:20:35 +00001446 LLT PtrTy = getLLTForType(*CI.getArgOperand(0)->getType(), *DL);
Tim Northovercdf23f12016-10-31 18:30:59 +00001447 unsigned GuardVal = MRI->createGenericVirtualRegister(PtrTy);
Tim Northoverc53606e2016-12-07 21:29:15 +00001448 getStackGuard(GuardVal, MIRBuilder);
Tim Northovercdf23f12016-10-31 18:30:59 +00001449
1450 AllocaInst *Slot = cast<AllocaInst>(CI.getArgOperand(1));
Petr Pavlu84e89ff2018-12-10 15:15:05 +00001451 int FI = getOrCreateFrameIndex(*Slot);
1452 MF->getFrameInfo().setStackProtectorIndex(FI);
1453
Tim Northovercdf23f12016-10-31 18:30:59 +00001454 MIRBuilder.buildStore(
1455 GuardVal, getOrCreateVReg(*Slot),
Petr Pavlu84e89ff2018-12-10 15:15:05 +00001456 *MF->getMachineMemOperand(MachinePointerInfo::getFixedStack(*MF, FI),
1457 MachineMemOperand::MOStore |
1458 MachineMemOperand::MOVolatile,
1459 PtrTy.getSizeInBits() / 8, 8));
Tim Northovercdf23f12016-10-31 18:30:59 +00001460 return true;
1461 }
Jessica Paquetteed233522019-04-02 22:46:31 +00001462 case Intrinsic::stacksave: {
1463 // Save the stack pointer to the location provided by the intrinsic.
1464 unsigned Reg = getOrCreateVReg(CI);
1465 unsigned StackPtr = MF->getSubtarget()
1466 .getTargetLowering()
1467 ->getStackPointerRegisterToSaveRestore();
1468
1469 // If the target doesn't specify a stack pointer, then fall back.
1470 if (!StackPtr)
1471 return false;
1472
1473 MIRBuilder.buildCopy(Reg, StackPtr);
1474 return true;
1475 }
1476 case Intrinsic::stackrestore: {
1477 // Restore the stack pointer from the location provided by the intrinsic.
1478 unsigned Reg = getOrCreateVReg(*CI.getArgOperand(0));
1479 unsigned StackPtr = MF->getSubtarget()
1480 .getTargetLowering()
1481 ->getStackPointerRegisterToSaveRestore();
1482
1483 // If the target doesn't specify a stack pointer, then fall back.
1484 if (!StackPtr)
1485 return false;
1486
1487 MIRBuilder.buildCopy(StackPtr, Reg);
1488 return true;
1489 }
Aditya Nandakumare07b3b72018-08-04 01:22:12 +00001490 case Intrinsic::cttz:
1491 case Intrinsic::ctlz: {
1492 ConstantInt *Cst = cast<ConstantInt>(CI.getArgOperand(1));
1493 bool isTrailing = ID == Intrinsic::cttz;
1494 unsigned Opcode = isTrailing
1495 ? Cst->isZero() ? TargetOpcode::G_CTTZ
1496 : TargetOpcode::G_CTTZ_ZERO_UNDEF
1497 : Cst->isZero() ? TargetOpcode::G_CTLZ
1498 : TargetOpcode::G_CTLZ_ZERO_UNDEF;
1499 MIRBuilder.buildInstr(Opcode)
1500 .addDef(getOrCreateVReg(CI))
1501 .addUse(getOrCreateVReg(*CI.getArgOperand(0)));
1502 return true;
1503 }
Jessica Paquetteb328d952018-10-05 21:02:46 +00001504 case Intrinsic::invariant_start: {
1505 LLT PtrTy = getLLTForType(*CI.getArgOperand(0)->getType(), *DL);
1506 unsigned Undef = MRI->createGenericVirtualRegister(PtrTy);
1507 MIRBuilder.buildUndef(Undef);
1508 return true;
1509 }
1510 case Intrinsic::invariant_end:
1511 return true;
Volkan Keles97204a62019-06-07 20:19:27 +00001512 case Intrinsic::assume:
1513 case Intrinsic::var_annotation:
1514 case Intrinsic::sideeffect:
1515 // Discard annotate attributes, assumptions, and artificial side-effects.
1516 return true;
Tim Northover91c81732016-08-19 17:17:06 +00001517 }
Tim Northover1e656ec2016-12-08 22:44:00 +00001518 return false;
Tim Northover91c81732016-08-19 17:17:06 +00001519}
1520
Tim Northoveraa995c92017-03-09 23:36:26 +00001521bool IRTranslator::translateInlineAsm(const CallInst &CI,
1522 MachineIRBuilder &MIRBuilder) {
1523 const InlineAsm &IA = cast<InlineAsm>(*CI.getCalledValue());
1524 if (!IA.getConstraintString().empty())
1525 return false;
1526
1527 unsigned ExtraInfo = 0;
1528 if (IA.hasSideEffects())
1529 ExtraInfo |= InlineAsm::Extra_HasSideEffects;
1530 if (IA.getDialect() == InlineAsm::AD_Intel)
1531 ExtraInfo |= InlineAsm::Extra_AsmDialect;
1532
1533 MIRBuilder.buildInstr(TargetOpcode::INLINEASM)
1534 .addExternalSymbol(IA.getAsmString().c_str())
1535 .addImm(ExtraInfo);
1536
1537 return true;
1538}
1539
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001540unsigned IRTranslator::packRegs(const Value &V,
1541 MachineIRBuilder &MIRBuilder) {
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001542 ArrayRef<Register> Regs = getOrCreateVRegs(V);
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001543 ArrayRef<uint64_t> Offsets = *VMap.getOffsets(V);
1544 LLT BigTy = getLLTForType(*V.getType(), *DL);
1545
1546 if (Regs.size() == 1)
1547 return Regs[0];
1548
1549 unsigned Dst = MRI->createGenericVirtualRegister(BigTy);
1550 MIRBuilder.buildUndef(Dst);
1551 for (unsigned i = 0; i < Regs.size(); ++i) {
1552 unsigned NewDst = MRI->createGenericVirtualRegister(BigTy);
1553 MIRBuilder.buildInsert(NewDst, Dst, Regs[i], Offsets[i]);
1554 Dst = NewDst;
1555 }
1556 return Dst;
1557}
1558
1559void IRTranslator::unpackRegs(const Value &V, unsigned Src,
1560 MachineIRBuilder &MIRBuilder) {
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001561 ArrayRef<Register> Regs = getOrCreateVRegs(V);
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001562 ArrayRef<uint64_t> Offsets = *VMap.getOffsets(V);
1563
1564 for (unsigned i = 0; i < Regs.size(); ++i)
1565 MIRBuilder.buildExtract(Regs[i], Src, Offsets[i]);
1566}
1567
Tim Northoverc53606e2016-12-07 21:29:15 +00001568bool IRTranslator::translateCall(const User &U, MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +00001569 const CallInst &CI = cast<CallInst>(U);
Tim Northover50db7f412016-12-07 21:17:47 +00001570 auto TII = MF->getTarget().getIntrinsicInfo();
Tim Northover406024a2016-08-10 21:44:01 +00001571 const Function *F = CI.getCalledFunction();
Tim Northover5fb414d2016-07-29 22:32:36 +00001572
Martin Storsjocc981d22018-01-30 19:50:58 +00001573 // FIXME: support Windows dllimport function calls.
1574 if (F && F->hasDLLImportStorageClass())
1575 return false;
1576
Tim Northover3babfef2017-01-19 23:59:35 +00001577 if (CI.isInlineAsm())
Tim Northoveraa995c92017-03-09 23:36:26 +00001578 return translateInlineAsm(CI, MIRBuilder);
Tim Northover3babfef2017-01-19 23:59:35 +00001579
Amara Emerson913918c2018-01-02 18:56:39 +00001580 Intrinsic::ID ID = Intrinsic::not_intrinsic;
1581 if (F && F->isIntrinsic()) {
1582 ID = F->getIntrinsicID();
1583 if (TII && ID == Intrinsic::not_intrinsic)
1584 ID = static_cast<Intrinsic::ID>(TII->getIntrinsicID(F));
1585 }
1586
1587 if (!F || !F->isIntrinsic() || ID == Intrinsic::not_intrinsic) {
Matt Arsenault13371692019-03-14 14:18:56 +00001588 bool IsSplitType = valueIsSplit(CI);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001589 Register Res = IsSplitType ? MRI->createGenericVirtualRegister(
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001590 getLLTForType(*CI.getType(), *DL))
1591 : getOrCreateVReg(CI);
1592
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001593 SmallVector<Register, 8> Args;
1594 Register SwiftErrorVReg;
Tim Northover3b2157a2019-05-24 08:40:13 +00001595 for (auto &Arg: CI.arg_operands()) {
1596 if (CLI->supportSwiftError() && isSwiftError(Arg)) {
1597 LLT Ty = getLLTForType(*Arg->getType(), *DL);
1598 unsigned InVReg = MRI->createGenericVirtualRegister(Ty);
1599 MIRBuilder.buildCopy(InVReg, SwiftError.getOrCreateVRegUseAt(
1600 &CI, &MIRBuilder.getMBB(), Arg));
1601 Args.push_back(InVReg);
1602 SwiftErrorVReg =
1603 SwiftError.getOrCreateVRegDefAt(&CI, &MIRBuilder.getMBB(), Arg);
1604 continue;
1605 }
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001606 Args.push_back(packRegs(*Arg, MIRBuilder));
Tim Northover3b2157a2019-05-24 08:40:13 +00001607 }
Tim Northover406024a2016-08-10 21:44:01 +00001608
Tim Northoverd1e951e2017-03-09 22:00:39 +00001609 MF->getFrameInfo().setHasCalls(true);
Tim Northover3b2157a2019-05-24 08:40:13 +00001610 bool Success =
1611 CLI->lowerCall(MIRBuilder, &CI, Res, Args, SwiftErrorVReg,
1612 [&]() { return getOrCreateVReg(*CI.getCalledValue()); });
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001613
1614 if (IsSplitType)
1615 unpackRegs(CI, Res, MIRBuilder);
Tim Northover3b2157a2019-05-24 08:40:13 +00001616
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001617 return Success;
Tim Northover406024a2016-08-10 21:44:01 +00001618 }
1619
Tim Northover406024a2016-08-10 21:44:01 +00001620 assert(ID != Intrinsic::not_intrinsic && "unknown intrinsic");
Tim Northover5fb414d2016-07-29 22:32:36 +00001621
Tim Northoverc53606e2016-12-07 21:29:15 +00001622 if (translateKnownIntrinsic(CI, ID, MIRBuilder))
Tim Northover91c81732016-08-19 17:17:06 +00001623 return true;
1624
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001625 ArrayRef<Register> ResultRegs;
Matt Arsenault13371692019-03-14 14:18:56 +00001626 if (!CI.getType()->isVoidTy())
1627 ResultRegs = getOrCreateVRegs(CI);
1628
Matt Arsenault3e140062019-06-17 17:01:35 +00001629 // Ignore the callsite attributes. Backend code is most likely not expecting
1630 // an intrinsic to sometimes have side effects and sometimes not.
Tim Northover5fb414d2016-07-29 22:32:36 +00001631 MachineInstrBuilder MIB =
Matt Arsenault3e140062019-06-17 17:01:35 +00001632 MIRBuilder.buildIntrinsic(ID, ResultRegs, !F->doesNotAccessMemory());
Michael Bergd573aa02019-04-18 18:48:57 +00001633 if (isa<FPMathOperator>(CI))
1634 MIB->copyIRFlags(CI);
Tim Northover5fb414d2016-07-29 22:32:36 +00001635
1636 for (auto &Arg : CI.arg_operands()) {
Ahmed Bougacha55d10422017-03-07 20:53:09 +00001637 // Some intrinsics take metadata parameters. Reject them.
1638 if (isa<MetadataAsValue>(Arg))
1639 return false;
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001640 MIB.addUse(packRegs(*Arg, MIRBuilder));
Tim Northover5fb414d2016-07-29 22:32:36 +00001641 }
Volkan Kelesebe6bb92017-06-05 22:17:17 +00001642
1643 // Add a MachineMemOperand if it is a target mem intrinsic.
1644 const TargetLowering &TLI = *MF->getSubtarget().getTargetLowering();
1645 TargetLowering::IntrinsicInfo Info;
1646 // TODO: Add a GlobalISel version of getTgtMemIntrinsic.
Matt Arsenault7d7adf42017-12-14 22:34:10 +00001647 if (TLI.getTgtMemIntrinsic(Info, CI, *MF, ID)) {
Matt Arsenault50d65792019-01-31 23:41:23 +00001648 unsigned Align = Info.align;
1649 if (Align == 0)
1650 Align = DL->getABITypeAlignment(Info.memVT.getTypeForEVT(F->getContext()));
Matt Arsenault2a645982019-01-31 01:38:47 +00001651
Matt Arsenault50d65792019-01-31 23:41:23 +00001652 uint64_t Size = Info.memVT.getStoreSize();
Volkan Kelesebe6bb92017-06-05 22:17:17 +00001653 MIB.addMemOperand(MF->getMachineMemOperand(MachinePointerInfo(Info.ptrVal),
Matt Arsenault50d65792019-01-31 23:41:23 +00001654 Info.flags, Size, Align));
Volkan Kelesebe6bb92017-06-05 22:17:17 +00001655 }
1656
Tim Northover5fb414d2016-07-29 22:32:36 +00001657 return true;
1658}
1659
Tim Northoverc53606e2016-12-07 21:29:15 +00001660bool IRTranslator::translateInvoke(const User &U,
1661 MachineIRBuilder &MIRBuilder) {
Tim Northovera9105be2016-11-09 22:39:54 +00001662 const InvokeInst &I = cast<InvokeInst>(U);
Tim Northover50db7f412016-12-07 21:17:47 +00001663 MCContext &Context = MF->getContext();
Tim Northovera9105be2016-11-09 22:39:54 +00001664
1665 const BasicBlock *ReturnBB = I.getSuccessor(0);
1666 const BasicBlock *EHPadBB = I.getSuccessor(1);
1667
Ahmed Bougacha4ec6d5a2017-03-10 00:25:35 +00001668 const Value *Callee = I.getCalledValue();
Tim Northovera9105be2016-11-09 22:39:54 +00001669 const Function *Fn = dyn_cast<Function>(Callee);
1670 if (isa<InlineAsm>(Callee))
1671 return false;
1672
1673 // FIXME: support invoking patchpoint and statepoint intrinsics.
1674 if (Fn && Fn->isIntrinsic())
1675 return false;
1676
1677 // FIXME: support whatever these are.
1678 if (I.countOperandBundlesOfType(LLVMContext::OB_deopt))
1679 return false;
1680
1681 // FIXME: support Windows exception handling.
1682 if (!isa<LandingPadInst>(EHPadBB->front()))
1683 return false;
1684
Matthias Braund0ee66c2016-12-01 19:32:15 +00001685 // Emit the actual call, bracketed by EH_LABELs so that the MF knows about
Tim Northovera9105be2016-11-09 22:39:54 +00001686 // the region covered by the try.
Matthias Braund0ee66c2016-12-01 19:32:15 +00001687 MCSymbol *BeginSymbol = Context.createTempSymbol();
Tim Northovera9105be2016-11-09 22:39:54 +00001688 MIRBuilder.buildInstr(TargetOpcode::EH_LABEL).addSym(BeginSymbol);
1689
Matt Arsenault0aab9992019-04-10 17:27:55 +00001690 unsigned Res = 0;
1691 if (!I.getType()->isVoidTy())
1692 Res = MRI->createGenericVirtualRegister(getLLTForType(*I.getType(), *DL));
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001693 SmallVector<Register, 8> Args;
1694 Register SwiftErrorVReg;
Tim Northover3b2157a2019-05-24 08:40:13 +00001695 for (auto &Arg : I.arg_operands()) {
1696 if (CLI->supportSwiftError() && isSwiftError(Arg)) {
1697 LLT Ty = getLLTForType(*Arg->getType(), *DL);
1698 unsigned InVReg = MRI->createGenericVirtualRegister(Ty);
1699 MIRBuilder.buildCopy(InVReg, SwiftError.getOrCreateVRegUseAt(
1700 &I, &MIRBuilder.getMBB(), Arg));
1701 Args.push_back(InVReg);
1702 SwiftErrorVReg =
1703 SwiftError.getOrCreateVRegDefAt(&I, &MIRBuilder.getMBB(), Arg);
1704 continue;
1705 }
Tim Northovera9105be2016-11-09 22:39:54 +00001706
Tim Northover3b2157a2019-05-24 08:40:13 +00001707 Args.push_back(packRegs(*Arg, MIRBuilder));
1708 }
1709
1710 if (!CLI->lowerCall(MIRBuilder, &I, Res, Args, SwiftErrorVReg,
Ahmed Bougacha4ec6d5a2017-03-10 00:25:35 +00001711 [&]() { return getOrCreateVReg(*I.getCalledValue()); }))
1712 return false;
Tim Northovera9105be2016-11-09 22:39:54 +00001713
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001714 unpackRegs(I, Res, MIRBuilder);
1715
Matthias Braund0ee66c2016-12-01 19:32:15 +00001716 MCSymbol *EndSymbol = Context.createTempSymbol();
Tim Northovera9105be2016-11-09 22:39:54 +00001717 MIRBuilder.buildInstr(TargetOpcode::EH_LABEL).addSym(EndSymbol);
1718
1719 // FIXME: track probabilities.
Ahmed Bougachaa61c2142017-03-15 18:22:33 +00001720 MachineBasicBlock &EHPadMBB = getMBB(*EHPadBB),
1721 &ReturnMBB = getMBB(*ReturnBB);
Tim Northover50db7f412016-12-07 21:17:47 +00001722 MF->addInvoke(&EHPadMBB, BeginSymbol, EndSymbol);
Tim Northovera9105be2016-11-09 22:39:54 +00001723 MIRBuilder.getMBB().addSuccessor(&ReturnMBB);
1724 MIRBuilder.getMBB().addSuccessor(&EHPadMBB);
Tim Northoverc6bfa482017-01-31 20:12:18 +00001725 MIRBuilder.buildBr(ReturnMBB);
Tim Northovera9105be2016-11-09 22:39:54 +00001726
1727 return true;
1728}
1729
Craig Topper784929d2019-02-08 20:48:56 +00001730bool IRTranslator::translateCallBr(const User &U,
1731 MachineIRBuilder &MIRBuilder) {
1732 // FIXME: Implement this.
1733 return false;
1734}
1735
Tim Northoverc53606e2016-12-07 21:29:15 +00001736bool IRTranslator::translateLandingPad(const User &U,
1737 MachineIRBuilder &MIRBuilder) {
Tim Northovera9105be2016-11-09 22:39:54 +00001738 const LandingPadInst &LP = cast<LandingPadInst>(U);
1739
1740 MachineBasicBlock &MBB = MIRBuilder.getMBB();
Tim Northovera9105be2016-11-09 22:39:54 +00001741
1742 MBB.setIsEHPad();
1743
1744 // If there aren't registers to copy the values into (e.g., during SjLj
1745 // exceptions), then don't bother.
Tim Northover50db7f412016-12-07 21:17:47 +00001746 auto &TLI = *MF->getSubtarget().getTargetLowering();
Matthias Braunf1caa282017-12-15 22:22:58 +00001747 const Constant *PersonalityFn = MF->getFunction().getPersonalityFn();
Tim Northovera9105be2016-11-09 22:39:54 +00001748 if (TLI.getExceptionPointerRegister(PersonalityFn) == 0 &&
1749 TLI.getExceptionSelectorRegister(PersonalityFn) == 0)
1750 return true;
1751
1752 // If landingpad's return type is token type, we don't create DAG nodes
1753 // for its exception pointer and selector value. The extraction of exception
1754 // pointer or selector value from token type landingpads is not currently
1755 // supported.
1756 if (LP.getType()->isTokenTy())
1757 return true;
1758
1759 // Add a label to mark the beginning of the landing pad. Deletion of the
1760 // landing pad can thus be detected via the MachineModuleInfo.
1761 MIRBuilder.buildInstr(TargetOpcode::EH_LABEL)
Tim Northover50db7f412016-12-07 21:17:47 +00001762 .addSym(MF->addLandingPad(&MBB));
Tim Northovera9105be2016-11-09 22:39:54 +00001763
Daniel Sanders1351db42017-03-07 23:32:10 +00001764 LLT Ty = getLLTForType(*LP.getType(), *DL);
Tim Northover542d1c12017-03-07 23:04:06 +00001765 unsigned Undef = MRI->createGenericVirtualRegister(Ty);
1766 MIRBuilder.buildUndef(Undef);
1767
Justin Bognera0295312017-01-25 00:16:53 +00001768 SmallVector<LLT, 2> Tys;
1769 for (Type *Ty : cast<StructType>(LP.getType())->elements())
Daniel Sanders52b4ce72017-03-07 23:20:35 +00001770 Tys.push_back(getLLTForType(*Ty, *DL));
Justin Bognera0295312017-01-25 00:16:53 +00001771 assert(Tys.size() == 2 && "Only two-valued landingpads are supported");
1772
Tim Northovera9105be2016-11-09 22:39:54 +00001773 // Mark exception register as live in.
Tim Northover542d1c12017-03-07 23:04:06 +00001774 unsigned ExceptionReg = TLI.getExceptionPointerRegister(PersonalityFn);
1775 if (!ExceptionReg)
1776 return false;
Tim Northovera9105be2016-11-09 22:39:54 +00001777
Tim Northover542d1c12017-03-07 23:04:06 +00001778 MBB.addLiveIn(ExceptionReg);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00001779 ArrayRef<Register> ResRegs = getOrCreateVRegs(LP);
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001780 MIRBuilder.buildCopy(ResRegs[0], ExceptionReg);
Tim Northoverc9449702017-01-30 20:52:42 +00001781
Tim Northover542d1c12017-03-07 23:04:06 +00001782 unsigned SelectorReg = TLI.getExceptionSelectorRegister(PersonalityFn);
1783 if (!SelectorReg)
1784 return false;
Tim Northoverc9449702017-01-30 20:52:42 +00001785
Tim Northover542d1c12017-03-07 23:04:06 +00001786 MBB.addLiveIn(SelectorReg);
Tim Northover542d1c12017-03-07 23:04:06 +00001787 unsigned PtrVReg = MRI->createGenericVirtualRegister(Tys[0]);
1788 MIRBuilder.buildCopy(PtrVReg, SelectorReg);
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001789 MIRBuilder.buildCast(ResRegs[1], PtrVReg);
Tim Northover542d1c12017-03-07 23:04:06 +00001790
Tim Northovera9105be2016-11-09 22:39:54 +00001791 return true;
1792}
1793
Tim Northoverc3e3f592017-02-03 18:22:45 +00001794bool IRTranslator::translateAlloca(const User &U,
1795 MachineIRBuilder &MIRBuilder) {
1796 auto &AI = cast<AllocaInst>(U);
Quentin Colombet3bb32cc2016-08-26 23:49:05 +00001797
Amara Emersonfdd089a2018-07-26 01:25:58 +00001798 if (AI.isSwiftError())
Tim Northover3b2157a2019-05-24 08:40:13 +00001799 return true;
Amara Emersonfdd089a2018-07-26 01:25:58 +00001800
Tim Northoverc3e3f592017-02-03 18:22:45 +00001801 if (AI.isStaticAlloca()) {
1802 unsigned Res = getOrCreateVReg(AI);
1803 int FI = getOrCreateFrameIndex(AI);
1804 MIRBuilder.buildFrameIndex(Res, FI);
1805 return true;
1806 }
1807
Martin Storsjoa63a5b92018-02-17 14:26:32 +00001808 // FIXME: support stack probing for Windows.
1809 if (MF->getTarget().getTargetTriple().isOSWindows())
1810 return false;
1811
Tim Northoverc3e3f592017-02-03 18:22:45 +00001812 // Now we're in the harder dynamic case.
1813 Type *Ty = AI.getAllocatedType();
1814 unsigned Align =
1815 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI.getAlignment());
1816
1817 unsigned NumElts = getOrCreateVReg(*AI.getArraySize());
1818
Ahmed Bougacha2fb80302017-03-15 19:21:11 +00001819 Type *IntPtrIRTy = DL->getIntPtrType(AI.getType());
1820 LLT IntPtrTy = getLLTForType(*IntPtrIRTy, *DL);
Tim Northoverc3e3f592017-02-03 18:22:45 +00001821 if (MRI->getType(NumElts) != IntPtrTy) {
1822 unsigned ExtElts = MRI->createGenericVirtualRegister(IntPtrTy);
1823 MIRBuilder.buildZExtOrTrunc(ExtElts, NumElts);
1824 NumElts = ExtElts;
1825 }
1826
1827 unsigned AllocSize = MRI->createGenericVirtualRegister(IntPtrTy);
Ahmed Bougacha2fb80302017-03-15 19:21:11 +00001828 unsigned TySize =
1829 getOrCreateVReg(*ConstantInt::get(IntPtrIRTy, -DL->getTypeAllocSize(Ty)));
Tim Northoverc3e3f592017-02-03 18:22:45 +00001830 MIRBuilder.buildMul(AllocSize, NumElts, TySize);
1831
Daniel Sanders52b4ce72017-03-07 23:20:35 +00001832 LLT PtrTy = getLLTForType(*AI.getType(), *DL);
Tim Northoverc3e3f592017-02-03 18:22:45 +00001833 auto &TLI = *MF->getSubtarget().getTargetLowering();
1834 unsigned SPReg = TLI.getStackPointerRegisterToSaveRestore();
1835
1836 unsigned SPTmp = MRI->createGenericVirtualRegister(PtrTy);
1837 MIRBuilder.buildCopy(SPTmp, SPReg);
1838
Tim Northoverc2f89562017-02-14 20:56:18 +00001839 unsigned AllocTmp = MRI->createGenericVirtualRegister(PtrTy);
1840 MIRBuilder.buildGEP(AllocTmp, SPTmp, AllocSize);
Tim Northoverc3e3f592017-02-03 18:22:45 +00001841
1842 // Handle alignment. We have to realign if the allocation granule was smaller
1843 // than stack alignment, or the specific alloca requires more than stack
1844 // alignment.
1845 unsigned StackAlign =
1846 MF->getSubtarget().getFrameLowering()->getStackAlignment();
1847 Align = std::max(Align, StackAlign);
1848 if (Align > StackAlign || DL->getTypeAllocSize(Ty) % StackAlign != 0) {
1849 // Round the size of the allocation up to the stack alignment size
1850 // by add SA-1 to the size. This doesn't overflow because we're computing
1851 // an address inside an alloca.
Tim Northoverc2f89562017-02-14 20:56:18 +00001852 unsigned AlignedAlloc = MRI->createGenericVirtualRegister(PtrTy);
1853 MIRBuilder.buildPtrMask(AlignedAlloc, AllocTmp, Log2_32(Align));
1854 AllocTmp = AlignedAlloc;
Tim Northoverc3e3f592017-02-03 18:22:45 +00001855 }
1856
Tim Northoverc2f89562017-02-14 20:56:18 +00001857 MIRBuilder.buildCopy(SPReg, AllocTmp);
1858 MIRBuilder.buildCopy(getOrCreateVReg(AI), AllocTmp);
Tim Northoverc3e3f592017-02-03 18:22:45 +00001859
1860 MF->getFrameInfo().CreateVariableSizedObject(Align ? Align : 1, &AI);
1861 assert(MF->getFrameInfo().hasVarSizedObjects());
Tim Northoverbd505462016-07-22 16:59:52 +00001862 return true;
1863}
1864
Tim Northover4a652222017-02-15 23:22:33 +00001865bool IRTranslator::translateVAArg(const User &U, MachineIRBuilder &MIRBuilder) {
1866 // FIXME: We may need more info about the type. Because of how LLT works,
1867 // we're completely discarding the i64/double distinction here (amongst
1868 // others). Fortunately the ABIs I know of where that matters don't use va_arg
1869 // anyway but that's not guaranteed.
1870 MIRBuilder.buildInstr(TargetOpcode::G_VAARG)
1871 .addDef(getOrCreateVReg(U))
1872 .addUse(getOrCreateVReg(*U.getOperand(0)))
1873 .addImm(DL->getABITypeAlignment(U.getType()));
1874 return true;
1875}
1876
Volkan Keles04cb08c2017-03-10 19:08:28 +00001877bool IRTranslator::translateInsertElement(const User &U,
1878 MachineIRBuilder &MIRBuilder) {
1879 // If it is a <1 x Ty> vector, use the scalar as it is
1880 // not a legal vector type in LLT.
1881 if (U.getType()->getVectorNumElements() == 1) {
1882 unsigned Elt = getOrCreateVReg(*U.getOperand(1));
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001883 auto &Regs = *VMap.getVRegs(U);
1884 if (Regs.empty()) {
1885 Regs.push_back(Elt);
1886 VMap.getOffsets(U)->push_back(0);
1887 } else {
1888 MIRBuilder.buildCopy(Regs[0], Elt);
1889 }
Volkan Keles04cb08c2017-03-10 19:08:28 +00001890 return true;
1891 }
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001892
Kristof Beyls7a713502017-04-19 06:38:37 +00001893 unsigned Res = getOrCreateVReg(U);
1894 unsigned Val = getOrCreateVReg(*U.getOperand(0));
1895 unsigned Elt = getOrCreateVReg(*U.getOperand(1));
1896 unsigned Idx = getOrCreateVReg(*U.getOperand(2));
1897 MIRBuilder.buildInsertVectorElement(Res, Val, Elt, Idx);
Volkan Keles04cb08c2017-03-10 19:08:28 +00001898 return true;
1899}
1900
1901bool IRTranslator::translateExtractElement(const User &U,
1902 MachineIRBuilder &MIRBuilder) {
1903 // If it is a <1 x Ty> vector, use the scalar as it is
1904 // not a legal vector type in LLT.
1905 if (U.getOperand(0)->getType()->getVectorNumElements() == 1) {
1906 unsigned Elt = getOrCreateVReg(*U.getOperand(0));
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001907 auto &Regs = *VMap.getVRegs(U);
1908 if (Regs.empty()) {
1909 Regs.push_back(Elt);
1910 VMap.getOffsets(U)->push_back(0);
1911 } else {
1912 MIRBuilder.buildCopy(Regs[0], Elt);
1913 }
Volkan Keles04cb08c2017-03-10 19:08:28 +00001914 return true;
1915 }
Kristof Beyls7a713502017-04-19 06:38:37 +00001916 unsigned Res = getOrCreateVReg(U);
1917 unsigned Val = getOrCreateVReg(*U.getOperand(0));
Amara Emersoncbd86d82018-10-25 14:04:54 +00001918 const auto &TLI = *MF->getSubtarget().getTargetLowering();
1919 unsigned PreferredVecIdxWidth = TLI.getVectorIdxTy(*DL).getSizeInBits();
1920 unsigned Idx = 0;
1921 if (auto *CI = dyn_cast<ConstantInt>(U.getOperand(1))) {
1922 if (CI->getBitWidth() != PreferredVecIdxWidth) {
1923 APInt NewIdx = CI->getValue().sextOrTrunc(PreferredVecIdxWidth);
1924 auto *NewIdxCI = ConstantInt::get(CI->getContext(), NewIdx);
1925 Idx = getOrCreateVReg(*NewIdxCI);
1926 }
1927 }
1928 if (!Idx)
1929 Idx = getOrCreateVReg(*U.getOperand(1));
1930 if (MRI->getType(Idx).getSizeInBits() != PreferredVecIdxWidth) {
1931 const LLT &VecIdxTy = LLT::scalar(PreferredVecIdxWidth);
1932 Idx = MIRBuilder.buildSExtOrTrunc(VecIdxTy, Idx)->getOperand(0).getReg();
1933 }
Kristof Beyls7a713502017-04-19 06:38:37 +00001934 MIRBuilder.buildExtractVectorElement(Res, Val, Idx);
Volkan Keles04cb08c2017-03-10 19:08:28 +00001935 return true;
1936}
1937
Volkan Keles75bdc762017-03-21 08:44:13 +00001938bool IRTranslator::translateShuffleVector(const User &U,
1939 MachineIRBuilder &MIRBuilder) {
1940 MIRBuilder.buildInstr(TargetOpcode::G_SHUFFLE_VECTOR)
1941 .addDef(getOrCreateVReg(U))
1942 .addUse(getOrCreateVReg(*U.getOperand(0)))
1943 .addUse(getOrCreateVReg(*U.getOperand(1)))
1944 .addUse(getOrCreateVReg(*U.getOperand(2)));
1945 return true;
1946}
1947
Tim Northoverc53606e2016-12-07 21:29:15 +00001948bool IRTranslator::translatePHI(const User &U, MachineIRBuilder &MIRBuilder) {
Tim Northover357f1be2016-08-10 23:02:41 +00001949 const PHINode &PI = cast<PHINode>(U);
Tim Northover97d0cb32016-08-05 17:16:40 +00001950
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001951 SmallVector<MachineInstr *, 4> Insts;
1952 for (auto Reg : getOrCreateVRegs(PI)) {
Aditya Nandakumarcef44a22018-12-11 00:48:50 +00001953 auto MIB = MIRBuilder.buildInstr(TargetOpcode::G_PHI, {Reg}, {});
Amara Emerson0d6a26d2018-05-16 10:32:02 +00001954 Insts.push_back(MIB.getInstr());
1955 }
1956
1957 PendingPHIs.emplace_back(&PI, std::move(Insts));
Tim Northover97d0cb32016-08-05 17:16:40 +00001958 return true;
1959}
1960
Daniel Sanders94813992018-07-09 19:33:40 +00001961bool IRTranslator::translateAtomicCmpXchg(const User &U,
1962 MachineIRBuilder &MIRBuilder) {
1963 const AtomicCmpXchgInst &I = cast<AtomicCmpXchgInst>(U);
1964
1965 if (I.isWeak())
1966 return false;
1967
1968 auto Flags = I.isVolatile() ? MachineMemOperand::MOVolatile
1969 : MachineMemOperand::MONone;
1970 Flags |= MachineMemOperand::MOLoad | MachineMemOperand::MOStore;
1971
1972 Type *ResType = I.getType();
1973 Type *ValType = ResType->Type::getStructElementType(0);
1974
1975 auto Res = getOrCreateVRegs(I);
1976 unsigned OldValRes = Res[0];
1977 unsigned SuccessRes = Res[1];
1978 unsigned Addr = getOrCreateVReg(*I.getPointerOperand());
1979 unsigned Cmp = getOrCreateVReg(*I.getCompareOperand());
1980 unsigned NewVal = getOrCreateVReg(*I.getNewValOperand());
1981
1982 MIRBuilder.buildAtomicCmpXchgWithSuccess(
1983 OldValRes, SuccessRes, Addr, Cmp, NewVal,
1984 *MF->getMachineMemOperand(MachinePointerInfo(I.getPointerOperand()),
1985 Flags, DL->getTypeStoreSize(ValType),
1986 getMemOpAlignment(I), AAMDNodes(), nullptr,
1987 I.getSyncScopeID(), I.getSuccessOrdering(),
1988 I.getFailureOrdering()));
1989 return true;
1990}
1991
1992bool IRTranslator::translateAtomicRMW(const User &U,
1993 MachineIRBuilder &MIRBuilder) {
1994 const AtomicRMWInst &I = cast<AtomicRMWInst>(U);
1995
1996 auto Flags = I.isVolatile() ? MachineMemOperand::MOVolatile
1997 : MachineMemOperand::MONone;
1998 Flags |= MachineMemOperand::MOLoad | MachineMemOperand::MOStore;
1999
2000 Type *ResType = I.getType();
2001
2002 unsigned Res = getOrCreateVReg(I);
2003 unsigned Addr = getOrCreateVReg(*I.getPointerOperand());
2004 unsigned Val = getOrCreateVReg(*I.getValOperand());
2005
2006 unsigned Opcode = 0;
2007 switch (I.getOperation()) {
2008 default:
2009 llvm_unreachable("Unknown atomicrmw op");
2010 return false;
2011 case AtomicRMWInst::Xchg:
2012 Opcode = TargetOpcode::G_ATOMICRMW_XCHG;
2013 break;
2014 case AtomicRMWInst::Add:
2015 Opcode = TargetOpcode::G_ATOMICRMW_ADD;
2016 break;
2017 case AtomicRMWInst::Sub:
2018 Opcode = TargetOpcode::G_ATOMICRMW_SUB;
2019 break;
2020 case AtomicRMWInst::And:
2021 Opcode = TargetOpcode::G_ATOMICRMW_AND;
2022 break;
2023 case AtomicRMWInst::Nand:
2024 Opcode = TargetOpcode::G_ATOMICRMW_NAND;
2025 break;
2026 case AtomicRMWInst::Or:
2027 Opcode = TargetOpcode::G_ATOMICRMW_OR;
2028 break;
2029 case AtomicRMWInst::Xor:
2030 Opcode = TargetOpcode::G_ATOMICRMW_XOR;
2031 break;
2032 case AtomicRMWInst::Max:
2033 Opcode = TargetOpcode::G_ATOMICRMW_MAX;
2034 break;
2035 case AtomicRMWInst::Min:
2036 Opcode = TargetOpcode::G_ATOMICRMW_MIN;
2037 break;
2038 case AtomicRMWInst::UMax:
2039 Opcode = TargetOpcode::G_ATOMICRMW_UMAX;
2040 break;
2041 case AtomicRMWInst::UMin:
2042 Opcode = TargetOpcode::G_ATOMICRMW_UMIN;
2043 break;
2044 }
2045
2046 MIRBuilder.buildAtomicRMW(
2047 Opcode, Res, Addr, Val,
2048 *MF->getMachineMemOperand(MachinePointerInfo(I.getPointerOperand()),
2049 Flags, DL->getTypeStoreSize(ResType),
2050 getMemOpAlignment(I), AAMDNodes(), nullptr,
2051 I.getSyncScopeID(), I.getOrdering()));
2052 return true;
2053}
2054
Tim Northover97d0cb32016-08-05 17:16:40 +00002055void IRTranslator::finishPendingPhis() {
Daniel Sanders3b390402018-10-31 17:31:23 +00002056#ifndef NDEBUG
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002057 DILocationVerifier Verifier;
2058 GISelObserverWrapper WrapperObserver(&Verifier);
2059 RAIIDelegateInstaller DelInstall(*MF, &WrapperObserver);
Daniel Sanders3b390402018-10-31 17:31:23 +00002060#endif // ifndef NDEBUG
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002061 for (auto &Phi : PendingPHIs) {
Tim Northover97d0cb32016-08-05 17:16:40 +00002062 const PHINode *PI = Phi.first;
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002063 ArrayRef<MachineInstr *> ComponentPHIs = Phi.second;
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002064 EntryBuilder->setDebugLoc(PI->getDebugLoc());
Daniel Sanders3b390402018-10-31 17:31:23 +00002065#ifndef NDEBUG
2066 Verifier.setCurrentInst(PI);
2067#endif // ifndef NDEBUG
Tim Northover97d0cb32016-08-05 17:16:40 +00002068
Amara Emersonfe4625f2019-06-21 18:10:38 +00002069 SmallSet<const MachineBasicBlock *, 16> SeenPreds;
Tim Northover97d0cb32016-08-05 17:16:40 +00002070 for (unsigned i = 0; i < PI->getNumIncomingValues(); ++i) {
Tim Northoverb6636fd2017-01-17 22:13:50 +00002071 auto IRPred = PI->getIncomingBlock(i);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00002072 ArrayRef<Register> ValRegs = getOrCreateVRegs(*PI->getIncomingValue(i));
Tim Northoverb6636fd2017-01-17 22:13:50 +00002073 for (auto Pred : getMachinePredBBs({IRPred, PI->getParent()})) {
Amara Emersonfe4625f2019-06-21 18:10:38 +00002074 if (SeenPreds.count(Pred))
2075 continue;
2076 SeenPreds.insert(Pred);
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002077 for (unsigned j = 0; j < ValRegs.size(); ++j) {
2078 MachineInstrBuilder MIB(*MF, ComponentPHIs[j]);
2079 MIB.addUse(ValRegs[j]);
2080 MIB.addMBB(Pred);
2081 }
Tim Northoverb6636fd2017-01-17 22:13:50 +00002082 }
Tim Northover97d0cb32016-08-05 17:16:40 +00002083 }
2084 }
2085}
2086
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002087bool IRTranslator::valueIsSplit(const Value &V,
2088 SmallVectorImpl<uint64_t> *Offsets) {
2089 SmallVector<LLT, 4> SplitTys;
Amara Emerson30e61402018-08-14 12:04:25 +00002090 if (Offsets && !Offsets->empty())
2091 Offsets->clear();
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002092 computeValueLLTs(*DL, *V.getType(), SplitTys, Offsets);
2093 return SplitTys.size() > 1;
2094}
2095
Quentin Colombet2ecff3b2016-02-10 22:59:27 +00002096bool IRTranslator::translate(const Instruction &Inst) {
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002097 CurBuilder->setDebugLoc(Inst.getDebugLoc());
Amara Emersonfb0a40f2019-06-13 22:15:35 +00002098 // We only emit constants into the entry block from here. To prevent jumpy
2099 // debug behaviour set the line to 0.
2100 if (const DebugLoc &DL = Inst.getDebugLoc())
2101 EntryBuilder->setDebugLoc(
2102 DebugLoc::get(0, 0, DL.getScope(), DL.getInlinedAt()));
2103 else
2104 EntryBuilder->setDebugLoc(DebugLoc());
2105
2106 switch (Inst.getOpcode()) {
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002107#define HANDLE_INST(NUM, OPCODE, CLASS) \
2108 case Instruction::OPCODE: \
2109 return translate##OPCODE(Inst, *CurBuilder.get());
Tim Northover357f1be2016-08-10 23:02:41 +00002110#include "llvm/IR/Instruction.def"
Quentin Colombet74d7d2f2016-02-11 18:53:28 +00002111 default:
Quentin Colombetee8a4f52017-03-11 00:28:33 +00002112 return false;
Quentin Colombet2ecff3b2016-02-10 22:59:27 +00002113 }
Quentin Colombet105cf2b2016-01-20 20:58:56 +00002114}
2115
Tim Northover5ed648e2016-08-09 21:28:04 +00002116bool IRTranslator::translate(const Constant &C, unsigned Reg) {
Tim Northoverd403a3d2016-08-09 23:01:30 +00002117 if (auto CI = dyn_cast<ConstantInt>(&C))
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002118 EntryBuilder->buildConstant(Reg, *CI);
Tim Northoverb16734f2016-08-19 20:09:15 +00002119 else if (auto CF = dyn_cast<ConstantFP>(&C))
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002120 EntryBuilder->buildFConstant(Reg, *CF);
Tim Northoverd403a3d2016-08-09 23:01:30 +00002121 else if (isa<UndefValue>(C))
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002122 EntryBuilder->buildUndef(Reg);
Aditya Nandakumarb3297ef2018-03-22 17:31:38 +00002123 else if (isa<ConstantPointerNull>(C)) {
2124 // As we are trying to build a constant val of 0 into a pointer,
2125 // insert a cast to make them correct with respect to types.
2126 unsigned NullSize = DL->getTypeSizeInBits(C.getType());
2127 auto *ZeroTy = Type::getIntNTy(C.getContext(), NullSize);
2128 auto *ZeroVal = ConstantInt::get(ZeroTy, 0);
2129 unsigned ZeroReg = getOrCreateVReg(*ZeroVal);
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002130 EntryBuilder->buildCast(Reg, ZeroReg);
Aditya Nandakumarb3297ef2018-03-22 17:31:38 +00002131 } else if (auto GV = dyn_cast<GlobalValue>(&C))
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002132 EntryBuilder->buildGlobalValue(Reg, GV);
Volkan Keles970fee42017-03-10 21:23:13 +00002133 else if (auto CAZ = dyn_cast<ConstantAggregateZero>(&C)) {
2134 if (!CAZ->getType()->isVectorTy())
2135 return false;
Volkan Keles4862c632017-03-14 23:45:06 +00002136 // Return the scalar if it is a <1 x Ty> vector.
2137 if (CAZ->getNumElements() == 1)
2138 return translate(*CAZ->getElementValue(0u), Reg);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00002139 SmallVector<Register, 4> Ops;
Volkan Keles970fee42017-03-10 21:23:13 +00002140 for (unsigned i = 0; i < CAZ->getNumElements(); ++i) {
2141 Constant &Elt = *CAZ->getElementValue(i);
2142 Ops.push_back(getOrCreateVReg(Elt));
2143 }
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002144 EntryBuilder->buildBuildVector(Reg, Ops);
Volkan Keles38a91a02017-03-13 21:36:19 +00002145 } else if (auto CV = dyn_cast<ConstantDataVector>(&C)) {
Volkan Keles4862c632017-03-14 23:45:06 +00002146 // Return the scalar if it is a <1 x Ty> vector.
2147 if (CV->getNumElements() == 1)
2148 return translate(*CV->getElementAsConstant(0), Reg);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00002149 SmallVector<Register, 4> Ops;
Volkan Keles38a91a02017-03-13 21:36:19 +00002150 for (unsigned i = 0; i < CV->getNumElements(); ++i) {
2151 Constant &Elt = *CV->getElementAsConstant(i);
2152 Ops.push_back(getOrCreateVReg(Elt));
2153 }
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002154 EntryBuilder->buildBuildVector(Reg, Ops);
Volkan Keles970fee42017-03-10 21:23:13 +00002155 } else if (auto CE = dyn_cast<ConstantExpr>(&C)) {
Tim Northover357f1be2016-08-10 23:02:41 +00002156 switch(CE->getOpcode()) {
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002157#define HANDLE_INST(NUM, OPCODE, CLASS) \
2158 case Instruction::OPCODE: \
2159 return translate##OPCODE(*CE, *EntryBuilder.get());
Tim Northover357f1be2016-08-10 23:02:41 +00002160#include "llvm/IR/Instruction.def"
2161 default:
Quentin Colombetee8a4f52017-03-11 00:28:33 +00002162 return false;
Tim Northover357f1be2016-08-10 23:02:41 +00002163 }
Aditya Nandakumar117b6672017-05-04 21:43:12 +00002164 } else if (auto CV = dyn_cast<ConstantVector>(&C)) {
2165 if (CV->getNumOperands() == 1)
2166 return translate(*CV->getOperand(0), Reg);
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00002167 SmallVector<Register, 4> Ops;
Aditya Nandakumar117b6672017-05-04 21:43:12 +00002168 for (unsigned i = 0; i < CV->getNumOperands(); ++i) {
2169 Ops.push_back(getOrCreateVReg(*CV->getOperand(i)));
2170 }
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002171 EntryBuilder->buildBuildVector(Reg, Ops);
Amara Emerson6aff5a72018-07-31 00:08:50 +00002172 } else if (auto *BA = dyn_cast<BlockAddress>(&C)) {
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002173 EntryBuilder->buildBlockAddress(Reg, BA);
Quentin Colombetee8a4f52017-03-11 00:28:33 +00002174 } else
Quentin Colombet3bb32cc2016-08-26 23:49:05 +00002175 return false;
Tim Northover5ed648e2016-08-09 21:28:04 +00002176
Tim Northoverd403a3d2016-08-09 23:01:30 +00002177 return true;
Tim Northover5ed648e2016-08-09 21:28:04 +00002178}
2179
Amara Emersonfe4625f2019-06-21 18:10:38 +00002180void IRTranslator::finalizeBasicBlock() {
2181 for (auto &JTCase : SL->JTCases)
2182 emitJumpTable(JTCase.second, JTCase.second.MBB);
2183 SL->JTCases.clear();
2184}
2185
Tim Northover0d510442016-08-11 16:21:29 +00002186void IRTranslator::finalizeFunction() {
Quentin Colombet2ecff3b2016-02-10 22:59:27 +00002187 // Release the memory used by the different maps we
2188 // needed during the translation.
Tim Northover800638f2016-12-05 23:10:19 +00002189 PendingPHIs.clear();
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002190 VMap.reset();
Tim Northovercdf23f12016-10-31 18:30:59 +00002191 FrameIndices.clear();
Tim Northoverb6636fd2017-01-17 22:13:50 +00002192 MachinePreds.clear();
Aditya Nandakumarbe929932017-05-17 17:41:55 +00002193 // MachineIRBuilder::DebugLoc can outlive the DILocation it holds. Clear it
2194 // to avoid accessing free’d memory (in runOnMachineFunction) and to avoid
2195 // destroying it twice (in ~IRTranslator() and ~LLVMContext())
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002196 EntryBuilder.reset();
2197 CurBuilder.reset();
Amara Emersonfe4625f2019-06-21 18:10:38 +00002198 FuncInfo.clear();
Quentin Colombet105cf2b2016-01-20 20:58:56 +00002199}
2200
Tim Northover50db7f412016-12-07 21:17:47 +00002201bool IRTranslator::runOnMachineFunction(MachineFunction &CurMF) {
2202 MF = &CurMF;
Matthias Braunf1caa282017-12-15 22:22:58 +00002203 const Function &F = MF->getFunction();
Quentin Colombetfd9d0a02016-02-11 19:59:41 +00002204 if (F.empty())
2205 return false;
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002206 GISelCSEAnalysisWrapper &Wrapper =
2207 getAnalysis<GISelCSEAnalysisWrapperPass>().getCSEWrapper();
2208 // Set the CSEConfig and run the analysis.
2209 GISelCSEInfo *CSEInfo = nullptr;
2210 TPC = &getAnalysis<TargetPassConfig>();
Aditya Nandakumar3ba0d942019-01-24 23:11:25 +00002211 bool EnableCSE = EnableCSEInIRTranslator.getNumOccurrences()
2212 ? EnableCSEInIRTranslator
2213 : TPC->isGISelCSEEnabled();
2214
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002215 if (EnableCSE) {
2216 EntryBuilder = make_unique<CSEMIRBuilder>(CurMF);
Amara Emersond1896802019-04-15 04:53:46 +00002217 CSEInfo = &Wrapper.get(TPC->getCSEConfig());
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002218 EntryBuilder->setCSEInfo(CSEInfo);
2219 CurBuilder = make_unique<CSEMIRBuilder>(CurMF);
2220 CurBuilder->setCSEInfo(CSEInfo);
2221 } else {
2222 EntryBuilder = make_unique<MachineIRBuilder>();
2223 CurBuilder = make_unique<MachineIRBuilder>();
2224 }
Tim Northover50db7f412016-12-07 21:17:47 +00002225 CLI = MF->getSubtarget().getCallLowering();
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002226 CurBuilder->setMF(*MF);
2227 EntryBuilder->setMF(*MF);
Tim Northover50db7f412016-12-07 21:17:47 +00002228 MRI = &MF->getRegInfo();
Tim Northoverbd505462016-07-22 16:59:52 +00002229 DL = &F.getParent()->getDataLayout();
Eugene Zelenko76bf48d2017-06-26 22:44:03 +00002230 ORE = llvm::make_unique<OptimizationRemarkEmitter>(&F);
Amara Emersonfe4625f2019-06-21 18:10:38 +00002231 FuncInfo.MF = MF;
2232 FuncInfo.BPI = nullptr;
2233 const auto &TLI = *MF->getSubtarget().getTargetLowering();
2234 const TargetMachine &TM = MF->getTarget();
2235 SL = make_unique<GISelSwitchLowering>(this, FuncInfo);
2236 SL->init(TLI, TM, *DL);
2237
2238 EnableOpts = TM.getOptLevel() != CodeGenOpt::None && !skipFunction(F);
Tim Northoverbd505462016-07-22 16:59:52 +00002239
Tim Northover14e7f732016-08-05 17:50:36 +00002240 assert(PendingPHIs.empty() && "stale PHIs");
2241
Amara Emersondf9b5292017-12-11 16:58:29 +00002242 if (!DL->isLittleEndian()) {
2243 // Currently we don't properly handle big endian code.
2244 OptimizationRemarkMissed R("gisel-irtranslator", "GISelFailure",
Matthias Braunf1caa282017-12-15 22:22:58 +00002245 F.getSubprogram(), &F.getEntryBlock());
Amara Emersondf9b5292017-12-11 16:58:29 +00002246 R << "unable to translate in big endian mode";
2247 reportTranslationError(*MF, *TPC, *ORE, R);
2248 }
2249
Ahmed Bougachaeceabdd2017-02-23 23:57:28 +00002250 // Release the per-function state when we return, whether we succeeded or not.
2251 auto FinalizeOnReturn = make_scope_exit([this]() { finalizeFunction(); });
2252
Ahmed Bougachaa61c2142017-03-15 18:22:33 +00002253 // Setup a separate basic-block for the arguments and constants
Tim Northover50db7f412016-12-07 21:17:47 +00002254 MachineBasicBlock *EntryBB = MF->CreateMachineBasicBlock();
2255 MF->push_back(EntryBB);
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002256 EntryBuilder->setMBB(*EntryBB);
Tim Northover05cc4852016-12-07 21:05:38 +00002257
Tim Northover3b2157a2019-05-24 08:40:13 +00002258 DebugLoc DbgLoc = F.getEntryBlock().getFirstNonPHI()->getDebugLoc();
2259 SwiftError.setFunction(CurMF);
2260 SwiftError.createEntriesInEntryBlock(DbgLoc);
2261
Ahmed Bougachaa61c2142017-03-15 18:22:33 +00002262 // Create all blocks, in IR order, to preserve the layout.
2263 for (const BasicBlock &BB: F) {
2264 auto *&MBB = BBToMBB[&BB];
2265
2266 MBB = MF->CreateMachineBasicBlock(&BB);
2267 MF->push_back(MBB);
2268
2269 if (BB.hasAddressTaken())
2270 MBB->setHasAddressTaken();
2271 }
2272
2273 // Make our arguments/constants entry block fallthrough to the IR entry block.
2274 EntryBB->addSuccessor(&getMBB(F.front()));
2275
Tim Northover05cc4852016-12-07 21:05:38 +00002276 // Lower the actual args into this basic block.
Matt Arsenaulte3a676e2019-06-24 15:50:29 +00002277 SmallVector<Register, 8> VRegArgs;
Amara Emersond78d65c2017-11-30 20:06:02 +00002278 for (const Argument &Arg: F.args()) {
2279 if (DL->getTypeStoreSize(Arg.getType()) == 0)
2280 continue; // Don't handle zero sized types.
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002281 VRegArgs.push_back(
2282 MRI->createGenericVirtualRegister(getLLTForType(*Arg.getType(), *DL)));
Tim Northover3b2157a2019-05-24 08:40:13 +00002283
2284 if (Arg.hasSwiftErrorAttr())
2285 SwiftError.setCurrentVReg(EntryBB, SwiftError.getFunctionArg(),
2286 VRegArgs.back());
Amara Emersond78d65c2017-11-30 20:06:02 +00002287 }
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002288
Amara Emersonfdd089a2018-07-26 01:25:58 +00002289 // We don't currently support translating swifterror or swiftself functions.
2290 for (auto &Arg : F.args()) {
Tim Northover3b2157a2019-05-24 08:40:13 +00002291 if (Arg.hasSwiftSelfAttr()) {
Amara Emersonfdd089a2018-07-26 01:25:58 +00002292 OptimizationRemarkMissed R("gisel-irtranslator", "GISelFailure",
2293 F.getSubprogram(), &F.getEntryBlock());
Tim Northover3b2157a2019-05-24 08:40:13 +00002294 R << "unable to lower arguments due to swiftself: "
Amara Emersonfdd089a2018-07-26 01:25:58 +00002295 << ore::NV("Prototype", F.getType());
2296 reportTranslationError(*MF, *TPC, *ORE, R);
2297 return false;
2298 }
2299 }
2300
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002301 if (!CLI->lowerFormalArguments(*EntryBuilder.get(), F, VRegArgs)) {
Ahmed Bougacha7c88a4e2017-02-24 00:34:44 +00002302 OptimizationRemarkMissed R("gisel-irtranslator", "GISelFailure",
Matthias Braunf1caa282017-12-15 22:22:58 +00002303 F.getSubprogram(), &F.getEntryBlock());
Ahmed Bougachaae9dade2017-02-23 21:05:42 +00002304 R << "unable to lower arguments: " << ore::NV("Prototype", F.getType());
2305 reportTranslationError(*MF, *TPC, *ORE, R);
Ahmed Bougachaae9dade2017-02-23 21:05:42 +00002306 return false;
Quentin Colombet3bb32cc2016-08-26 23:49:05 +00002307 }
Quentin Colombetfd9d0a02016-02-11 19:59:41 +00002308
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002309 auto ArgIt = F.arg_begin();
2310 for (auto &VArg : VRegArgs) {
2311 // If the argument is an unsplit scalar then don't use unpackRegs to avoid
2312 // creating redundant copies.
2313 if (!valueIsSplit(*ArgIt, VMap.getOffsets(*ArgIt))) {
2314 auto &VRegs = *VMap.getVRegs(cast<Value>(*ArgIt));
2315 assert(VRegs.empty() && "VRegs already populated?");
2316 VRegs.push_back(VArg);
2317 } else {
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002318 unpackRegs(*ArgIt, VArg, *EntryBuilder.get());
Amara Emerson0d6a26d2018-05-16 10:32:02 +00002319 }
2320 ArgIt++;
2321 }
2322
Amara Emerson6cdfe292018-08-01 02:17:42 +00002323 // Need to visit defs before uses when translating instructions.
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002324 GISelObserverWrapper WrapperObserver;
2325 if (EnableCSE && CSEInfo)
2326 WrapperObserver.addObserver(CSEInfo);
Daniel Sanders3b390402018-10-31 17:31:23 +00002327 {
2328 ReversePostOrderTraversal<const Function *> RPOT(&F);
2329#ifndef NDEBUG
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002330 DILocationVerifier Verifier;
2331 WrapperObserver.addObserver(&Verifier);
Daniel Sanders3b390402018-10-31 17:31:23 +00002332#endif // ifndef NDEBUG
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002333 RAIIDelegateInstaller DelInstall(*MF, &WrapperObserver);
Daniel Sanders3b390402018-10-31 17:31:23 +00002334 for (const BasicBlock *BB : RPOT) {
2335 MachineBasicBlock &MBB = getMBB(*BB);
2336 // Set the insertion point of all the following translations to
2337 // the end of this basic block.
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002338 CurBuilder->setMBB(MBB);
Tim Northovera9105be2016-11-09 22:39:54 +00002339
Daniel Sanders3b390402018-10-31 17:31:23 +00002340 for (const Instruction &Inst : *BB) {
2341#ifndef NDEBUG
2342 Verifier.setCurrentInst(&Inst);
2343#endif // ifndef NDEBUG
2344 if (translate(Inst))
2345 continue;
Ahmed Bougachaae9dade2017-02-23 21:05:42 +00002346
Daniel Sanders3b390402018-10-31 17:31:23 +00002347 OptimizationRemarkMissed R("gisel-irtranslator", "GISelFailure",
2348 Inst.getDebugLoc(), BB);
2349 R << "unable to translate instruction: " << ore::NV("Opcode", &Inst);
Ahmed Bougachad630a922017-09-18 18:50:09 +00002350
Daniel Sanders3b390402018-10-31 17:31:23 +00002351 if (ORE->allowExtraAnalysis("gisel-irtranslator")) {
2352 std::string InstStrStorage;
2353 raw_string_ostream InstStr(InstStrStorage);
2354 InstStr << Inst;
Ahmed Bougachad630a922017-09-18 18:50:09 +00002355
Daniel Sanders3b390402018-10-31 17:31:23 +00002356 R << ": '" << InstStr.str() << "'";
2357 }
2358
2359 reportTranslationError(*MF, *TPC, *ORE, R);
2360 return false;
Ahmed Bougachad630a922017-09-18 18:50:09 +00002361 }
Amara Emersonfe4625f2019-06-21 18:10:38 +00002362
2363 finalizeBasicBlock();
Quentin Colombet2ecff3b2016-02-10 22:59:27 +00002364 }
Aditya Nandakumar500e3ea2019-01-16 00:40:37 +00002365#ifndef NDEBUG
2366 WrapperObserver.removeObserver(&Verifier);
2367#endif
Quentin Colombet2ecff3b2016-02-10 22:59:27 +00002368 }
Tim Northover72eebfa2016-07-12 22:23:42 +00002369
Ahmed Bougacha4f8dd022017-02-23 23:57:36 +00002370 finishPendingPhis();
Tim Northover97d0cb32016-08-05 17:16:40 +00002371
Tim Northover3b2157a2019-05-24 08:40:13 +00002372 SwiftError.propagateVRegs();
2373
Ahmed Bougacha4f8dd022017-02-23 23:57:36 +00002374 // Merge the argument lowering and constants block with its single
2375 // successor, the LLVM-IR entry block. We want the basic block to
2376 // be maximal.
2377 assert(EntryBB->succ_size() == 1 &&
2378 "Custom BB used for lowering should have only one successor");
2379 // Get the successor of the current entry block.
2380 MachineBasicBlock &NewEntryBB = **EntryBB->succ_begin();
2381 assert(NewEntryBB.pred_size() == 1 &&
2382 "LLVM-IR entry block has a predecessor!?");
2383 // Move all the instruction from the current entry block to the
2384 // new entry block.
2385 NewEntryBB.splice(NewEntryBB.begin(), EntryBB, EntryBB->begin(),
2386 EntryBB->end());
Quentin Colombet327f9422016-12-15 23:32:25 +00002387
Ahmed Bougacha4f8dd022017-02-23 23:57:36 +00002388 // Update the live-in information for the new entry block.
2389 for (const MachineBasicBlock::RegisterMaskPair &LiveIn : EntryBB->liveins())
2390 NewEntryBB.addLiveIn(LiveIn);
2391 NewEntryBB.sortUniqueLiveIns();
Quentin Colombet327f9422016-12-15 23:32:25 +00002392
Ahmed Bougacha4f8dd022017-02-23 23:57:36 +00002393 // Get rid of the now empty basic block.
2394 EntryBB->removeSuccessor(&NewEntryBB);
2395 MF->remove(EntryBB);
2396 MF->DeleteMachineBasicBlock(EntryBB);
Quentin Colombet327f9422016-12-15 23:32:25 +00002397
Ahmed Bougacha4f8dd022017-02-23 23:57:36 +00002398 assert(&MF->front() == &NewEntryBB &&
2399 "New entry wasn't next in the list of basic block!");
Tim Northover800638f2016-12-05 23:10:19 +00002400
Matthias Braun90ad6832018-07-13 00:08:38 +00002401 // Initialize stack protector information.
2402 StackProtector &SP = getAnalysis<StackProtector>();
2403 SP.copyToMachineFrameInfo(MF->getFrameInfo());
2404
Quentin Colombet105cf2b2016-01-20 20:58:56 +00002405 return false;
2406}