blob: df4bf699f024cec0505fa620df3adf1ca0db7bbf [file] [log] [blame]
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +00001//===- AArch64InstructionSelector.cpp ----------------------------*- C++ -*-==//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9/// \file
10/// This file implements the targeting of the InstructionSelector class for
11/// AArch64.
12/// \todo This should be generated by TableGen.
13//===----------------------------------------------------------------------===//
14
15#include "AArch64InstructionSelector.h"
16#include "AArch64InstrInfo.h"
17#include "AArch64RegisterBankInfo.h"
18#include "AArch64RegisterInfo.h"
19#include "AArch64Subtarget.h"
Tim Northoverbdf16242016-10-10 21:50:00 +000020#include "AArch64TargetMachine.h"
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +000021#include "llvm/CodeGen/MachineBasicBlock.h"
22#include "llvm/CodeGen/MachineFunction.h"
23#include "llvm/CodeGen/MachineInstr.h"
24#include "llvm/CodeGen/MachineInstrBuilder.h"
25#include "llvm/CodeGen/MachineRegisterInfo.h"
26#include "llvm/IR/Type.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Support/raw_ostream.h"
29
30#define DEBUG_TYPE "aarch64-isel"
31
32using namespace llvm;
33
34#ifndef LLVM_BUILD_GLOBAL_ISEL
35#error "You shouldn't build this"
36#endif
37
38AArch64InstructionSelector::AArch64InstructionSelector(
Tim Northoverbdf16242016-10-10 21:50:00 +000039 const AArch64TargetMachine &TM, const AArch64Subtarget &STI,
40 const AArch64RegisterBankInfo &RBI)
41 : InstructionSelector(), TM(TM), STI(STI), TII(*STI.getInstrInfo()),
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +000042 TRI(*STI.getRegisterInfo()), RBI(RBI) {}
43
Ahmed Bougacha59e160a2016-08-16 14:37:40 +000044/// Check whether \p I is a currently unsupported binary operation:
45/// - it has an unsized type
46/// - an operand is not a vreg
47/// - all operands are not in the same bank
48/// These are checks that should someday live in the verifier, but right now,
49/// these are mostly limitations of the aarch64 selector.
50static bool unsupportedBinOp(const MachineInstr &I,
51 const AArch64RegisterBankInfo &RBI,
52 const MachineRegisterInfo &MRI,
53 const AArch64RegisterInfo &TRI) {
Tim Northover0f140c72016-09-09 11:46:34 +000054 LLT Ty = MRI.getType(I.getOperand(0).getReg());
Tim Northover32a078a2016-09-15 10:09:59 +000055 if (!Ty.isValid()) {
56 DEBUG(dbgs() << "Generic binop register should be typed\n");
Ahmed Bougacha59e160a2016-08-16 14:37:40 +000057 return true;
58 }
59
60 const RegisterBank *PrevOpBank = nullptr;
61 for (auto &MO : I.operands()) {
62 // FIXME: Support non-register operands.
63 if (!MO.isReg()) {
64 DEBUG(dbgs() << "Generic inst non-reg operands are unsupported\n");
65 return true;
66 }
67
68 // FIXME: Can generic operations have physical registers operands? If
69 // so, this will need to be taught about that, and we'll need to get the
70 // bank out of the minimal class for the register.
71 // Either way, this needs to be documented (and possibly verified).
72 if (!TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
73 DEBUG(dbgs() << "Generic inst has physical register operand\n");
74 return true;
75 }
76
77 const RegisterBank *OpBank = RBI.getRegBank(MO.getReg(), MRI, TRI);
78 if (!OpBank) {
79 DEBUG(dbgs() << "Generic register has no bank or class\n");
80 return true;
81 }
82
83 if (PrevOpBank && OpBank != PrevOpBank) {
84 DEBUG(dbgs() << "Generic inst operands have different banks\n");
85 return true;
86 }
87 PrevOpBank = OpBank;
88 }
89 return false;
90}
91
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +000092/// Select the AArch64 opcode for the basic binary operation \p GenericOpc
93/// (such as G_OR or G_ADD), appropriate for the register bank \p RegBankID
94/// and of size \p OpSize.
95/// \returns \p GenericOpc if the combination is unsupported.
96static unsigned selectBinaryOp(unsigned GenericOpc, unsigned RegBankID,
97 unsigned OpSize) {
98 switch (RegBankID) {
99 case AArch64::GPRRegBankID:
100 switch (OpSize) {
101 case 32:
102 switch (GenericOpc) {
103 case TargetOpcode::G_OR:
104 return AArch64::ORRWrr;
Ahmed Bougacha6db3cfe2016-07-29 16:56:25 +0000105 case TargetOpcode::G_XOR:
106 return AArch64::EORWrr;
Ahmed Bougacha61a79282016-07-28 16:58:31 +0000107 case TargetOpcode::G_AND:
108 return AArch64::ANDWrr;
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000109 case TargetOpcode::G_ADD:
110 return AArch64::ADDWrr;
Ahmed Bougachad7748d62016-07-28 16:58:35 +0000111 case TargetOpcode::G_SUB:
112 return AArch64::SUBWrr;
Ahmed Bougacha2ac5bf92016-08-16 14:02:47 +0000113 case TargetOpcode::G_SHL:
114 return AArch64::LSLVWr;
115 case TargetOpcode::G_LSHR:
116 return AArch64::LSRVWr;
117 case TargetOpcode::G_ASHR:
118 return AArch64::ASRVWr;
Ahmed Bougacha1d0560b2016-08-18 15:17:13 +0000119 case TargetOpcode::G_SDIV:
120 return AArch64::SDIVWr;
121 case TargetOpcode::G_UDIV:
122 return AArch64::UDIVWr;
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000123 default:
124 return GenericOpc;
125 }
126 case 64:
127 switch (GenericOpc) {
128 case TargetOpcode::G_OR:
129 return AArch64::ORRXrr;
Ahmed Bougacha6db3cfe2016-07-29 16:56:25 +0000130 case TargetOpcode::G_XOR:
131 return AArch64::EORXrr;
Ahmed Bougacha61a79282016-07-28 16:58:31 +0000132 case TargetOpcode::G_AND:
133 return AArch64::ANDXrr;
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000134 case TargetOpcode::G_ADD:
Tim Northover2fda4b02016-10-10 21:49:49 +0000135 case TargetOpcode::G_GEP:
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000136 return AArch64::ADDXrr;
Ahmed Bougachad7748d62016-07-28 16:58:35 +0000137 case TargetOpcode::G_SUB:
138 return AArch64::SUBXrr;
Ahmed Bougacha2ac5bf92016-08-16 14:02:47 +0000139 case TargetOpcode::G_SHL:
140 return AArch64::LSLVXr;
141 case TargetOpcode::G_LSHR:
142 return AArch64::LSRVXr;
143 case TargetOpcode::G_ASHR:
144 return AArch64::ASRVXr;
Ahmed Bougacha1d0560b2016-08-18 15:17:13 +0000145 case TargetOpcode::G_SDIV:
146 return AArch64::SDIVXr;
147 case TargetOpcode::G_UDIV:
148 return AArch64::UDIVXr;
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000149 default:
150 return GenericOpc;
151 }
152 }
Ahmed Bougacha33e19fe2016-08-18 16:05:11 +0000153 case AArch64::FPRRegBankID:
154 switch (OpSize) {
155 case 32:
156 switch (GenericOpc) {
157 case TargetOpcode::G_FADD:
158 return AArch64::FADDSrr;
159 case TargetOpcode::G_FSUB:
160 return AArch64::FSUBSrr;
161 case TargetOpcode::G_FMUL:
162 return AArch64::FMULSrr;
163 case TargetOpcode::G_FDIV:
164 return AArch64::FDIVSrr;
165 default:
166 return GenericOpc;
167 }
168 case 64:
169 switch (GenericOpc) {
170 case TargetOpcode::G_FADD:
171 return AArch64::FADDDrr;
172 case TargetOpcode::G_FSUB:
173 return AArch64::FSUBDrr;
174 case TargetOpcode::G_FMUL:
175 return AArch64::FMULDrr;
176 case TargetOpcode::G_FDIV:
177 return AArch64::FDIVDrr;
Quentin Colombet0e531272016-10-11 00:21:11 +0000178 case TargetOpcode::G_OR:
179 return AArch64::ORRv8i8;
Ahmed Bougacha33e19fe2016-08-18 16:05:11 +0000180 default:
181 return GenericOpc;
182 }
183 }
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000184 };
185 return GenericOpc;
186}
187
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000188/// Select the AArch64 opcode for the G_LOAD or G_STORE operation \p GenericOpc,
189/// appropriate for the (value) register bank \p RegBankID and of memory access
190/// size \p OpSize. This returns the variant with the base+unsigned-immediate
191/// addressing mode (e.g., LDRXui).
192/// \returns \p GenericOpc if the combination is unsupported.
193static unsigned selectLoadStoreUIOp(unsigned GenericOpc, unsigned RegBankID,
194 unsigned OpSize) {
195 const bool isStore = GenericOpc == TargetOpcode::G_STORE;
196 switch (RegBankID) {
197 case AArch64::GPRRegBankID:
198 switch (OpSize) {
199 case 32:
200 return isStore ? AArch64::STRWui : AArch64::LDRWui;
201 case 64:
202 return isStore ? AArch64::STRXui : AArch64::LDRXui;
203 }
Quentin Colombetd2623f8e2016-10-11 00:21:14 +0000204 case AArch64::FPRRegBankID:
205 switch (OpSize) {
206 case 32:
207 return isStore ? AArch64::STRSui : AArch64::LDRSui;
208 case 64:
209 return isStore ? AArch64::STRDui : AArch64::LDRDui;
210 }
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000211 };
212 return GenericOpc;
213}
214
Quentin Colombetcb629a82016-10-12 03:57:49 +0000215static bool selectCopy(MachineInstr &I, const TargetInstrInfo &TII,
216 MachineRegisterInfo &MRI, const TargetRegisterInfo &TRI,
217 const RegisterBankInfo &RBI) {
218
219 unsigned DstReg = I.getOperand(0).getReg();
220 if (TargetRegisterInfo::isPhysicalRegister(DstReg)) {
221 assert(I.isCopy() && "Generic operators do not allow physical registers");
222 return true;
223 }
224
225 const RegisterBank &RegBank = *RBI.getRegBank(DstReg, MRI, TRI);
226 const unsigned DstSize = MRI.getType(DstReg).getSizeInBits();
227 unsigned SrcReg = I.getOperand(1).getReg();
228 const unsigned SrcSize = RBI.getSizeInBits(SrcReg, MRI, TRI);
229 (void)SrcSize;
230 assert((!TargetRegisterInfo::isPhysicalRegister(SrcReg) || I.isCopy()) &&
231 "No phys reg on generic operators");
232 assert(
233 (DstSize == SrcSize ||
234 // Copies are a mean to setup initial types, the number of
235 // bits may not exactly match.
236 (TargetRegisterInfo::isPhysicalRegister(SrcReg) &&
237 DstSize <= RBI.getSizeInBits(SrcReg, MRI, TRI)) ||
238 // Copies are a mean to copy bits around, as long as we are
239 // on the same register class, that's fine. Otherwise, that
240 // means we need some SUBREG_TO_REG or AND & co.
241 (((DstSize + 31) / 32 == (SrcSize + 31) / 32) && DstSize > SrcSize)) &&
242 "Copy with different width?!");
243 assert((DstSize <= 64 || RegBank.getID() == AArch64::FPRRegBankID) &&
244 "GPRs cannot get more than 64-bit width values");
245 const TargetRegisterClass *RC = nullptr;
246
247 if (RegBank.getID() == AArch64::FPRRegBankID) {
248 if (DstSize <= 32)
249 RC = &AArch64::FPR32RegClass;
250 else if (DstSize <= 64)
251 RC = &AArch64::FPR64RegClass;
252 else if (DstSize <= 128)
253 RC = &AArch64::FPR128RegClass;
254 else {
255 DEBUG(dbgs() << "Unexpected bitcast size " << DstSize << '\n');
256 return false;
257 }
258 } else {
259 assert(RegBank.getID() == AArch64::GPRRegBankID &&
260 "Bitcast for the flags?");
261 RC =
262 DstSize <= 32 ? &AArch64::GPR32allRegClass : &AArch64::GPR64allRegClass;
263 }
264
265 // No need to constrain SrcReg. It will get constrained when
266 // we hit another of its use or its defs.
267 // Copies do not have constraints.
268 if (!RBI.constrainGenericRegister(DstReg, *RC, MRI)) {
269 DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
270 << " operand\n");
271 return false;
272 }
273 I.setDesc(TII.get(AArch64::COPY));
274 return true;
275}
276
Tim Northover69271c62016-10-12 22:49:11 +0000277static unsigned selectFPConvOpc(unsigned GenericOpc, LLT DstTy, LLT SrcTy) {
278 if (!DstTy.isScalar() || !SrcTy.isScalar())
279 return GenericOpc;
280
281 const unsigned DstSize = DstTy.getSizeInBits();
282 const unsigned SrcSize = SrcTy.getSizeInBits();
283
284 switch (DstSize) {
285 case 32:
286 switch (SrcSize) {
287 case 32:
288 switch (GenericOpc) {
289 case TargetOpcode::G_SITOFP:
290 return AArch64::SCVTFUWSri;
291 case TargetOpcode::G_UITOFP:
292 return AArch64::UCVTFUWSri;
293 case TargetOpcode::G_FPTOSI:
294 return AArch64::FCVTZSUWSr;
295 case TargetOpcode::G_FPTOUI:
296 return AArch64::FCVTZUUWSr;
297 default:
298 return GenericOpc;
299 }
300 case 64:
301 switch (GenericOpc) {
302 case TargetOpcode::G_SITOFP:
303 return AArch64::SCVTFUXSri;
304 case TargetOpcode::G_UITOFP:
305 return AArch64::UCVTFUXSri;
306 case TargetOpcode::G_FPTOSI:
307 return AArch64::FCVTZSUWDr;
308 case TargetOpcode::G_FPTOUI:
309 return AArch64::FCVTZUUWDr;
310 default:
311 return GenericOpc;
312 }
313 default:
314 return GenericOpc;
315 }
316 case 64:
317 switch (SrcSize) {
318 case 32:
319 switch (GenericOpc) {
320 case TargetOpcode::G_SITOFP:
321 return AArch64::SCVTFUWDri;
322 case TargetOpcode::G_UITOFP:
323 return AArch64::UCVTFUWDri;
324 case TargetOpcode::G_FPTOSI:
325 return AArch64::FCVTZSUXSr;
326 case TargetOpcode::G_FPTOUI:
327 return AArch64::FCVTZUUXSr;
328 default:
329 return GenericOpc;
330 }
331 case 64:
332 switch (GenericOpc) {
333 case TargetOpcode::G_SITOFP:
334 return AArch64::SCVTFUXDri;
335 case TargetOpcode::G_UITOFP:
336 return AArch64::UCVTFUXDri;
337 case TargetOpcode::G_FPTOSI:
338 return AArch64::FCVTZSUXDr;
339 case TargetOpcode::G_FPTOUI:
340 return AArch64::FCVTZUUXDr;
341 default:
342 return GenericOpc;
343 }
344 default:
345 return GenericOpc;
346 }
347 default:
348 return GenericOpc;
349 };
350 return GenericOpc;
351}
352
Tim Northover6c02ad52016-10-12 22:49:04 +0000353static AArch64CC::CondCode changeICMPPredToAArch64CC(CmpInst::Predicate P) {
354 switch (P) {
355 default:
356 llvm_unreachable("Unknown condition code!");
357 case CmpInst::ICMP_NE:
358 return AArch64CC::NE;
359 case CmpInst::ICMP_EQ:
360 return AArch64CC::EQ;
361 case CmpInst::ICMP_SGT:
362 return AArch64CC::GT;
363 case CmpInst::ICMP_SGE:
364 return AArch64CC::GE;
365 case CmpInst::ICMP_SLT:
366 return AArch64CC::LT;
367 case CmpInst::ICMP_SLE:
368 return AArch64CC::LE;
369 case CmpInst::ICMP_UGT:
370 return AArch64CC::HI;
371 case CmpInst::ICMP_UGE:
372 return AArch64CC::HS;
373 case CmpInst::ICMP_ULT:
374 return AArch64CC::LO;
375 case CmpInst::ICMP_ULE:
376 return AArch64CC::LS;
377 }
378}
379
Tim Northover7dd378d2016-10-12 22:49:07 +0000380static void changeFCMPPredToAArch64CC(CmpInst::Predicate P,
381 AArch64CC::CondCode &CondCode,
382 AArch64CC::CondCode &CondCode2) {
383 CondCode2 = AArch64CC::AL;
384 switch (P) {
385 default:
386 llvm_unreachable("Unknown FP condition!");
387 case CmpInst::FCMP_OEQ:
388 CondCode = AArch64CC::EQ;
389 break;
390 case CmpInst::FCMP_OGT:
391 CondCode = AArch64CC::GT;
392 break;
393 case CmpInst::FCMP_OGE:
394 CondCode = AArch64CC::GE;
395 break;
396 case CmpInst::FCMP_OLT:
397 CondCode = AArch64CC::MI;
398 break;
399 case CmpInst::FCMP_OLE:
400 CondCode = AArch64CC::LS;
401 break;
402 case CmpInst::FCMP_ONE:
403 CondCode = AArch64CC::MI;
404 CondCode2 = AArch64CC::GT;
405 break;
406 case CmpInst::FCMP_ORD:
407 CondCode = AArch64CC::VC;
408 break;
409 case CmpInst::FCMP_UNO:
410 CondCode = AArch64CC::VS;
411 break;
412 case CmpInst::FCMP_UEQ:
413 CondCode = AArch64CC::EQ;
414 CondCode2 = AArch64CC::VS;
415 break;
416 case CmpInst::FCMP_UGT:
417 CondCode = AArch64CC::HI;
418 break;
419 case CmpInst::FCMP_UGE:
420 CondCode = AArch64CC::PL;
421 break;
422 case CmpInst::FCMP_ULT:
423 CondCode = AArch64CC::LT;
424 break;
425 case CmpInst::FCMP_ULE:
426 CondCode = AArch64CC::LE;
427 break;
428 case CmpInst::FCMP_UNE:
429 CondCode = AArch64CC::NE;
430 break;
431 }
432}
433
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000434bool AArch64InstructionSelector::select(MachineInstr &I) const {
435 assert(I.getParent() && "Instruction should be in a basic block!");
436 assert(I.getParent()->getParent() && "Instruction should be in a function!");
437
438 MachineBasicBlock &MBB = *I.getParent();
439 MachineFunction &MF = *MBB.getParent();
440 MachineRegisterInfo &MRI = MF.getRegInfo();
441
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000442 if (!isPreISelGenericOpcode(I.getOpcode()))
Quentin Colombetcb629a82016-10-12 03:57:49 +0000443 return !I.isCopy() || selectCopy(I, TII, MRI, TRI, RBI);
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000444
445 if (I.getNumOperands() != I.getNumExplicitOperands()) {
446 DEBUG(dbgs() << "Generic instruction has unexpected implicit operands\n");
447 return false;
448 }
449
Tim Northover69271c62016-10-12 22:49:11 +0000450 unsigned Opcode = I.getOpcode();
Tim Northover32a078a2016-09-15 10:09:59 +0000451 LLT Ty =
452 I.getOperand(0).isReg() ? MRI.getType(I.getOperand(0).getReg()) : LLT{};
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000453
Tim Northover69271c62016-10-12 22:49:11 +0000454 switch (Opcode) {
Ahmed Bougacha85505092016-07-28 17:15:15 +0000455 case TargetOpcode::G_BR: {
456 I.setDesc(TII.get(AArch64::B));
Ahmed Bougacha85505092016-07-28 17:15:15 +0000457 return true;
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000458 }
459
Tim Northover5e3dbf32016-10-12 22:49:01 +0000460 case TargetOpcode::G_BRCOND: {
461 if (Ty.getSizeInBits() > 32) {
462 // We shouldn't need this on AArch64, but it would be implemented as an
463 // EXTRACT_SUBREG followed by a TBNZW because TBNZX has no encoding if the
464 // bit being tested is < 32.
465 DEBUG(dbgs() << "G_BRCOND has type: " << Ty
466 << ", expected at most 32-bits");
467 return false;
468 }
469
470 const unsigned CondReg = I.getOperand(0).getReg();
471 MachineBasicBlock *DestMBB = I.getOperand(1).getMBB();
472
473 auto MIB = BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::TBNZW))
474 .addUse(CondReg)
475 .addImm(/*bit offset=*/0)
476 .addMBB(DestMBB);
477
478 I.eraseFromParent();
479 return constrainSelectedInstRegOperands(*MIB.getInstr(), TII, TRI, RBI);
480 }
481
Tim Northover4edc60d2016-10-10 21:49:42 +0000482 case TargetOpcode::G_CONSTANT: {
483 if (Ty.getSizeInBits() <= 32)
484 I.setDesc(TII.get(AArch64::MOVi32imm));
485 else if (Ty.getSizeInBits() <= 64)
486 I.setDesc(TII.get(AArch64::MOVi64imm));
487 else
488 return false;
489 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
490 }
491
Ahmed Bougacha0306b5e2016-08-16 14:02:42 +0000492 case TargetOpcode::G_FRAME_INDEX: {
493 // allocas and G_FRAME_INDEX are only supported in addrspace(0).
Tim Northover5ae83502016-09-15 09:20:34 +0000494 if (Ty != LLT::pointer(0, 64)) {
Tim Northover0f140c72016-09-09 11:46:34 +0000495 DEBUG(dbgs() << "G_FRAME_INDEX pointer has type: " << Ty
Tim Northover5ae83502016-09-15 09:20:34 +0000496 << ", expected: " << LLT::pointer(0, 64) << '\n');
Ahmed Bougacha0306b5e2016-08-16 14:02:42 +0000497 return false;
498 }
499
500 I.setDesc(TII.get(AArch64::ADDXri));
Ahmed Bougacha0306b5e2016-08-16 14:02:42 +0000501
502 // MOs for a #0 shifted immediate.
503 I.addOperand(MachineOperand::CreateImm(0));
504 I.addOperand(MachineOperand::CreateImm(0));
505
506 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
507 }
Tim Northoverbdf16242016-10-10 21:50:00 +0000508
509 case TargetOpcode::G_GLOBAL_VALUE: {
510 auto GV = I.getOperand(1).getGlobal();
511 if (GV->isThreadLocal()) {
512 // FIXME: we don't support TLS yet.
513 return false;
514 }
515 unsigned char OpFlags = STI.ClassifyGlobalReference(GV, TM);
516 if (OpFlags & AArch64II::MO_GOT)
517 I.setDesc(TII.get(AArch64::LOADgot));
518 else {
519 I.setDesc(TII.get(AArch64::MOVaddr));
520 I.getOperand(1).setTargetFlags(OpFlags | AArch64II::MO_PAGE);
521 MachineInstrBuilder MIB(MF, I);
522 MIB.addGlobalAddress(GV, I.getOperand(1).getOffset(),
523 OpFlags | AArch64II::MO_PAGEOFF | AArch64II::MO_NC);
524 }
525 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
526 }
527
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000528 case TargetOpcode::G_LOAD:
529 case TargetOpcode::G_STORE: {
Tim Northover0f140c72016-09-09 11:46:34 +0000530 LLT MemTy = Ty;
531 LLT PtrTy = MRI.getType(I.getOperand(1).getReg());
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000532
Tim Northover5ae83502016-09-15 09:20:34 +0000533 if (PtrTy != LLT::pointer(0, 64)) {
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000534 DEBUG(dbgs() << "Load/Store pointer has type: " << PtrTy
Tim Northover5ae83502016-09-15 09:20:34 +0000535 << ", expected: " << LLT::pointer(0, 64) << '\n');
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000536 return false;
537 }
538
539#ifndef NDEBUG
540 // Sanity-check the pointer register.
541 const unsigned PtrReg = I.getOperand(1).getReg();
542 const RegisterBank &PtrRB = *RBI.getRegBank(PtrReg, MRI, TRI);
543 assert(PtrRB.getID() == AArch64::GPRRegBankID &&
544 "Load/Store pointer operand isn't a GPR");
Tim Northover0f140c72016-09-09 11:46:34 +0000545 assert(MRI.getType(PtrReg).isPointer() &&
546 "Load/Store pointer operand isn't a pointer");
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000547#endif
548
549 const unsigned ValReg = I.getOperand(0).getReg();
550 const RegisterBank &RB = *RBI.getRegBank(ValReg, MRI, TRI);
551
552 const unsigned NewOpc =
553 selectLoadStoreUIOp(I.getOpcode(), RB.getID(), MemTy.getSizeInBits());
554 if (NewOpc == I.getOpcode())
555 return false;
556
557 I.setDesc(TII.get(NewOpc));
Ahmed Bougacha7adfac52016-07-29 16:56:16 +0000558
559 I.addOperand(MachineOperand::CreateImm(0));
560 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
561 }
562
Ahmed Bougachae4c03ab2016-08-16 14:37:46 +0000563 case TargetOpcode::G_MUL: {
564 // Reject the various things we don't support yet.
565 if (unsupportedBinOp(I, RBI, MRI, TRI))
566 return false;
567
568 const unsigned DefReg = I.getOperand(0).getReg();
569 const RegisterBank &RB = *RBI.getRegBank(DefReg, MRI, TRI);
570
571 if (RB.getID() != AArch64::GPRRegBankID) {
572 DEBUG(dbgs() << "G_MUL on bank: " << RB << ", expected: GPR\n");
573 return false;
574 }
575
576 unsigned ZeroReg;
577 unsigned NewOpc;
578 if (Ty == LLT::scalar(32)) {
579 NewOpc = AArch64::MADDWrrr;
580 ZeroReg = AArch64::WZR;
581 } else if (Ty == LLT::scalar(64)) {
582 NewOpc = AArch64::MADDXrrr;
583 ZeroReg = AArch64::XZR;
584 } else {
585 DEBUG(dbgs() << "G_MUL has type: " << Ty << ", expected: "
586 << LLT::scalar(32) << " or " << LLT::scalar(64) << '\n');
587 return false;
588 }
589
590 I.setDesc(TII.get(NewOpc));
Ahmed Bougachae4c03ab2016-08-16 14:37:46 +0000591
592 I.addOperand(MachineOperand::CreateReg(ZeroReg, /*isDef=*/false));
593
594 // Now that we selected an opcode, we need to constrain the register
595 // operands to use appropriate classes.
596 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
597 }
598
Ahmed Bougacha33e19fe2016-08-18 16:05:11 +0000599 case TargetOpcode::G_FADD:
600 case TargetOpcode::G_FSUB:
601 case TargetOpcode::G_FMUL:
602 case TargetOpcode::G_FDIV:
603
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000604 case TargetOpcode::G_OR:
Ahmed Bougacha6db3cfe2016-07-29 16:56:25 +0000605 case TargetOpcode::G_XOR:
Ahmed Bougacha61a79282016-07-28 16:58:31 +0000606 case TargetOpcode::G_AND:
Ahmed Bougacha2ac5bf92016-08-16 14:02:47 +0000607 case TargetOpcode::G_SHL:
608 case TargetOpcode::G_LSHR:
609 case TargetOpcode::G_ASHR:
Ahmed Bougacha1d0560b2016-08-18 15:17:13 +0000610 case TargetOpcode::G_SDIV:
611 case TargetOpcode::G_UDIV:
Ahmed Bougachad7748d62016-07-28 16:58:35 +0000612 case TargetOpcode::G_ADD:
Tim Northover2fda4b02016-10-10 21:49:49 +0000613 case TargetOpcode::G_SUB:
614 case TargetOpcode::G_GEP: {
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000615 // Reject the various things we don't support yet.
Ahmed Bougacha59e160a2016-08-16 14:37:40 +0000616 if (unsupportedBinOp(I, RBI, MRI, TRI))
617 return false;
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000618
Ahmed Bougacha59e160a2016-08-16 14:37:40 +0000619 const unsigned OpSize = Ty.getSizeInBits();
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000620
621 const unsigned DefReg = I.getOperand(0).getReg();
622 const RegisterBank &RB = *RBI.getRegBank(DefReg, MRI, TRI);
623
624 const unsigned NewOpc = selectBinaryOp(I.getOpcode(), RB.getID(), OpSize);
625 if (NewOpc == I.getOpcode())
626 return false;
627
628 I.setDesc(TII.get(NewOpc));
629 // FIXME: Should the type be always reset in setDesc?
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000630
631 // Now that we selected an opcode, we need to constrain the register
632 // operands to use appropriate classes.
633 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
634 }
Tim Northover3d38b3a2016-10-11 20:50:21 +0000635
636 case TargetOpcode::G_ANYEXT: {
637 const unsigned DstReg = I.getOperand(0).getReg();
638 const unsigned SrcReg = I.getOperand(1).getReg();
639
Quentin Colombetcb629a82016-10-12 03:57:49 +0000640 const RegisterBank &RBDst = *RBI.getRegBank(DstReg, MRI, TRI);
641 if (RBDst.getID() != AArch64::GPRRegBankID) {
642 DEBUG(dbgs() << "G_ANYEXT on bank: " << RBDst << ", expected: GPR\n");
643 return false;
644 }
Tim Northover3d38b3a2016-10-11 20:50:21 +0000645
Quentin Colombetcb629a82016-10-12 03:57:49 +0000646 const RegisterBank &RBSrc = *RBI.getRegBank(SrcReg, MRI, TRI);
647 if (RBSrc.getID() != AArch64::GPRRegBankID) {
648 DEBUG(dbgs() << "G_ANYEXT on bank: " << RBSrc << ", expected: GPR\n");
Tim Northover3d38b3a2016-10-11 20:50:21 +0000649 return false;
650 }
651
652 const unsigned DstSize = MRI.getType(DstReg).getSizeInBits();
653
654 if (DstSize == 0) {
655 DEBUG(dbgs() << "G_ANYEXT operand has no size, not a gvreg?\n");
656 return false;
657 }
658
Quentin Colombetcb629a82016-10-12 03:57:49 +0000659 if (DstSize != 64 && DstSize > 32) {
Tim Northover3d38b3a2016-10-11 20:50:21 +0000660 DEBUG(dbgs() << "G_ANYEXT to size: " << DstSize
661 << ", expected: 32 or 64\n");
662 return false;
663 }
Quentin Colombetcb629a82016-10-12 03:57:49 +0000664 // At this point G_ANYEXT is just like a plain COPY, but we need
665 // to explicitly form the 64-bit value if any.
666 if (DstSize > 32) {
667 unsigned ExtSrc = MRI.createVirtualRegister(&AArch64::GPR64allRegClass);
668 BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::SUBREG_TO_REG))
669 .addDef(ExtSrc)
670 .addImm(0)
671 .addUse(SrcReg)
672 .addImm(AArch64::sub_32);
673 I.getOperand(1).setReg(ExtSrc);
Tim Northover3d38b3a2016-10-11 20:50:21 +0000674 }
Quentin Colombetcb629a82016-10-12 03:57:49 +0000675 return selectCopy(I, TII, MRI, TRI, RBI);
Tim Northover3d38b3a2016-10-11 20:50:21 +0000676 }
677
678 case TargetOpcode::G_ZEXT:
679 case TargetOpcode::G_SEXT: {
680 unsigned Opcode = I.getOpcode();
681 const LLT DstTy = MRI.getType(I.getOperand(0).getReg()),
682 SrcTy = MRI.getType(I.getOperand(1).getReg());
683 const bool isSigned = Opcode == TargetOpcode::G_SEXT;
684 const unsigned DefReg = I.getOperand(0).getReg();
685 const unsigned SrcReg = I.getOperand(1).getReg();
686 const RegisterBank &RB = *RBI.getRegBank(DefReg, MRI, TRI);
687
688 if (RB.getID() != AArch64::GPRRegBankID) {
689 DEBUG(dbgs() << TII.getName(I.getOpcode()) << " on bank: " << RB
690 << ", expected: GPR\n");
691 return false;
692 }
693
694 MachineInstr *ExtI;
695 if (DstTy == LLT::scalar(64)) {
696 // FIXME: Can we avoid manually doing this?
697 if (!RBI.constrainGenericRegister(SrcReg, AArch64::GPR32RegClass, MRI)) {
698 DEBUG(dbgs() << "Failed to constrain " << TII.getName(Opcode)
699 << " operand\n");
700 return false;
701 }
702
703 const unsigned SrcXReg =
704 MRI.createVirtualRegister(&AArch64::GPR64RegClass);
705 BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::SUBREG_TO_REG))
706 .addDef(SrcXReg)
707 .addImm(0)
708 .addUse(SrcReg)
709 .addImm(AArch64::sub_32);
710
711 const unsigned NewOpc = isSigned ? AArch64::SBFMXri : AArch64::UBFMXri;
712 ExtI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(NewOpc))
713 .addDef(DefReg)
714 .addUse(SrcXReg)
715 .addImm(0)
716 .addImm(SrcTy.getSizeInBits() - 1);
717 } else if (DstTy == LLT::scalar(32)) {
718 const unsigned NewOpc = isSigned ? AArch64::SBFMWri : AArch64::UBFMWri;
719 ExtI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(NewOpc))
720 .addDef(DefReg)
721 .addUse(SrcReg)
722 .addImm(0)
723 .addImm(SrcTy.getSizeInBits() - 1);
724 } else {
725 return false;
726 }
727
728 constrainSelectedInstRegOperands(*ExtI, TII, TRI, RBI);
729
730 I.eraseFromParent();
731 return true;
732 }
Tim Northoverc1d8c2b2016-10-11 22:29:23 +0000733
Tim Northover69271c62016-10-12 22:49:11 +0000734 case TargetOpcode::G_SITOFP:
735 case TargetOpcode::G_UITOFP:
736 case TargetOpcode::G_FPTOSI:
737 case TargetOpcode::G_FPTOUI: {
738 const LLT DstTy = MRI.getType(I.getOperand(0).getReg()),
739 SrcTy = MRI.getType(I.getOperand(1).getReg());
740 const unsigned NewOpc = selectFPConvOpc(Opcode, DstTy, SrcTy);
741 if (NewOpc == Opcode)
742 return false;
743
744 I.setDesc(TII.get(NewOpc));
745 constrainSelectedInstRegOperands(I, TII, TRI, RBI);
746
747 return true;
748 }
749
750
Tim Northoverc1d8c2b2016-10-11 22:29:23 +0000751 case TargetOpcode::G_INTTOPTR:
752 case TargetOpcode::G_PTRTOINT:
Quentin Colombet9de30fa2016-10-12 03:57:52 +0000753 case TargetOpcode::G_BITCAST:
754 return selectCopy(I, TII, MRI, TRI, RBI);
Tim Northover6c02ad52016-10-12 22:49:04 +0000755
756 case TargetOpcode::G_ICMP: {
757 if (Ty != LLT::scalar(1)) {
758 DEBUG(dbgs() << "G_ICMP result has type: " << Ty
759 << ", expected: " << LLT::scalar(1) << '\n');
760 return false;
761 }
762
763 unsigned CmpOpc = 0;
764 unsigned ZReg = 0;
765
766 LLT CmpTy = MRI.getType(I.getOperand(2).getReg());
767 if (CmpTy == LLT::scalar(32)) {
768 CmpOpc = AArch64::SUBSWrr;
769 ZReg = AArch64::WZR;
770 } else if (CmpTy == LLT::scalar(64) || CmpTy.isPointer()) {
771 CmpOpc = AArch64::SUBSXrr;
772 ZReg = AArch64::XZR;
773 } else {
774 return false;
775 }
776
777 const AArch64CC::CondCode CC = changeICMPPredToAArch64CC(
778 (CmpInst::Predicate)I.getOperand(1).getPredicate());
779
780 MachineInstr &CmpMI = *BuildMI(MBB, I, I.getDebugLoc(), TII.get(CmpOpc))
781 .addDef(ZReg)
782 .addUse(I.getOperand(2).getReg())
783 .addUse(I.getOperand(3).getReg());
784
785 MachineInstr &CSetMI =
786 *BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::CSINCWr))
787 .addDef(I.getOperand(0).getReg())
788 .addUse(AArch64::WZR)
789 .addUse(AArch64::WZR)
790 .addImm(CC);
791
792 constrainSelectedInstRegOperands(CmpMI, TII, TRI, RBI);
793 constrainSelectedInstRegOperands(CSetMI, TII, TRI, RBI);
794
795 I.eraseFromParent();
796 return true;
797 }
798
Tim Northover7dd378d2016-10-12 22:49:07 +0000799 case TargetOpcode::G_FCMP: {
800 if (Ty != LLT::scalar(1)) {
801 DEBUG(dbgs() << "G_FCMP result has type: " << Ty
802 << ", expected: " << LLT::scalar(1) << '\n');
803 return false;
804 }
805
806 unsigned CmpOpc = 0;
807 LLT CmpTy = MRI.getType(I.getOperand(2).getReg());
808 if (CmpTy == LLT::scalar(32)) {
809 CmpOpc = AArch64::FCMPSrr;
810 } else if (CmpTy == LLT::scalar(64)) {
811 CmpOpc = AArch64::FCMPDrr;
812 } else {
813 return false;
814 }
815
816 // FIXME: regbank
817
818 AArch64CC::CondCode CC1, CC2;
819 changeFCMPPredToAArch64CC(
820 (CmpInst::Predicate)I.getOperand(1).getPredicate(), CC1, CC2);
821
822 MachineInstr &CmpMI = *BuildMI(MBB, I, I.getDebugLoc(), TII.get(CmpOpc))
823 .addUse(I.getOperand(2).getReg())
824 .addUse(I.getOperand(3).getReg());
825
826 const unsigned DefReg = I.getOperand(0).getReg();
827 unsigned Def1Reg = DefReg;
828 if (CC2 != AArch64CC::AL)
829 Def1Reg = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
830
831 MachineInstr &CSetMI =
832 *BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::CSINCWr))
833 .addDef(Def1Reg)
834 .addUse(AArch64::WZR)
835 .addUse(AArch64::WZR)
836 .addImm(CC1);
837
838 if (CC2 != AArch64CC::AL) {
839 unsigned Def2Reg = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
840 MachineInstr &CSet2MI =
841 *BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::CSINCWr))
842 .addDef(Def2Reg)
843 .addUse(AArch64::WZR)
844 .addUse(AArch64::WZR)
845 .addImm(CC2);
846 MachineInstr &OrMI =
847 *BuildMI(MBB, I, I.getDebugLoc(), TII.get(AArch64::ORRWrr))
848 .addDef(DefReg)
849 .addUse(Def1Reg)
850 .addUse(Def2Reg);
851 constrainSelectedInstRegOperands(OrMI, TII, TRI, RBI);
852 constrainSelectedInstRegOperands(CSet2MI, TII, TRI, RBI);
853 }
854
855 constrainSelectedInstRegOperands(CmpMI, TII, TRI, RBI);
856 constrainSelectedInstRegOperands(CSetMI, TII, TRI, RBI);
857
858 I.eraseFromParent();
859 return true;
860 }
Ahmed Bougacha6756a2c2016-07-27 14:31:55 +0000861 }
862
863 return false;
864}