blob: f058ebb264521bc4eac38322ec2c1f2b0f4de07e [file] [log] [blame]
Petar Jovanovicfac93e22018-02-23 11:06:40 +00001//===- MipsInstructionSelector.cpp ------------------------------*- C++ -*-===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Petar Jovanovicfac93e22018-02-23 11:06:40 +00006//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file implements the targeting of the InstructionSelector class for
10/// Mips.
11/// \todo This should be generated by TableGen.
12//===----------------------------------------------------------------------===//
13
Petar Avramovic22e99c42019-06-05 14:03:13 +000014#include "MCTargetDesc/MipsInstPrinter.h"
Petar Avramovicefcd3c02019-05-31 08:27:06 +000015#include "MipsMachineFunction.h"
Petar Jovanovicfac93e22018-02-23 11:06:40 +000016#include "MipsRegisterBankInfo.h"
Petar Jovanovicfac93e22018-02-23 11:06:40 +000017#include "MipsTargetMachine.h"
Petar Jovanovic366857a2018-04-11 15:12:32 +000018#include "llvm/CodeGen/GlobalISel/InstructionSelectorImpl.h"
Petar Jovanovicce4dd0a2018-09-10 15:56:52 +000019#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
Petar Jovanovicfac93e22018-02-23 11:06:40 +000020
Petar Jovanovic366857a2018-04-11 15:12:32 +000021#define DEBUG_TYPE "mips-isel"
22
Petar Jovanovicfac93e22018-02-23 11:06:40 +000023using namespace llvm;
24
25namespace {
26
Petar Jovanovic366857a2018-04-11 15:12:32 +000027#define GET_GLOBALISEL_PREDICATE_BITSET
28#include "MipsGenGlobalISel.inc"
29#undef GET_GLOBALISEL_PREDICATE_BITSET
30
Petar Jovanovicfac93e22018-02-23 11:06:40 +000031class MipsInstructionSelector : public InstructionSelector {
32public:
33 MipsInstructionSelector(const MipsTargetMachine &TM, const MipsSubtarget &STI,
34 const MipsRegisterBankInfo &RBI);
35
36 bool select(MachineInstr &I, CodeGenCoverage &CoverageInfo) const override;
Petar Jovanovic366857a2018-04-11 15:12:32 +000037 static const char *getName() { return DEBUG_TYPE; }
Petar Jovanovicfac93e22018-02-23 11:06:40 +000038
39private:
Petar Jovanovic366857a2018-04-11 15:12:32 +000040 bool selectImpl(MachineInstr &I, CodeGenCoverage &CoverageInfo) const;
Petar Avramovic3e0da142019-03-15 07:07:50 +000041 bool materialize32BitImm(unsigned DestReg, APInt Imm,
42 MachineIRBuilder &B) const;
Petar Avramovica034a642019-03-25 11:38:06 +000043 bool selectCopy(MachineInstr &I, MachineRegisterInfo &MRI) const;
Petar Jovanovic366857a2018-04-11 15:12:32 +000044
45 const MipsTargetMachine &TM;
46 const MipsSubtarget &STI;
Petar Jovanovicfac93e22018-02-23 11:06:40 +000047 const MipsInstrInfo &TII;
48 const MipsRegisterInfo &TRI;
Petar Jovanovic366857a2018-04-11 15:12:32 +000049 const MipsRegisterBankInfo &RBI;
50
51#define GET_GLOBALISEL_PREDICATES_DECL
52#include "MipsGenGlobalISel.inc"
53#undef GET_GLOBALISEL_PREDICATES_DECL
54
55#define GET_GLOBALISEL_TEMPORARIES_DECL
56#include "MipsGenGlobalISel.inc"
57#undef GET_GLOBALISEL_TEMPORARIES_DECL
Petar Jovanovicfac93e22018-02-23 11:06:40 +000058};
59
60} // end anonymous namespace
61
Petar Jovanovic366857a2018-04-11 15:12:32 +000062#define GET_GLOBALISEL_IMPL
63#include "MipsGenGlobalISel.inc"
64#undef GET_GLOBALISEL_IMPL
65
Petar Jovanovicfac93e22018-02-23 11:06:40 +000066MipsInstructionSelector::MipsInstructionSelector(
67 const MipsTargetMachine &TM, const MipsSubtarget &STI,
68 const MipsRegisterBankInfo &RBI)
Petar Jovanovic366857a2018-04-11 15:12:32 +000069 : InstructionSelector(), TM(TM), STI(STI), TII(*STI.getInstrInfo()),
70 TRI(*STI.getRegisterInfo()), RBI(RBI),
71
72#define GET_GLOBALISEL_PREDICATES_INIT
73#include "MipsGenGlobalISel.inc"
74#undef GET_GLOBALISEL_PREDICATES_INIT
75#define GET_GLOBALISEL_TEMPORARIES_INIT
76#include "MipsGenGlobalISel.inc"
77#undef GET_GLOBALISEL_TEMPORARIES_INIT
78{
79}
80
Petar Avramovica034a642019-03-25 11:38:06 +000081bool MipsInstructionSelector::selectCopy(MachineInstr &I,
82 MachineRegisterInfo &MRI) const {
Petar Jovanovic366857a2018-04-11 15:12:32 +000083 unsigned DstReg = I.getOperand(0).getReg();
84 if (TargetRegisterInfo::isPhysicalRegister(DstReg))
85 return true;
86
Petar Avramovica034a642019-03-25 11:38:06 +000087 const RegisterBank *RegBank = RBI.getRegBank(DstReg, MRI, TRI);
88 const unsigned DstSize = MRI.getType(DstReg).getSizeInBits();
Petar Jovanovic366857a2018-04-11 15:12:32 +000089
Petar Avramovica034a642019-03-25 11:38:06 +000090 const TargetRegisterClass *RC = &Mips::GPR32RegClass;
91 if (RegBank->getID() == Mips::FPRBRegBankID) {
92 if (DstSize == 32)
93 RC = &Mips::FGR32RegClass;
94 else if (DstSize == 64)
95 RC = STI.isFP64bit() ? &Mips::FGR64RegClass : &Mips::AFGR64RegClass;
96 else
97 llvm_unreachable("Unsupported destination size");
98 }
Petar Jovanovic366857a2018-04-11 15:12:32 +000099 if (!RBI.constrainGenericRegister(DstReg, *RC, MRI)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000100 LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
101 << " operand\n");
Petar Jovanovic366857a2018-04-11 15:12:32 +0000102 return false;
103 }
104 return true;
105}
Petar Jovanovicfac93e22018-02-23 11:06:40 +0000106
Petar Avramovic3e0da142019-03-15 07:07:50 +0000107bool MipsInstructionSelector::materialize32BitImm(unsigned DestReg, APInt Imm,
108 MachineIRBuilder &B) const {
109 assert(Imm.getBitWidth() == 32 && "Unsupported immediate size.");
110 // Ori zero extends immediate. Used for values with zeros in high 16 bits.
111 if (Imm.getHiBits(16).isNullValue()) {
112 MachineInstr *Inst = B.buildInstr(Mips::ORi, {DestReg}, {Mips::ZERO})
113 .addImm(Imm.getLoBits(16).getLimitedValue());
114 return constrainSelectedInstRegOperands(*Inst, TII, TRI, RBI);
115 }
116 // Lui places immediate in high 16 bits and sets low 16 bits to zero.
117 if (Imm.getLoBits(16).isNullValue()) {
118 MachineInstr *Inst = B.buildInstr(Mips::LUi, {DestReg}, {})
119 .addImm(Imm.getHiBits(16).getLimitedValue());
120 return constrainSelectedInstRegOperands(*Inst, TII, TRI, RBI);
121 }
122 // ADDiu sign extends immediate. Used for values with 1s in high 17 bits.
123 if (Imm.isSignedIntN(16)) {
124 MachineInstr *Inst = B.buildInstr(Mips::ADDiu, {DestReg}, {Mips::ZERO})
125 .addImm(Imm.getLoBits(16).getLimitedValue());
126 return constrainSelectedInstRegOperands(*Inst, TII, TRI, RBI);
127 }
128 // Values that cannot be materialized with single immediate instruction.
129 unsigned LUiReg = B.getMRI()->createVirtualRegister(&Mips::GPR32RegClass);
130 MachineInstr *LUi = B.buildInstr(Mips::LUi, {LUiReg}, {})
131 .addImm(Imm.getHiBits(16).getLimitedValue());
132 MachineInstr *ORi = B.buildInstr(Mips::ORi, {DestReg}, {LUiReg})
133 .addImm(Imm.getLoBits(16).getLimitedValue());
134 if (!constrainSelectedInstRegOperands(*LUi, TII, TRI, RBI))
135 return false;
136 if (!constrainSelectedInstRegOperands(*ORi, TII, TRI, RBI))
137 return false;
138 return true;
139}
140
Petar Avramovic79df8592019-01-24 10:27:21 +0000141/// Returning Opc indicates that we failed to select MIPS instruction opcode.
142static unsigned selectLoadStoreOpCode(unsigned Opc, unsigned MemSizeInBytes) {
143 if (Opc == TargetOpcode::G_STORE)
144 switch (MemSizeInBytes) {
145 case 4:
146 return Mips::SW;
Petar Avramovicc98b26d2019-02-08 14:27:23 +0000147 case 2:
148 return Mips::SH;
149 case 1:
150 return Mips::SB;
Petar Avramovic79df8592019-01-24 10:27:21 +0000151 default:
152 return Opc;
153 }
154 else
Petar Avramovicc98b26d2019-02-08 14:27:23 +0000155 // Unspecified extending load is selected into zeroExtending load.
Petar Avramovic79df8592019-01-24 10:27:21 +0000156 switch (MemSizeInBytes) {
157 case 4:
158 return Mips::LW;
159 case 2:
160 return Opc == TargetOpcode::G_SEXTLOAD ? Mips::LH : Mips::LHu;
161 case 1:
162 return Opc == TargetOpcode::G_SEXTLOAD ? Mips::LB : Mips::LBu;
163 default:
164 return Opc;
165 }
166}
167
Petar Jovanovicfac93e22018-02-23 11:06:40 +0000168bool MipsInstructionSelector::select(MachineInstr &I,
169 CodeGenCoverage &CoverageInfo) const {
170
Petar Jovanovic366857a2018-04-11 15:12:32 +0000171 MachineBasicBlock &MBB = *I.getParent();
172 MachineFunction &MF = *MBB.getParent();
173 MachineRegisterInfo &MRI = MF.getRegInfo();
174
Petar Jovanovicfac93e22018-02-23 11:06:40 +0000175 if (!isPreISelGenericOpcode(I.getOpcode())) {
Petar Jovanovic366857a2018-04-11 15:12:32 +0000176 if (I.isCopy())
Petar Avramovica034a642019-03-25 11:38:06 +0000177 return selectCopy(I, MRI);
Petar Jovanovic366857a2018-04-11 15:12:32 +0000178
Petar Jovanovicfac93e22018-02-23 11:06:40 +0000179 return true;
180 }
181
Petar Avramovic3d3120d2019-03-07 13:28:29 +0000182 if (I.getOpcode() == Mips::G_MUL) {
183 MachineInstr *Mul = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::MUL))
184 .add(I.getOperand(0))
185 .add(I.getOperand(1))
186 .add(I.getOperand(2));
187 if (!constrainSelectedInstRegOperands(*Mul, TII, TRI, RBI))
188 return false;
189 Mul->getOperand(3).setIsDead(true);
190 Mul->getOperand(4).setIsDead(true);
191
192 I.eraseFromParent();
Petar Jovanovic366857a2018-04-11 15:12:32 +0000193 return true;
194 }
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000195
Petar Avramovic3d3120d2019-03-07 13:28:29 +0000196 if (selectImpl(I, CoverageInfo))
197 return true;
198
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000199 MachineInstr *MI = nullptr;
200 using namespace TargetOpcode;
201
202 switch (I.getOpcode()) {
Petar Avramovica48285a2019-03-01 07:25:44 +0000203 case G_UMULH: {
204 unsigned PseudoMULTuReg = MRI.createVirtualRegister(&Mips::ACC64RegClass);
205 MachineInstr *PseudoMULTu, *PseudoMove;
206
207 PseudoMULTu = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::PseudoMULTu))
208 .addDef(PseudoMULTuReg)
209 .add(I.getOperand(1))
210 .add(I.getOperand(2));
211 if (!constrainSelectedInstRegOperands(*PseudoMULTu, TII, TRI, RBI))
212 return false;
213
214 PseudoMove = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::PseudoMFHI))
215 .addDef(I.getOperand(0).getReg())
216 .addUse(PseudoMULTuReg);
217 if (!constrainSelectedInstRegOperands(*PseudoMove, TII, TRI, RBI))
218 return false;
219
220 I.eraseFromParent();
221 return true;
222 }
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000223 case G_GEP: {
224 MI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::ADDu))
225 .add(I.getOperand(0))
226 .add(I.getOperand(1))
227 .add(I.getOperand(2));
228 break;
229 }
230 case G_FRAME_INDEX: {
231 MI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::ADDiu))
232 .add(I.getOperand(0))
233 .add(I.getOperand(1))
234 .addImm(0);
235 break;
236 }
Petar Avramovic5d9b8ee2019-02-14 11:39:53 +0000237 case G_BRCOND: {
238 MI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::BNE))
239 .add(I.getOperand(0))
240 .addUse(Mips::ZERO)
241 .add(I.getOperand(1));
242 break;
243 }
Petar Avramovic14c7ecf2019-02-14 12:36:19 +0000244 case G_PHI: {
245 const unsigned DestReg = I.getOperand(0).getReg();
246 const unsigned DestRegBank = RBI.getRegBank(DestReg, MRI, TRI)->getID();
247 const unsigned OpSize = MRI.getType(DestReg).getSizeInBits();
248
249 if (DestRegBank != Mips::GPRBRegBankID || OpSize != 32)
250 return false;
251
252 const TargetRegisterClass *DefRC = &Mips::GPR32RegClass;
253 I.setDesc(TII.get(TargetOpcode::PHI));
254 return RBI.constrainGenericRegister(DestReg, *DefRC, MRI);
255 }
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000256 case G_STORE:
Petar Avramovic79df8592019-01-24 10:27:21 +0000257 case G_LOAD:
258 case G_ZEXTLOAD:
259 case G_SEXTLOAD: {
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000260 const unsigned DestReg = I.getOperand(0).getReg();
261 const unsigned DestRegBank = RBI.getRegBank(DestReg, MRI, TRI)->getID();
262 const unsigned OpSize = MRI.getType(DestReg).getSizeInBits();
Petar Avramovic79df8592019-01-24 10:27:21 +0000263 const unsigned OpMemSizeInBytes = (*I.memoperands_begin())->getSize();
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000264
265 if (DestRegBank != Mips::GPRBRegBankID || OpSize != 32)
266 return false;
267
Petar Avramovic79df8592019-01-24 10:27:21 +0000268 const unsigned NewOpc =
269 selectLoadStoreOpCode(I.getOpcode(), OpMemSizeInBytes);
270 if (NewOpc == I.getOpcode())
271 return false;
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000272
273 MI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(NewOpc))
274 .add(I.getOperand(0))
275 .add(I.getOperand(1))
276 .addImm(0)
277 .addMemOperand(*I.memoperands_begin());
278 break;
279 }
Petar Avramovic0a5e4eb2018-12-18 15:59:51 +0000280 case G_UDIV:
281 case G_UREM:
282 case G_SDIV:
283 case G_SREM: {
284 unsigned HILOReg = MRI.createVirtualRegister(&Mips::ACC64RegClass);
285 bool IsSigned = I.getOpcode() == G_SREM || I.getOpcode() == G_SDIV;
286 bool IsDiv = I.getOpcode() == G_UDIV || I.getOpcode() == G_SDIV;
287
288 MachineInstr *PseudoDIV, *PseudoMove;
289 PseudoDIV = BuildMI(MBB, I, I.getDebugLoc(),
290 TII.get(IsSigned ? Mips::PseudoSDIV : Mips::PseudoUDIV))
291 .addDef(HILOReg)
292 .add(I.getOperand(1))
293 .add(I.getOperand(2));
294 if (!constrainSelectedInstRegOperands(*PseudoDIV, TII, TRI, RBI))
295 return false;
296
297 PseudoMove = BuildMI(MBB, I, I.getDebugLoc(),
298 TII.get(IsDiv ? Mips::PseudoMFLO : Mips::PseudoMFHI))
299 .addDef(I.getOperand(0).getReg())
300 .addUse(HILOReg);
301 if (!constrainSelectedInstRegOperands(*PseudoMove, TII, TRI, RBI))
302 return false;
303
304 I.eraseFromParent();
305 return true;
306 }
Petar Avramovic09dff332018-12-25 14:42:30 +0000307 case G_SELECT: {
308 // Handle operands with pointer type.
309 MI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::MOVN_I_I))
310 .add(I.getOperand(0))
311 .add(I.getOperand(2))
312 .add(I.getOperand(1))
313 .add(I.getOperand(3));
314 break;
315 }
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000316 case G_CONSTANT: {
Petar Avramovic3e0da142019-03-15 07:07:50 +0000317 MachineIRBuilder B(I);
318 if (!materialize32BitImm(I.getOperand(0).getReg(),
319 I.getOperand(1).getCImm()->getValue(), B))
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000320 return false;
321
322 I.eraseFromParent();
323 return true;
324 }
Petar Avramovic1af05df2019-03-28 16:58:12 +0000325 case G_FCONSTANT: {
326 const APFloat &FPimm = I.getOperand(1).getFPImm()->getValueAPF();
327 APInt APImm = FPimm.bitcastToAPInt();
328 unsigned Size = MRI.getType(I.getOperand(0).getReg()).getSizeInBits();
329
330 if (Size == 32) {
331 unsigned GPRReg = MRI.createVirtualRegister(&Mips::GPR32RegClass);
332 MachineIRBuilder B(I);
333 if (!materialize32BitImm(GPRReg, APImm, B))
334 return false;
335
336 MachineInstrBuilder MTC1 =
337 B.buildInstr(Mips::MTC1, {I.getOperand(0).getReg()}, {GPRReg});
338 if (!MTC1.constrainAllUses(TII, TRI, RBI))
339 return false;
340 }
341 if (Size == 64) {
342 unsigned GPRRegHigh = MRI.createVirtualRegister(&Mips::GPR32RegClass);
343 unsigned GPRRegLow = MRI.createVirtualRegister(&Mips::GPR32RegClass);
344 MachineIRBuilder B(I);
345 if (!materialize32BitImm(GPRRegHigh, APImm.getHiBits(32).trunc(32), B))
346 return false;
347 if (!materialize32BitImm(GPRRegLow, APImm.getLoBits(32).trunc(32), B))
348 return false;
349
350 MachineInstrBuilder PairF64 = B.buildInstr(
351 STI.isFP64bit() ? Mips::BuildPairF64_64 : Mips::BuildPairF64,
352 {I.getOperand(0).getReg()}, {GPRRegLow, GPRRegHigh});
353 if (!PairF64.constrainAllUses(TII, TRI, RBI))
354 return false;
355 }
356
357 I.eraseFromParent();
358 return true;
359 }
Petar Avramovic0a1fd352019-06-06 09:22:37 +0000360 case G_FABS: {
361 unsigned Size = MRI.getType(I.getOperand(0).getReg()).getSizeInBits();
362 unsigned FABSOpcode =
363 Size == 32 ? Mips::FABS_S
364 : STI.isFP64bit() ? Mips::FABS_D64 : Mips::FABS_D32;
365 MI = BuildMI(MBB, I, I.getDebugLoc(), TII.get(FABSOpcode))
366 .add(I.getOperand(0))
367 .add(I.getOperand(1));
368 break;
369 }
Petar Avramovic4b4dae12019-06-20 08:52:53 +0000370 case G_FPTOSI: {
371 unsigned FromSize = MRI.getType(I.getOperand(1).getReg()).getSizeInBits();
372 unsigned ToSize = MRI.getType(I.getOperand(0).getReg()).getSizeInBits();
373 assert((ToSize == 32) && "Unsupported integer size for G_FPTOSI");
374 assert((FromSize == 32 || FromSize == 64) &&
375 "Unsupported floating point size for G_FPTOSI");
376
377 unsigned Opcode;
378 if (FromSize == 32)
379 Opcode = Mips::TRUNC_W_S;
380 else
381 Opcode = STI.isFP64bit() ? Mips::TRUNC_W_D64 : Mips::TRUNC_W_D32;
382 unsigned ResultInFPR = MRI.createVirtualRegister(&Mips::FGR32RegClass);
383 MachineInstr *Trunc = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Opcode))
384 .addDef(ResultInFPR)
385 .addUse(I.getOperand(1).getReg());
386 if (!constrainSelectedInstRegOperands(*Trunc, TII, TRI, RBI))
387 return false;
388
389 MachineInstr *Move = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::MFC1))
390 .addDef(I.getOperand(0).getReg())
391 .addUse(ResultInFPR);
392 if (!constrainSelectedInstRegOperands(*Move, TII, TRI, RBI))
393 return false;
394
395 I.eraseFromParent();
396 return true;
397 }
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000398 case G_GLOBAL_VALUE: {
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000399 const llvm::GlobalValue *GVal = I.getOperand(1).getGlobal();
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000400 if (MF.getTarget().isPositionIndependent()) {
401 MachineInstr *LWGOT = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::LW))
402 .addDef(I.getOperand(0).getReg())
403 .addReg(MF.getInfo<MipsFunctionInfo>()
404 ->getGlobalBaseRegForGlobalISel())
405 .addGlobalAddress(GVal);
406 // Global Values that don't have local linkage are handled differently
407 // when they are part of call sequence. MipsCallLowering::lowerCall
408 // creates G_GLOBAL_VALUE instruction as part of call sequence and adds
409 // MO_GOT_CALL flag when Callee doesn't have local linkage.
410 if (I.getOperand(1).getTargetFlags() == MipsII::MO_GOT_CALL)
411 LWGOT->getOperand(2).setTargetFlags(MipsII::MO_GOT_CALL);
412 else
413 LWGOT->getOperand(2).setTargetFlags(MipsII::MO_GOT);
414 LWGOT->addMemOperand(
415 MF, MF.getMachineMemOperand(MachinePointerInfo::getGOT(MF),
416 MachineMemOperand::MOLoad, 4, 4));
417 if (!constrainSelectedInstRegOperands(*LWGOT, TII, TRI, RBI))
418 return false;
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000419
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000420 if (GVal->hasLocalLinkage()) {
421 unsigned LWGOTDef = MRI.createVirtualRegister(&Mips::GPR32RegClass);
422 LWGOT->getOperand(0).setReg(LWGOTDef);
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000423
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000424 MachineInstr *ADDiu =
425 BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::ADDiu))
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000426 .addDef(I.getOperand(0).getReg())
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000427 .addReg(LWGOTDef)
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000428 .addGlobalAddress(GVal);
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000429 ADDiu->getOperand(2).setTargetFlags(MipsII::MO_ABS_LO);
430 if (!constrainSelectedInstRegOperands(*ADDiu, TII, TRI, RBI))
431 return false;
432 }
433 } else {
434 unsigned LUiReg = MRI.createVirtualRegister(&Mips::GPR32RegClass);
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000435
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000436 MachineInstr *LUi = BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::LUi))
437 .addDef(LUiReg)
438 .addGlobalAddress(GVal);
439 LUi->getOperand(1).setTargetFlags(MipsII::MO_ABS_HI);
440 if (!constrainSelectedInstRegOperands(*LUi, TII, TRI, RBI))
441 return false;
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000442
Petar Avramovicefcd3c02019-05-31 08:27:06 +0000443 MachineInstr *ADDiu =
444 BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::ADDiu))
445 .addDef(I.getOperand(0).getReg())
446 .addUse(LUiReg)
447 .addGlobalAddress(GVal);
448 ADDiu->getOperand(2).setTargetFlags(MipsII::MO_ABS_LO);
449 if (!constrainSelectedInstRegOperands(*ADDiu, TII, TRI, RBI))
450 return false;
451 }
Petar Jovanovic64c10ba2018-08-01 09:03:23 +0000452 I.eraseFromParent();
453 return true;
454 }
Petar Jovanovicce4dd0a2018-09-10 15:56:52 +0000455 case G_ICMP: {
456 struct Instr {
457 unsigned Opcode, Def, LHS, RHS;
458 Instr(unsigned Opcode, unsigned Def, unsigned LHS, unsigned RHS)
459 : Opcode(Opcode), Def(Def), LHS(LHS), RHS(RHS){};
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000460
Petar Jovanovicce4dd0a2018-09-10 15:56:52 +0000461 bool hasImm() const {
462 if (Opcode == Mips::SLTiu || Opcode == Mips::XORi)
463 return true;
464 return false;
465 }
466 };
467
468 SmallVector<struct Instr, 2> Instructions;
469 unsigned ICMPReg = I.getOperand(0).getReg();
470 unsigned Temp = MRI.createVirtualRegister(&Mips::GPR32RegClass);
471 unsigned LHS = I.getOperand(2).getReg();
472 unsigned RHS = I.getOperand(3).getReg();
473 CmpInst::Predicate Cond =
474 static_cast<CmpInst::Predicate>(I.getOperand(1).getPredicate());
475
476 switch (Cond) {
477 case CmpInst::ICMP_EQ: // LHS == RHS -> (LHS ^ RHS) < 1
478 Instructions.emplace_back(Mips::XOR, Temp, LHS, RHS);
479 Instructions.emplace_back(Mips::SLTiu, ICMPReg, Temp, 1);
480 break;
481 case CmpInst::ICMP_NE: // LHS != RHS -> 0 < (LHS ^ RHS)
482 Instructions.emplace_back(Mips::XOR, Temp, LHS, RHS);
483 Instructions.emplace_back(Mips::SLTu, ICMPReg, Mips::ZERO, Temp);
484 break;
485 case CmpInst::ICMP_UGT: // LHS > RHS -> RHS < LHS
486 Instructions.emplace_back(Mips::SLTu, ICMPReg, RHS, LHS);
487 break;
488 case CmpInst::ICMP_UGE: // LHS >= RHS -> !(LHS < RHS)
489 Instructions.emplace_back(Mips::SLTu, Temp, LHS, RHS);
490 Instructions.emplace_back(Mips::XORi, ICMPReg, Temp, 1);
491 break;
492 case CmpInst::ICMP_ULT: // LHS < RHS -> LHS < RHS
493 Instructions.emplace_back(Mips::SLTu, ICMPReg, LHS, RHS);
494 break;
495 case CmpInst::ICMP_ULE: // LHS <= RHS -> !(RHS < LHS)
496 Instructions.emplace_back(Mips::SLTu, Temp, RHS, LHS);
497 Instructions.emplace_back(Mips::XORi, ICMPReg, Temp, 1);
498 break;
499 case CmpInst::ICMP_SGT: // LHS > RHS -> RHS < LHS
500 Instructions.emplace_back(Mips::SLT, ICMPReg, RHS, LHS);
501 break;
502 case CmpInst::ICMP_SGE: // LHS >= RHS -> !(LHS < RHS)
503 Instructions.emplace_back(Mips::SLT, Temp, LHS, RHS);
504 Instructions.emplace_back(Mips::XORi, ICMPReg, Temp, 1);
505 break;
506 case CmpInst::ICMP_SLT: // LHS < RHS -> LHS < RHS
507 Instructions.emplace_back(Mips::SLT, ICMPReg, LHS, RHS);
508 break;
509 case CmpInst::ICMP_SLE: // LHS <= RHS -> !(RHS < LHS)
510 Instructions.emplace_back(Mips::SLT, Temp, RHS, LHS);
511 Instructions.emplace_back(Mips::XORi, ICMPReg, Temp, 1);
512 break;
513 default:
514 return false;
515 }
516
517 MachineIRBuilder B(I);
518 for (const struct Instr &Instruction : Instructions) {
Aditya Nandakumarcef44a22018-12-11 00:48:50 +0000519 MachineInstrBuilder MIB = B.buildInstr(
520 Instruction.Opcode, {Instruction.Def}, {Instruction.LHS});
Petar Jovanovicce4dd0a2018-09-10 15:56:52 +0000521
522 if (Instruction.hasImm())
523 MIB.addImm(Instruction.RHS);
524 else
525 MIB.addUse(Instruction.RHS);
526
527 if (!MIB.constrainAllUses(TII, TRI, RBI))
528 return false;
529 }
530
531 I.eraseFromParent();
532 return true;
533 }
Petar Avramovic22e99c42019-06-05 14:03:13 +0000534 case G_FCMP: {
535 unsigned MipsFCMPCondCode;
536 bool isLogicallyNegated;
537 switch (CmpInst::Predicate Cond = static_cast<CmpInst::Predicate>(
538 I.getOperand(1).getPredicate())) {
539 case CmpInst::FCMP_UNO: // Unordered
540 case CmpInst::FCMP_ORD: // Ordered (OR)
541 MipsFCMPCondCode = Mips::FCOND_UN;
542 isLogicallyNegated = Cond != CmpInst::FCMP_UNO;
543 break;
544 case CmpInst::FCMP_OEQ: // Equal
545 case CmpInst::FCMP_UNE: // Not Equal (NEQ)
546 MipsFCMPCondCode = Mips::FCOND_OEQ;
547 isLogicallyNegated = Cond != CmpInst::FCMP_OEQ;
548 break;
549 case CmpInst::FCMP_UEQ: // Unordered or Equal
550 case CmpInst::FCMP_ONE: // Ordered or Greater Than or Less Than (OGL)
551 MipsFCMPCondCode = Mips::FCOND_UEQ;
552 isLogicallyNegated = Cond != CmpInst::FCMP_UEQ;
553 break;
554 case CmpInst::FCMP_OLT: // Ordered or Less Than
555 case CmpInst::FCMP_UGE: // Unordered or Greater Than or Equal (UGE)
556 MipsFCMPCondCode = Mips::FCOND_OLT;
557 isLogicallyNegated = Cond != CmpInst::FCMP_OLT;
558 break;
559 case CmpInst::FCMP_ULT: // Unordered or Less Than
560 case CmpInst::FCMP_OGE: // Ordered or Greater Than or Equal (OGE)
561 MipsFCMPCondCode = Mips::FCOND_ULT;
562 isLogicallyNegated = Cond != CmpInst::FCMP_ULT;
563 break;
564 case CmpInst::FCMP_OLE: // Ordered or Less Than or Equal
565 case CmpInst::FCMP_UGT: // Unordered or Greater Than (UGT)
566 MipsFCMPCondCode = Mips::FCOND_OLE;
567 isLogicallyNegated = Cond != CmpInst::FCMP_OLE;
568 break;
569 case CmpInst::FCMP_ULE: // Unordered or Less Than or Equal
570 case CmpInst::FCMP_OGT: // Ordered or Greater Than (OGT)
571 MipsFCMPCondCode = Mips::FCOND_ULE;
572 isLogicallyNegated = Cond != CmpInst::FCMP_ULE;
573 break;
574 default:
575 return false;
576 }
577
578 // Default compare result in gpr register will be `true`.
579 // We will move `false` (MIPS::Zero) to gpr result when fcmp gives false
580 // using MOVF_I. When orignal predicate (Cond) is logically negated
581 // MipsFCMPCondCode, result is inverted i.e. MOVT_I is used.
582 unsigned MoveOpcode = isLogicallyNegated ? Mips::MOVT_I : Mips::MOVF_I;
583
584 unsigned TrueInReg = MRI.createVirtualRegister(&Mips::GPR32RegClass);
585 BuildMI(MBB, I, I.getDebugLoc(), TII.get(Mips::ADDiu))
586 .addDef(TrueInReg)
587 .addUse(Mips::ZERO)
588 .addImm(1);
589
590 unsigned Size = MRI.getType(I.getOperand(2).getReg()).getSizeInBits();
591 unsigned FCMPOpcode =
592 Size == 32 ? Mips::FCMP_S32
593 : STI.isFP64bit() ? Mips::FCMP_D64 : Mips::FCMP_D32;
594 MachineInstr *FCMP = BuildMI(MBB, I, I.getDebugLoc(), TII.get(FCMPOpcode))
595 .addUse(I.getOperand(2).getReg())
596 .addUse(I.getOperand(3).getReg())
597 .addImm(MipsFCMPCondCode);
598 if (!constrainSelectedInstRegOperands(*FCMP, TII, TRI, RBI))
599 return false;
600
601 MachineInstr *Move = BuildMI(MBB, I, I.getDebugLoc(), TII.get(MoveOpcode))
602 .addDef(I.getOperand(0).getReg())
603 .addUse(Mips::ZERO)
604 .addUse(Mips::FCC0)
605 .addUse(TrueInReg);
606 if (!constrainSelectedInstRegOperands(*Move, TII, TRI, RBI))
607 return false;
608
609 I.eraseFromParent();
610 return true;
611 }
Petar Jovanovic021e4c82018-07-16 13:29:32 +0000612 default:
613 return false;
614 }
615
616 I.eraseFromParent();
617 return constrainSelectedInstRegOperands(*MI, TII, TRI, RBI);
Petar Jovanovicfac93e22018-02-23 11:06:40 +0000618}
619
620namespace llvm {
621InstructionSelector *createMipsInstructionSelector(const MipsTargetMachine &TM,
622 MipsSubtarget &Subtarget,
623 MipsRegisterBankInfo &RBI) {
624 return new MipsInstructionSelector(TM, Subtarget, RBI);
625}
626} // end namespace llvm