blob: bf94cdc0946e4bdfffe8f66ea9abf8a63498550f [file] [log] [blame]
Chris Lattner4ee451d2007-12-29 20:36:04 +00001//===- SPUInstrInfo.cpp - Cell SPU Instruction Information ----------------===//
Scott Michel66377522007-12-04 22:35:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Scott Michel66377522007-12-04 22:35:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file contains the Cell SPU implementation of the TargetInstrInfo class.
11//
12//===----------------------------------------------------------------------===//
13
14#include "SPURegisterNames.h"
15#include "SPUInstrInfo.h"
Owen Andersonf6372aa2008-01-01 21:11:32 +000016#include "SPUInstrBuilder.h"
Scott Michel66377522007-12-04 22:35:58 +000017#include "SPUTargetMachine.h"
18#include "SPUGenInstrInfo.inc"
19#include "llvm/CodeGen/MachineInstrBuilder.h"
20#include <iostream>
21
22using namespace llvm;
23
24SPUInstrInfo::SPUInstrInfo(SPUTargetMachine &tm)
Chris Lattner64105522008-01-01 01:03:04 +000025 : TargetInstrInfoImpl(SPUInsts, sizeof(SPUInsts)/sizeof(SPUInsts[0])),
Scott Michel66377522007-12-04 22:35:58 +000026 TM(tm),
27 RI(*TM.getSubtargetImpl(), *this)
28{
29 /* NOP */
30}
31
32/// getPointerRegClass - Return the register class to use to hold pointers.
33/// This is used for addressing modes.
34const TargetRegisterClass *
35SPUInstrInfo::getPointerRegClass() const
36{
37 return &SPU::R32CRegClass;
38}
39
40bool
41SPUInstrInfo::isMoveInstr(const MachineInstr& MI,
42 unsigned& sourceReg,
43 unsigned& destReg) const {
44 // Primarily, ORI and OR are generated by copyRegToReg. But, there are other
45 // cases where we can safely say that what's being done is really a move
46 // (see how PowerPC does this -- it's the model for this code too.)
47 switch (MI.getOpcode()) {
48 default:
49 break;
50 case SPU::ORIv4i32:
51 case SPU::ORIr32:
Scott Michel66377522007-12-04 22:35:58 +000052 case SPU::ORHIv8i16:
53 case SPU::ORHIr16:
Scott Michela59d4692008-02-23 18:41:37 +000054 case SPU::ORHIi8i16:
Scott Michel66377522007-12-04 22:35:58 +000055 case SPU::ORBIv16i8:
Scott Michel504c3692007-12-17 22:32:34 +000056 case SPU::ORBIr8:
Scott Michela59d4692008-02-23 18:41:37 +000057 case SPU::ORIi16i32:
58 case SPU::ORIi8i32:
Scott Michel66377522007-12-04 22:35:58 +000059 case SPU::AHIvec:
60 case SPU::AHIr16:
61 case SPU::AIvec:
Scott Michel66377522007-12-04 22:35:58 +000062 assert(MI.getNumOperands() == 3 &&
63 MI.getOperand(0).isRegister() &&
64 MI.getOperand(1).isRegister() &&
65 MI.getOperand(2).isImmediate() &&
66 "invalid SPU ORI/ORHI/ORBI/AHI/AI/SFI/SFHI instruction!");
Chris Lattner9a1ceae2007-12-30 20:49:49 +000067 if (MI.getOperand(2).getImm() == 0) {
Scott Michel66377522007-12-04 22:35:58 +000068 sourceReg = MI.getOperand(1).getReg();
69 destReg = MI.getOperand(0).getReg();
70 return true;
71 }
72 break;
Scott Michel9999e682007-12-19 07:35:06 +000073 case SPU::AIr32:
74 assert(MI.getNumOperands() == 3 &&
75 "wrong number of operands to AIr32");
76 if (MI.getOperand(0).isRegister() &&
77 (MI.getOperand(1).isRegister() ||
78 MI.getOperand(1).isFrameIndex()) &&
79 (MI.getOperand(2).isImmediate() &&
Chris Lattner9a1ceae2007-12-30 20:49:49 +000080 MI.getOperand(2).getImm() == 0)) {
Scott Michel9999e682007-12-19 07:35:06 +000081 sourceReg = MI.getOperand(1).getReg();
82 destReg = MI.getOperand(0).getReg();
83 return true;
84 }
85 break;
Scott Michel170783a2007-12-19 20:15:47 +000086 case SPU::ORv16i8_i8:
Scott Michel66377522007-12-04 22:35:58 +000087 case SPU::ORv8i16_i16:
88 case SPU::ORv4i32_i32:
89 case SPU::ORv2i64_i64:
90 case SPU::ORv4f32_f32:
91 case SPU::ORv2f64_f64:
Scott Michel170783a2007-12-19 20:15:47 +000092 case SPU::ORi8_v16i8:
Scott Michel66377522007-12-04 22:35:58 +000093 case SPU::ORi16_v8i16:
94 case SPU::ORi32_v4i32:
95 case SPU::ORi64_v2i64:
96 case SPU::ORf32_v4f32:
97 case SPU::ORf64_v2f64:
98 case SPU::ORv16i8:
99 case SPU::ORv8i16:
100 case SPU::ORv4i32:
101 case SPU::ORr32:
102 case SPU::ORr64:
Scott Michel86c041f2007-12-20 00:44:13 +0000103 case SPU::ORf32:
104 case SPU::ORf64:
Scott Michel66377522007-12-04 22:35:58 +0000105 assert(MI.getNumOperands() == 3 &&
106 MI.getOperand(0).isRegister() &&
107 MI.getOperand(1).isRegister() &&
108 MI.getOperand(2).isRegister() &&
109 "invalid SPU OR(vec|r32|r64|gprc) instruction!");
110 if (MI.getOperand(1).getReg() == MI.getOperand(2).getReg()) {
111 sourceReg = MI.getOperand(1).getReg();
112 destReg = MI.getOperand(0).getReg();
113 return true;
114 }
115 break;
116 }
117
118 return false;
119}
120
121unsigned
122SPUInstrInfo::isLoadFromStackSlot(MachineInstr *MI, int &FrameIndex) const {
123 switch (MI->getOpcode()) {
124 default: break;
125 case SPU::LQDv16i8:
126 case SPU::LQDv8i16:
127 case SPU::LQDv4i32:
128 case SPU::LQDv4f32:
129 case SPU::LQDv2f64:
130 case SPU::LQDr128:
131 case SPU::LQDr64:
132 case SPU::LQDr32:
133 case SPU::LQDr16:
134 case SPU::LQXv4i32:
135 case SPU::LQXr128:
136 case SPU::LQXr64:
137 case SPU::LQXr32:
138 case SPU::LQXr16:
Chris Lattner9a1ceae2007-12-30 20:49:49 +0000139 if (MI->getOperand(1).isImmediate() && !MI->getOperand(1).getImm() &&
Scott Michel66377522007-12-04 22:35:58 +0000140 MI->getOperand(2).isFrameIndex()) {
Chris Lattner8aa797a2007-12-30 23:10:15 +0000141 FrameIndex = MI->getOperand(2).getIndex();
Scott Michel66377522007-12-04 22:35:58 +0000142 return MI->getOperand(0).getReg();
143 }
144 break;
145 }
146 return 0;
147}
148
149unsigned
150SPUInstrInfo::isStoreToStackSlot(MachineInstr *MI, int &FrameIndex) const {
151 switch (MI->getOpcode()) {
152 default: break;
153 case SPU::STQDv16i8:
154 case SPU::STQDv8i16:
155 case SPU::STQDv4i32:
156 case SPU::STQDv4f32:
157 case SPU::STQDv2f64:
158 case SPU::STQDr128:
159 case SPU::STQDr64:
160 case SPU::STQDr32:
161 case SPU::STQDr16:
162 // case SPU::STQDr8:
163 case SPU::STQXv16i8:
164 case SPU::STQXv8i16:
165 case SPU::STQXv4i32:
166 case SPU::STQXv4f32:
167 case SPU::STQXv2f64:
168 case SPU::STQXr128:
169 case SPU::STQXr64:
170 case SPU::STQXr32:
171 case SPU::STQXr16:
172 // case SPU::STQXr8:
Chris Lattner9a1ceae2007-12-30 20:49:49 +0000173 if (MI->getOperand(1).isImmediate() && !MI->getOperand(1).getImm() &&
Scott Michel66377522007-12-04 22:35:58 +0000174 MI->getOperand(2).isFrameIndex()) {
Chris Lattner8aa797a2007-12-30 23:10:15 +0000175 FrameIndex = MI->getOperand(2).getIndex();
Scott Michel66377522007-12-04 22:35:58 +0000176 return MI->getOperand(0).getReg();
177 }
178 break;
179 }
180 return 0;
181}
Owen Andersond10fd972007-12-31 06:32:00 +0000182
183void SPUInstrInfo::copyRegToReg(MachineBasicBlock &MBB,
184 MachineBasicBlock::iterator MI,
185 unsigned DestReg, unsigned SrcReg,
186 const TargetRegisterClass *DestRC,
187 const TargetRegisterClass *SrcRC) const
188{
Chris Lattner5e09da22008-03-09 20:31:11 +0000189 // We support cross register class moves for our aliases, such as R3 in any
190 // reg class to any other reg class containing R3. This is required because
191 // we instruction select bitconvert i64 -> f64 as a noop for example, so our
192 // types have no specific meaning.
193
194 //if (DestRC != SrcRC) {
195 // cerr << "SPUInstrInfo::copyRegToReg(): DestRC != SrcRC not supported!\n";
196 // abort();
197 //}
Owen Andersond10fd972007-12-31 06:32:00 +0000198
199 if (DestRC == SPU::R8CRegisterClass) {
200 BuildMI(MBB, MI, get(SPU::ORBIr8), DestReg).addReg(SrcReg).addImm(0);
201 } else if (DestRC == SPU::R16CRegisterClass) {
202 BuildMI(MBB, MI, get(SPU::ORHIr16), DestReg).addReg(SrcReg).addImm(0);
203 } else if (DestRC == SPU::R32CRegisterClass) {
204 BuildMI(MBB, MI, get(SPU::ORIr32), DestReg).addReg(SrcReg).addImm(0);
205 } else if (DestRC == SPU::R32FPRegisterClass) {
206 BuildMI(MBB, MI, get(SPU::ORf32), DestReg).addReg(SrcReg)
207 .addReg(SrcReg);
208 } else if (DestRC == SPU::R64CRegisterClass) {
Scott Michela59d4692008-02-23 18:41:37 +0000209 BuildMI(MBB, MI, get(SPU::ORr64), DestReg).addReg(SrcReg)
210 .addReg(SrcReg);
Owen Andersond10fd972007-12-31 06:32:00 +0000211 } else if (DestRC == SPU::R64FPRegisterClass) {
212 BuildMI(MBB, MI, get(SPU::ORf64), DestReg).addReg(SrcReg)
213 .addReg(SrcReg);
Scott Michela59d4692008-02-23 18:41:37 +0000214 } /* else if (DestRC == SPU::GPRCRegisterClass) {
Owen Andersond10fd972007-12-31 06:32:00 +0000215 BuildMI(MBB, MI, get(SPU::ORgprc), DestReg).addReg(SrcReg)
216 .addReg(SrcReg);
Scott Michela59d4692008-02-23 18:41:37 +0000217 } */ else if (DestRC == SPU::VECREGRegisterClass) {
Owen Andersond10fd972007-12-31 06:32:00 +0000218 BuildMI(MBB, MI, get(SPU::ORv4i32), DestReg).addReg(SrcReg)
219 .addReg(SrcReg);
220 } else {
221 std::cerr << "Attempt to copy unknown/unsupported register class!\n";
222 abort();
223 }
224}
Owen Andersonf6372aa2008-01-01 21:11:32 +0000225
226void
227SPUInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
228 MachineBasicBlock::iterator MI,
229 unsigned SrcReg, bool isKill, int FrameIdx,
230 const TargetRegisterClass *RC) const
231{
Chris Lattnercc8cd0c2008-01-07 02:48:55 +0000232 unsigned opc;
Owen Andersonf6372aa2008-01-01 21:11:32 +0000233 if (RC == SPU::GPRCRegisterClass) {
234 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
235 ? SPU::STQDr128
236 : SPU::STQXr128;
237 } else if (RC == SPU::R64CRegisterClass) {
238 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
239 ? SPU::STQDr64
240 : SPU::STQXr64;
241 } else if (RC == SPU::R64FPRegisterClass) {
242 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
243 ? SPU::STQDr64
244 : SPU::STQXr64;
245 } else if (RC == SPU::R32CRegisterClass) {
246 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
247 ? SPU::STQDr32
248 : SPU::STQXr32;
249 } else if (RC == SPU::R32FPRegisterClass) {
250 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
251 ? SPU::STQDr32
252 : SPU::STQXr32;
253 } else if (RC == SPU::R16CRegisterClass) {
254 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset()) ?
255 SPU::STQDr16
256 : SPU::STQXr16;
257 } else {
258 assert(0 && "Unknown regclass!");
259 abort();
260 }
261
262 addFrameReference(BuildMI(MBB, MI, get(opc))
263 .addReg(SrcReg, false, false, isKill), FrameIdx);
264}
265
266void SPUInstrInfo::storeRegToAddr(MachineFunction &MF, unsigned SrcReg,
267 bool isKill,
268 SmallVectorImpl<MachineOperand> &Addr,
269 const TargetRegisterClass *RC,
270 SmallVectorImpl<MachineInstr*> &NewMIs) const {
271 cerr << "storeRegToAddr() invoked!\n";
272 abort();
273
274 if (Addr[0].isFrameIndex()) {
275 /* do what storeRegToStackSlot does here */
276 } else {
277 unsigned Opc = 0;
278 if (RC == SPU::GPRCRegisterClass) {
279 /* Opc = PPC::STW; */
280 } else if (RC == SPU::R16CRegisterClass) {
281 /* Opc = PPC::STD; */
282 } else if (RC == SPU::R32CRegisterClass) {
283 /* Opc = PPC::STFD; */
284 } else if (RC == SPU::R32FPRegisterClass) {
285 /* Opc = PPC::STFD; */
286 } else if (RC == SPU::R64FPRegisterClass) {
287 /* Opc = PPC::STFS; */
288 } else if (RC == SPU::VECREGRegisterClass) {
289 /* Opc = PPC::STVX; */
290 } else {
291 assert(0 && "Unknown regclass!");
292 abort();
293 }
294 MachineInstrBuilder MIB = BuildMI(get(Opc))
295 .addReg(SrcReg, false, false, isKill);
296 for (unsigned i = 0, e = Addr.size(); i != e; ++i) {
297 MachineOperand &MO = Addr[i];
298 if (MO.isRegister())
299 MIB.addReg(MO.getReg());
300 else if (MO.isImmediate())
301 MIB.addImm(MO.getImm());
302 else
303 MIB.addFrameIndex(MO.getIndex());
304 }
305 NewMIs.push_back(MIB);
306 }
307}
308
309void
310SPUInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
311 MachineBasicBlock::iterator MI,
312 unsigned DestReg, int FrameIdx,
313 const TargetRegisterClass *RC) const
314{
Chris Lattnercc8cd0c2008-01-07 02:48:55 +0000315 unsigned opc;
Owen Andersonf6372aa2008-01-01 21:11:32 +0000316 if (RC == SPU::GPRCRegisterClass) {
317 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
318 ? SPU::LQDr128
319 : SPU::LQXr128;
320 } else if (RC == SPU::R64CRegisterClass) {
321 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
322 ? SPU::LQDr64
323 : SPU::LQXr64;
324 } else if (RC == SPU::R64FPRegisterClass) {
325 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
326 ? SPU::LQDr64
327 : SPU::LQXr64;
328 } else if (RC == SPU::R32CRegisterClass) {
329 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
330 ? SPU::LQDr32
331 : SPU::LQXr32;
332 } else if (RC == SPU::R32FPRegisterClass) {
333 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
334 ? SPU::LQDr32
335 : SPU::LQXr32;
336 } else if (RC == SPU::R16CRegisterClass) {
337 opc = (FrameIdx < SPUFrameInfo::maxFrameOffset())
338 ? SPU::LQDr16
339 : SPU::LQXr16;
340 } else {
341 assert(0 && "Unknown regclass in loadRegFromStackSlot!");
342 abort();
343 }
344
345 addFrameReference(BuildMI(MBB, MI, get(opc)).addReg(DestReg), FrameIdx);
346}
347
348/*!
349 \note We are really pessimistic here about what kind of a load we're doing.
350 */
351void SPUInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
352 SmallVectorImpl<MachineOperand> &Addr,
353 const TargetRegisterClass *RC,
354 SmallVectorImpl<MachineInstr*> &NewMIs)
355 const {
356 cerr << "loadRegToAddr() invoked!\n";
357 abort();
358
359 if (Addr[0].isFrameIndex()) {
360 /* do what loadRegFromStackSlot does here... */
361 } else {
362 unsigned Opc = 0;
363 if (RC == SPU::R8CRegisterClass) {
364 /* do brilliance here */
365 } else if (RC == SPU::R16CRegisterClass) {
366 /* Opc = PPC::LWZ; */
367 } else if (RC == SPU::R32CRegisterClass) {
368 /* Opc = PPC::LD; */
369 } else if (RC == SPU::R32FPRegisterClass) {
370 /* Opc = PPC::LFD; */
371 } else if (RC == SPU::R64FPRegisterClass) {
372 /* Opc = PPC::LFS; */
373 } else if (RC == SPU::VECREGRegisterClass) {
374 /* Opc = PPC::LVX; */
375 } else if (RC == SPU::GPRCRegisterClass) {
376 /* Opc = something else! */
377 } else {
378 assert(0 && "Unknown regclass!");
379 abort();
380 }
381 MachineInstrBuilder MIB = BuildMI(get(Opc), DestReg);
382 for (unsigned i = 0, e = Addr.size(); i != e; ++i) {
383 MachineOperand &MO = Addr[i];
384 if (MO.isRegister())
385 MIB.addReg(MO.getReg());
386 else if (MO.isImmediate())
387 MIB.addImm(MO.getImm());
388 else
389 MIB.addFrameIndex(MO.getIndex());
390 }
391 NewMIs.push_back(MIB);
392 }
393}
394
Owen Anderson43dbe052008-01-07 01:35:02 +0000395/// foldMemoryOperand - SPU, like PPC, can only fold spills into
396/// copy instructions, turning them into load/store instructions.
397MachineInstr *
Evan Cheng5fd79d02008-02-08 21:20:40 +0000398SPUInstrInfo::foldMemoryOperand(MachineFunction &MF,
399 MachineInstr *MI,
400 SmallVectorImpl<unsigned> &Ops,
401 int FrameIndex) const
Owen Anderson43dbe052008-01-07 01:35:02 +0000402{
403#if SOMEDAY_SCOTT_LOOKS_AT_ME_AGAIN
404 if (Ops.size() != 1) return NULL;
405
406 unsigned OpNum = Ops[0];
407 unsigned Opc = MI->getOpcode();
408 MachineInstr *NewMI = 0;
409
410 if ((Opc == SPU::ORr32
411 || Opc == SPU::ORv4i32)
412 && MI->getOperand(1).getReg() == MI->getOperand(2).getReg()) {
413 if (OpNum == 0) { // move -> store
414 unsigned InReg = MI->getOperand(1).getReg();
415 if (FrameIndex < SPUFrameInfo::maxFrameOffset()) {
Scott Michel7f9ba9b2008-01-30 02:55:46 +0000416 NewMI = addFrameReference(BuildMI(TII.get(SPU::STQDr32)).addReg(InReg),
417 FrameIndex);
Owen Anderson43dbe052008-01-07 01:35:02 +0000418 }
419 } else { // move -> load
420 unsigned OutReg = MI->getOperand(0).getReg();
421 Opc = (FrameIndex < SPUFrameInfo::maxFrameOffset()) ? SPU::STQDr32 : SPU::STQXr32;
422 NewMI = addFrameReference(BuildMI(TII.get(Opc), OutReg), FrameIndex);
423 }
424 }
425
426 if (NewMI)
427 NewMI->copyKillDeadInfo(MI);
428
429 return NewMI;
430#else
431 return 0;
432#endif
433}
434