Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 1 | //===- SPUInstrInfo.cpp - Cell SPU Instruction Information ----------------===// |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains the Cell SPU implementation of the TargetInstrInfo class. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "SPURegisterNames.h" |
| 15 | #include "SPUInstrInfo.h" |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 16 | #include "SPUInstrBuilder.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 17 | #include "SPUTargetMachine.h" |
| 18 | #include "SPUGenInstrInfo.inc" |
| 19 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Bill Wendling | eecfa36 | 2008-05-29 21:46:33 +0000 | [diff] [blame] | 20 | #include "llvm/Support/Streams.h" |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 21 | #include "llvm/Support/Debug.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 22 | |
| 23 | using namespace llvm; |
| 24 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 25 | namespace { |
| 26 | //! Predicate for an unconditional branch instruction |
| 27 | inline bool isUncondBranch(const MachineInstr *I) { |
| 28 | unsigned opc = I->getOpcode(); |
| 29 | |
| 30 | return (opc == SPU::BR |
| 31 | || opc == SPU::BRA |
| 32 | || opc == SPU::BI); |
| 33 | } |
| 34 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 35 | //! Predicate for a conditional branch instruction |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 36 | inline bool isCondBranch(const MachineInstr *I) { |
| 37 | unsigned opc = I->getOpcode(); |
| 38 | |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 39 | return (opc == SPU::BRNZr32 |
| 40 | || opc == SPU::BRNZv4i32 |
| 41 | || opc == SPU::BRZr32 |
| 42 | || opc == SPU::BRZv4i32 |
| 43 | || opc == SPU::BRHNZr16 |
| 44 | || opc == SPU::BRHNZv8i16 |
| 45 | || opc == SPU::BRHZr16 |
| 46 | || opc == SPU::BRHZv8i16); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 47 | } |
| 48 | } |
| 49 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 50 | SPUInstrInfo::SPUInstrInfo(SPUTargetMachine &tm) |
Chris Lattner | 6410552 | 2008-01-01 01:03:04 +0000 | [diff] [blame] | 51 | : TargetInstrInfoImpl(SPUInsts, sizeof(SPUInsts)/sizeof(SPUInsts[0])), |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 52 | TM(tm), |
| 53 | RI(*TM.getSubtargetImpl(), *this) |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 54 | { /* NOP */ } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 55 | |
| 56 | /// getPointerRegClass - Return the register class to use to hold pointers. |
| 57 | /// This is used for addressing modes. |
| 58 | const TargetRegisterClass * |
| 59 | SPUInstrInfo::getPointerRegClass() const |
| 60 | { |
| 61 | return &SPU::R32CRegClass; |
| 62 | } |
| 63 | |
| 64 | bool |
| 65 | SPUInstrInfo::isMoveInstr(const MachineInstr& MI, |
| 66 | unsigned& sourceReg, |
Evan Cheng | 04ee5a1 | 2009-01-20 19:12:24 +0000 | [diff] [blame] | 67 | unsigned& destReg, |
| 68 | unsigned& SrcSR, unsigned& DstSR) const { |
| 69 | SrcSR = DstSR = 0; // No sub-registers. |
| 70 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 71 | // Primarily, ORI and OR are generated by copyRegToReg. But, there are other |
| 72 | // cases where we can safely say that what's being done is really a move |
| 73 | // (see how PowerPC does this -- it's the model for this code too.) |
| 74 | switch (MI.getOpcode()) { |
| 75 | default: |
| 76 | break; |
| 77 | case SPU::ORIv4i32: |
| 78 | case SPU::ORIr32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 79 | case SPU::ORHIv8i16: |
| 80 | case SPU::ORHIr16: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 81 | case SPU::ORHIi8i16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 82 | case SPU::ORBIv16i8: |
Scott Michel | 504c369 | 2007-12-17 22:32:34 +0000 | [diff] [blame] | 83 | case SPU::ORBIr8: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 84 | case SPU::ORIi16i32: |
| 85 | case SPU::ORIi8i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 86 | case SPU::AHIvec: |
| 87 | case SPU::AHIr16: |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 88 | case SPU::AIv4i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 89 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 90 | MI.getOperand(0).isReg() && |
| 91 | MI.getOperand(1).isReg() && |
| 92 | MI.getOperand(2).isImm() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 93 | "invalid SPU ORI/ORHI/ORBI/AHI/AI/SFI/SFHI instruction!"); |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 94 | if (MI.getOperand(2).getImm() == 0) { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 95 | sourceReg = MI.getOperand(1).getReg(); |
| 96 | destReg = MI.getOperand(0).getReg(); |
| 97 | return true; |
| 98 | } |
| 99 | break; |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 100 | case SPU::AIr32: |
| 101 | assert(MI.getNumOperands() == 3 && |
| 102 | "wrong number of operands to AIr32"); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 103 | if (MI.getOperand(0).isReg() && |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 104 | MI.getOperand(1).isReg() && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 105 | (MI.getOperand(2).isImm() && |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 106 | MI.getOperand(2).getImm() == 0)) { |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 107 | sourceReg = MI.getOperand(1).getReg(); |
| 108 | destReg = MI.getOperand(0).getReg(); |
| 109 | return true; |
| 110 | } |
| 111 | break; |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 112 | case SPU::LRr8: |
| 113 | case SPU::LRr16: |
| 114 | case SPU::LRr32: |
| 115 | case SPU::LRf32: |
| 116 | case SPU::LRr64: |
| 117 | case SPU::LRf64: |
| 118 | case SPU::LRr128: |
| 119 | case SPU::LRv16i8: |
| 120 | case SPU::LRv8i16: |
| 121 | case SPU::LRv4i32: |
| 122 | case SPU::LRv4f32: |
| 123 | case SPU::LRv2i64: |
| 124 | case SPU::LRv2f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 125 | case SPU::ORv16i8_i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 126 | case SPU::ORv8i16_i16: |
| 127 | case SPU::ORv4i32_i32: |
| 128 | case SPU::ORv2i64_i64: |
| 129 | case SPU::ORv4f32_f32: |
| 130 | case SPU::ORv2f64_f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 131 | case SPU::ORi8_v16i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 132 | case SPU::ORi16_v8i16: |
| 133 | case SPU::ORi32_v4i32: |
| 134 | case SPU::ORi64_v2i64: |
| 135 | case SPU::ORf32_v4f32: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 136 | case SPU::ORf64_v2f64: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 137 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 138 | case SPU::ORi128_r64: |
| 139 | case SPU::ORi128_f64: |
| 140 | case SPU::ORi128_r32: |
| 141 | case SPU::ORi128_f32: |
| 142 | case SPU::ORi128_r16: |
| 143 | case SPU::ORi128_r8: |
| 144 | case SPU::ORi128_vec: |
| 145 | case SPU::ORr64_i128: |
| 146 | case SPU::ORf64_i128: |
| 147 | case SPU::ORr32_i128: |
| 148 | case SPU::ORf32_i128: |
| 149 | case SPU::ORr16_i128: |
| 150 | case SPU::ORr8_i128: |
| 151 | case SPU::ORvec_i128: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 152 | */ |
| 153 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 154 | case SPU::ORr16_r32: |
| 155 | case SPU::ORr8_r32: |
| 156 | case SPU::ORr32_r16: |
| 157 | case SPU::ORr32_r8: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 158 | case SPU::ORr16_r64: |
| 159 | case SPU::ORr8_r64: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 160 | case SPU::ORr64_r16: |
| 161 | case SPU::ORr64_r8: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 162 | */ |
Scott Michel | c9c8b2a | 2009-01-26 03:31:40 +0000 | [diff] [blame^] | 163 | case SPU::ORr64_r32: |
| 164 | case SPU::ORr32_r64: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 165 | case SPU::ORf32_r32: |
| 166 | case SPU::ORr32_f32: |
| 167 | case SPU::ORf64_r64: |
| 168 | case SPU::ORr64_f64: { |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 169 | assert(MI.getNumOperands() == 2 && |
| 170 | MI.getOperand(0).isReg() && |
| 171 | MI.getOperand(1).isReg() && |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 172 | "invalid SPU OR<type>_<vec> or LR instruction!"); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 173 | if (MI.getOperand(0).getReg() == MI.getOperand(1).getReg()) { |
| 174 | sourceReg = MI.getOperand(0).getReg(); |
| 175 | destReg = MI.getOperand(0).getReg(); |
| 176 | return true; |
| 177 | } |
| 178 | break; |
| 179 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 180 | case SPU::ORv16i8: |
| 181 | case SPU::ORv8i16: |
| 182 | case SPU::ORv4i32: |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 183 | case SPU::ORv2i64: |
| 184 | case SPU::ORr8: |
| 185 | case SPU::ORr16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 186 | case SPU::ORr32: |
| 187 | case SPU::ORr64: |
Scott Michel | 86c041f | 2007-12-20 00:44:13 +0000 | [diff] [blame] | 188 | case SPU::ORf32: |
| 189 | case SPU::ORf64: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 190 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 191 | MI.getOperand(0).isReg() && |
| 192 | MI.getOperand(1).isReg() && |
| 193 | MI.getOperand(2).isReg() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 194 | "invalid SPU OR(vec|r32|r64|gprc) instruction!"); |
| 195 | if (MI.getOperand(1).getReg() == MI.getOperand(2).getReg()) { |
| 196 | sourceReg = MI.getOperand(1).getReg(); |
| 197 | destReg = MI.getOperand(0).getReg(); |
| 198 | return true; |
| 199 | } |
| 200 | break; |
| 201 | } |
| 202 | |
| 203 | return false; |
| 204 | } |
| 205 | |
| 206 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 207 | SPUInstrInfo::isLoadFromStackSlot(const MachineInstr *MI, |
| 208 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 209 | switch (MI->getOpcode()) { |
| 210 | default: break; |
| 211 | case SPU::LQDv16i8: |
| 212 | case SPU::LQDv8i16: |
| 213 | case SPU::LQDv4i32: |
| 214 | case SPU::LQDv4f32: |
| 215 | case SPU::LQDv2f64: |
| 216 | case SPU::LQDr128: |
| 217 | case SPU::LQDr64: |
| 218 | case SPU::LQDr32: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 219 | case SPU::LQDr16: { |
| 220 | const MachineOperand MOp1 = MI->getOperand(1); |
| 221 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 222 | if (MOp1.isImm() && MOp2.isFI()) { |
| 223 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 224 | return MI->getOperand(0).getReg(); |
| 225 | } |
| 226 | break; |
| 227 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 228 | } |
| 229 | return 0; |
| 230 | } |
| 231 | |
| 232 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 233 | SPUInstrInfo::isStoreToStackSlot(const MachineInstr *MI, |
| 234 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 235 | switch (MI->getOpcode()) { |
| 236 | default: break; |
| 237 | case SPU::STQDv16i8: |
| 238 | case SPU::STQDv8i16: |
| 239 | case SPU::STQDv4i32: |
| 240 | case SPU::STQDv4f32: |
| 241 | case SPU::STQDv2f64: |
| 242 | case SPU::STQDr128: |
| 243 | case SPU::STQDr64: |
| 244 | case SPU::STQDr32: |
| 245 | case SPU::STQDr16: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 246 | case SPU::STQDr8: { |
| 247 | const MachineOperand MOp1 = MI->getOperand(1); |
| 248 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 249 | if (MOp1.isImm() && MOp2.isFI()) { |
| 250 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 251 | return MI->getOperand(0).getReg(); |
| 252 | } |
| 253 | break; |
| 254 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 255 | } |
| 256 | return 0; |
| 257 | } |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 258 | |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 259 | bool SPUInstrInfo::copyRegToReg(MachineBasicBlock &MBB, |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 260 | MachineBasicBlock::iterator MI, |
| 261 | unsigned DestReg, unsigned SrcReg, |
| 262 | const TargetRegisterClass *DestRC, |
| 263 | const TargetRegisterClass *SrcRC) const |
| 264 | { |
Chris Lattner | 5e09da2 | 2008-03-09 20:31:11 +0000 | [diff] [blame] | 265 | // We support cross register class moves for our aliases, such as R3 in any |
| 266 | // reg class to any other reg class containing R3. This is required because |
| 267 | // we instruction select bitconvert i64 -> f64 as a noop for example, so our |
| 268 | // types have no specific meaning. |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 269 | |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 270 | if (DestRC == SPU::R8CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 271 | BuildMI(MBB, MI, get(SPU::LRr8), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 272 | } else if (DestRC == SPU::R16CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 273 | BuildMI(MBB, MI, get(SPU::LRr16), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 274 | } else if (DestRC == SPU::R32CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 275 | BuildMI(MBB, MI, get(SPU::LRr32), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 276 | } else if (DestRC == SPU::R32FPRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 277 | BuildMI(MBB, MI, get(SPU::LRf32), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 278 | } else if (DestRC == SPU::R64CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 279 | BuildMI(MBB, MI, get(SPU::LRr64), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 280 | } else if (DestRC == SPU::R64FPRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 281 | BuildMI(MBB, MI, get(SPU::LRf64), DestReg).addReg(SrcReg); |
| 282 | } else if (DestRC == SPU::GPRCRegisterClass) { |
| 283 | BuildMI(MBB, MI, get(SPU::LRr128), DestReg).addReg(SrcReg); |
| 284 | } else if (DestRC == SPU::VECREGRegisterClass) { |
| 285 | BuildMI(MBB, MI, get(SPU::LRv16i8), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 286 | } else { |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 287 | // Attempt to copy unknown/unsupported register class! |
| 288 | return false; |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 289 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 290 | |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 291 | return true; |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 292 | } |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 293 | |
| 294 | void |
| 295 | SPUInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB, |
| 296 | MachineBasicBlock::iterator MI, |
| 297 | unsigned SrcReg, bool isKill, int FrameIdx, |
| 298 | const TargetRegisterClass *RC) const |
| 299 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 300 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 301 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 302 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 303 | opc = (isValidFrameIdx ? SPU::STQDr128 : SPU::STQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 304 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 305 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 306 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 307 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 308 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 309 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 310 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 311 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 312 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 313 | opc = (isValidFrameIdx ? SPU::STQDr16 : SPU::STQXr16); |
| 314 | } else if (RC == SPU::R8CRegisterClass) { |
| 315 | opc = (isValidFrameIdx ? SPU::STQDr8 : SPU::STQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 316 | } else if (RC == SPU::VECREGRegisterClass) { |
| 317 | opc = (isValidFrameIdx) ? SPU::STQDv16i8 : SPU::STQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 318 | } else { |
| 319 | assert(0 && "Unknown regclass!"); |
| 320 | abort(); |
| 321 | } |
| 322 | |
| 323 | addFrameReference(BuildMI(MBB, MI, get(opc)) |
| 324 | .addReg(SrcReg, false, false, isKill), FrameIdx); |
| 325 | } |
| 326 | |
| 327 | void SPUInstrInfo::storeRegToAddr(MachineFunction &MF, unsigned SrcReg, |
| 328 | bool isKill, |
| 329 | SmallVectorImpl<MachineOperand> &Addr, |
| 330 | const TargetRegisterClass *RC, |
| 331 | SmallVectorImpl<MachineInstr*> &NewMIs) const { |
| 332 | cerr << "storeRegToAddr() invoked!\n"; |
| 333 | abort(); |
| 334 | |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 335 | if (Addr[0].isFI()) { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 336 | /* do what storeRegToStackSlot does here */ |
| 337 | } else { |
| 338 | unsigned Opc = 0; |
| 339 | if (RC == SPU::GPRCRegisterClass) { |
| 340 | /* Opc = PPC::STW; */ |
| 341 | } else if (RC == SPU::R16CRegisterClass) { |
| 342 | /* Opc = PPC::STD; */ |
| 343 | } else if (RC == SPU::R32CRegisterClass) { |
| 344 | /* Opc = PPC::STFD; */ |
| 345 | } else if (RC == SPU::R32FPRegisterClass) { |
| 346 | /* Opc = PPC::STFD; */ |
| 347 | } else if (RC == SPU::R64FPRegisterClass) { |
| 348 | /* Opc = PPC::STFS; */ |
| 349 | } else if (RC == SPU::VECREGRegisterClass) { |
| 350 | /* Opc = PPC::STVX; */ |
| 351 | } else { |
| 352 | assert(0 && "Unknown regclass!"); |
| 353 | abort(); |
| 354 | } |
Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 355 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc)) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 356 | .addReg(SrcReg, false, false, isKill); |
| 357 | for (unsigned i = 0, e = Addr.size(); i != e; ++i) { |
| 358 | MachineOperand &MO = Addr[i]; |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 359 | if (MO.isReg()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 360 | MIB.addReg(MO.getReg()); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 361 | else if (MO.isImm()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 362 | MIB.addImm(MO.getImm()); |
| 363 | else |
| 364 | MIB.addFrameIndex(MO.getIndex()); |
| 365 | } |
| 366 | NewMIs.push_back(MIB); |
| 367 | } |
| 368 | } |
| 369 | |
| 370 | void |
| 371 | SPUInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 372 | MachineBasicBlock::iterator MI, |
| 373 | unsigned DestReg, int FrameIdx, |
| 374 | const TargetRegisterClass *RC) const |
| 375 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 376 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 377 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 378 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 379 | opc = (isValidFrameIdx ? SPU::LQDr128 : SPU::LQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 380 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 381 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 382 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 383 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 384 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 385 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 386 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 387 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 388 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 389 | opc = (isValidFrameIdx ? SPU::LQDr16 : SPU::LQXr16); |
| 390 | } else if (RC == SPU::R8CRegisterClass) { |
| 391 | opc = (isValidFrameIdx ? SPU::LQDr8 : SPU::LQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 392 | } else if (RC == SPU::VECREGRegisterClass) { |
| 393 | opc = (isValidFrameIdx) ? SPU::LQDv16i8 : SPU::LQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 394 | } else { |
| 395 | assert(0 && "Unknown regclass in loadRegFromStackSlot!"); |
| 396 | abort(); |
| 397 | } |
| 398 | |
| 399 | addFrameReference(BuildMI(MBB, MI, get(opc)).addReg(DestReg), FrameIdx); |
| 400 | } |
| 401 | |
| 402 | /*! |
| 403 | \note We are really pessimistic here about what kind of a load we're doing. |
| 404 | */ |
| 405 | void SPUInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 406 | SmallVectorImpl<MachineOperand> &Addr, |
| 407 | const TargetRegisterClass *RC, |
| 408 | SmallVectorImpl<MachineInstr*> &NewMIs) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 409 | const { |
| 410 | cerr << "loadRegToAddr() invoked!\n"; |
| 411 | abort(); |
| 412 | |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 413 | if (Addr[0].isFI()) { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 414 | /* do what loadRegFromStackSlot does here... */ |
| 415 | } else { |
| 416 | unsigned Opc = 0; |
| 417 | if (RC == SPU::R8CRegisterClass) { |
| 418 | /* do brilliance here */ |
| 419 | } else if (RC == SPU::R16CRegisterClass) { |
| 420 | /* Opc = PPC::LWZ; */ |
| 421 | } else if (RC == SPU::R32CRegisterClass) { |
| 422 | /* Opc = PPC::LD; */ |
| 423 | } else if (RC == SPU::R32FPRegisterClass) { |
| 424 | /* Opc = PPC::LFD; */ |
| 425 | } else if (RC == SPU::R64FPRegisterClass) { |
| 426 | /* Opc = PPC::LFS; */ |
| 427 | } else if (RC == SPU::VECREGRegisterClass) { |
| 428 | /* Opc = PPC::LVX; */ |
| 429 | } else if (RC == SPU::GPRCRegisterClass) { |
| 430 | /* Opc = something else! */ |
| 431 | } else { |
| 432 | assert(0 && "Unknown regclass!"); |
| 433 | abort(); |
| 434 | } |
Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 435 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc), DestReg); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 436 | for (unsigned i = 0, e = Addr.size(); i != e; ++i) { |
| 437 | MachineOperand &MO = Addr[i]; |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 438 | if (MO.isReg()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 439 | MIB.addReg(MO.getReg()); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 440 | else if (MO.isImm()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 441 | MIB.addImm(MO.getImm()); |
| 442 | else |
| 443 | MIB.addFrameIndex(MO.getIndex()); |
| 444 | } |
| 445 | NewMIs.push_back(MIB); |
| 446 | } |
| 447 | } |
| 448 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 449 | //! Return true if the specified load or store can be folded |
| 450 | bool |
| 451 | SPUInstrInfo::canFoldMemoryOperand(const MachineInstr *MI, |
| 452 | const SmallVectorImpl<unsigned> &Ops) const { |
| 453 | if (Ops.size() != 1) return false; |
| 454 | |
| 455 | // Make sure this is a reg-reg copy. |
| 456 | unsigned Opc = MI->getOpcode(); |
| 457 | |
| 458 | switch (Opc) { |
| 459 | case SPU::ORv16i8: |
| 460 | case SPU::ORv8i16: |
| 461 | case SPU::ORv4i32: |
| 462 | case SPU::ORv2i64: |
| 463 | case SPU::ORr8: |
| 464 | case SPU::ORr16: |
| 465 | case SPU::ORr32: |
| 466 | case SPU::ORr64: |
| 467 | case SPU::ORf32: |
| 468 | case SPU::ORf64: |
| 469 | if (MI->getOperand(1).getReg() == MI->getOperand(2).getReg()) |
| 470 | return true; |
| 471 | break; |
| 472 | } |
| 473 | |
| 474 | return false; |
| 475 | } |
| 476 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 477 | /// foldMemoryOperand - SPU, like PPC, can only fold spills into |
| 478 | /// copy instructions, turning them into load/store instructions. |
| 479 | MachineInstr * |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 480 | SPUInstrInfo::foldMemoryOperandImpl(MachineFunction &MF, |
| 481 | MachineInstr *MI, |
| 482 | const SmallVectorImpl<unsigned> &Ops, |
| 483 | int FrameIndex) const |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 484 | { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 485 | if (Ops.size() != 1) return 0; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 486 | |
| 487 | unsigned OpNum = Ops[0]; |
| 488 | unsigned Opc = MI->getOpcode(); |
| 489 | MachineInstr *NewMI = 0; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 490 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 491 | switch (Opc) { |
| 492 | case SPU::ORv16i8: |
| 493 | case SPU::ORv8i16: |
| 494 | case SPU::ORv4i32: |
| 495 | case SPU::ORv2i64: |
| 496 | case SPU::ORr8: |
| 497 | case SPU::ORr16: |
| 498 | case SPU::ORr32: |
| 499 | case SPU::ORr64: |
| 500 | case SPU::ORf32: |
| 501 | case SPU::ORf64: |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 502 | if (OpNum == 0) { // move -> store |
| 503 | unsigned InReg = MI->getOperand(1).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 504 | bool isKill = MI->getOperand(1).isKill(); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 505 | if (FrameIndex < SPUFrameInfo::maxFrameOffset()) { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 506 | MachineInstrBuilder MIB = BuildMI(MF, get(SPU::STQDr32)); |
| 507 | |
| 508 | MIB.addReg(InReg, false, false, isKill); |
| 509 | NewMI = addFrameReference(MIB, FrameIndex); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 510 | } |
| 511 | } else { // move -> load |
| 512 | unsigned OutReg = MI->getOperand(0).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 513 | bool isDead = MI->getOperand(0).isDead(); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 514 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc)); |
| 515 | |
| 516 | MIB.addReg(OutReg, true, false, false, isDead); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 517 | Opc = (FrameIndex < SPUFrameInfo::maxFrameOffset()) |
| 518 | ? SPU::STQDr32 : SPU::STQXr32; |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 519 | NewMI = addFrameReference(MIB, FrameIndex); |
| 520 | break; |
| 521 | } |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 522 | } |
| 523 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 524 | return NewMI; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 525 | } |
| 526 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 527 | //! Branch analysis |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 528 | /*! |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 529 | \note This code was kiped from PPC. There may be more branch analysis for |
| 530 | CellSPU than what's currently done here. |
| 531 | */ |
| 532 | bool |
| 533 | SPUInstrInfo::AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 534 | MachineBasicBlock *&FBB, |
| 535 | SmallVectorImpl<MachineOperand> &Cond) const { |
| 536 | // If the block has no terminators, it just falls into the block after it. |
| 537 | MachineBasicBlock::iterator I = MBB.end(); |
| 538 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) |
| 539 | return false; |
| 540 | |
| 541 | // Get the last instruction in the block. |
| 542 | MachineInstr *LastInst = I; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 543 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 544 | // If there is only one terminator instruction, process it. |
| 545 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) { |
| 546 | if (isUncondBranch(LastInst)) { |
| 547 | TBB = LastInst->getOperand(0).getMBB(); |
| 548 | return false; |
| 549 | } else if (isCondBranch(LastInst)) { |
| 550 | // Block ends with fall-through condbranch. |
| 551 | TBB = LastInst->getOperand(1).getMBB(); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 552 | DEBUG(cerr << "Pushing LastInst: "); |
| 553 | DEBUG(LastInst->dump()); |
| 554 | Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 555 | Cond.push_back(LastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 556 | return false; |
| 557 | } |
| 558 | // Otherwise, don't know what this is. |
| 559 | return true; |
| 560 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 561 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 562 | // Get the instruction before it if it's a terminator. |
| 563 | MachineInstr *SecondLastInst = I; |
| 564 | |
| 565 | // If there are three terminators, we don't know what sort of block this is. |
| 566 | if (SecondLastInst && I != MBB.begin() && |
| 567 | isUnpredicatedTerminator(--I)) |
| 568 | return true; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 569 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 570 | // If the block ends with a conditional and unconditional branch, handle it. |
| 571 | if (isCondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 572 | TBB = SecondLastInst->getOperand(1).getMBB(); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 573 | DEBUG(cerr << "Pushing SecondLastInst: "); |
| 574 | DEBUG(SecondLastInst->dump()); |
| 575 | Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 576 | Cond.push_back(SecondLastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 577 | FBB = LastInst->getOperand(0).getMBB(); |
| 578 | return false; |
| 579 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 580 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 581 | // If the block ends with two unconditional branches, handle it. The second |
| 582 | // one is not executed, so remove it. |
| 583 | if (isUncondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 584 | TBB = SecondLastInst->getOperand(0).getMBB(); |
| 585 | I = LastInst; |
| 586 | I->eraseFromParent(); |
| 587 | return false; |
| 588 | } |
| 589 | |
| 590 | // Otherwise, can't handle this. |
| 591 | return true; |
| 592 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 593 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 594 | unsigned |
| 595 | SPUInstrInfo::RemoveBranch(MachineBasicBlock &MBB) const { |
| 596 | MachineBasicBlock::iterator I = MBB.end(); |
| 597 | if (I == MBB.begin()) |
| 598 | return 0; |
| 599 | --I; |
| 600 | if (!isCondBranch(I) && !isUncondBranch(I)) |
| 601 | return 0; |
| 602 | |
| 603 | // Remove the first branch. |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 604 | DEBUG(cerr << "Removing branch: "); |
| 605 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 606 | I->eraseFromParent(); |
| 607 | I = MBB.end(); |
| 608 | if (I == MBB.begin()) |
| 609 | return 1; |
| 610 | |
| 611 | --I; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 612 | if (!(isCondBranch(I) || isUncondBranch(I))) |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 613 | return 1; |
| 614 | |
| 615 | // Remove the second branch. |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 616 | DEBUG(cerr << "Removing second branch: "); |
| 617 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 618 | I->eraseFromParent(); |
| 619 | return 2; |
| 620 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 621 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 622 | unsigned |
| 623 | SPUInstrInfo::InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
| 624 | MachineBasicBlock *FBB, |
| 625 | const SmallVectorImpl<MachineOperand> &Cond) const { |
| 626 | // Shouldn't be a fall through. |
| 627 | assert(TBB && "InsertBranch must not be told to insert a fallthrough"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 628 | assert((Cond.size() == 2 || Cond.size() == 0) && |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 629 | "SPU branch conditions have two components!"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 630 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 631 | // One-way branch. |
| 632 | if (FBB == 0) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 633 | if (Cond.empty()) { |
| 634 | // Unconditional branch |
| 635 | MachineInstrBuilder MIB = BuildMI(&MBB, get(SPU::BR)); |
| 636 | MIB.addMBB(TBB); |
| 637 | |
| 638 | DEBUG(cerr << "Inserted one-way uncond branch: "); |
| 639 | DEBUG((*MIB).dump()); |
| 640 | } else { |
| 641 | // Conditional branch |
| 642 | MachineInstrBuilder MIB = BuildMI(&MBB, get(Cond[0].getImm())); |
| 643 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 644 | |
| 645 | DEBUG(cerr << "Inserted one-way cond branch: "); |
| 646 | DEBUG((*MIB).dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 647 | } |
| 648 | return 1; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 649 | } else { |
| 650 | MachineInstrBuilder MIB = BuildMI(&MBB, get(Cond[0].getImm())); |
| 651 | MachineInstrBuilder MIB2 = BuildMI(&MBB, get(SPU::BR)); |
| 652 | |
| 653 | // Two-way Conditional Branch. |
| 654 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 655 | MIB2.addMBB(FBB); |
| 656 | |
| 657 | DEBUG(cerr << "Inserted conditional branch: "); |
| 658 | DEBUG((*MIB).dump()); |
| 659 | DEBUG(cerr << "part 2: "); |
| 660 | DEBUG((*MIB2).dump()); |
| 661 | return 2; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 662 | } |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 663 | } |
| 664 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 665 | bool |
| 666 | SPUInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { |
| 667 | return (!MBB.empty() && isUncondBranch(&MBB.back())); |
| 668 | } |
| 669 | //! Reverses a branch's condition, returning false on success. |
| 670 | bool |
| 671 | SPUInstrInfo::ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) |
| 672 | const { |
| 673 | // Pretty brainless way of inverting the condition, but it works, considering |
| 674 | // there are only two conditions... |
| 675 | static struct { |
| 676 | unsigned Opc; //! The incoming opcode |
| 677 | unsigned RevCondOpc; //! The reversed condition opcode |
| 678 | } revconds[] = { |
| 679 | { SPU::BRNZr32, SPU::BRZr32 }, |
| 680 | { SPU::BRNZv4i32, SPU::BRZv4i32 }, |
| 681 | { SPU::BRZr32, SPU::BRNZr32 }, |
| 682 | { SPU::BRZv4i32, SPU::BRNZv4i32 }, |
| 683 | { SPU::BRHNZr16, SPU::BRHZr16 }, |
| 684 | { SPU::BRHNZv8i16, SPU::BRHZv8i16 }, |
| 685 | { SPU::BRHZr16, SPU::BRHNZr16 }, |
| 686 | { SPU::BRHZv8i16, SPU::BRHNZv8i16 } |
| 687 | }; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 688 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 689 | unsigned Opc = unsigned(Cond[0].getImm()); |
| 690 | // Pretty dull mapping between the two conditions that SPU can generate: |
Misha Brukman | 93c65c8 | 2009-01-07 23:07:29 +0000 | [diff] [blame] | 691 | for (int i = sizeof(revconds)/sizeof(revconds[0]) - 1; i >= 0; --i) { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 692 | if (revconds[i].Opc == Opc) { |
| 693 | Cond[0].setImm(revconds[i].RevCondOpc); |
| 694 | return false; |
| 695 | } |
| 696 | } |
| 697 | |
| 698 | return true; |
| 699 | } |