Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 1 | //===- SPUInstrInfo.cpp - Cell SPU Instruction Information ----------------===// |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains the Cell SPU implementation of the TargetInstrInfo class. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "SPURegisterNames.h" |
| 15 | #include "SPUInstrInfo.h" |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 16 | #include "SPUInstrBuilder.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 17 | #include "SPUTargetMachine.h" |
| 18 | #include "SPUGenInstrInfo.inc" |
| 19 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Bill Wendling | eecfa36 | 2008-05-29 21:46:33 +0000 | [diff] [blame] | 20 | #include "llvm/Support/Streams.h" |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 21 | #include "llvm/Support/Debug.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 22 | |
| 23 | using namespace llvm; |
| 24 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 25 | namespace { |
| 26 | //! Predicate for an unconditional branch instruction |
| 27 | inline bool isUncondBranch(const MachineInstr *I) { |
| 28 | unsigned opc = I->getOpcode(); |
| 29 | |
| 30 | return (opc == SPU::BR |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 31 | || opc == SPU::BRA |
| 32 | || opc == SPU::BI); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 33 | } |
| 34 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 35 | //! Predicate for a conditional branch instruction |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 36 | inline bool isCondBranch(const MachineInstr *I) { |
| 37 | unsigned opc = I->getOpcode(); |
| 38 | |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 39 | return (opc == SPU::BRNZr32 |
| 40 | || opc == SPU::BRNZv4i32 |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 41 | || opc == SPU::BRZr32 |
| 42 | || opc == SPU::BRZv4i32 |
| 43 | || opc == SPU::BRHNZr16 |
| 44 | || opc == SPU::BRHNZv8i16 |
| 45 | || opc == SPU::BRHZr16 |
| 46 | || opc == SPU::BRHZv8i16); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 47 | } |
| 48 | } |
| 49 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 50 | SPUInstrInfo::SPUInstrInfo(SPUTargetMachine &tm) |
Chris Lattner | 6410552 | 2008-01-01 01:03:04 +0000 | [diff] [blame] | 51 | : TargetInstrInfoImpl(SPUInsts, sizeof(SPUInsts)/sizeof(SPUInsts[0])), |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 52 | TM(tm), |
| 53 | RI(*TM.getSubtargetImpl(), *this) |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 54 | { /* NOP */ } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 55 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 56 | bool |
| 57 | SPUInstrInfo::isMoveInstr(const MachineInstr& MI, |
| 58 | unsigned& sourceReg, |
Evan Cheng | 04ee5a1 | 2009-01-20 19:12:24 +0000 | [diff] [blame] | 59 | unsigned& destReg, |
| 60 | unsigned& SrcSR, unsigned& DstSR) const { |
| 61 | SrcSR = DstSR = 0; // No sub-registers. |
| 62 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 63 | // Primarily, ORI and OR are generated by copyRegToReg. But, there are other |
| 64 | // cases where we can safely say that what's being done is really a move |
| 65 | // (see how PowerPC does this -- it's the model for this code too.) |
| 66 | switch (MI.getOpcode()) { |
| 67 | default: |
| 68 | break; |
| 69 | case SPU::ORIv4i32: |
| 70 | case SPU::ORIr32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 71 | case SPU::ORHIv8i16: |
| 72 | case SPU::ORHIr16: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 73 | case SPU::ORHIi8i16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 74 | case SPU::ORBIv16i8: |
Scott Michel | 504c369 | 2007-12-17 22:32:34 +0000 | [diff] [blame] | 75 | case SPU::ORBIr8: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 76 | case SPU::ORIi16i32: |
| 77 | case SPU::ORIi8i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 78 | case SPU::AHIvec: |
| 79 | case SPU::AHIr16: |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 80 | case SPU::AIv4i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 81 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 82 | MI.getOperand(0).isReg() && |
| 83 | MI.getOperand(1).isReg() && |
| 84 | MI.getOperand(2).isImm() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 85 | "invalid SPU ORI/ORHI/ORBI/AHI/AI/SFI/SFHI instruction!"); |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 86 | if (MI.getOperand(2).getImm() == 0) { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 87 | sourceReg = MI.getOperand(1).getReg(); |
| 88 | destReg = MI.getOperand(0).getReg(); |
| 89 | return true; |
| 90 | } |
| 91 | break; |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 92 | case SPU::AIr32: |
| 93 | assert(MI.getNumOperands() == 3 && |
| 94 | "wrong number of operands to AIr32"); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 95 | if (MI.getOperand(0).isReg() && |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 96 | MI.getOperand(1).isReg() && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 97 | (MI.getOperand(2).isImm() && |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 98 | MI.getOperand(2).getImm() == 0)) { |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 99 | sourceReg = MI.getOperand(1).getReg(); |
| 100 | destReg = MI.getOperand(0).getReg(); |
| 101 | return true; |
| 102 | } |
| 103 | break; |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 104 | case SPU::LRr8: |
| 105 | case SPU::LRr16: |
| 106 | case SPU::LRr32: |
| 107 | case SPU::LRf32: |
| 108 | case SPU::LRr64: |
| 109 | case SPU::LRf64: |
| 110 | case SPU::LRr128: |
| 111 | case SPU::LRv16i8: |
| 112 | case SPU::LRv8i16: |
| 113 | case SPU::LRv4i32: |
| 114 | case SPU::LRv4f32: |
| 115 | case SPU::LRv2i64: |
| 116 | case SPU::LRv2f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 117 | case SPU::ORv16i8_i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 118 | case SPU::ORv8i16_i16: |
| 119 | case SPU::ORv4i32_i32: |
| 120 | case SPU::ORv2i64_i64: |
| 121 | case SPU::ORv4f32_f32: |
| 122 | case SPU::ORv2f64_f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 123 | case SPU::ORi8_v16i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 124 | case SPU::ORi16_v8i16: |
| 125 | case SPU::ORi32_v4i32: |
| 126 | case SPU::ORi64_v2i64: |
| 127 | case SPU::ORf32_v4f32: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 128 | case SPU::ORf64_v2f64: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 129 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 130 | case SPU::ORi128_r64: |
| 131 | case SPU::ORi128_f64: |
| 132 | case SPU::ORi128_r32: |
| 133 | case SPU::ORi128_f32: |
| 134 | case SPU::ORi128_r16: |
| 135 | case SPU::ORi128_r8: |
| 136 | case SPU::ORi128_vec: |
| 137 | case SPU::ORr64_i128: |
| 138 | case SPU::ORf64_i128: |
| 139 | case SPU::ORr32_i128: |
| 140 | case SPU::ORf32_i128: |
| 141 | case SPU::ORr16_i128: |
| 142 | case SPU::ORr8_i128: |
| 143 | case SPU::ORvec_i128: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 144 | */ |
| 145 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 146 | case SPU::ORr16_r32: |
| 147 | case SPU::ORr8_r32: |
| 148 | case SPU::ORr32_r16: |
| 149 | case SPU::ORr32_r8: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 150 | case SPU::ORr16_r64: |
| 151 | case SPU::ORr8_r64: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 152 | case SPU::ORr64_r16: |
| 153 | case SPU::ORr64_r8: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 154 | */ |
Scott Michel | c9c8b2a | 2009-01-26 03:31:40 +0000 | [diff] [blame] | 155 | case SPU::ORr64_r32: |
| 156 | case SPU::ORr32_r64: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 157 | case SPU::ORf32_r32: |
| 158 | case SPU::ORr32_f32: |
| 159 | case SPU::ORf64_r64: |
| 160 | case SPU::ORr64_f64: { |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 161 | assert(MI.getNumOperands() == 2 && |
| 162 | MI.getOperand(0).isReg() && |
| 163 | MI.getOperand(1).isReg() && |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 164 | "invalid SPU OR<type>_<vec> or LR instruction!"); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 165 | if (MI.getOperand(0).getReg() == MI.getOperand(1).getReg()) { |
| 166 | sourceReg = MI.getOperand(0).getReg(); |
| 167 | destReg = MI.getOperand(0).getReg(); |
| 168 | return true; |
| 169 | } |
| 170 | break; |
| 171 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 172 | case SPU::ORv16i8: |
| 173 | case SPU::ORv8i16: |
| 174 | case SPU::ORv4i32: |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 175 | case SPU::ORv2i64: |
| 176 | case SPU::ORr8: |
| 177 | case SPU::ORr16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 178 | case SPU::ORr32: |
| 179 | case SPU::ORr64: |
Scott Michel | 86c041f | 2007-12-20 00:44:13 +0000 | [diff] [blame] | 180 | case SPU::ORf32: |
| 181 | case SPU::ORf64: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 182 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 183 | MI.getOperand(0).isReg() && |
| 184 | MI.getOperand(1).isReg() && |
| 185 | MI.getOperand(2).isReg() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 186 | "invalid SPU OR(vec|r32|r64|gprc) instruction!"); |
| 187 | if (MI.getOperand(1).getReg() == MI.getOperand(2).getReg()) { |
| 188 | sourceReg = MI.getOperand(1).getReg(); |
| 189 | destReg = MI.getOperand(0).getReg(); |
| 190 | return true; |
| 191 | } |
| 192 | break; |
| 193 | } |
| 194 | |
| 195 | return false; |
| 196 | } |
| 197 | |
| 198 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 199 | SPUInstrInfo::isLoadFromStackSlot(const MachineInstr *MI, |
| 200 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 201 | switch (MI->getOpcode()) { |
| 202 | default: break; |
| 203 | case SPU::LQDv16i8: |
| 204 | case SPU::LQDv8i16: |
| 205 | case SPU::LQDv4i32: |
| 206 | case SPU::LQDv4f32: |
| 207 | case SPU::LQDv2f64: |
| 208 | case SPU::LQDr128: |
| 209 | case SPU::LQDr64: |
| 210 | case SPU::LQDr32: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 211 | case SPU::LQDr16: { |
| 212 | const MachineOperand MOp1 = MI->getOperand(1); |
| 213 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 214 | if (MOp1.isImm() && MOp2.isFI()) { |
| 215 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 216 | return MI->getOperand(0).getReg(); |
| 217 | } |
| 218 | break; |
| 219 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 220 | } |
| 221 | return 0; |
| 222 | } |
| 223 | |
| 224 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 225 | SPUInstrInfo::isStoreToStackSlot(const MachineInstr *MI, |
| 226 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 227 | switch (MI->getOpcode()) { |
| 228 | default: break; |
| 229 | case SPU::STQDv16i8: |
| 230 | case SPU::STQDv8i16: |
| 231 | case SPU::STQDv4i32: |
| 232 | case SPU::STQDv4f32: |
| 233 | case SPU::STQDv2f64: |
| 234 | case SPU::STQDr128: |
| 235 | case SPU::STQDr64: |
| 236 | case SPU::STQDr32: |
| 237 | case SPU::STQDr16: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 238 | case SPU::STQDr8: { |
| 239 | const MachineOperand MOp1 = MI->getOperand(1); |
| 240 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 241 | if (MOp1.isImm() && MOp2.isFI()) { |
| 242 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 243 | return MI->getOperand(0).getReg(); |
| 244 | } |
| 245 | break; |
| 246 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 247 | } |
| 248 | return 0; |
| 249 | } |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 250 | |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 251 | bool SPUInstrInfo::copyRegToReg(MachineBasicBlock &MBB, |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 252 | MachineBasicBlock::iterator MI, |
| 253 | unsigned DestReg, unsigned SrcReg, |
| 254 | const TargetRegisterClass *DestRC, |
| 255 | const TargetRegisterClass *SrcRC) const |
| 256 | { |
Chris Lattner | 5e09da2 | 2008-03-09 20:31:11 +0000 | [diff] [blame] | 257 | // We support cross register class moves for our aliases, such as R3 in any |
| 258 | // reg class to any other reg class containing R3. This is required because |
| 259 | // we instruction select bitconvert i64 -> f64 as a noop for example, so our |
| 260 | // types have no specific meaning. |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 261 | |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 262 | DebugLoc DL = DebugLoc::getUnknownLoc(); |
| 263 | if (MI != MBB.end()) DL = MI->getDebugLoc(); |
| 264 | |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 265 | if (DestRC == SPU::R8CRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 266 | BuildMI(MBB, MI, DL, get(SPU::LRr8), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 267 | } else if (DestRC == SPU::R16CRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 268 | BuildMI(MBB, MI, DL, get(SPU::LRr16), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 269 | } else if (DestRC == SPU::R32CRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 270 | BuildMI(MBB, MI, DL, get(SPU::LRr32), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 271 | } else if (DestRC == SPU::R32FPRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 272 | BuildMI(MBB, MI, DL, get(SPU::LRf32), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 273 | } else if (DestRC == SPU::R64CRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 274 | BuildMI(MBB, MI, DL, get(SPU::LRr64), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 275 | } else if (DestRC == SPU::R64FPRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 276 | BuildMI(MBB, MI, DL, get(SPU::LRf64), DestReg).addReg(SrcReg); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 277 | } else if (DestRC == SPU::GPRCRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 278 | BuildMI(MBB, MI, DL, get(SPU::LRr128), DestReg).addReg(SrcReg); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 279 | } else if (DestRC == SPU::VECREGRegisterClass) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 280 | BuildMI(MBB, MI, DL, get(SPU::LRv16i8), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 281 | } else { |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 282 | // Attempt to copy unknown/unsupported register class! |
| 283 | return false; |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 284 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 285 | |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 286 | return true; |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 287 | } |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 288 | |
| 289 | void |
| 290 | SPUInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB, |
| 291 | MachineBasicBlock::iterator MI, |
| 292 | unsigned SrcReg, bool isKill, int FrameIdx, |
| 293 | const TargetRegisterClass *RC) const |
| 294 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 295 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 296 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 297 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 298 | opc = (isValidFrameIdx ? SPU::STQDr128 : SPU::STQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 299 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 300 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 301 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 302 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 303 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 304 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 305 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 306 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 307 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 308 | opc = (isValidFrameIdx ? SPU::STQDr16 : SPU::STQXr16); |
| 309 | } else if (RC == SPU::R8CRegisterClass) { |
| 310 | opc = (isValidFrameIdx ? SPU::STQDr8 : SPU::STQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 311 | } else if (RC == SPU::VECREGRegisterClass) { |
| 312 | opc = (isValidFrameIdx) ? SPU::STQDv16i8 : SPU::STQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 313 | } else { |
| 314 | assert(0 && "Unknown regclass!"); |
| 315 | abort(); |
| 316 | } |
| 317 | |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 318 | DebugLoc DL = DebugLoc::getUnknownLoc(); |
| 319 | if (MI != MBB.end()) DL = MI->getDebugLoc(); |
| 320 | addFrameReference(BuildMI(MBB, MI, DL, get(opc)) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 321 | .addReg(SrcReg, false, false, isKill), FrameIdx); |
| 322 | } |
| 323 | |
| 324 | void SPUInstrInfo::storeRegToAddr(MachineFunction &MF, unsigned SrcReg, |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 325 | bool isKill, |
| 326 | SmallVectorImpl<MachineOperand> &Addr, |
| 327 | const TargetRegisterClass *RC, |
| 328 | SmallVectorImpl<MachineInstr*> &NewMIs) const { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 329 | cerr << "storeRegToAddr() invoked!\n"; |
| 330 | abort(); |
| 331 | |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 332 | if (Addr[0].isFI()) { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 333 | /* do what storeRegToStackSlot does here */ |
| 334 | } else { |
| 335 | unsigned Opc = 0; |
| 336 | if (RC == SPU::GPRCRegisterClass) { |
| 337 | /* Opc = PPC::STW; */ |
| 338 | } else if (RC == SPU::R16CRegisterClass) { |
| 339 | /* Opc = PPC::STD; */ |
| 340 | } else if (RC == SPU::R32CRegisterClass) { |
| 341 | /* Opc = PPC::STFD; */ |
| 342 | } else if (RC == SPU::R32FPRegisterClass) { |
| 343 | /* Opc = PPC::STFD; */ |
| 344 | } else if (RC == SPU::R64FPRegisterClass) { |
| 345 | /* Opc = PPC::STFS; */ |
| 346 | } else if (RC == SPU::VECREGRegisterClass) { |
| 347 | /* Opc = PPC::STVX; */ |
| 348 | } else { |
| 349 | assert(0 && "Unknown regclass!"); |
| 350 | abort(); |
| 351 | } |
Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 352 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc)) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 353 | .addReg(SrcReg, false, false, isKill); |
| 354 | for (unsigned i = 0, e = Addr.size(); i != e; ++i) { |
| 355 | MachineOperand &MO = Addr[i]; |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 356 | if (MO.isReg()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 357 | MIB.addReg(MO.getReg()); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 358 | else if (MO.isImm()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 359 | MIB.addImm(MO.getImm()); |
| 360 | else |
| 361 | MIB.addFrameIndex(MO.getIndex()); |
| 362 | } |
| 363 | NewMIs.push_back(MIB); |
| 364 | } |
| 365 | } |
| 366 | |
| 367 | void |
| 368 | SPUInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 369 | MachineBasicBlock::iterator MI, |
| 370 | unsigned DestReg, int FrameIdx, |
| 371 | const TargetRegisterClass *RC) const |
| 372 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 373 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 374 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 375 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 376 | opc = (isValidFrameIdx ? SPU::LQDr128 : SPU::LQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 377 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 378 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 379 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 380 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 381 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 382 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 383 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 384 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 385 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 386 | opc = (isValidFrameIdx ? SPU::LQDr16 : SPU::LQXr16); |
| 387 | } else if (RC == SPU::R8CRegisterClass) { |
| 388 | opc = (isValidFrameIdx ? SPU::LQDr8 : SPU::LQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 389 | } else if (RC == SPU::VECREGRegisterClass) { |
| 390 | opc = (isValidFrameIdx) ? SPU::LQDv16i8 : SPU::LQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 391 | } else { |
| 392 | assert(0 && "Unknown regclass in loadRegFromStackSlot!"); |
| 393 | abort(); |
| 394 | } |
| 395 | |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 396 | DebugLoc DL = DebugLoc::getUnknownLoc(); |
| 397 | if (MI != MBB.end()) DL = MI->getDebugLoc(); |
| 398 | addFrameReference(BuildMI(MBB, MI, DL, get(opc)).addReg(DestReg), FrameIdx); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 399 | } |
| 400 | |
| 401 | /*! |
| 402 | \note We are really pessimistic here about what kind of a load we're doing. |
| 403 | */ |
| 404 | void SPUInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 405 | SmallVectorImpl<MachineOperand> &Addr, |
| 406 | const TargetRegisterClass *RC, |
| 407 | SmallVectorImpl<MachineInstr*> &NewMIs) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 408 | const { |
| 409 | cerr << "loadRegToAddr() invoked!\n"; |
| 410 | abort(); |
| 411 | |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 412 | if (Addr[0].isFI()) { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 413 | /* do what loadRegFromStackSlot does here... */ |
| 414 | } else { |
| 415 | unsigned Opc = 0; |
| 416 | if (RC == SPU::R8CRegisterClass) { |
| 417 | /* do brilliance here */ |
| 418 | } else if (RC == SPU::R16CRegisterClass) { |
| 419 | /* Opc = PPC::LWZ; */ |
| 420 | } else if (RC == SPU::R32CRegisterClass) { |
| 421 | /* Opc = PPC::LD; */ |
| 422 | } else if (RC == SPU::R32FPRegisterClass) { |
| 423 | /* Opc = PPC::LFD; */ |
| 424 | } else if (RC == SPU::R64FPRegisterClass) { |
| 425 | /* Opc = PPC::LFS; */ |
| 426 | } else if (RC == SPU::VECREGRegisterClass) { |
| 427 | /* Opc = PPC::LVX; */ |
| 428 | } else if (RC == SPU::GPRCRegisterClass) { |
| 429 | /* Opc = something else! */ |
| 430 | } else { |
| 431 | assert(0 && "Unknown regclass!"); |
| 432 | abort(); |
| 433 | } |
Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 434 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc), DestReg); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 435 | for (unsigned i = 0, e = Addr.size(); i != e; ++i) { |
| 436 | MachineOperand &MO = Addr[i]; |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 437 | if (MO.isReg()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 438 | MIB.addReg(MO.getReg()); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 439 | else if (MO.isImm()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 440 | MIB.addImm(MO.getImm()); |
| 441 | else |
| 442 | MIB.addFrameIndex(MO.getIndex()); |
| 443 | } |
| 444 | NewMIs.push_back(MIB); |
| 445 | } |
| 446 | } |
| 447 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 448 | //! Return true if the specified load or store can be folded |
| 449 | bool |
| 450 | SPUInstrInfo::canFoldMemoryOperand(const MachineInstr *MI, |
| 451 | const SmallVectorImpl<unsigned> &Ops) const { |
| 452 | if (Ops.size() != 1) return false; |
| 453 | |
| 454 | // Make sure this is a reg-reg copy. |
| 455 | unsigned Opc = MI->getOpcode(); |
| 456 | |
| 457 | switch (Opc) { |
| 458 | case SPU::ORv16i8: |
| 459 | case SPU::ORv8i16: |
| 460 | case SPU::ORv4i32: |
| 461 | case SPU::ORv2i64: |
| 462 | case SPU::ORr8: |
| 463 | case SPU::ORr16: |
| 464 | case SPU::ORr32: |
| 465 | case SPU::ORr64: |
| 466 | case SPU::ORf32: |
| 467 | case SPU::ORf64: |
| 468 | if (MI->getOperand(1).getReg() == MI->getOperand(2).getReg()) |
| 469 | return true; |
| 470 | break; |
| 471 | } |
| 472 | |
| 473 | return false; |
| 474 | } |
| 475 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 476 | /// foldMemoryOperand - SPU, like PPC, can only fold spills into |
| 477 | /// copy instructions, turning them into load/store instructions. |
| 478 | MachineInstr * |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 479 | SPUInstrInfo::foldMemoryOperandImpl(MachineFunction &MF, |
| 480 | MachineInstr *MI, |
| 481 | const SmallVectorImpl<unsigned> &Ops, |
| 482 | int FrameIndex) const |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 483 | { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 484 | if (Ops.size() != 1) return 0; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 485 | |
| 486 | unsigned OpNum = Ops[0]; |
| 487 | unsigned Opc = MI->getOpcode(); |
| 488 | MachineInstr *NewMI = 0; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 489 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 490 | switch (Opc) { |
| 491 | case SPU::ORv16i8: |
| 492 | case SPU::ORv8i16: |
| 493 | case SPU::ORv4i32: |
| 494 | case SPU::ORv2i64: |
| 495 | case SPU::ORr8: |
| 496 | case SPU::ORr16: |
| 497 | case SPU::ORr32: |
| 498 | case SPU::ORr64: |
| 499 | case SPU::ORf32: |
| 500 | case SPU::ORf64: |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 501 | if (OpNum == 0) { // move -> store |
| 502 | unsigned InReg = MI->getOperand(1).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 503 | bool isKill = MI->getOperand(1).isKill(); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 504 | if (FrameIndex < SPUFrameInfo::maxFrameOffset()) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 505 | MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), |
| 506 | get(SPU::STQDr32)); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 507 | |
| 508 | MIB.addReg(InReg, false, false, isKill); |
| 509 | NewMI = addFrameReference(MIB, FrameIndex); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 510 | } |
| 511 | } else { // move -> load |
| 512 | unsigned OutReg = MI->getOperand(0).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 513 | bool isDead = MI->getOperand(0).isDead(); |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 514 | MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc)); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 515 | |
| 516 | MIB.addReg(OutReg, true, false, false, isDead); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 517 | Opc = (FrameIndex < SPUFrameInfo::maxFrameOffset()) |
| 518 | ? SPU::STQDr32 : SPU::STQXr32; |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 519 | NewMI = addFrameReference(MIB, FrameIndex); |
| 520 | break; |
| 521 | } |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 522 | } |
| 523 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 524 | return NewMI; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 525 | } |
| 526 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 527 | //! Branch analysis |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 528 | /*! |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 529 | \note This code was kiped from PPC. There may be more branch analysis for |
| 530 | CellSPU than what's currently done here. |
| 531 | */ |
| 532 | bool |
| 533 | SPUInstrInfo::AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 534 | MachineBasicBlock *&FBB, |
Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 535 | SmallVectorImpl<MachineOperand> &Cond, |
| 536 | bool AllowModify) const { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 537 | // If the block has no terminators, it just falls into the block after it. |
| 538 | MachineBasicBlock::iterator I = MBB.end(); |
| 539 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) |
| 540 | return false; |
| 541 | |
| 542 | // Get the last instruction in the block. |
| 543 | MachineInstr *LastInst = I; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 544 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 545 | // If there is only one terminator instruction, process it. |
| 546 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) { |
| 547 | if (isUncondBranch(LastInst)) { |
| 548 | TBB = LastInst->getOperand(0).getMBB(); |
| 549 | return false; |
| 550 | } else if (isCondBranch(LastInst)) { |
| 551 | // Block ends with fall-through condbranch. |
| 552 | TBB = LastInst->getOperand(1).getMBB(); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 553 | DEBUG(cerr << "Pushing LastInst: "); |
| 554 | DEBUG(LastInst->dump()); |
| 555 | Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 556 | Cond.push_back(LastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 557 | return false; |
| 558 | } |
| 559 | // Otherwise, don't know what this is. |
| 560 | return true; |
| 561 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 562 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 563 | // Get the instruction before it if it's a terminator. |
| 564 | MachineInstr *SecondLastInst = I; |
| 565 | |
| 566 | // If there are three terminators, we don't know what sort of block this is. |
| 567 | if (SecondLastInst && I != MBB.begin() && |
| 568 | isUnpredicatedTerminator(--I)) |
| 569 | return true; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 570 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 571 | // If the block ends with a conditional and unconditional branch, handle it. |
| 572 | if (isCondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 573 | TBB = SecondLastInst->getOperand(1).getMBB(); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 574 | DEBUG(cerr << "Pushing SecondLastInst: "); |
| 575 | DEBUG(SecondLastInst->dump()); |
| 576 | Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 577 | Cond.push_back(SecondLastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 578 | FBB = LastInst->getOperand(0).getMBB(); |
| 579 | return false; |
| 580 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 581 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 582 | // If the block ends with two unconditional branches, handle it. The second |
| 583 | // one is not executed, so remove it. |
| 584 | if (isUncondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 585 | TBB = SecondLastInst->getOperand(0).getMBB(); |
| 586 | I = LastInst; |
Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 587 | if (AllowModify) |
| 588 | I->eraseFromParent(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 589 | return false; |
| 590 | } |
| 591 | |
| 592 | // Otherwise, can't handle this. |
| 593 | return true; |
| 594 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 595 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 596 | unsigned |
| 597 | SPUInstrInfo::RemoveBranch(MachineBasicBlock &MBB) const { |
| 598 | MachineBasicBlock::iterator I = MBB.end(); |
| 599 | if (I == MBB.begin()) |
| 600 | return 0; |
| 601 | --I; |
| 602 | if (!isCondBranch(I) && !isUncondBranch(I)) |
| 603 | return 0; |
| 604 | |
| 605 | // Remove the first branch. |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 606 | DEBUG(cerr << "Removing branch: "); |
| 607 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 608 | I->eraseFromParent(); |
| 609 | I = MBB.end(); |
| 610 | if (I == MBB.begin()) |
| 611 | return 1; |
| 612 | |
| 613 | --I; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 614 | if (!(isCondBranch(I) || isUncondBranch(I))) |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 615 | return 1; |
| 616 | |
| 617 | // Remove the second branch. |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 618 | DEBUG(cerr << "Removing second branch: "); |
| 619 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 620 | I->eraseFromParent(); |
| 621 | return 2; |
| 622 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 623 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 624 | unsigned |
| 625 | SPUInstrInfo::InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 626 | MachineBasicBlock *FBB, |
| 627 | const SmallVectorImpl<MachineOperand> &Cond) const { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 628 | // Shouldn't be a fall through. |
| 629 | assert(TBB && "InsertBranch must not be told to insert a fallthrough"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 630 | assert((Cond.size() == 2 || Cond.size() == 0) && |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 631 | "SPU branch conditions have two components!"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 632 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 633 | // One-way branch. |
| 634 | if (FBB == 0) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 635 | if (Cond.empty()) { |
| 636 | // Unconditional branch |
| 637 | MachineInstrBuilder MIB = BuildMI(&MBB, get(SPU::BR)); |
| 638 | MIB.addMBB(TBB); |
| 639 | |
| 640 | DEBUG(cerr << "Inserted one-way uncond branch: "); |
| 641 | DEBUG((*MIB).dump()); |
| 642 | } else { |
| 643 | // Conditional branch |
| 644 | MachineInstrBuilder MIB = BuildMI(&MBB, get(Cond[0].getImm())); |
| 645 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 646 | |
| 647 | DEBUG(cerr << "Inserted one-way cond branch: "); |
| 648 | DEBUG((*MIB).dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 649 | } |
| 650 | return 1; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 651 | } else { |
| 652 | MachineInstrBuilder MIB = BuildMI(&MBB, get(Cond[0].getImm())); |
| 653 | MachineInstrBuilder MIB2 = BuildMI(&MBB, get(SPU::BR)); |
| 654 | |
| 655 | // Two-way Conditional Branch. |
| 656 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 657 | MIB2.addMBB(FBB); |
| 658 | |
| 659 | DEBUG(cerr << "Inserted conditional branch: "); |
| 660 | DEBUG((*MIB).dump()); |
| 661 | DEBUG(cerr << "part 2: "); |
| 662 | DEBUG((*MIB2).dump()); |
| 663 | return 2; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 664 | } |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 665 | } |
| 666 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 667 | bool |
| 668 | SPUInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { |
| 669 | return (!MBB.empty() && isUncondBranch(&MBB.back())); |
| 670 | } |
| 671 | //! Reverses a branch's condition, returning false on success. |
| 672 | bool |
| 673 | SPUInstrInfo::ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) |
| 674 | const { |
| 675 | // Pretty brainless way of inverting the condition, but it works, considering |
| 676 | // there are only two conditions... |
| 677 | static struct { |
| 678 | unsigned Opc; //! The incoming opcode |
| 679 | unsigned RevCondOpc; //! The reversed condition opcode |
| 680 | } revconds[] = { |
| 681 | { SPU::BRNZr32, SPU::BRZr32 }, |
| 682 | { SPU::BRNZv4i32, SPU::BRZv4i32 }, |
| 683 | { SPU::BRZr32, SPU::BRNZr32 }, |
| 684 | { SPU::BRZv4i32, SPU::BRNZv4i32 }, |
| 685 | { SPU::BRHNZr16, SPU::BRHZr16 }, |
| 686 | { SPU::BRHNZv8i16, SPU::BRHZv8i16 }, |
| 687 | { SPU::BRHZr16, SPU::BRHNZr16 }, |
| 688 | { SPU::BRHZv8i16, SPU::BRHNZv8i16 } |
| 689 | }; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 690 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 691 | unsigned Opc = unsigned(Cond[0].getImm()); |
| 692 | // Pretty dull mapping between the two conditions that SPU can generate: |
Misha Brukman | 93c65c8 | 2009-01-07 23:07:29 +0000 | [diff] [blame] | 693 | for (int i = sizeof(revconds)/sizeof(revconds[0]) - 1; i >= 0; --i) { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 694 | if (revconds[i].Opc == Opc) { |
| 695 | Cond[0].setImm(revconds[i].RevCondOpc); |
| 696 | return false; |
| 697 | } |
| 698 | } |
| 699 | |
| 700 | return true; |
| 701 | } |