Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 1 | //===- SPUInstrInfo.cpp - Cell SPU Instruction Information ----------------===// |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains the Cell SPU implementation of the TargetInstrInfo class. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "SPURegisterNames.h" |
| 15 | #include "SPUInstrInfo.h" |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 16 | #include "SPUInstrBuilder.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 17 | #include "SPUTargetMachine.h" |
| 18 | #include "SPUGenInstrInfo.inc" |
| 19 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Bill Wendling | eecfa36 | 2008-05-29 21:46:33 +0000 | [diff] [blame] | 20 | #include "llvm/Support/Streams.h" |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 21 | #include "llvm/Support/Debug.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 22 | |
| 23 | using namespace llvm; |
| 24 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 25 | namespace { |
| 26 | //! Predicate for an unconditional branch instruction |
| 27 | inline bool isUncondBranch(const MachineInstr *I) { |
| 28 | unsigned opc = I->getOpcode(); |
| 29 | |
| 30 | return (opc == SPU::BR |
| 31 | || opc == SPU::BRA |
| 32 | || opc == SPU::BI); |
| 33 | } |
| 34 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 35 | //! Predicate for a conditional branch instruction |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 36 | inline bool isCondBranch(const MachineInstr *I) { |
| 37 | unsigned opc = I->getOpcode(); |
| 38 | |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 39 | return (opc == SPU::BRNZr32 |
| 40 | || opc == SPU::BRNZv4i32 |
| 41 | || opc == SPU::BRZr32 |
| 42 | || opc == SPU::BRZv4i32 |
| 43 | || opc == SPU::BRHNZr16 |
| 44 | || opc == SPU::BRHNZv8i16 |
| 45 | || opc == SPU::BRHZr16 |
| 46 | || opc == SPU::BRHZv8i16); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 47 | } |
| 48 | } |
| 49 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 50 | SPUInstrInfo::SPUInstrInfo(SPUTargetMachine &tm) |
Chris Lattner | 6410552 | 2008-01-01 01:03:04 +0000 | [diff] [blame] | 51 | : TargetInstrInfoImpl(SPUInsts, sizeof(SPUInsts)/sizeof(SPUInsts[0])), |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 52 | TM(tm), |
| 53 | RI(*TM.getSubtargetImpl(), *this) |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 54 | { /* NOP */ } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 55 | |
| 56 | /// getPointerRegClass - Return the register class to use to hold pointers. |
| 57 | /// This is used for addressing modes. |
| 58 | const TargetRegisterClass * |
| 59 | SPUInstrInfo::getPointerRegClass() const |
| 60 | { |
| 61 | return &SPU::R32CRegClass; |
| 62 | } |
| 63 | |
| 64 | bool |
| 65 | SPUInstrInfo::isMoveInstr(const MachineInstr& MI, |
| 66 | unsigned& sourceReg, |
| 67 | unsigned& destReg) const { |
| 68 | // Primarily, ORI and OR are generated by copyRegToReg. But, there are other |
| 69 | // cases where we can safely say that what's being done is really a move |
| 70 | // (see how PowerPC does this -- it's the model for this code too.) |
| 71 | switch (MI.getOpcode()) { |
| 72 | default: |
| 73 | break; |
| 74 | case SPU::ORIv4i32: |
| 75 | case SPU::ORIr32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 76 | case SPU::ORHIv8i16: |
| 77 | case SPU::ORHIr16: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 78 | case SPU::ORHIi8i16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 79 | case SPU::ORBIv16i8: |
Scott Michel | 504c369 | 2007-12-17 22:32:34 +0000 | [diff] [blame] | 80 | case SPU::ORBIr8: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 81 | case SPU::ORIi16i32: |
| 82 | case SPU::ORIi8i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 83 | case SPU::AHIvec: |
| 84 | case SPU::AHIr16: |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 85 | case SPU::AIv4i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 86 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 87 | MI.getOperand(0).isReg() && |
| 88 | MI.getOperand(1).isReg() && |
| 89 | MI.getOperand(2).isImm() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 90 | "invalid SPU ORI/ORHI/ORBI/AHI/AI/SFI/SFHI instruction!"); |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 91 | if (MI.getOperand(2).getImm() == 0) { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 92 | sourceReg = MI.getOperand(1).getReg(); |
| 93 | destReg = MI.getOperand(0).getReg(); |
| 94 | return true; |
| 95 | } |
| 96 | break; |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 97 | case SPU::AIr32: |
| 98 | assert(MI.getNumOperands() == 3 && |
| 99 | "wrong number of operands to AIr32"); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 100 | if (MI.getOperand(0).isReg() && |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 101 | MI.getOperand(1).isReg() && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 102 | (MI.getOperand(2).isImm() && |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 103 | MI.getOperand(2).getImm() == 0)) { |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 104 | sourceReg = MI.getOperand(1).getReg(); |
| 105 | destReg = MI.getOperand(0).getReg(); |
| 106 | return true; |
| 107 | } |
| 108 | break; |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 109 | case SPU::LRr8: |
| 110 | case SPU::LRr16: |
| 111 | case SPU::LRr32: |
| 112 | case SPU::LRf32: |
| 113 | case SPU::LRr64: |
| 114 | case SPU::LRf64: |
| 115 | case SPU::LRr128: |
| 116 | case SPU::LRv16i8: |
| 117 | case SPU::LRv8i16: |
| 118 | case SPU::LRv4i32: |
| 119 | case SPU::LRv4f32: |
| 120 | case SPU::LRv2i64: |
| 121 | case SPU::LRv2f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 122 | case SPU::ORv16i8_i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 123 | case SPU::ORv8i16_i16: |
| 124 | case SPU::ORv4i32_i32: |
| 125 | case SPU::ORv2i64_i64: |
| 126 | case SPU::ORv4f32_f32: |
| 127 | case SPU::ORv2f64_f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 128 | case SPU::ORi8_v16i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 129 | case SPU::ORi16_v8i16: |
| 130 | case SPU::ORi32_v4i32: |
| 131 | case SPU::ORi64_v2i64: |
| 132 | case SPU::ORf32_v4f32: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 133 | case SPU::ORf64_v2f64: |
| 134 | case SPU::ORi128_r64: |
| 135 | case SPU::ORi128_f64: |
| 136 | case SPU::ORi128_r32: |
| 137 | case SPU::ORi128_f32: |
| 138 | case SPU::ORi128_r16: |
| 139 | case SPU::ORi128_r8: |
| 140 | case SPU::ORi128_vec: |
| 141 | case SPU::ORr64_i128: |
| 142 | case SPU::ORf64_i128: |
| 143 | case SPU::ORr32_i128: |
| 144 | case SPU::ORf32_i128: |
| 145 | case SPU::ORr16_i128: |
| 146 | case SPU::ORr8_i128: |
| 147 | case SPU::ORvec_i128: |
| 148 | case SPU::ORr16_r32: |
| 149 | case SPU::ORr8_r32: |
| 150 | case SPU::ORr32_r16: |
| 151 | case SPU::ORr32_r8: |
| 152 | case SPU::ORr32_r64: |
| 153 | case SPU::ORr16_r64: |
| 154 | case SPU::ORr8_r64: |
| 155 | case SPU::ORr64_r32: |
| 156 | case SPU::ORr64_r16: |
| 157 | case SPU::ORr64_r8: |
| 158 | { |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 159 | assert(MI.getNumOperands() == 2 && |
| 160 | MI.getOperand(0).isReg() && |
| 161 | MI.getOperand(1).isReg() && |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 162 | "invalid SPU OR<type>_<vec> or LR instruction!"); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 163 | if (MI.getOperand(0).getReg() == MI.getOperand(1).getReg()) { |
| 164 | sourceReg = MI.getOperand(0).getReg(); |
| 165 | destReg = MI.getOperand(0).getReg(); |
| 166 | return true; |
| 167 | } |
| 168 | break; |
| 169 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 170 | case SPU::ORv16i8: |
| 171 | case SPU::ORv8i16: |
| 172 | case SPU::ORv4i32: |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 173 | case SPU::ORv2i64: |
| 174 | case SPU::ORr8: |
| 175 | case SPU::ORr16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 176 | case SPU::ORr32: |
| 177 | case SPU::ORr64: |
Scott Michel | 86c041f | 2007-12-20 00:44:13 +0000 | [diff] [blame] | 178 | case SPU::ORf32: |
| 179 | case SPU::ORf64: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 180 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 181 | MI.getOperand(0).isReg() && |
| 182 | MI.getOperand(1).isReg() && |
| 183 | MI.getOperand(2).isReg() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 184 | "invalid SPU OR(vec|r32|r64|gprc) instruction!"); |
| 185 | if (MI.getOperand(1).getReg() == MI.getOperand(2).getReg()) { |
| 186 | sourceReg = MI.getOperand(1).getReg(); |
| 187 | destReg = MI.getOperand(0).getReg(); |
| 188 | return true; |
| 189 | } |
| 190 | break; |
| 191 | } |
| 192 | |
| 193 | return false; |
| 194 | } |
| 195 | |
| 196 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 197 | SPUInstrInfo::isLoadFromStackSlot(const MachineInstr *MI, |
| 198 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 199 | switch (MI->getOpcode()) { |
| 200 | default: break; |
| 201 | case SPU::LQDv16i8: |
| 202 | case SPU::LQDv8i16: |
| 203 | case SPU::LQDv4i32: |
| 204 | case SPU::LQDv4f32: |
| 205 | case SPU::LQDv2f64: |
| 206 | case SPU::LQDr128: |
| 207 | case SPU::LQDr64: |
| 208 | case SPU::LQDr32: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 209 | case SPU::LQDr16: { |
| 210 | const MachineOperand MOp1 = MI->getOperand(1); |
| 211 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 212 | if (MOp1.isImm() && MOp2.isFI()) { |
| 213 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 214 | return MI->getOperand(0).getReg(); |
| 215 | } |
| 216 | break; |
| 217 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 218 | } |
| 219 | return 0; |
| 220 | } |
| 221 | |
| 222 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 223 | SPUInstrInfo::isStoreToStackSlot(const MachineInstr *MI, |
| 224 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 225 | switch (MI->getOpcode()) { |
| 226 | default: break; |
| 227 | case SPU::STQDv16i8: |
| 228 | case SPU::STQDv8i16: |
| 229 | case SPU::STQDv4i32: |
| 230 | case SPU::STQDv4f32: |
| 231 | case SPU::STQDv2f64: |
| 232 | case SPU::STQDr128: |
| 233 | case SPU::STQDr64: |
| 234 | case SPU::STQDr32: |
| 235 | case SPU::STQDr16: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 236 | case SPU::STQDr8: { |
| 237 | const MachineOperand MOp1 = MI->getOperand(1); |
| 238 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 239 | if (MOp1.isImm() && MOp2.isFI()) { |
| 240 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 241 | return MI->getOperand(0).getReg(); |
| 242 | } |
| 243 | break; |
| 244 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 245 | } |
| 246 | return 0; |
| 247 | } |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 248 | |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 249 | bool SPUInstrInfo::copyRegToReg(MachineBasicBlock &MBB, |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 250 | MachineBasicBlock::iterator MI, |
| 251 | unsigned DestReg, unsigned SrcReg, |
| 252 | const TargetRegisterClass *DestRC, |
| 253 | const TargetRegisterClass *SrcRC) const |
| 254 | { |
Chris Lattner | 5e09da2 | 2008-03-09 20:31:11 +0000 | [diff] [blame] | 255 | // We support cross register class moves for our aliases, such as R3 in any |
| 256 | // reg class to any other reg class containing R3. This is required because |
| 257 | // we instruction select bitconvert i64 -> f64 as a noop for example, so our |
| 258 | // types have no specific meaning. |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 259 | |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 260 | if (DestRC == SPU::R8CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 261 | BuildMI(MBB, MI, get(SPU::LRr8), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 262 | } else if (DestRC == SPU::R16CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 263 | BuildMI(MBB, MI, get(SPU::LRr16), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 264 | } else if (DestRC == SPU::R32CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 265 | BuildMI(MBB, MI, get(SPU::LRr32), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 266 | } else if (DestRC == SPU::R32FPRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 267 | BuildMI(MBB, MI, get(SPU::LRf32), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 268 | } else if (DestRC == SPU::R64CRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 269 | BuildMI(MBB, MI, get(SPU::LRr64), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 270 | } else if (DestRC == SPU::R64FPRegisterClass) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 271 | BuildMI(MBB, MI, get(SPU::LRf64), DestReg).addReg(SrcReg); |
| 272 | } else if (DestRC == SPU::GPRCRegisterClass) { |
| 273 | BuildMI(MBB, MI, get(SPU::LRr128), DestReg).addReg(SrcReg); |
| 274 | } else if (DestRC == SPU::VECREGRegisterClass) { |
| 275 | BuildMI(MBB, MI, get(SPU::LRv16i8), DestReg).addReg(SrcReg); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 276 | } else { |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 277 | // Attempt to copy unknown/unsupported register class! |
| 278 | return false; |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 279 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 280 | |
Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 281 | return true; |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 282 | } |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 283 | |
| 284 | void |
| 285 | SPUInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB, |
| 286 | MachineBasicBlock::iterator MI, |
| 287 | unsigned SrcReg, bool isKill, int FrameIdx, |
| 288 | const TargetRegisterClass *RC) const |
| 289 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 290 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 291 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 292 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 293 | opc = (isValidFrameIdx ? SPU::STQDr128 : SPU::STQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 294 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 295 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 296 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 297 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 298 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 299 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 300 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 301 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 302 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 303 | opc = (isValidFrameIdx ? SPU::STQDr16 : SPU::STQXr16); |
| 304 | } else if (RC == SPU::R8CRegisterClass) { |
| 305 | opc = (isValidFrameIdx ? SPU::STQDr8 : SPU::STQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 306 | } else if (RC == SPU::VECREGRegisterClass) { |
| 307 | opc = (isValidFrameIdx) ? SPU::STQDv16i8 : SPU::STQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 308 | } else { |
| 309 | assert(0 && "Unknown regclass!"); |
| 310 | abort(); |
| 311 | } |
| 312 | |
| 313 | addFrameReference(BuildMI(MBB, MI, get(opc)) |
| 314 | .addReg(SrcReg, false, false, isKill), FrameIdx); |
| 315 | } |
| 316 | |
| 317 | void SPUInstrInfo::storeRegToAddr(MachineFunction &MF, unsigned SrcReg, |
| 318 | bool isKill, |
| 319 | SmallVectorImpl<MachineOperand> &Addr, |
| 320 | const TargetRegisterClass *RC, |
| 321 | SmallVectorImpl<MachineInstr*> &NewMIs) const { |
| 322 | cerr << "storeRegToAddr() invoked!\n"; |
| 323 | abort(); |
| 324 | |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 325 | if (Addr[0].isFI()) { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 326 | /* do what storeRegToStackSlot does here */ |
| 327 | } else { |
| 328 | unsigned Opc = 0; |
| 329 | if (RC == SPU::GPRCRegisterClass) { |
| 330 | /* Opc = PPC::STW; */ |
| 331 | } else if (RC == SPU::R16CRegisterClass) { |
| 332 | /* Opc = PPC::STD; */ |
| 333 | } else if (RC == SPU::R32CRegisterClass) { |
| 334 | /* Opc = PPC::STFD; */ |
| 335 | } else if (RC == SPU::R32FPRegisterClass) { |
| 336 | /* Opc = PPC::STFD; */ |
| 337 | } else if (RC == SPU::R64FPRegisterClass) { |
| 338 | /* Opc = PPC::STFS; */ |
| 339 | } else if (RC == SPU::VECREGRegisterClass) { |
| 340 | /* Opc = PPC::STVX; */ |
| 341 | } else { |
| 342 | assert(0 && "Unknown regclass!"); |
| 343 | abort(); |
| 344 | } |
Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 345 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc)) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 346 | .addReg(SrcReg, false, false, isKill); |
| 347 | for (unsigned i = 0, e = Addr.size(); i != e; ++i) { |
| 348 | MachineOperand &MO = Addr[i]; |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 349 | if (MO.isReg()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 350 | MIB.addReg(MO.getReg()); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 351 | else if (MO.isImm()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 352 | MIB.addImm(MO.getImm()); |
| 353 | else |
| 354 | MIB.addFrameIndex(MO.getIndex()); |
| 355 | } |
| 356 | NewMIs.push_back(MIB); |
| 357 | } |
| 358 | } |
| 359 | |
| 360 | void |
| 361 | SPUInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 362 | MachineBasicBlock::iterator MI, |
| 363 | unsigned DestReg, int FrameIdx, |
| 364 | const TargetRegisterClass *RC) const |
| 365 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 366 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 367 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 368 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 369 | opc = (isValidFrameIdx ? SPU::LQDr128 : SPU::LQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 370 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 371 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 372 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 373 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 374 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 375 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 376 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 377 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 378 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 379 | opc = (isValidFrameIdx ? SPU::LQDr16 : SPU::LQXr16); |
| 380 | } else if (RC == SPU::R8CRegisterClass) { |
| 381 | opc = (isValidFrameIdx ? SPU::LQDr8 : SPU::LQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 382 | } else if (RC == SPU::VECREGRegisterClass) { |
| 383 | opc = (isValidFrameIdx) ? SPU::LQDv16i8 : SPU::LQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 384 | } else { |
| 385 | assert(0 && "Unknown regclass in loadRegFromStackSlot!"); |
| 386 | abort(); |
| 387 | } |
| 388 | |
| 389 | addFrameReference(BuildMI(MBB, MI, get(opc)).addReg(DestReg), FrameIdx); |
| 390 | } |
| 391 | |
| 392 | /*! |
| 393 | \note We are really pessimistic here about what kind of a load we're doing. |
| 394 | */ |
| 395 | void SPUInstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 396 | SmallVectorImpl<MachineOperand> &Addr, |
| 397 | const TargetRegisterClass *RC, |
| 398 | SmallVectorImpl<MachineInstr*> &NewMIs) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 399 | const { |
| 400 | cerr << "loadRegToAddr() invoked!\n"; |
| 401 | abort(); |
| 402 | |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 403 | if (Addr[0].isFI()) { |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 404 | /* do what loadRegFromStackSlot does here... */ |
| 405 | } else { |
| 406 | unsigned Opc = 0; |
| 407 | if (RC == SPU::R8CRegisterClass) { |
| 408 | /* do brilliance here */ |
| 409 | } else if (RC == SPU::R16CRegisterClass) { |
| 410 | /* Opc = PPC::LWZ; */ |
| 411 | } else if (RC == SPU::R32CRegisterClass) { |
| 412 | /* Opc = PPC::LD; */ |
| 413 | } else if (RC == SPU::R32FPRegisterClass) { |
| 414 | /* Opc = PPC::LFD; */ |
| 415 | } else if (RC == SPU::R64FPRegisterClass) { |
| 416 | /* Opc = PPC::LFS; */ |
| 417 | } else if (RC == SPU::VECREGRegisterClass) { |
| 418 | /* Opc = PPC::LVX; */ |
| 419 | } else if (RC == SPU::GPRCRegisterClass) { |
| 420 | /* Opc = something else! */ |
| 421 | } else { |
| 422 | assert(0 && "Unknown regclass!"); |
| 423 | abort(); |
| 424 | } |
Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 425 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc), DestReg); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 426 | for (unsigned i = 0, e = Addr.size(); i != e; ++i) { |
| 427 | MachineOperand &MO = Addr[i]; |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 428 | if (MO.isReg()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 429 | MIB.addReg(MO.getReg()); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 430 | else if (MO.isImm()) |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 431 | MIB.addImm(MO.getImm()); |
| 432 | else |
| 433 | MIB.addFrameIndex(MO.getIndex()); |
| 434 | } |
| 435 | NewMIs.push_back(MIB); |
| 436 | } |
| 437 | } |
| 438 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 439 | //! Return true if the specified load or store can be folded |
| 440 | bool |
| 441 | SPUInstrInfo::canFoldMemoryOperand(const MachineInstr *MI, |
| 442 | const SmallVectorImpl<unsigned> &Ops) const { |
| 443 | if (Ops.size() != 1) return false; |
| 444 | |
| 445 | // Make sure this is a reg-reg copy. |
| 446 | unsigned Opc = MI->getOpcode(); |
| 447 | |
| 448 | switch (Opc) { |
| 449 | case SPU::ORv16i8: |
| 450 | case SPU::ORv8i16: |
| 451 | case SPU::ORv4i32: |
| 452 | case SPU::ORv2i64: |
| 453 | case SPU::ORr8: |
| 454 | case SPU::ORr16: |
| 455 | case SPU::ORr32: |
| 456 | case SPU::ORr64: |
| 457 | case SPU::ORf32: |
| 458 | case SPU::ORf64: |
| 459 | if (MI->getOperand(1).getReg() == MI->getOperand(2).getReg()) |
| 460 | return true; |
| 461 | break; |
| 462 | } |
| 463 | |
| 464 | return false; |
| 465 | } |
| 466 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 467 | /// foldMemoryOperand - SPU, like PPC, can only fold spills into |
| 468 | /// copy instructions, turning them into load/store instructions. |
| 469 | MachineInstr * |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 470 | SPUInstrInfo::foldMemoryOperandImpl(MachineFunction &MF, |
| 471 | MachineInstr *MI, |
| 472 | const SmallVectorImpl<unsigned> &Ops, |
| 473 | int FrameIndex) const |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 474 | { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 475 | if (Ops.size() != 1) return 0; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 476 | |
| 477 | unsigned OpNum = Ops[0]; |
| 478 | unsigned Opc = MI->getOpcode(); |
| 479 | MachineInstr *NewMI = 0; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 480 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 481 | switch (Opc) { |
| 482 | case SPU::ORv16i8: |
| 483 | case SPU::ORv8i16: |
| 484 | case SPU::ORv4i32: |
| 485 | case SPU::ORv2i64: |
| 486 | case SPU::ORr8: |
| 487 | case SPU::ORr16: |
| 488 | case SPU::ORr32: |
| 489 | case SPU::ORr64: |
| 490 | case SPU::ORf32: |
| 491 | case SPU::ORf64: |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 492 | if (OpNum == 0) { // move -> store |
| 493 | unsigned InReg = MI->getOperand(1).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 494 | bool isKill = MI->getOperand(1).isKill(); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 495 | if (FrameIndex < SPUFrameInfo::maxFrameOffset()) { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 496 | MachineInstrBuilder MIB = BuildMI(MF, get(SPU::STQDr32)); |
| 497 | |
| 498 | MIB.addReg(InReg, false, false, isKill); |
| 499 | NewMI = addFrameReference(MIB, FrameIndex); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 500 | } |
| 501 | } else { // move -> load |
| 502 | unsigned OutReg = MI->getOperand(0).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 503 | bool isDead = MI->getOperand(0).isDead(); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 504 | MachineInstrBuilder MIB = BuildMI(MF, get(Opc)); |
| 505 | |
| 506 | MIB.addReg(OutReg, true, false, false, isDead); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 507 | Opc = (FrameIndex < SPUFrameInfo::maxFrameOffset()) |
| 508 | ? SPU::STQDr32 : SPU::STQXr32; |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 509 | NewMI = addFrameReference(MIB, FrameIndex); |
| 510 | break; |
| 511 | } |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 512 | } |
| 513 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 514 | return NewMI; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 515 | } |
| 516 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 517 | //! Branch analysis |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 518 | /*! |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 519 | \note This code was kiped from PPC. There may be more branch analysis for |
| 520 | CellSPU than what's currently done here. |
| 521 | */ |
| 522 | bool |
| 523 | SPUInstrInfo::AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 524 | MachineBasicBlock *&FBB, |
| 525 | SmallVectorImpl<MachineOperand> &Cond) const { |
| 526 | // If the block has no terminators, it just falls into the block after it. |
| 527 | MachineBasicBlock::iterator I = MBB.end(); |
| 528 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) |
| 529 | return false; |
| 530 | |
| 531 | // Get the last instruction in the block. |
| 532 | MachineInstr *LastInst = I; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 533 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 534 | // If there is only one terminator instruction, process it. |
| 535 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) { |
| 536 | if (isUncondBranch(LastInst)) { |
| 537 | TBB = LastInst->getOperand(0).getMBB(); |
| 538 | return false; |
| 539 | } else if (isCondBranch(LastInst)) { |
| 540 | // Block ends with fall-through condbranch. |
| 541 | TBB = LastInst->getOperand(1).getMBB(); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 542 | DEBUG(cerr << "Pushing LastInst: "); |
| 543 | DEBUG(LastInst->dump()); |
| 544 | Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 545 | Cond.push_back(LastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 546 | return false; |
| 547 | } |
| 548 | // Otherwise, don't know what this is. |
| 549 | return true; |
| 550 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 551 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 552 | // Get the instruction before it if it's a terminator. |
| 553 | MachineInstr *SecondLastInst = I; |
| 554 | |
| 555 | // If there are three terminators, we don't know what sort of block this is. |
| 556 | if (SecondLastInst && I != MBB.begin() && |
| 557 | isUnpredicatedTerminator(--I)) |
| 558 | return true; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 559 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 560 | // If the block ends with a conditional and unconditional branch, handle it. |
| 561 | if (isCondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 562 | TBB = SecondLastInst->getOperand(1).getMBB(); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 563 | DEBUG(cerr << "Pushing SecondLastInst: "); |
| 564 | DEBUG(SecondLastInst->dump()); |
| 565 | Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 566 | Cond.push_back(SecondLastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 567 | FBB = LastInst->getOperand(0).getMBB(); |
| 568 | return false; |
| 569 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 570 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 571 | // If the block ends with two unconditional branches, handle it. The second |
| 572 | // one is not executed, so remove it. |
| 573 | if (isUncondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 574 | TBB = SecondLastInst->getOperand(0).getMBB(); |
| 575 | I = LastInst; |
| 576 | I->eraseFromParent(); |
| 577 | return false; |
| 578 | } |
| 579 | |
| 580 | // Otherwise, can't handle this. |
| 581 | return true; |
| 582 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 583 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 584 | unsigned |
| 585 | SPUInstrInfo::RemoveBranch(MachineBasicBlock &MBB) const { |
| 586 | MachineBasicBlock::iterator I = MBB.end(); |
| 587 | if (I == MBB.begin()) |
| 588 | return 0; |
| 589 | --I; |
| 590 | if (!isCondBranch(I) && !isUncondBranch(I)) |
| 591 | return 0; |
| 592 | |
| 593 | // Remove the first branch. |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 594 | DEBUG(cerr << "Removing branch: "); |
| 595 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 596 | I->eraseFromParent(); |
| 597 | I = MBB.end(); |
| 598 | if (I == MBB.begin()) |
| 599 | return 1; |
| 600 | |
| 601 | --I; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 602 | if (!(isCondBranch(I) || isUncondBranch(I))) |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 603 | return 1; |
| 604 | |
| 605 | // Remove the second branch. |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 606 | DEBUG(cerr << "Removing second branch: "); |
| 607 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 608 | I->eraseFromParent(); |
| 609 | return 2; |
| 610 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 611 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 612 | unsigned |
| 613 | SPUInstrInfo::InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
| 614 | MachineBasicBlock *FBB, |
| 615 | const SmallVectorImpl<MachineOperand> &Cond) const { |
| 616 | // Shouldn't be a fall through. |
| 617 | assert(TBB && "InsertBranch must not be told to insert a fallthrough"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 618 | assert((Cond.size() == 2 || Cond.size() == 0) && |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 619 | "SPU branch conditions have two components!"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 620 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 621 | // One-way branch. |
| 622 | if (FBB == 0) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 623 | if (Cond.empty()) { |
| 624 | // Unconditional branch |
| 625 | MachineInstrBuilder MIB = BuildMI(&MBB, get(SPU::BR)); |
| 626 | MIB.addMBB(TBB); |
| 627 | |
| 628 | DEBUG(cerr << "Inserted one-way uncond branch: "); |
| 629 | DEBUG((*MIB).dump()); |
| 630 | } else { |
| 631 | // Conditional branch |
| 632 | MachineInstrBuilder MIB = BuildMI(&MBB, get(Cond[0].getImm())); |
| 633 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 634 | |
| 635 | DEBUG(cerr << "Inserted one-way cond branch: "); |
| 636 | DEBUG((*MIB).dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 637 | } |
| 638 | return 1; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 639 | } else { |
| 640 | MachineInstrBuilder MIB = BuildMI(&MBB, get(Cond[0].getImm())); |
| 641 | MachineInstrBuilder MIB2 = BuildMI(&MBB, get(SPU::BR)); |
| 642 | |
| 643 | // Two-way Conditional Branch. |
| 644 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 645 | MIB2.addMBB(FBB); |
| 646 | |
| 647 | DEBUG(cerr << "Inserted conditional branch: "); |
| 648 | DEBUG((*MIB).dump()); |
| 649 | DEBUG(cerr << "part 2: "); |
| 650 | DEBUG((*MIB2).dump()); |
| 651 | return 2; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 652 | } |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 653 | } |
| 654 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 655 | bool |
| 656 | SPUInstrInfo::BlockHasNoFallThrough(const MachineBasicBlock &MBB) const { |
| 657 | return (!MBB.empty() && isUncondBranch(&MBB.back())); |
| 658 | } |
| 659 | //! Reverses a branch's condition, returning false on success. |
| 660 | bool |
| 661 | SPUInstrInfo::ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) |
| 662 | const { |
| 663 | // Pretty brainless way of inverting the condition, but it works, considering |
| 664 | // there are only two conditions... |
| 665 | static struct { |
| 666 | unsigned Opc; //! The incoming opcode |
| 667 | unsigned RevCondOpc; //! The reversed condition opcode |
| 668 | } revconds[] = { |
| 669 | { SPU::BRNZr32, SPU::BRZr32 }, |
| 670 | { SPU::BRNZv4i32, SPU::BRZv4i32 }, |
| 671 | { SPU::BRZr32, SPU::BRNZr32 }, |
| 672 | { SPU::BRZv4i32, SPU::BRNZv4i32 }, |
| 673 | { SPU::BRHNZr16, SPU::BRHZr16 }, |
| 674 | { SPU::BRHNZv8i16, SPU::BRHZv8i16 }, |
| 675 | { SPU::BRHZr16, SPU::BRHNZr16 }, |
| 676 | { SPU::BRHZv8i16, SPU::BRHNZv8i16 } |
| 677 | }; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 678 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 679 | unsigned Opc = unsigned(Cond[0].getImm()); |
| 680 | // Pretty dull mapping between the two conditions that SPU can generate: |
Misha Brukman | 93c65c8 | 2009-01-07 23:07:29 +0000 | [diff] [blame] | 681 | for (int i = sizeof(revconds)/sizeof(revconds[0]) - 1; i >= 0; --i) { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 682 | if (revconds[i].Opc == Opc) { |
| 683 | Cond[0].setImm(revconds[i].RevCondOpc); |
| 684 | return false; |
| 685 | } |
| 686 | } |
| 687 | |
| 688 | return true; |
| 689 | } |