Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 1 | //===- SPUInstrInfo.cpp - Cell SPU Instruction Information ----------------===// |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file contains the Cell SPU implementation of the TargetInstrInfo class. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "SPURegisterNames.h" |
| 15 | #include "SPUInstrInfo.h" |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 16 | #include "SPUInstrBuilder.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 17 | #include "SPUTargetMachine.h" |
| 18 | #include "SPUGenInstrInfo.inc" |
| 19 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 20 | #include "llvm/Support/Debug.h" |
Torok Edwin | dac237e | 2009-07-08 20:53:28 +0000 | [diff] [blame] | 21 | #include "llvm/Support/ErrorHandling.h" |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 22 | #include "llvm/Support/raw_ostream.h" |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 23 | |
| 24 | using namespace llvm; |
| 25 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 26 | namespace { |
| 27 | //! Predicate for an unconditional branch instruction |
| 28 | inline bool isUncondBranch(const MachineInstr *I) { |
| 29 | unsigned opc = I->getOpcode(); |
| 30 | |
| 31 | return (opc == SPU::BR |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 32 | || opc == SPU::BRA |
| 33 | || opc == SPU::BI); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 34 | } |
| 35 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 36 | //! Predicate for a conditional branch instruction |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 37 | inline bool isCondBranch(const MachineInstr *I) { |
| 38 | unsigned opc = I->getOpcode(); |
| 39 | |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 40 | return (opc == SPU::BRNZr32 |
| 41 | || opc == SPU::BRNZv4i32 |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 42 | || opc == SPU::BRZr32 |
| 43 | || opc == SPU::BRZv4i32 |
| 44 | || opc == SPU::BRHNZr16 |
| 45 | || opc == SPU::BRHNZv8i16 |
| 46 | || opc == SPU::BRHZr16 |
| 47 | || opc == SPU::BRHZv8i16); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 48 | } |
| 49 | } |
| 50 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 51 | SPUInstrInfo::SPUInstrInfo(SPUTargetMachine &tm) |
Chris Lattner | 6410552 | 2008-01-01 01:03:04 +0000 | [diff] [blame] | 52 | : TargetInstrInfoImpl(SPUInsts, sizeof(SPUInsts)/sizeof(SPUInsts[0])), |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 53 | TM(tm), |
| 54 | RI(*TM.getSubtargetImpl(), *this) |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 55 | { /* NOP */ } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 56 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 57 | bool |
| 58 | SPUInstrInfo::isMoveInstr(const MachineInstr& MI, |
| 59 | unsigned& sourceReg, |
Evan Cheng | 04ee5a1 | 2009-01-20 19:12:24 +0000 | [diff] [blame] | 60 | unsigned& destReg, |
| 61 | unsigned& SrcSR, unsigned& DstSR) const { |
| 62 | SrcSR = DstSR = 0; // No sub-registers. |
| 63 | |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 64 | switch (MI.getOpcode()) { |
| 65 | default: |
| 66 | break; |
| 67 | case SPU::ORIv4i32: |
| 68 | case SPU::ORIr32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 69 | case SPU::ORHIv8i16: |
| 70 | case SPU::ORHIr16: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 71 | case SPU::ORHIi8i16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 72 | case SPU::ORBIv16i8: |
Scott Michel | 504c369 | 2007-12-17 22:32:34 +0000 | [diff] [blame] | 73 | case SPU::ORBIr8: |
Scott Michel | a59d469 | 2008-02-23 18:41:37 +0000 | [diff] [blame] | 74 | case SPU::ORIi16i32: |
| 75 | case SPU::ORIi8i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 76 | case SPU::AHIvec: |
| 77 | case SPU::AHIr16: |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 78 | case SPU::AIv4i32: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 79 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 80 | MI.getOperand(0).isReg() && |
| 81 | MI.getOperand(1).isReg() && |
| 82 | MI.getOperand(2).isImm() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 83 | "invalid SPU ORI/ORHI/ORBI/AHI/AI/SFI/SFHI instruction!"); |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 84 | if (MI.getOperand(2).getImm() == 0) { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 85 | sourceReg = MI.getOperand(1).getReg(); |
| 86 | destReg = MI.getOperand(0).getReg(); |
| 87 | return true; |
| 88 | } |
| 89 | break; |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 90 | case SPU::AIr32: |
| 91 | assert(MI.getNumOperands() == 3 && |
| 92 | "wrong number of operands to AIr32"); |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 93 | if (MI.getOperand(0).isReg() && |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 94 | MI.getOperand(1).isReg() && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 95 | (MI.getOperand(2).isImm() && |
Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 96 | MI.getOperand(2).getImm() == 0)) { |
Scott Michel | 9999e68 | 2007-12-19 07:35:06 +0000 | [diff] [blame] | 97 | sourceReg = MI.getOperand(1).getReg(); |
| 98 | destReg = MI.getOperand(0).getReg(); |
| 99 | return true; |
| 100 | } |
| 101 | break; |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 102 | case SPU::LRr8: |
| 103 | case SPU::LRr16: |
| 104 | case SPU::LRr32: |
| 105 | case SPU::LRf32: |
| 106 | case SPU::LRr64: |
| 107 | case SPU::LRf64: |
| 108 | case SPU::LRr128: |
| 109 | case SPU::LRv16i8: |
| 110 | case SPU::LRv8i16: |
| 111 | case SPU::LRv4i32: |
| 112 | case SPU::LRv4f32: |
| 113 | case SPU::LRv2i64: |
| 114 | case SPU::LRv2f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 115 | case SPU::ORv16i8_i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 116 | case SPU::ORv8i16_i16: |
| 117 | case SPU::ORv4i32_i32: |
| 118 | case SPU::ORv2i64_i64: |
| 119 | case SPU::ORv4f32_f32: |
| 120 | case SPU::ORv2f64_f64: |
Scott Michel | 170783a | 2007-12-19 20:15:47 +0000 | [diff] [blame] | 121 | case SPU::ORi8_v16i8: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 122 | case SPU::ORi16_v8i16: |
| 123 | case SPU::ORi32_v4i32: |
| 124 | case SPU::ORi64_v2i64: |
| 125 | case SPU::ORf32_v4f32: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 126 | case SPU::ORf64_v2f64: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 127 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 128 | case SPU::ORi128_r64: |
| 129 | case SPU::ORi128_f64: |
| 130 | case SPU::ORi128_r32: |
| 131 | case SPU::ORi128_f32: |
| 132 | case SPU::ORi128_r16: |
| 133 | case SPU::ORi128_r8: |
Scott Michel | 6e1d147 | 2009-03-16 18:47:25 +0000 | [diff] [blame] | 134 | */ |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 135 | case SPU::ORi128_vec: |
Scott Michel | 6e1d147 | 2009-03-16 18:47:25 +0000 | [diff] [blame] | 136 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 137 | case SPU::ORr64_i128: |
| 138 | case SPU::ORf64_i128: |
| 139 | case SPU::ORr32_i128: |
| 140 | case SPU::ORf32_i128: |
| 141 | case SPU::ORr16_i128: |
| 142 | case SPU::ORr8_i128: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 143 | */ |
Scott Michel | 6e1d147 | 2009-03-16 18:47:25 +0000 | [diff] [blame] | 144 | case SPU::ORvec_i128: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 145 | /* |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 146 | case SPU::ORr16_r32: |
| 147 | case SPU::ORr8_r32: |
Scott Michel | 6e1d147 | 2009-03-16 18:47:25 +0000 | [diff] [blame] | 148 | case SPU::ORf32_r32: |
| 149 | case SPU::ORr32_f32: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 150 | case SPU::ORr32_r16: |
| 151 | case SPU::ORr32_r8: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 152 | case SPU::ORr16_r64: |
| 153 | case SPU::ORr8_r64: |
Scott Michel | dd95009 | 2009-01-06 03:36:14 +0000 | [diff] [blame] | 154 | case SPU::ORr64_r16: |
| 155 | case SPU::ORr64_r8: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 156 | */ |
Scott Michel | c9c8b2a | 2009-01-26 03:31:40 +0000 | [diff] [blame] | 157 | case SPU::ORr64_r32: |
| 158 | case SPU::ORr32_r64: |
Scott Michel | d1e8d9c | 2009-01-21 04:58:48 +0000 | [diff] [blame] | 159 | case SPU::ORf32_r32: |
| 160 | case SPU::ORr32_f32: |
| 161 | case SPU::ORf64_r64: |
| 162 | case SPU::ORr64_f64: { |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 163 | assert(MI.getNumOperands() == 2 && |
| 164 | MI.getOperand(0).isReg() && |
| 165 | MI.getOperand(1).isReg() && |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 166 | "invalid SPU OR<type>_<vec> or LR instruction!"); |
Scott Michel | 7ea02ff | 2009-03-17 01:15:45 +0000 | [diff] [blame] | 167 | sourceReg = MI.getOperand(1).getReg(); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 168 | destReg = MI.getOperand(0).getReg(); |
| 169 | return true; |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 170 | break; |
| 171 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 172 | case SPU::ORv16i8: |
| 173 | case SPU::ORv8i16: |
| 174 | case SPU::ORv4i32: |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 175 | case SPU::ORv2i64: |
| 176 | case SPU::ORr8: |
| 177 | case SPU::ORr16: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 178 | case SPU::ORr32: |
| 179 | case SPU::ORr64: |
Scott Michel | 6e1d147 | 2009-03-16 18:47:25 +0000 | [diff] [blame] | 180 | case SPU::ORr128: |
Scott Michel | 86c041f | 2007-12-20 00:44:13 +0000 | [diff] [blame] | 181 | case SPU::ORf32: |
| 182 | case SPU::ORf64: |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 183 | assert(MI.getNumOperands() == 3 && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 184 | MI.getOperand(0).isReg() && |
| 185 | MI.getOperand(1).isReg() && |
| 186 | MI.getOperand(2).isReg() && |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 187 | "invalid SPU OR(vec|r32|r64|gprc) instruction!"); |
| 188 | if (MI.getOperand(1).getReg() == MI.getOperand(2).getReg()) { |
| 189 | sourceReg = MI.getOperand(1).getReg(); |
| 190 | destReg = MI.getOperand(0).getReg(); |
| 191 | return true; |
| 192 | } |
| 193 | break; |
| 194 | } |
| 195 | |
| 196 | return false; |
| 197 | } |
| 198 | |
| 199 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 200 | SPUInstrInfo::isLoadFromStackSlot(const MachineInstr *MI, |
| 201 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 202 | switch (MI->getOpcode()) { |
| 203 | default: break; |
| 204 | case SPU::LQDv16i8: |
| 205 | case SPU::LQDv8i16: |
| 206 | case SPU::LQDv4i32: |
| 207 | case SPU::LQDv4f32: |
| 208 | case SPU::LQDv2f64: |
| 209 | case SPU::LQDr128: |
| 210 | case SPU::LQDr64: |
| 211 | case SPU::LQDr32: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 212 | case SPU::LQDr16: { |
| 213 | const MachineOperand MOp1 = MI->getOperand(1); |
| 214 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 215 | if (MOp1.isImm() && MOp2.isFI()) { |
| 216 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 217 | return MI->getOperand(0).getReg(); |
| 218 | } |
| 219 | break; |
| 220 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 221 | } |
| 222 | return 0; |
| 223 | } |
| 224 | |
| 225 | unsigned |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 226 | SPUInstrInfo::isStoreToStackSlot(const MachineInstr *MI, |
| 227 | int &FrameIndex) const { |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 228 | switch (MI->getOpcode()) { |
| 229 | default: break; |
| 230 | case SPU::STQDv16i8: |
| 231 | case SPU::STQDv8i16: |
| 232 | case SPU::STQDv4i32: |
| 233 | case SPU::STQDv4f32: |
| 234 | case SPU::STQDv2f64: |
| 235 | case SPU::STQDr128: |
| 236 | case SPU::STQDr64: |
| 237 | case SPU::STQDr32: |
| 238 | case SPU::STQDr16: |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 239 | case SPU::STQDr8: { |
| 240 | const MachineOperand MOp1 = MI->getOperand(1); |
| 241 | const MachineOperand MOp2 = MI->getOperand(2); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 242 | if (MOp1.isImm() && MOp2.isFI()) { |
| 243 | FrameIndex = MOp2.getIndex(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 244 | return MI->getOperand(0).getReg(); |
| 245 | } |
| 246 | break; |
| 247 | } |
Scott Michel | 6637752 | 2007-12-04 22:35:58 +0000 | [diff] [blame] | 248 | } |
| 249 | return 0; |
| 250 | } |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 251 | |
Jakob Stoklund Olesen | 377b7b7 | 2010-07-11 07:31:03 +0000 | [diff] [blame^] | 252 | void SPUInstrInfo::copyPhysReg(MachineBasicBlock &MBB, |
| 253 | MachineBasicBlock::iterator I, DebugLoc DL, |
| 254 | unsigned DestReg, unsigned SrcReg, |
| 255 | bool KillSrc) const |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 256 | { |
Chris Lattner | 5e09da2 | 2008-03-09 20:31:11 +0000 | [diff] [blame] | 257 | // We support cross register class moves for our aliases, such as R3 in any |
| 258 | // reg class to any other reg class containing R3. This is required because |
| 259 | // we instruction select bitconvert i64 -> f64 as a noop for example, so our |
| 260 | // types have no specific meaning. |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 261 | |
Jakob Stoklund Olesen | 377b7b7 | 2010-07-11 07:31:03 +0000 | [diff] [blame^] | 262 | BuildMI(MBB, I, DL, get(SPU::LRr128), DestReg) |
| 263 | .addReg(SrcReg, getKillRegState(KillSrc)); |
Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 264 | } |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 265 | |
| 266 | void |
| 267 | SPUInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB, |
Evan Cheng | 746ad69 | 2010-05-06 19:06:44 +0000 | [diff] [blame] | 268 | MachineBasicBlock::iterator MI, |
| 269 | unsigned SrcReg, bool isKill, int FrameIdx, |
| 270 | const TargetRegisterClass *RC, |
| 271 | const TargetRegisterInfo *TRI) const |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 272 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 273 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 274 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 275 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 276 | opc = (isValidFrameIdx ? SPU::STQDr128 : SPU::STQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 277 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 278 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 279 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 280 | opc = (isValidFrameIdx ? SPU::STQDr64 : SPU::STQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 281 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 282 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 283 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 284 | opc = (isValidFrameIdx ? SPU::STQDr32 : SPU::STQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 285 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 286 | opc = (isValidFrameIdx ? SPU::STQDr16 : SPU::STQXr16); |
| 287 | } else if (RC == SPU::R8CRegisterClass) { |
| 288 | opc = (isValidFrameIdx ? SPU::STQDr8 : SPU::STQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 289 | } else if (RC == SPU::VECREGRegisterClass) { |
| 290 | opc = (isValidFrameIdx) ? SPU::STQDv16i8 : SPU::STQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 291 | } else { |
Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 292 | llvm_unreachable("Unknown regclass!"); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 293 | } |
| 294 | |
Chris Lattner | c7f3ace | 2010-04-02 20:16:16 +0000 | [diff] [blame] | 295 | DebugLoc DL; |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 296 | if (MI != MBB.end()) DL = MI->getDebugLoc(); |
| 297 | addFrameReference(BuildMI(MBB, MI, DL, get(opc)) |
Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 298 | .addReg(SrcReg, getKillRegState(isKill)), FrameIdx); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 299 | } |
| 300 | |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 301 | void |
| 302 | SPUInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB, |
Evan Cheng | 746ad69 | 2010-05-06 19:06:44 +0000 | [diff] [blame] | 303 | MachineBasicBlock::iterator MI, |
| 304 | unsigned DestReg, int FrameIdx, |
| 305 | const TargetRegisterClass *RC, |
| 306 | const TargetRegisterInfo *TRI) const |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 307 | { |
Chris Lattner | cc8cd0c | 2008-01-07 02:48:55 +0000 | [diff] [blame] | 308 | unsigned opc; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 309 | bool isValidFrameIdx = (FrameIdx < SPUFrameInfo::maxFrameOffset()); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 310 | if (RC == SPU::GPRCRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 311 | opc = (isValidFrameIdx ? SPU::LQDr128 : SPU::LQXr128); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 312 | } else if (RC == SPU::R64CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 313 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 314 | } else if (RC == SPU::R64FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 315 | opc = (isValidFrameIdx ? SPU::LQDr64 : SPU::LQXr64); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 316 | } else if (RC == SPU::R32CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 317 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 318 | } else if (RC == SPU::R32FPRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 319 | opc = (isValidFrameIdx ? SPU::LQDr32 : SPU::LQXr32); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 320 | } else if (RC == SPU::R16CRegisterClass) { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 321 | opc = (isValidFrameIdx ? SPU::LQDr16 : SPU::LQXr16); |
| 322 | } else if (RC == SPU::R8CRegisterClass) { |
| 323 | opc = (isValidFrameIdx ? SPU::LQDr8 : SPU::LQXr8); |
Scott Michel | f0569be | 2008-12-27 04:51:36 +0000 | [diff] [blame] | 324 | } else if (RC == SPU::VECREGRegisterClass) { |
| 325 | opc = (isValidFrameIdx) ? SPU::LQDv16i8 : SPU::LQXv16i8; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 326 | } else { |
Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 327 | llvm_unreachable("Unknown regclass in loadRegFromStackSlot!"); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 328 | } |
| 329 | |
Chris Lattner | c7f3ace | 2010-04-02 20:16:16 +0000 | [diff] [blame] | 330 | DebugLoc DL; |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 331 | if (MI != MBB.end()) DL = MI->getDebugLoc(); |
Jakob Stoklund Olesen | f2c3f6a | 2009-05-16 07:25:44 +0000 | [diff] [blame] | 332 | addFrameReference(BuildMI(MBB, MI, DL, get(opc), DestReg), FrameIdx); |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 333 | } |
| 334 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 335 | //! Return true if the specified load or store can be folded |
| 336 | bool |
| 337 | SPUInstrInfo::canFoldMemoryOperand(const MachineInstr *MI, |
| 338 | const SmallVectorImpl<unsigned> &Ops) const { |
| 339 | if (Ops.size() != 1) return false; |
| 340 | |
| 341 | // Make sure this is a reg-reg copy. |
| 342 | unsigned Opc = MI->getOpcode(); |
| 343 | |
| 344 | switch (Opc) { |
| 345 | case SPU::ORv16i8: |
| 346 | case SPU::ORv8i16: |
| 347 | case SPU::ORv4i32: |
| 348 | case SPU::ORv2i64: |
| 349 | case SPU::ORr8: |
| 350 | case SPU::ORr16: |
| 351 | case SPU::ORr32: |
| 352 | case SPU::ORr64: |
| 353 | case SPU::ORf32: |
| 354 | case SPU::ORf64: |
| 355 | if (MI->getOperand(1).getReg() == MI->getOperand(2).getReg()) |
| 356 | return true; |
| 357 | break; |
| 358 | } |
| 359 | |
| 360 | return false; |
| 361 | } |
| 362 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 363 | /// foldMemoryOperand - SPU, like PPC, can only fold spills into |
| 364 | /// copy instructions, turning them into load/store instructions. |
| 365 | MachineInstr * |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 366 | SPUInstrInfo::foldMemoryOperandImpl(MachineFunction &MF, |
| 367 | MachineInstr *MI, |
| 368 | const SmallVectorImpl<unsigned> &Ops, |
| 369 | int FrameIndex) const |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 370 | { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 371 | if (Ops.size() != 1) return 0; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 372 | |
| 373 | unsigned OpNum = Ops[0]; |
| 374 | unsigned Opc = MI->getOpcode(); |
| 375 | MachineInstr *NewMI = 0; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 376 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 377 | switch (Opc) { |
| 378 | case SPU::ORv16i8: |
| 379 | case SPU::ORv8i16: |
| 380 | case SPU::ORv4i32: |
| 381 | case SPU::ORv2i64: |
| 382 | case SPU::ORr8: |
| 383 | case SPU::ORr16: |
| 384 | case SPU::ORr32: |
| 385 | case SPU::ORr64: |
| 386 | case SPU::ORf32: |
| 387 | case SPU::ORf64: |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 388 | if (OpNum == 0) { // move -> store |
| 389 | unsigned InReg = MI->getOperand(1).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 390 | bool isKill = MI->getOperand(1).isKill(); |
Evan Cheng | 2578ba2 | 2009-07-01 01:59:31 +0000 | [diff] [blame] | 391 | bool isUndef = MI->getOperand(1).isUndef(); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 392 | if (FrameIndex < SPUFrameInfo::maxFrameOffset()) { |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 393 | MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), |
| 394 | get(SPU::STQDr32)); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 395 | |
Evan Cheng | 2578ba2 | 2009-07-01 01:59:31 +0000 | [diff] [blame] | 396 | MIB.addReg(InReg, getKillRegState(isKill) | getUndefRegState(isUndef)); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 397 | NewMI = addFrameReference(MIB, FrameIndex); |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 398 | } |
| 399 | } else { // move -> load |
| 400 | unsigned OutReg = MI->getOperand(0).getReg(); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 401 | bool isDead = MI->getOperand(0).isDead(); |
Evan Cheng | 2578ba2 | 2009-07-01 01:59:31 +0000 | [diff] [blame] | 402 | bool isUndef = MI->getOperand(0).isUndef(); |
Bill Wendling | d1c321a | 2009-02-12 00:02:55 +0000 | [diff] [blame] | 403 | MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc)); |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 404 | |
Evan Cheng | 2578ba2 | 2009-07-01 01:59:31 +0000 | [diff] [blame] | 405 | MIB.addReg(OutReg, RegState::Define | getDeadRegState(isDead) | |
| 406 | getUndefRegState(isUndef)); |
Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 407 | Opc = (FrameIndex < SPUFrameInfo::maxFrameOffset()) |
| 408 | ? SPU::STQDr32 : SPU::STQXr32; |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 409 | NewMI = addFrameReference(MIB, FrameIndex); |
| 410 | break; |
| 411 | } |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 412 | } |
| 413 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 414 | return NewMI; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 415 | } |
| 416 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 417 | //! Branch analysis |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 418 | /*! |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 419 | \note This code was kiped from PPC. There may be more branch analysis for |
| 420 | CellSPU than what's currently done here. |
| 421 | */ |
| 422 | bool |
| 423 | SPUInstrInfo::AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 424 | MachineBasicBlock *&FBB, |
Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 425 | SmallVectorImpl<MachineOperand> &Cond, |
| 426 | bool AllowModify) const { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 427 | // If the block has no terminators, it just falls into the block after it. |
| 428 | MachineBasicBlock::iterator I = MBB.end(); |
Dale Johannesen | 93d6a7e | 2010-04-02 01:38:09 +0000 | [diff] [blame] | 429 | if (I == MBB.begin()) |
| 430 | return false; |
| 431 | --I; |
| 432 | while (I->isDebugValue()) { |
| 433 | if (I == MBB.begin()) |
| 434 | return false; |
| 435 | --I; |
| 436 | } |
| 437 | if (!isUnpredicatedTerminator(I)) |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 438 | return false; |
| 439 | |
| 440 | // Get the last instruction in the block. |
| 441 | MachineInstr *LastInst = I; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 442 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 443 | // If there is only one terminator instruction, process it. |
| 444 | if (I == MBB.begin() || !isUnpredicatedTerminator(--I)) { |
| 445 | if (isUncondBranch(LastInst)) { |
Kalle Raiskila | 2320a44 | 2010-05-11 11:00:02 +0000 | [diff] [blame] | 446 | // Check for jump tables |
| 447 | if (!LastInst->getOperand(0).isMBB()) |
| 448 | return true; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 449 | TBB = LastInst->getOperand(0).getMBB(); |
| 450 | return false; |
| 451 | } else if (isCondBranch(LastInst)) { |
| 452 | // Block ends with fall-through condbranch. |
| 453 | TBB = LastInst->getOperand(1).getMBB(); |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 454 | DEBUG(errs() << "Pushing LastInst: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 455 | DEBUG(LastInst->dump()); |
| 456 | Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 457 | Cond.push_back(LastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 458 | return false; |
| 459 | } |
| 460 | // Otherwise, don't know what this is. |
| 461 | return true; |
| 462 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 463 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 464 | // Get the instruction before it if it's a terminator. |
| 465 | MachineInstr *SecondLastInst = I; |
| 466 | |
| 467 | // If there are three terminators, we don't know what sort of block this is. |
| 468 | if (SecondLastInst && I != MBB.begin() && |
| 469 | isUnpredicatedTerminator(--I)) |
| 470 | return true; |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 471 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 472 | // If the block ends with a conditional and unconditional branch, handle it. |
| 473 | if (isCondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 474 | TBB = SecondLastInst->getOperand(1).getMBB(); |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 475 | DEBUG(errs() << "Pushing SecondLastInst: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 476 | DEBUG(SecondLastInst->dump()); |
| 477 | Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode())); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 478 | Cond.push_back(SecondLastInst->getOperand(0)); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 479 | FBB = LastInst->getOperand(0).getMBB(); |
| 480 | return false; |
| 481 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 482 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 483 | // If the block ends with two unconditional branches, handle it. The second |
| 484 | // one is not executed, so remove it. |
| 485 | if (isUncondBranch(SecondLastInst) && isUncondBranch(LastInst)) { |
| 486 | TBB = SecondLastInst->getOperand(0).getMBB(); |
| 487 | I = LastInst; |
Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 488 | if (AllowModify) |
| 489 | I->eraseFromParent(); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 490 | return false; |
| 491 | } |
| 492 | |
| 493 | // Otherwise, can't handle this. |
| 494 | return true; |
| 495 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 496 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 497 | unsigned |
| 498 | SPUInstrInfo::RemoveBranch(MachineBasicBlock &MBB) const { |
| 499 | MachineBasicBlock::iterator I = MBB.end(); |
| 500 | if (I == MBB.begin()) |
| 501 | return 0; |
| 502 | --I; |
Dale Johannesen | 93d6a7e | 2010-04-02 01:38:09 +0000 | [diff] [blame] | 503 | while (I->isDebugValue()) { |
| 504 | if (I == MBB.begin()) |
| 505 | return 0; |
| 506 | --I; |
| 507 | } |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 508 | if (!isCondBranch(I) && !isUncondBranch(I)) |
| 509 | return 0; |
| 510 | |
| 511 | // Remove the first branch. |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 512 | DEBUG(errs() << "Removing branch: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 513 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 514 | I->eraseFromParent(); |
| 515 | I = MBB.end(); |
| 516 | if (I == MBB.begin()) |
| 517 | return 1; |
| 518 | |
| 519 | --I; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 520 | if (!(isCondBranch(I) || isUncondBranch(I))) |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 521 | return 1; |
| 522 | |
| 523 | // Remove the second branch. |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 524 | DEBUG(errs() << "Removing second branch: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 525 | DEBUG(I->dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 526 | I->eraseFromParent(); |
| 527 | return 2; |
| 528 | } |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 529 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 530 | unsigned |
| 531 | SPUInstrInfo::InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
Scott Michel | 19c10e6 | 2009-01-26 03:37:41 +0000 | [diff] [blame] | 532 | MachineBasicBlock *FBB, |
Stuart Hastings | 3bf9125 | 2010-06-17 22:43:56 +0000 | [diff] [blame] | 533 | const SmallVectorImpl<MachineOperand> &Cond, |
| 534 | DebugLoc DL) const { |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 535 | // Shouldn't be a fall through. |
| 536 | assert(TBB && "InsertBranch must not be told to insert a fallthrough"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 537 | assert((Cond.size() == 2 || Cond.size() == 0) && |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 538 | "SPU branch conditions have two components!"); |
Scott Michel | 02d711b | 2008-12-30 23:28:25 +0000 | [diff] [blame] | 539 | |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 540 | // One-way branch. |
| 541 | if (FBB == 0) { |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 542 | if (Cond.empty()) { |
| 543 | // Unconditional branch |
Stuart Hastings | 3bf9125 | 2010-06-17 22:43:56 +0000 | [diff] [blame] | 544 | MachineInstrBuilder MIB = BuildMI(&MBB, DL, get(SPU::BR)); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 545 | MIB.addMBB(TBB); |
| 546 | |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 547 | DEBUG(errs() << "Inserted one-way uncond branch: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 548 | DEBUG((*MIB).dump()); |
| 549 | } else { |
| 550 | // Conditional branch |
Stuart Hastings | 3bf9125 | 2010-06-17 22:43:56 +0000 | [diff] [blame] | 551 | MachineInstrBuilder MIB = BuildMI(&MBB, DL, get(Cond[0].getImm())); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 552 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 553 | |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 554 | DEBUG(errs() << "Inserted one-way cond branch: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 555 | DEBUG((*MIB).dump()); |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 556 | } |
| 557 | return 1; |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 558 | } else { |
Stuart Hastings | 3bf9125 | 2010-06-17 22:43:56 +0000 | [diff] [blame] | 559 | MachineInstrBuilder MIB = BuildMI(&MBB, DL, get(Cond[0].getImm())); |
| 560 | MachineInstrBuilder MIB2 = BuildMI(&MBB, DL, get(SPU::BR)); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 561 | |
| 562 | // Two-way Conditional Branch. |
| 563 | MIB.addReg(Cond[1].getReg()).addMBB(TBB); |
| 564 | MIB2.addMBB(FBB); |
| 565 | |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 566 | DEBUG(errs() << "Inserted conditional branch: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 567 | DEBUG((*MIB).dump()); |
Benjamin Kramer | 072a56e | 2009-08-23 11:52:17 +0000 | [diff] [blame] | 568 | DEBUG(errs() << "part 2: "); |
Scott Michel | 9bd7a37 | 2009-01-02 20:52:08 +0000 | [diff] [blame] | 569 | DEBUG((*MIB2).dump()); |
| 570 | return 2; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 571 | } |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 572 | } |
| 573 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 574 | //! Reverses a branch's condition, returning false on success. |
| 575 | bool |
| 576 | SPUInstrInfo::ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) |
| 577 | const { |
| 578 | // Pretty brainless way of inverting the condition, but it works, considering |
| 579 | // there are only two conditions... |
| 580 | static struct { |
| 581 | unsigned Opc; //! The incoming opcode |
| 582 | unsigned RevCondOpc; //! The reversed condition opcode |
| 583 | } revconds[] = { |
| 584 | { SPU::BRNZr32, SPU::BRZr32 }, |
| 585 | { SPU::BRNZv4i32, SPU::BRZv4i32 }, |
| 586 | { SPU::BRZr32, SPU::BRNZr32 }, |
| 587 | { SPU::BRZv4i32, SPU::BRNZv4i32 }, |
| 588 | { SPU::BRHNZr16, SPU::BRHZr16 }, |
| 589 | { SPU::BRHNZv8i16, SPU::BRHZv8i16 }, |
| 590 | { SPU::BRHZr16, SPU::BRHNZr16 }, |
| 591 | { SPU::BRHZv8i16, SPU::BRHNZv8i16 } |
| 592 | }; |
Scott Michel | aedc637 | 2008-12-10 00:15:19 +0000 | [diff] [blame] | 593 | |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 594 | unsigned Opc = unsigned(Cond[0].getImm()); |
| 595 | // Pretty dull mapping between the two conditions that SPU can generate: |
Misha Brukman | 93c65c8 | 2009-01-07 23:07:29 +0000 | [diff] [blame] | 596 | for (int i = sizeof(revconds)/sizeof(revconds[0]) - 1; i >= 0; --i) { |
Scott Michel | 52d0001 | 2009-01-03 00:27:53 +0000 | [diff] [blame] | 597 | if (revconds[i].Opc == Opc) { |
| 598 | Cond[0].setImm(revconds[i].RevCondOpc); |
| 599 | return false; |
| 600 | } |
| 601 | } |
| 602 | |
| 603 | return true; |
| 604 | } |