| Misha Brukman | a85d6bc | 2002-11-22 22:42:50 +0000 | [diff] [blame] | 1 | //===- X86InstrInfo.cpp - X86 Instruction Information -----------*- C++ -*-===// | 
| Misha Brukman | 0e0a7a45 | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 2 | // | 
| John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 3 | //                     The LLVM Compiler Infrastructure | 
 | 4 | // | 
| Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source | 
 | 6 | // License. See LICENSE.TXT for details. | 
| Misha Brukman | 0e0a7a45 | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 7 | // | 
| John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// | 
| Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 9 | // | 
| Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 10 | // This file contains the X86 implementation of the TargetInstrInfo class. | 
| Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 11 | // | 
 | 12 | //===----------------------------------------------------------------------===// | 
 | 13 |  | 
| Chris Lattner | 055c965 | 2002-10-29 21:05:24 +0000 | [diff] [blame] | 14 | #include "X86InstrInfo.h" | 
| Chris Lattner | 4ce42a7 | 2002-12-03 05:42:53 +0000 | [diff] [blame] | 15 | #include "X86.h" | 
| Chris Lattner | abf05b2 | 2003-08-03 21:55:55 +0000 | [diff] [blame] | 16 | #include "X86GenInstrInfo.inc" | 
| Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 17 | #include "X86InstrBuilder.h" | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 18 | #include "X86MachineFunctionInfo.h" | 
| Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 19 | #include "X86Subtarget.h" | 
 | 20 | #include "X86TargetMachine.h" | 
| Dan Gohman | d68a076 | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 21 | #include "llvm/DerivedTypes.h" | 
| Owen Anderson | 0a5372e | 2009-07-13 04:09:18 +0000 | [diff] [blame] | 22 | #include "llvm/LLVMContext.h" | 
| Owen Anderson | 718cb66 | 2007-09-07 04:06:50 +0000 | [diff] [blame] | 23 | #include "llvm/ADT/STLExtras.h" | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 24 | #include "llvm/CodeGen/MachineConstantPool.h" | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 25 | #include "llvm/CodeGen/MachineFrameInfo.h" | 
| Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 26 | #include "llvm/CodeGen/MachineInstrBuilder.h" | 
| Chris Lattner | 84bc542 | 2007-12-31 04:13:23 +0000 | [diff] [blame] | 27 | #include "llvm/CodeGen/MachineRegisterInfo.h" | 
| Evan Cheng | 258ff67 | 2006-12-01 21:52:41 +0000 | [diff] [blame] | 28 | #include "llvm/CodeGen/LiveVariables.h" | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 29 | #include "llvm/CodeGen/PseudoSourceValue.h" | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 30 | #include "llvm/Support/CommandLine.h" | 
| Torok Edwin | ab7c09b | 2009-07-08 18:01:40 +0000 | [diff] [blame] | 31 | #include "llvm/Support/ErrorHandling.h" | 
 | 32 | #include "llvm/Support/raw_ostream.h" | 
| Evan Cheng | 0488db9 | 2007-09-25 01:57:46 +0000 | [diff] [blame] | 33 | #include "llvm/Target/TargetOptions.h" | 
| Chris Lattner | af76e59 | 2009-08-22 20:48:53 +0000 | [diff] [blame] | 34 | #include "llvm/MC/MCAsmInfo.h" | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 35 |  | 
 | 36 | #include <limits> | 
 | 37 |  | 
| Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 38 | using namespace llvm; | 
 | 39 |  | 
| Chris Lattner | 705e07f | 2009-08-23 03:41:05 +0000 | [diff] [blame] | 40 | static cl::opt<bool> | 
 | 41 | NoFusing("disable-spill-fusing", | 
 | 42 |          cl::desc("Disable fusing of spill code into instructions")); | 
 | 43 | static cl::opt<bool> | 
 | 44 | PrintFailedFusing("print-failed-fuse-candidates", | 
 | 45 |                   cl::desc("Print instructions that the allocator wants to" | 
 | 46 |                            " fuse, but the X86 backend currently can't"), | 
 | 47 |                   cl::Hidden); | 
 | 48 | static cl::opt<bool> | 
 | 49 | ReMatPICStubLoad("remat-pic-stub-load", | 
 | 50 |                  cl::desc("Re-materialize load from stub in PIC mode"), | 
 | 51 |                  cl::init(false), cl::Hidden); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 52 |  | 
| Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 53 | X86InstrInfo::X86InstrInfo(X86TargetMachine &tm) | 
| Chris Lattner | 6410552 | 2008-01-01 01:03:04 +0000 | [diff] [blame] | 54 |   : TargetInstrInfoImpl(X86Insts, array_lengthof(X86Insts)), | 
| Evan Cheng | 25ab690 | 2006-09-08 06:48:29 +0000 | [diff] [blame] | 55 |     TM(tm), RI(tm, *this) { | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 56 |   SmallVector<unsigned,16> AmbEntries; | 
 | 57 |   static const unsigned OpTbl2Addr[][2] = { | 
 | 58 |     { X86::ADC32ri,     X86::ADC32mi }, | 
 | 59 |     { X86::ADC32ri8,    X86::ADC32mi8 }, | 
 | 60 |     { X86::ADC32rr,     X86::ADC32mr }, | 
 | 61 |     { X86::ADC64ri32,   X86::ADC64mi32 }, | 
 | 62 |     { X86::ADC64ri8,    X86::ADC64mi8 }, | 
 | 63 |     { X86::ADC64rr,     X86::ADC64mr }, | 
 | 64 |     { X86::ADD16ri,     X86::ADD16mi }, | 
 | 65 |     { X86::ADD16ri8,    X86::ADD16mi8 }, | 
 | 66 |     { X86::ADD16rr,     X86::ADD16mr }, | 
 | 67 |     { X86::ADD32ri,     X86::ADD32mi }, | 
 | 68 |     { X86::ADD32ri8,    X86::ADD32mi8 }, | 
 | 69 |     { X86::ADD32rr,     X86::ADD32mr }, | 
 | 70 |     { X86::ADD64ri32,   X86::ADD64mi32 }, | 
 | 71 |     { X86::ADD64ri8,    X86::ADD64mi8 }, | 
 | 72 |     { X86::ADD64rr,     X86::ADD64mr }, | 
 | 73 |     { X86::ADD8ri,      X86::ADD8mi }, | 
 | 74 |     { X86::ADD8rr,      X86::ADD8mr }, | 
 | 75 |     { X86::AND16ri,     X86::AND16mi }, | 
 | 76 |     { X86::AND16ri8,    X86::AND16mi8 }, | 
 | 77 |     { X86::AND16rr,     X86::AND16mr }, | 
 | 78 |     { X86::AND32ri,     X86::AND32mi }, | 
 | 79 |     { X86::AND32ri8,    X86::AND32mi8 }, | 
 | 80 |     { X86::AND32rr,     X86::AND32mr }, | 
 | 81 |     { X86::AND64ri32,   X86::AND64mi32 }, | 
 | 82 |     { X86::AND64ri8,    X86::AND64mi8 }, | 
 | 83 |     { X86::AND64rr,     X86::AND64mr }, | 
 | 84 |     { X86::AND8ri,      X86::AND8mi }, | 
 | 85 |     { X86::AND8rr,      X86::AND8mr }, | 
 | 86 |     { X86::DEC16r,      X86::DEC16m }, | 
 | 87 |     { X86::DEC32r,      X86::DEC32m }, | 
 | 88 |     { X86::DEC64_16r,   X86::DEC64_16m }, | 
 | 89 |     { X86::DEC64_32r,   X86::DEC64_32m }, | 
 | 90 |     { X86::DEC64r,      X86::DEC64m }, | 
 | 91 |     { X86::DEC8r,       X86::DEC8m }, | 
 | 92 |     { X86::INC16r,      X86::INC16m }, | 
 | 93 |     { X86::INC32r,      X86::INC32m }, | 
 | 94 |     { X86::INC64_16r,   X86::INC64_16m }, | 
 | 95 |     { X86::INC64_32r,   X86::INC64_32m }, | 
 | 96 |     { X86::INC64r,      X86::INC64m }, | 
 | 97 |     { X86::INC8r,       X86::INC8m }, | 
 | 98 |     { X86::NEG16r,      X86::NEG16m }, | 
 | 99 |     { X86::NEG32r,      X86::NEG32m }, | 
 | 100 |     { X86::NEG64r,      X86::NEG64m }, | 
 | 101 |     { X86::NEG8r,       X86::NEG8m }, | 
 | 102 |     { X86::NOT16r,      X86::NOT16m }, | 
 | 103 |     { X86::NOT32r,      X86::NOT32m }, | 
 | 104 |     { X86::NOT64r,      X86::NOT64m }, | 
 | 105 |     { X86::NOT8r,       X86::NOT8m }, | 
 | 106 |     { X86::OR16ri,      X86::OR16mi }, | 
 | 107 |     { X86::OR16ri8,     X86::OR16mi8 }, | 
 | 108 |     { X86::OR16rr,      X86::OR16mr }, | 
 | 109 |     { X86::OR32ri,      X86::OR32mi }, | 
 | 110 |     { X86::OR32ri8,     X86::OR32mi8 }, | 
 | 111 |     { X86::OR32rr,      X86::OR32mr }, | 
 | 112 |     { X86::OR64ri32,    X86::OR64mi32 }, | 
 | 113 |     { X86::OR64ri8,     X86::OR64mi8 }, | 
 | 114 |     { X86::OR64rr,      X86::OR64mr }, | 
 | 115 |     { X86::OR8ri,       X86::OR8mi }, | 
 | 116 |     { X86::OR8rr,       X86::OR8mr }, | 
 | 117 |     { X86::ROL16r1,     X86::ROL16m1 }, | 
 | 118 |     { X86::ROL16rCL,    X86::ROL16mCL }, | 
 | 119 |     { X86::ROL16ri,     X86::ROL16mi }, | 
 | 120 |     { X86::ROL32r1,     X86::ROL32m1 }, | 
 | 121 |     { X86::ROL32rCL,    X86::ROL32mCL }, | 
 | 122 |     { X86::ROL32ri,     X86::ROL32mi }, | 
 | 123 |     { X86::ROL64r1,     X86::ROL64m1 }, | 
 | 124 |     { X86::ROL64rCL,    X86::ROL64mCL }, | 
 | 125 |     { X86::ROL64ri,     X86::ROL64mi }, | 
 | 126 |     { X86::ROL8r1,      X86::ROL8m1 }, | 
 | 127 |     { X86::ROL8rCL,     X86::ROL8mCL }, | 
 | 128 |     { X86::ROL8ri,      X86::ROL8mi }, | 
 | 129 |     { X86::ROR16r1,     X86::ROR16m1 }, | 
 | 130 |     { X86::ROR16rCL,    X86::ROR16mCL }, | 
 | 131 |     { X86::ROR16ri,     X86::ROR16mi }, | 
 | 132 |     { X86::ROR32r1,     X86::ROR32m1 }, | 
 | 133 |     { X86::ROR32rCL,    X86::ROR32mCL }, | 
 | 134 |     { X86::ROR32ri,     X86::ROR32mi }, | 
 | 135 |     { X86::ROR64r1,     X86::ROR64m1 }, | 
 | 136 |     { X86::ROR64rCL,    X86::ROR64mCL }, | 
 | 137 |     { X86::ROR64ri,     X86::ROR64mi }, | 
 | 138 |     { X86::ROR8r1,      X86::ROR8m1 }, | 
 | 139 |     { X86::ROR8rCL,     X86::ROR8mCL }, | 
 | 140 |     { X86::ROR8ri,      X86::ROR8mi }, | 
 | 141 |     { X86::SAR16r1,     X86::SAR16m1 }, | 
 | 142 |     { X86::SAR16rCL,    X86::SAR16mCL }, | 
 | 143 |     { X86::SAR16ri,     X86::SAR16mi }, | 
 | 144 |     { X86::SAR32r1,     X86::SAR32m1 }, | 
 | 145 |     { X86::SAR32rCL,    X86::SAR32mCL }, | 
 | 146 |     { X86::SAR32ri,     X86::SAR32mi }, | 
 | 147 |     { X86::SAR64r1,     X86::SAR64m1 }, | 
 | 148 |     { X86::SAR64rCL,    X86::SAR64mCL }, | 
 | 149 |     { X86::SAR64ri,     X86::SAR64mi }, | 
 | 150 |     { X86::SAR8r1,      X86::SAR8m1 }, | 
 | 151 |     { X86::SAR8rCL,     X86::SAR8mCL }, | 
 | 152 |     { X86::SAR8ri,      X86::SAR8mi }, | 
 | 153 |     { X86::SBB32ri,     X86::SBB32mi }, | 
 | 154 |     { X86::SBB32ri8,    X86::SBB32mi8 }, | 
 | 155 |     { X86::SBB32rr,     X86::SBB32mr }, | 
 | 156 |     { X86::SBB64ri32,   X86::SBB64mi32 }, | 
 | 157 |     { X86::SBB64ri8,    X86::SBB64mi8 }, | 
 | 158 |     { X86::SBB64rr,     X86::SBB64mr }, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 159 |     { X86::SHL16rCL,    X86::SHL16mCL }, | 
 | 160 |     { X86::SHL16ri,     X86::SHL16mi }, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 161 |     { X86::SHL32rCL,    X86::SHL32mCL }, | 
 | 162 |     { X86::SHL32ri,     X86::SHL32mi }, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 163 |     { X86::SHL64rCL,    X86::SHL64mCL }, | 
 | 164 |     { X86::SHL64ri,     X86::SHL64mi }, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 165 |     { X86::SHL8rCL,     X86::SHL8mCL }, | 
 | 166 |     { X86::SHL8ri,      X86::SHL8mi }, | 
 | 167 |     { X86::SHLD16rrCL,  X86::SHLD16mrCL }, | 
 | 168 |     { X86::SHLD16rri8,  X86::SHLD16mri8 }, | 
 | 169 |     { X86::SHLD32rrCL,  X86::SHLD32mrCL }, | 
 | 170 |     { X86::SHLD32rri8,  X86::SHLD32mri8 }, | 
 | 171 |     { X86::SHLD64rrCL,  X86::SHLD64mrCL }, | 
 | 172 |     { X86::SHLD64rri8,  X86::SHLD64mri8 }, | 
 | 173 |     { X86::SHR16r1,     X86::SHR16m1 }, | 
 | 174 |     { X86::SHR16rCL,    X86::SHR16mCL }, | 
 | 175 |     { X86::SHR16ri,     X86::SHR16mi }, | 
 | 176 |     { X86::SHR32r1,     X86::SHR32m1 }, | 
 | 177 |     { X86::SHR32rCL,    X86::SHR32mCL }, | 
 | 178 |     { X86::SHR32ri,     X86::SHR32mi }, | 
 | 179 |     { X86::SHR64r1,     X86::SHR64m1 }, | 
 | 180 |     { X86::SHR64rCL,    X86::SHR64mCL }, | 
 | 181 |     { X86::SHR64ri,     X86::SHR64mi }, | 
 | 182 |     { X86::SHR8r1,      X86::SHR8m1 }, | 
 | 183 |     { X86::SHR8rCL,     X86::SHR8mCL }, | 
 | 184 |     { X86::SHR8ri,      X86::SHR8mi }, | 
 | 185 |     { X86::SHRD16rrCL,  X86::SHRD16mrCL }, | 
 | 186 |     { X86::SHRD16rri8,  X86::SHRD16mri8 }, | 
 | 187 |     { X86::SHRD32rrCL,  X86::SHRD32mrCL }, | 
 | 188 |     { X86::SHRD32rri8,  X86::SHRD32mri8 }, | 
 | 189 |     { X86::SHRD64rrCL,  X86::SHRD64mrCL }, | 
 | 190 |     { X86::SHRD64rri8,  X86::SHRD64mri8 }, | 
 | 191 |     { X86::SUB16ri,     X86::SUB16mi }, | 
 | 192 |     { X86::SUB16ri8,    X86::SUB16mi8 }, | 
 | 193 |     { X86::SUB16rr,     X86::SUB16mr }, | 
 | 194 |     { X86::SUB32ri,     X86::SUB32mi }, | 
 | 195 |     { X86::SUB32ri8,    X86::SUB32mi8 }, | 
 | 196 |     { X86::SUB32rr,     X86::SUB32mr }, | 
 | 197 |     { X86::SUB64ri32,   X86::SUB64mi32 }, | 
 | 198 |     { X86::SUB64ri8,    X86::SUB64mi8 }, | 
 | 199 |     { X86::SUB64rr,     X86::SUB64mr }, | 
 | 200 |     { X86::SUB8ri,      X86::SUB8mi }, | 
 | 201 |     { X86::SUB8rr,      X86::SUB8mr }, | 
 | 202 |     { X86::XOR16ri,     X86::XOR16mi }, | 
 | 203 |     { X86::XOR16ri8,    X86::XOR16mi8 }, | 
 | 204 |     { X86::XOR16rr,     X86::XOR16mr }, | 
 | 205 |     { X86::XOR32ri,     X86::XOR32mi }, | 
 | 206 |     { X86::XOR32ri8,    X86::XOR32mi8 }, | 
 | 207 |     { X86::XOR32rr,     X86::XOR32mr }, | 
 | 208 |     { X86::XOR64ri32,   X86::XOR64mi32 }, | 
 | 209 |     { X86::XOR64ri8,    X86::XOR64mi8 }, | 
 | 210 |     { X86::XOR64rr,     X86::XOR64mr }, | 
 | 211 |     { X86::XOR8ri,      X86::XOR8mi }, | 
 | 212 |     { X86::XOR8rr,      X86::XOR8mr } | 
 | 213 |   }; | 
 | 214 |  | 
 | 215 |   for (unsigned i = 0, e = array_lengthof(OpTbl2Addr); i != e; ++i) { | 
 | 216 |     unsigned RegOp = OpTbl2Addr[i][0]; | 
 | 217 |     unsigned MemOp = OpTbl2Addr[i][1]; | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 218 |     if (!RegOp2MemOpTable2Addr.insert(std::make_pair((unsigned*)RegOp, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 219 |                                                std::make_pair(MemOp,0))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 220 |       assert(false && "Duplicated entries?"); | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 221 |     // Index 0, folded load and store, no alignment requirement. | 
 | 222 |     unsigned AuxInfo = 0 | (1 << 4) | (1 << 5); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 223 |     if (!MemOp2RegOpTable.insert(std::make_pair((unsigned*)MemOp, | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 224 |                                                 std::make_pair(RegOp, | 
 | 225 |                                                               AuxInfo))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 226 |       AmbEntries.push_back(MemOp); | 
 | 227 |   } | 
 | 228 |  | 
 | 229 |   // If the third value is 1, then it's folding either a load or a store. | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 230 |   static const unsigned OpTbl0[][4] = { | 
 | 231 |     { X86::BT16ri8,     X86::BT16mi8, 1, 0 }, | 
 | 232 |     { X86::BT32ri8,     X86::BT32mi8, 1, 0 }, | 
 | 233 |     { X86::BT64ri8,     X86::BT64mi8, 1, 0 }, | 
 | 234 |     { X86::CALL32r,     X86::CALL32m, 1, 0 }, | 
 | 235 |     { X86::CALL64r,     X86::CALL64m, 1, 0 }, | 
 | 236 |     { X86::CMP16ri,     X86::CMP16mi, 1, 0 }, | 
 | 237 |     { X86::CMP16ri8,    X86::CMP16mi8, 1, 0 }, | 
 | 238 |     { X86::CMP16rr,     X86::CMP16mr, 1, 0 }, | 
 | 239 |     { X86::CMP32ri,     X86::CMP32mi, 1, 0 }, | 
 | 240 |     { X86::CMP32ri8,    X86::CMP32mi8, 1, 0 }, | 
 | 241 |     { X86::CMP32rr,     X86::CMP32mr, 1, 0 }, | 
 | 242 |     { X86::CMP64ri32,   X86::CMP64mi32, 1, 0 }, | 
 | 243 |     { X86::CMP64ri8,    X86::CMP64mi8, 1, 0 }, | 
 | 244 |     { X86::CMP64rr,     X86::CMP64mr, 1, 0 }, | 
 | 245 |     { X86::CMP8ri,      X86::CMP8mi, 1, 0 }, | 
 | 246 |     { X86::CMP8rr,      X86::CMP8mr, 1, 0 }, | 
 | 247 |     { X86::DIV16r,      X86::DIV16m, 1, 0 }, | 
 | 248 |     { X86::DIV32r,      X86::DIV32m, 1, 0 }, | 
 | 249 |     { X86::DIV64r,      X86::DIV64m, 1, 0 }, | 
 | 250 |     { X86::DIV8r,       X86::DIV8m, 1, 0 }, | 
 | 251 |     { X86::EXTRACTPSrr, X86::EXTRACTPSmr, 0, 16 }, | 
 | 252 |     { X86::FsMOVAPDrr,  X86::MOVSDmr, 0, 0 }, | 
 | 253 |     { X86::FsMOVAPSrr,  X86::MOVSSmr, 0, 0 }, | 
 | 254 |     { X86::IDIV16r,     X86::IDIV16m, 1, 0 }, | 
 | 255 |     { X86::IDIV32r,     X86::IDIV32m, 1, 0 }, | 
 | 256 |     { X86::IDIV64r,     X86::IDIV64m, 1, 0 }, | 
 | 257 |     { X86::IDIV8r,      X86::IDIV8m, 1, 0 }, | 
 | 258 |     { X86::IMUL16r,     X86::IMUL16m, 1, 0 }, | 
 | 259 |     { X86::IMUL32r,     X86::IMUL32m, 1, 0 }, | 
 | 260 |     { X86::IMUL64r,     X86::IMUL64m, 1, 0 }, | 
 | 261 |     { X86::IMUL8r,      X86::IMUL8m, 1, 0 }, | 
 | 262 |     { X86::JMP32r,      X86::JMP32m, 1, 0 }, | 
 | 263 |     { X86::JMP64r,      X86::JMP64m, 1, 0 }, | 
 | 264 |     { X86::MOV16ri,     X86::MOV16mi, 0, 0 }, | 
 | 265 |     { X86::MOV16rr,     X86::MOV16mr, 0, 0 }, | 
 | 266 |     { X86::MOV32ri,     X86::MOV32mi, 0, 0 }, | 
 | 267 |     { X86::MOV32rr,     X86::MOV32mr, 0, 0 }, | 
 | 268 |     { X86::MOV64ri32,   X86::MOV64mi32, 0, 0 }, | 
 | 269 |     { X86::MOV64rr,     X86::MOV64mr, 0, 0 }, | 
 | 270 |     { X86::MOV8ri,      X86::MOV8mi, 0, 0 }, | 
 | 271 |     { X86::MOV8rr,      X86::MOV8mr, 0, 0 }, | 
 | 272 |     { X86::MOV8rr_NOREX, X86::MOV8mr_NOREX, 0, 0 }, | 
 | 273 |     { X86::MOVAPDrr,    X86::MOVAPDmr, 0, 16 }, | 
 | 274 |     { X86::MOVAPSrr,    X86::MOVAPSmr, 0, 16 }, | 
 | 275 |     { X86::MOVDQArr,    X86::MOVDQAmr, 0, 16 }, | 
 | 276 |     { X86::MOVPDI2DIrr, X86::MOVPDI2DImr, 0, 0 }, | 
 | 277 |     { X86::MOVPQIto64rr,X86::MOVPQI2QImr, 0, 0 }, | 
 | 278 |     { X86::MOVPS2SSrr,  X86::MOVPS2SSmr, 0, 0 }, | 
 | 279 |     { X86::MOVSDrr,     X86::MOVSDmr, 0, 0 }, | 
 | 280 |     { X86::MOVSDto64rr, X86::MOVSDto64mr, 0, 0 }, | 
 | 281 |     { X86::MOVSS2DIrr,  X86::MOVSS2DImr, 0, 0 }, | 
 | 282 |     { X86::MOVSSrr,     X86::MOVSSmr, 0, 0 }, | 
 | 283 |     { X86::MOVUPDrr,    X86::MOVUPDmr, 0, 0 }, | 
 | 284 |     { X86::MOVUPSrr,    X86::MOVUPSmr, 0, 0 }, | 
 | 285 |     { X86::MUL16r,      X86::MUL16m, 1, 0 }, | 
 | 286 |     { X86::MUL32r,      X86::MUL32m, 1, 0 }, | 
 | 287 |     { X86::MUL64r,      X86::MUL64m, 1, 0 }, | 
 | 288 |     { X86::MUL8r,       X86::MUL8m, 1, 0 }, | 
 | 289 |     { X86::SETAEr,      X86::SETAEm, 0, 0 }, | 
 | 290 |     { X86::SETAr,       X86::SETAm, 0, 0 }, | 
 | 291 |     { X86::SETBEr,      X86::SETBEm, 0, 0 }, | 
 | 292 |     { X86::SETBr,       X86::SETBm, 0, 0 }, | 
 | 293 |     { X86::SETEr,       X86::SETEm, 0, 0 }, | 
 | 294 |     { X86::SETGEr,      X86::SETGEm, 0, 0 }, | 
 | 295 |     { X86::SETGr,       X86::SETGm, 0, 0 }, | 
 | 296 |     { X86::SETLEr,      X86::SETLEm, 0, 0 }, | 
 | 297 |     { X86::SETLr,       X86::SETLm, 0, 0 }, | 
 | 298 |     { X86::SETNEr,      X86::SETNEm, 0, 0 }, | 
 | 299 |     { X86::SETNOr,      X86::SETNOm, 0, 0 }, | 
 | 300 |     { X86::SETNPr,      X86::SETNPm, 0, 0 }, | 
 | 301 |     { X86::SETNSr,      X86::SETNSm, 0, 0 }, | 
 | 302 |     { X86::SETOr,       X86::SETOm, 0, 0 }, | 
 | 303 |     { X86::SETPr,       X86::SETPm, 0, 0 }, | 
 | 304 |     { X86::SETSr,       X86::SETSm, 0, 0 }, | 
 | 305 |     { X86::TAILJMPr,    X86::TAILJMPm, 1, 0 }, | 
 | 306 |     { X86::TEST16ri,    X86::TEST16mi, 1, 0 }, | 
 | 307 |     { X86::TEST32ri,    X86::TEST32mi, 1, 0 }, | 
 | 308 |     { X86::TEST64ri32,  X86::TEST64mi32, 1, 0 }, | 
 | 309 |     { X86::TEST8ri,     X86::TEST8mi, 1, 0 } | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 310 |   }; | 
 | 311 |  | 
 | 312 |   for (unsigned i = 0, e = array_lengthof(OpTbl0); i != e; ++i) { | 
 | 313 |     unsigned RegOp = OpTbl0[i][0]; | 
 | 314 |     unsigned MemOp = OpTbl0[i][1]; | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 315 |     unsigned Align = OpTbl0[i][3]; | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 316 |     if (!RegOp2MemOpTable0.insert(std::make_pair((unsigned*)RegOp, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 317 |                                            std::make_pair(MemOp,Align))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 318 |       assert(false && "Duplicated entries?"); | 
 | 319 |     unsigned FoldedLoad = OpTbl0[i][2]; | 
 | 320 |     // Index 0, folded load or store. | 
 | 321 |     unsigned AuxInfo = 0 | (FoldedLoad << 4) | ((FoldedLoad^1) << 5); | 
 | 322 |     if (RegOp != X86::FsMOVAPDrr && RegOp != X86::FsMOVAPSrr) | 
 | 323 |       if (!MemOp2RegOpTable.insert(std::make_pair((unsigned*)MemOp, | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 324 |                                      std::make_pair(RegOp, AuxInfo))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 325 |         AmbEntries.push_back(MemOp); | 
 | 326 |   } | 
 | 327 |  | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 328 |   static const unsigned OpTbl1[][3] = { | 
 | 329 |     { X86::CMP16rr,         X86::CMP16rm, 0 }, | 
 | 330 |     { X86::CMP32rr,         X86::CMP32rm, 0 }, | 
 | 331 |     { X86::CMP64rr,         X86::CMP64rm, 0 }, | 
 | 332 |     { X86::CMP8rr,          X86::CMP8rm, 0 }, | 
 | 333 |     { X86::CVTSD2SSrr,      X86::CVTSD2SSrm, 0 }, | 
 | 334 |     { X86::CVTSI2SD64rr,    X86::CVTSI2SD64rm, 0 }, | 
 | 335 |     { X86::CVTSI2SDrr,      X86::CVTSI2SDrm, 0 }, | 
 | 336 |     { X86::CVTSI2SS64rr,    X86::CVTSI2SS64rm, 0 }, | 
 | 337 |     { X86::CVTSI2SSrr,      X86::CVTSI2SSrm, 0 }, | 
 | 338 |     { X86::CVTSS2SDrr,      X86::CVTSS2SDrm, 0 }, | 
 | 339 |     { X86::CVTTSD2SI64rr,   X86::CVTTSD2SI64rm, 0 }, | 
 | 340 |     { X86::CVTTSD2SIrr,     X86::CVTTSD2SIrm, 0 }, | 
 | 341 |     { X86::CVTTSS2SI64rr,   X86::CVTTSS2SI64rm, 0 }, | 
 | 342 |     { X86::CVTTSS2SIrr,     X86::CVTTSS2SIrm, 0 }, | 
 | 343 |     { X86::FsMOVAPDrr,      X86::MOVSDrm, 0 }, | 
 | 344 |     { X86::FsMOVAPSrr,      X86::MOVSSrm, 0 }, | 
 | 345 |     { X86::IMUL16rri,       X86::IMUL16rmi, 0 }, | 
 | 346 |     { X86::IMUL16rri8,      X86::IMUL16rmi8, 0 }, | 
 | 347 |     { X86::IMUL32rri,       X86::IMUL32rmi, 0 }, | 
 | 348 |     { X86::IMUL32rri8,      X86::IMUL32rmi8, 0 }, | 
 | 349 |     { X86::IMUL64rri32,     X86::IMUL64rmi32, 0 }, | 
 | 350 |     { X86::IMUL64rri8,      X86::IMUL64rmi8, 0 }, | 
 | 351 |     { X86::Int_CMPSDrr,     X86::Int_CMPSDrm, 0 }, | 
 | 352 |     { X86::Int_CMPSSrr,     X86::Int_CMPSSrm, 0 }, | 
 | 353 |     { X86::Int_COMISDrr,    X86::Int_COMISDrm, 0 }, | 
 | 354 |     { X86::Int_COMISSrr,    X86::Int_COMISSrm, 0 }, | 
 | 355 |     { X86::Int_CVTDQ2PDrr,  X86::Int_CVTDQ2PDrm, 16 }, | 
 | 356 |     { X86::Int_CVTDQ2PSrr,  X86::Int_CVTDQ2PSrm, 16 }, | 
 | 357 |     { X86::Int_CVTPD2DQrr,  X86::Int_CVTPD2DQrm, 16 }, | 
 | 358 |     { X86::Int_CVTPD2PSrr,  X86::Int_CVTPD2PSrm, 16 }, | 
 | 359 |     { X86::Int_CVTPS2DQrr,  X86::Int_CVTPS2DQrm, 16 }, | 
 | 360 |     { X86::Int_CVTPS2PDrr,  X86::Int_CVTPS2PDrm, 0 }, | 
 | 361 |     { X86::Int_CVTSD2SI64rr,X86::Int_CVTSD2SI64rm, 0 }, | 
 | 362 |     { X86::Int_CVTSD2SIrr,  X86::Int_CVTSD2SIrm, 0 }, | 
 | 363 |     { X86::Int_CVTSD2SSrr,  X86::Int_CVTSD2SSrm, 0 }, | 
 | 364 |     { X86::Int_CVTSI2SD64rr,X86::Int_CVTSI2SD64rm, 0 }, | 
 | 365 |     { X86::Int_CVTSI2SDrr,  X86::Int_CVTSI2SDrm, 0 }, | 
 | 366 |     { X86::Int_CVTSI2SS64rr,X86::Int_CVTSI2SS64rm, 0 }, | 
 | 367 |     { X86::Int_CVTSI2SSrr,  X86::Int_CVTSI2SSrm, 0 }, | 
 | 368 |     { X86::Int_CVTSS2SDrr,  X86::Int_CVTSS2SDrm, 0 }, | 
 | 369 |     { X86::Int_CVTSS2SI64rr,X86::Int_CVTSS2SI64rm, 0 }, | 
 | 370 |     { X86::Int_CVTSS2SIrr,  X86::Int_CVTSS2SIrm, 0 }, | 
 | 371 |     { X86::Int_CVTTPD2DQrr, X86::Int_CVTTPD2DQrm, 16 }, | 
 | 372 |     { X86::Int_CVTTPS2DQrr, X86::Int_CVTTPS2DQrm, 16 }, | 
 | 373 |     { X86::Int_CVTTSD2SI64rr,X86::Int_CVTTSD2SI64rm, 0 }, | 
 | 374 |     { X86::Int_CVTTSD2SIrr, X86::Int_CVTTSD2SIrm, 0 }, | 
 | 375 |     { X86::Int_CVTTSS2SI64rr,X86::Int_CVTTSS2SI64rm, 0 }, | 
 | 376 |     { X86::Int_CVTTSS2SIrr, X86::Int_CVTTSS2SIrm, 0 }, | 
 | 377 |     { X86::Int_UCOMISDrr,   X86::Int_UCOMISDrm, 0 }, | 
 | 378 |     { X86::Int_UCOMISSrr,   X86::Int_UCOMISSrm, 0 }, | 
 | 379 |     { X86::MOV16rr,         X86::MOV16rm, 0 }, | 
 | 380 |     { X86::MOV32rr,         X86::MOV32rm, 0 }, | 
 | 381 |     { X86::MOV64rr,         X86::MOV64rm, 0 }, | 
 | 382 |     { X86::MOV64toPQIrr,    X86::MOVQI2PQIrm, 0 }, | 
 | 383 |     { X86::MOV64toSDrr,     X86::MOV64toSDrm, 0 }, | 
 | 384 |     { X86::MOV8rr,          X86::MOV8rm, 0 }, | 
 | 385 |     { X86::MOVAPDrr,        X86::MOVAPDrm, 16 }, | 
 | 386 |     { X86::MOVAPSrr,        X86::MOVAPSrm, 16 }, | 
 | 387 |     { X86::MOVDDUPrr,       X86::MOVDDUPrm, 0 }, | 
 | 388 |     { X86::MOVDI2PDIrr,     X86::MOVDI2PDIrm, 0 }, | 
 | 389 |     { X86::MOVDI2SSrr,      X86::MOVDI2SSrm, 0 }, | 
 | 390 |     { X86::MOVDQArr,        X86::MOVDQArm, 16 }, | 
 | 391 |     { X86::MOVSD2PDrr,      X86::MOVSD2PDrm, 0 }, | 
 | 392 |     { X86::MOVSDrr,         X86::MOVSDrm, 0 }, | 
 | 393 |     { X86::MOVSHDUPrr,      X86::MOVSHDUPrm, 16 }, | 
 | 394 |     { X86::MOVSLDUPrr,      X86::MOVSLDUPrm, 16 }, | 
 | 395 |     { X86::MOVSS2PSrr,      X86::MOVSS2PSrm, 0 }, | 
 | 396 |     { X86::MOVSSrr,         X86::MOVSSrm, 0 }, | 
 | 397 |     { X86::MOVSX16rr8,      X86::MOVSX16rm8, 0 }, | 
 | 398 |     { X86::MOVSX32rr16,     X86::MOVSX32rm16, 0 }, | 
 | 399 |     { X86::MOVSX32rr8,      X86::MOVSX32rm8, 0 }, | 
 | 400 |     { X86::MOVSX64rr16,     X86::MOVSX64rm16, 0 }, | 
 | 401 |     { X86::MOVSX64rr32,     X86::MOVSX64rm32, 0 }, | 
 | 402 |     { X86::MOVSX64rr8,      X86::MOVSX64rm8, 0 }, | 
 | 403 |     { X86::MOVUPDrr,        X86::MOVUPDrm, 16 }, | 
 | 404 |     { X86::MOVUPSrr,        X86::MOVUPSrm, 16 }, | 
 | 405 |     { X86::MOVZDI2PDIrr,    X86::MOVZDI2PDIrm, 0 }, | 
 | 406 |     { X86::MOVZQI2PQIrr,    X86::MOVZQI2PQIrm, 0 }, | 
 | 407 |     { X86::MOVZPQILo2PQIrr, X86::MOVZPQILo2PQIrm, 16 }, | 
 | 408 |     { X86::MOVZX16rr8,      X86::MOVZX16rm8, 0 }, | 
 | 409 |     { X86::MOVZX32rr16,     X86::MOVZX32rm16, 0 }, | 
 | 410 |     { X86::MOVZX32_NOREXrr8, X86::MOVZX32_NOREXrm8, 0 }, | 
 | 411 |     { X86::MOVZX32rr8,      X86::MOVZX32rm8, 0 }, | 
 | 412 |     { X86::MOVZX64rr16,     X86::MOVZX64rm16, 0 }, | 
 | 413 |     { X86::MOVZX64rr32,     X86::MOVZX64rm32, 0 }, | 
 | 414 |     { X86::MOVZX64rr8,      X86::MOVZX64rm8, 0 }, | 
 | 415 |     { X86::PSHUFDri,        X86::PSHUFDmi, 16 }, | 
 | 416 |     { X86::PSHUFHWri,       X86::PSHUFHWmi, 16 }, | 
 | 417 |     { X86::PSHUFLWri,       X86::PSHUFLWmi, 16 }, | 
 | 418 |     { X86::RCPPSr,          X86::RCPPSm, 16 }, | 
 | 419 |     { X86::RCPPSr_Int,      X86::RCPPSm_Int, 16 }, | 
 | 420 |     { X86::RSQRTPSr,        X86::RSQRTPSm, 16 }, | 
 | 421 |     { X86::RSQRTPSr_Int,    X86::RSQRTPSm_Int, 16 }, | 
 | 422 |     { X86::RSQRTSSr,        X86::RSQRTSSm, 0 }, | 
 | 423 |     { X86::RSQRTSSr_Int,    X86::RSQRTSSm_Int, 0 }, | 
 | 424 |     { X86::SQRTPDr,         X86::SQRTPDm, 16 }, | 
 | 425 |     { X86::SQRTPDr_Int,     X86::SQRTPDm_Int, 16 }, | 
 | 426 |     { X86::SQRTPSr,         X86::SQRTPSm, 16 }, | 
 | 427 |     { X86::SQRTPSr_Int,     X86::SQRTPSm_Int, 16 }, | 
 | 428 |     { X86::SQRTSDr,         X86::SQRTSDm, 0 }, | 
 | 429 |     { X86::SQRTSDr_Int,     X86::SQRTSDm_Int, 0 }, | 
 | 430 |     { X86::SQRTSSr,         X86::SQRTSSm, 0 }, | 
 | 431 |     { X86::SQRTSSr_Int,     X86::SQRTSSm_Int, 0 }, | 
 | 432 |     { X86::TEST16rr,        X86::TEST16rm, 0 }, | 
 | 433 |     { X86::TEST32rr,        X86::TEST32rm, 0 }, | 
 | 434 |     { X86::TEST64rr,        X86::TEST64rm, 0 }, | 
 | 435 |     { X86::TEST8rr,         X86::TEST8rm, 0 }, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 436 |     // FIXME: TEST*rr EAX,EAX ---> CMP [mem], 0 | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 437 |     { X86::UCOMISDrr,       X86::UCOMISDrm, 0 }, | 
 | 438 |     { X86::UCOMISSrr,       X86::UCOMISSrm, 0 } | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 439 |   }; | 
 | 440 |  | 
 | 441 |   for (unsigned i = 0, e = array_lengthof(OpTbl1); i != e; ++i) { | 
 | 442 |     unsigned RegOp = OpTbl1[i][0]; | 
 | 443 |     unsigned MemOp = OpTbl1[i][1]; | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 444 |     unsigned Align = OpTbl1[i][2]; | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 445 |     if (!RegOp2MemOpTable1.insert(std::make_pair((unsigned*)RegOp, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 446 |                                            std::make_pair(MemOp,Align))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 447 |       assert(false && "Duplicated entries?"); | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 448 |     // Index 1, folded load | 
 | 449 |     unsigned AuxInfo = 1 | (1 << 4); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 450 |     if (RegOp != X86::FsMOVAPDrr && RegOp != X86::FsMOVAPSrr) | 
 | 451 |       if (!MemOp2RegOpTable.insert(std::make_pair((unsigned*)MemOp, | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 452 |                                      std::make_pair(RegOp, AuxInfo))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 453 |         AmbEntries.push_back(MemOp); | 
 | 454 |   } | 
 | 455 |  | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 456 |   static const unsigned OpTbl2[][3] = { | 
 | 457 |     { X86::ADC32rr,         X86::ADC32rm, 0 }, | 
 | 458 |     { X86::ADC64rr,         X86::ADC64rm, 0 }, | 
 | 459 |     { X86::ADD16rr,         X86::ADD16rm, 0 }, | 
 | 460 |     { X86::ADD32rr,         X86::ADD32rm, 0 }, | 
 | 461 |     { X86::ADD64rr,         X86::ADD64rm, 0 }, | 
 | 462 |     { X86::ADD8rr,          X86::ADD8rm, 0 }, | 
 | 463 |     { X86::ADDPDrr,         X86::ADDPDrm, 16 }, | 
 | 464 |     { X86::ADDPSrr,         X86::ADDPSrm, 16 }, | 
 | 465 |     { X86::ADDSDrr,         X86::ADDSDrm, 0 }, | 
 | 466 |     { X86::ADDSSrr,         X86::ADDSSrm, 0 }, | 
 | 467 |     { X86::ADDSUBPDrr,      X86::ADDSUBPDrm, 16 }, | 
 | 468 |     { X86::ADDSUBPSrr,      X86::ADDSUBPSrm, 16 }, | 
 | 469 |     { X86::AND16rr,         X86::AND16rm, 0 }, | 
 | 470 |     { X86::AND32rr,         X86::AND32rm, 0 }, | 
 | 471 |     { X86::AND64rr,         X86::AND64rm, 0 }, | 
 | 472 |     { X86::AND8rr,          X86::AND8rm, 0 }, | 
 | 473 |     { X86::ANDNPDrr,        X86::ANDNPDrm, 16 }, | 
 | 474 |     { X86::ANDNPSrr,        X86::ANDNPSrm, 16 }, | 
 | 475 |     { X86::ANDPDrr,         X86::ANDPDrm, 16 }, | 
 | 476 |     { X86::ANDPSrr,         X86::ANDPSrm, 16 }, | 
 | 477 |     { X86::CMOVA16rr,       X86::CMOVA16rm, 0 }, | 
 | 478 |     { X86::CMOVA32rr,       X86::CMOVA32rm, 0 }, | 
 | 479 |     { X86::CMOVA64rr,       X86::CMOVA64rm, 0 }, | 
 | 480 |     { X86::CMOVAE16rr,      X86::CMOVAE16rm, 0 }, | 
 | 481 |     { X86::CMOVAE32rr,      X86::CMOVAE32rm, 0 }, | 
 | 482 |     { X86::CMOVAE64rr,      X86::CMOVAE64rm, 0 }, | 
 | 483 |     { X86::CMOVB16rr,       X86::CMOVB16rm, 0 }, | 
 | 484 |     { X86::CMOVB32rr,       X86::CMOVB32rm, 0 }, | 
 | 485 |     { X86::CMOVB64rr,       X86::CMOVB64rm, 0 }, | 
 | 486 |     { X86::CMOVBE16rr,      X86::CMOVBE16rm, 0 }, | 
 | 487 |     { X86::CMOVBE32rr,      X86::CMOVBE32rm, 0 }, | 
 | 488 |     { X86::CMOVBE64rr,      X86::CMOVBE64rm, 0 }, | 
 | 489 |     { X86::CMOVE16rr,       X86::CMOVE16rm, 0 }, | 
 | 490 |     { X86::CMOVE32rr,       X86::CMOVE32rm, 0 }, | 
 | 491 |     { X86::CMOVE64rr,       X86::CMOVE64rm, 0 }, | 
 | 492 |     { X86::CMOVG16rr,       X86::CMOVG16rm, 0 }, | 
 | 493 |     { X86::CMOVG32rr,       X86::CMOVG32rm, 0 }, | 
 | 494 |     { X86::CMOVG64rr,       X86::CMOVG64rm, 0 }, | 
 | 495 |     { X86::CMOVGE16rr,      X86::CMOVGE16rm, 0 }, | 
 | 496 |     { X86::CMOVGE32rr,      X86::CMOVGE32rm, 0 }, | 
 | 497 |     { X86::CMOVGE64rr,      X86::CMOVGE64rm, 0 }, | 
 | 498 |     { X86::CMOVL16rr,       X86::CMOVL16rm, 0 }, | 
 | 499 |     { X86::CMOVL32rr,       X86::CMOVL32rm, 0 }, | 
 | 500 |     { X86::CMOVL64rr,       X86::CMOVL64rm, 0 }, | 
 | 501 |     { X86::CMOVLE16rr,      X86::CMOVLE16rm, 0 }, | 
 | 502 |     { X86::CMOVLE32rr,      X86::CMOVLE32rm, 0 }, | 
 | 503 |     { X86::CMOVLE64rr,      X86::CMOVLE64rm, 0 }, | 
 | 504 |     { X86::CMOVNE16rr,      X86::CMOVNE16rm, 0 }, | 
 | 505 |     { X86::CMOVNE32rr,      X86::CMOVNE32rm, 0 }, | 
 | 506 |     { X86::CMOVNE64rr,      X86::CMOVNE64rm, 0 }, | 
 | 507 |     { X86::CMOVNO16rr,      X86::CMOVNO16rm, 0 }, | 
 | 508 |     { X86::CMOVNO32rr,      X86::CMOVNO32rm, 0 }, | 
 | 509 |     { X86::CMOVNO64rr,      X86::CMOVNO64rm, 0 }, | 
 | 510 |     { X86::CMOVNP16rr,      X86::CMOVNP16rm, 0 }, | 
 | 511 |     { X86::CMOVNP32rr,      X86::CMOVNP32rm, 0 }, | 
 | 512 |     { X86::CMOVNP64rr,      X86::CMOVNP64rm, 0 }, | 
 | 513 |     { X86::CMOVNS16rr,      X86::CMOVNS16rm, 0 }, | 
 | 514 |     { X86::CMOVNS32rr,      X86::CMOVNS32rm, 0 }, | 
 | 515 |     { X86::CMOVNS64rr,      X86::CMOVNS64rm, 0 }, | 
 | 516 |     { X86::CMOVO16rr,       X86::CMOVO16rm, 0 }, | 
 | 517 |     { X86::CMOVO32rr,       X86::CMOVO32rm, 0 }, | 
 | 518 |     { X86::CMOVO64rr,       X86::CMOVO64rm, 0 }, | 
 | 519 |     { X86::CMOVP16rr,       X86::CMOVP16rm, 0 }, | 
 | 520 |     { X86::CMOVP32rr,       X86::CMOVP32rm, 0 }, | 
 | 521 |     { X86::CMOVP64rr,       X86::CMOVP64rm, 0 }, | 
 | 522 |     { X86::CMOVS16rr,       X86::CMOVS16rm, 0 }, | 
 | 523 |     { X86::CMOVS32rr,       X86::CMOVS32rm, 0 }, | 
 | 524 |     { X86::CMOVS64rr,       X86::CMOVS64rm, 0 }, | 
 | 525 |     { X86::CMPPDrri,        X86::CMPPDrmi, 16 }, | 
 | 526 |     { X86::CMPPSrri,        X86::CMPPSrmi, 16 }, | 
 | 527 |     { X86::CMPSDrr,         X86::CMPSDrm, 0 }, | 
 | 528 |     { X86::CMPSSrr,         X86::CMPSSrm, 0 }, | 
 | 529 |     { X86::DIVPDrr,         X86::DIVPDrm, 16 }, | 
 | 530 |     { X86::DIVPSrr,         X86::DIVPSrm, 16 }, | 
 | 531 |     { X86::DIVSDrr,         X86::DIVSDrm, 0 }, | 
 | 532 |     { X86::DIVSSrr,         X86::DIVSSrm, 0 }, | 
 | 533 |     { X86::FsANDNPDrr,      X86::FsANDNPDrm, 16 }, | 
 | 534 |     { X86::FsANDNPSrr,      X86::FsANDNPSrm, 16 }, | 
 | 535 |     { X86::FsANDPDrr,       X86::FsANDPDrm, 16 }, | 
 | 536 |     { X86::FsANDPSrr,       X86::FsANDPSrm, 16 }, | 
 | 537 |     { X86::FsORPDrr,        X86::FsORPDrm, 16 }, | 
 | 538 |     { X86::FsORPSrr,        X86::FsORPSrm, 16 }, | 
 | 539 |     { X86::FsXORPDrr,       X86::FsXORPDrm, 16 }, | 
 | 540 |     { X86::FsXORPSrr,       X86::FsXORPSrm, 16 }, | 
 | 541 |     { X86::HADDPDrr,        X86::HADDPDrm, 16 }, | 
 | 542 |     { X86::HADDPSrr,        X86::HADDPSrm, 16 }, | 
 | 543 |     { X86::HSUBPDrr,        X86::HSUBPDrm, 16 }, | 
 | 544 |     { X86::HSUBPSrr,        X86::HSUBPSrm, 16 }, | 
 | 545 |     { X86::IMUL16rr,        X86::IMUL16rm, 0 }, | 
 | 546 |     { X86::IMUL32rr,        X86::IMUL32rm, 0 }, | 
 | 547 |     { X86::IMUL64rr,        X86::IMUL64rm, 0 }, | 
 | 548 |     { X86::MAXPDrr,         X86::MAXPDrm, 16 }, | 
 | 549 |     { X86::MAXPDrr_Int,     X86::MAXPDrm_Int, 16 }, | 
 | 550 |     { X86::MAXPSrr,         X86::MAXPSrm, 16 }, | 
 | 551 |     { X86::MAXPSrr_Int,     X86::MAXPSrm_Int, 16 }, | 
 | 552 |     { X86::MAXSDrr,         X86::MAXSDrm, 0 }, | 
 | 553 |     { X86::MAXSDrr_Int,     X86::MAXSDrm_Int, 0 }, | 
 | 554 |     { X86::MAXSSrr,         X86::MAXSSrm, 0 }, | 
 | 555 |     { X86::MAXSSrr_Int,     X86::MAXSSrm_Int, 0 }, | 
 | 556 |     { X86::MINPDrr,         X86::MINPDrm, 16 }, | 
 | 557 |     { X86::MINPDrr_Int,     X86::MINPDrm_Int, 16 }, | 
 | 558 |     { X86::MINPSrr,         X86::MINPSrm, 16 }, | 
 | 559 |     { X86::MINPSrr_Int,     X86::MINPSrm_Int, 16 }, | 
 | 560 |     { X86::MINSDrr,         X86::MINSDrm, 0 }, | 
 | 561 |     { X86::MINSDrr_Int,     X86::MINSDrm_Int, 0 }, | 
 | 562 |     { X86::MINSSrr,         X86::MINSSrm, 0 }, | 
 | 563 |     { X86::MINSSrr_Int,     X86::MINSSrm_Int, 0 }, | 
 | 564 |     { X86::MULPDrr,         X86::MULPDrm, 16 }, | 
 | 565 |     { X86::MULPSrr,         X86::MULPSrm, 16 }, | 
 | 566 |     { X86::MULSDrr,         X86::MULSDrm, 0 }, | 
 | 567 |     { X86::MULSSrr,         X86::MULSSrm, 0 }, | 
 | 568 |     { X86::OR16rr,          X86::OR16rm, 0 }, | 
 | 569 |     { X86::OR32rr,          X86::OR32rm, 0 }, | 
 | 570 |     { X86::OR64rr,          X86::OR64rm, 0 }, | 
 | 571 |     { X86::OR8rr,           X86::OR8rm, 0 }, | 
 | 572 |     { X86::ORPDrr,          X86::ORPDrm, 16 }, | 
 | 573 |     { X86::ORPSrr,          X86::ORPSrm, 16 }, | 
 | 574 |     { X86::PACKSSDWrr,      X86::PACKSSDWrm, 16 }, | 
 | 575 |     { X86::PACKSSWBrr,      X86::PACKSSWBrm, 16 }, | 
 | 576 |     { X86::PACKUSWBrr,      X86::PACKUSWBrm, 16 }, | 
 | 577 |     { X86::PADDBrr,         X86::PADDBrm, 16 }, | 
 | 578 |     { X86::PADDDrr,         X86::PADDDrm, 16 }, | 
 | 579 |     { X86::PADDQrr,         X86::PADDQrm, 16 }, | 
 | 580 |     { X86::PADDSBrr,        X86::PADDSBrm, 16 }, | 
 | 581 |     { X86::PADDSWrr,        X86::PADDSWrm, 16 }, | 
 | 582 |     { X86::PADDWrr,         X86::PADDWrm, 16 }, | 
 | 583 |     { X86::PANDNrr,         X86::PANDNrm, 16 }, | 
 | 584 |     { X86::PANDrr,          X86::PANDrm, 16 }, | 
 | 585 |     { X86::PAVGBrr,         X86::PAVGBrm, 16 }, | 
 | 586 |     { X86::PAVGWrr,         X86::PAVGWrm, 16 }, | 
 | 587 |     { X86::PCMPEQBrr,       X86::PCMPEQBrm, 16 }, | 
 | 588 |     { X86::PCMPEQDrr,       X86::PCMPEQDrm, 16 }, | 
 | 589 |     { X86::PCMPEQWrr,       X86::PCMPEQWrm, 16 }, | 
 | 590 |     { X86::PCMPGTBrr,       X86::PCMPGTBrm, 16 }, | 
 | 591 |     { X86::PCMPGTDrr,       X86::PCMPGTDrm, 16 }, | 
 | 592 |     { X86::PCMPGTWrr,       X86::PCMPGTWrm, 16 }, | 
 | 593 |     { X86::PINSRWrri,       X86::PINSRWrmi, 16 }, | 
 | 594 |     { X86::PMADDWDrr,       X86::PMADDWDrm, 16 }, | 
 | 595 |     { X86::PMAXSWrr,        X86::PMAXSWrm, 16 }, | 
 | 596 |     { X86::PMAXUBrr,        X86::PMAXUBrm, 16 }, | 
 | 597 |     { X86::PMINSWrr,        X86::PMINSWrm, 16 }, | 
 | 598 |     { X86::PMINUBrr,        X86::PMINUBrm, 16 }, | 
 | 599 |     { X86::PMULDQrr,        X86::PMULDQrm, 16 }, | 
 | 600 |     { X86::PMULHUWrr,       X86::PMULHUWrm, 16 }, | 
 | 601 |     { X86::PMULHWrr,        X86::PMULHWrm, 16 }, | 
 | 602 |     { X86::PMULLDrr,        X86::PMULLDrm, 16 }, | 
 | 603 |     { X86::PMULLDrr_int,    X86::PMULLDrm_int, 16 }, | 
 | 604 |     { X86::PMULLWrr,        X86::PMULLWrm, 16 }, | 
 | 605 |     { X86::PMULUDQrr,       X86::PMULUDQrm, 16 }, | 
 | 606 |     { X86::PORrr,           X86::PORrm, 16 }, | 
 | 607 |     { X86::PSADBWrr,        X86::PSADBWrm, 16 }, | 
 | 608 |     { X86::PSLLDrr,         X86::PSLLDrm, 16 }, | 
 | 609 |     { X86::PSLLQrr,         X86::PSLLQrm, 16 }, | 
 | 610 |     { X86::PSLLWrr,         X86::PSLLWrm, 16 }, | 
 | 611 |     { X86::PSRADrr,         X86::PSRADrm, 16 }, | 
 | 612 |     { X86::PSRAWrr,         X86::PSRAWrm, 16 }, | 
 | 613 |     { X86::PSRLDrr,         X86::PSRLDrm, 16 }, | 
 | 614 |     { X86::PSRLQrr,         X86::PSRLQrm, 16 }, | 
 | 615 |     { X86::PSRLWrr,         X86::PSRLWrm, 16 }, | 
 | 616 |     { X86::PSUBBrr,         X86::PSUBBrm, 16 }, | 
 | 617 |     { X86::PSUBDrr,         X86::PSUBDrm, 16 }, | 
 | 618 |     { X86::PSUBSBrr,        X86::PSUBSBrm, 16 }, | 
 | 619 |     { X86::PSUBSWrr,        X86::PSUBSWrm, 16 }, | 
 | 620 |     { X86::PSUBWrr,         X86::PSUBWrm, 16 }, | 
 | 621 |     { X86::PUNPCKHBWrr,     X86::PUNPCKHBWrm, 16 }, | 
 | 622 |     { X86::PUNPCKHDQrr,     X86::PUNPCKHDQrm, 16 }, | 
 | 623 |     { X86::PUNPCKHQDQrr,    X86::PUNPCKHQDQrm, 16 }, | 
 | 624 |     { X86::PUNPCKHWDrr,     X86::PUNPCKHWDrm, 16 }, | 
 | 625 |     { X86::PUNPCKLBWrr,     X86::PUNPCKLBWrm, 16 }, | 
 | 626 |     { X86::PUNPCKLDQrr,     X86::PUNPCKLDQrm, 16 }, | 
 | 627 |     { X86::PUNPCKLQDQrr,    X86::PUNPCKLQDQrm, 16 }, | 
 | 628 |     { X86::PUNPCKLWDrr,     X86::PUNPCKLWDrm, 16 }, | 
 | 629 |     { X86::PXORrr,          X86::PXORrm, 16 }, | 
 | 630 |     { X86::SBB32rr,         X86::SBB32rm, 0 }, | 
 | 631 |     { X86::SBB64rr,         X86::SBB64rm, 0 }, | 
 | 632 |     { X86::SHUFPDrri,       X86::SHUFPDrmi, 16 }, | 
 | 633 |     { X86::SHUFPSrri,       X86::SHUFPSrmi, 16 }, | 
 | 634 |     { X86::SUB16rr,         X86::SUB16rm, 0 }, | 
 | 635 |     { X86::SUB32rr,         X86::SUB32rm, 0 }, | 
 | 636 |     { X86::SUB64rr,         X86::SUB64rm, 0 }, | 
 | 637 |     { X86::SUB8rr,          X86::SUB8rm, 0 }, | 
 | 638 |     { X86::SUBPDrr,         X86::SUBPDrm, 16 }, | 
 | 639 |     { X86::SUBPSrr,         X86::SUBPSrm, 16 }, | 
 | 640 |     { X86::SUBSDrr,         X86::SUBSDrm, 0 }, | 
 | 641 |     { X86::SUBSSrr,         X86::SUBSSrm, 0 }, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 642 |     // FIXME: TEST*rr -> swapped operand of TEST*mr. | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 643 |     { X86::UNPCKHPDrr,      X86::UNPCKHPDrm, 16 }, | 
 | 644 |     { X86::UNPCKHPSrr,      X86::UNPCKHPSrm, 16 }, | 
 | 645 |     { X86::UNPCKLPDrr,      X86::UNPCKLPDrm, 16 }, | 
 | 646 |     { X86::UNPCKLPSrr,      X86::UNPCKLPSrm, 16 }, | 
 | 647 |     { X86::XOR16rr,         X86::XOR16rm, 0 }, | 
 | 648 |     { X86::XOR32rr,         X86::XOR32rm, 0 }, | 
 | 649 |     { X86::XOR64rr,         X86::XOR64rm, 0 }, | 
 | 650 |     { X86::XOR8rr,          X86::XOR8rm, 0 }, | 
 | 651 |     { X86::XORPDrr,         X86::XORPDrm, 16 }, | 
 | 652 |     { X86::XORPSrr,         X86::XORPSrm, 16 } | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 653 |   }; | 
 | 654 |  | 
 | 655 |   for (unsigned i = 0, e = array_lengthof(OpTbl2); i != e; ++i) { | 
 | 656 |     unsigned RegOp = OpTbl2[i][0]; | 
 | 657 |     unsigned MemOp = OpTbl2[i][1]; | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 658 |     unsigned Align = OpTbl2[i][2]; | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 659 |     if (!RegOp2MemOpTable2.insert(std::make_pair((unsigned*)RegOp, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 660 |                                            std::make_pair(MemOp,Align))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 661 |       assert(false && "Duplicated entries?"); | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 662 |     // Index 2, folded load | 
 | 663 |     unsigned AuxInfo = 2 | (1 << 4); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 664 |     if (!MemOp2RegOpTable.insert(std::make_pair((unsigned*)MemOp, | 
| Dan Gohman | 6b345ee | 2008-07-07 17:46:23 +0000 | [diff] [blame] | 665 |                                    std::make_pair(RegOp, AuxInfo))).second) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 666 |       AmbEntries.push_back(MemOp); | 
 | 667 |   } | 
 | 668 |  | 
 | 669 |   // Remove ambiguous entries. | 
 | 670 |   assert(AmbEntries.empty() && "Duplicated entries in unfolding maps?"); | 
| Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 671 | } | 
 | 672 |  | 
| Alkis Evlogimenos | 5e30002 | 2003-12-28 17:35:08 +0000 | [diff] [blame] | 673 | bool X86InstrInfo::isMoveInstr(const MachineInstr& MI, | 
| Evan Cheng | 04ee5a1 | 2009-01-20 19:12:24 +0000 | [diff] [blame] | 674 |                                unsigned &SrcReg, unsigned &DstReg, | 
 | 675 |                                unsigned &SrcSubIdx, unsigned &DstSubIdx) const { | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 676 |   switch (MI.getOpcode()) { | 
 | 677 |   default: | 
 | 678 |     return false; | 
 | 679 |   case X86::MOV8rr: | 
| Bill Wendling | 1824773 | 2009-04-17 22:40:38 +0000 | [diff] [blame] | 680 |   case X86::MOV8rr_NOREX: | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 681 |   case X86::MOV16rr: | 
 | 682 |   case X86::MOV32rr:  | 
 | 683 |   case X86::MOV64rr: | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 684 |   case X86::MOVSSrr: | 
 | 685 |   case X86::MOVSDrr: | 
| Chris Lattner | 1d38677 | 2008-03-11 19:30:09 +0000 | [diff] [blame] | 686 |  | 
 | 687 |   // FP Stack register class copies | 
 | 688 |   case X86::MOV_Fp3232: case X86::MOV_Fp6464: case X86::MOV_Fp8080: | 
 | 689 |   case X86::MOV_Fp3264: case X86::MOV_Fp3280: | 
 | 690 |   case X86::MOV_Fp6432: case X86::MOV_Fp8032: | 
 | 691 |        | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 692 |   case X86::FsMOVAPSrr: | 
 | 693 |   case X86::FsMOVAPDrr: | 
 | 694 |   case X86::MOVAPSrr: | 
 | 695 |   case X86::MOVAPDrr: | 
| Dan Gohman | 5446274 | 2009-01-09 02:40:34 +0000 | [diff] [blame] | 696 |   case X86::MOVDQArr: | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 697 |   case X86::MOVSS2PSrr: | 
 | 698 |   case X86::MOVSD2PDrr: | 
 | 699 |   case X86::MOVPS2SSrr: | 
 | 700 |   case X86::MOVPD2SDrr: | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 701 |   case X86::MMX_MOVQ64rr: | 
 | 702 |     assert(MI.getNumOperands() >= 2 && | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 703 |            MI.getOperand(0).isReg() && | 
 | 704 |            MI.getOperand(1).isReg() && | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 705 |            "invalid register-register move instruction"); | 
| Evan Cheng | 04ee5a1 | 2009-01-20 19:12:24 +0000 | [diff] [blame] | 706 |     SrcReg = MI.getOperand(1).getReg(); | 
 | 707 |     DstReg = MI.getOperand(0).getReg(); | 
 | 708 |     SrcSubIdx = MI.getOperand(1).getSubReg(); | 
 | 709 |     DstSubIdx = MI.getOperand(0).getSubReg(); | 
| Chris Lattner | 07f7cc3 | 2008-03-11 19:28:17 +0000 | [diff] [blame] | 710 |     return true; | 
| Alkis Evlogimenos | 5e30002 | 2003-12-28 17:35:08 +0000 | [diff] [blame] | 711 |   } | 
| Alkis Evlogimenos | 5e30002 | 2003-12-28 17:35:08 +0000 | [diff] [blame] | 712 | } | 
| Alkis Evlogimenos | 36f506e | 2004-07-31 09:38:47 +0000 | [diff] [blame] | 713 |  | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 714 | /// isFrameOperand - Return true and the FrameIndex if the specified | 
 | 715 | /// operand and follow operands form a reference to the stack frame. | 
 | 716 | bool X86InstrInfo::isFrameOperand(const MachineInstr *MI, unsigned int Op, | 
 | 717 |                                   int &FrameIndex) const { | 
 | 718 |   if (MI->getOperand(Op).isFI() && MI->getOperand(Op+1).isImm() && | 
 | 719 |       MI->getOperand(Op+2).isReg() && MI->getOperand(Op+3).isImm() && | 
 | 720 |       MI->getOperand(Op+1).getImm() == 1 && | 
 | 721 |       MI->getOperand(Op+2).getReg() == 0 && | 
 | 722 |       MI->getOperand(Op+3).getImm() == 0) { | 
 | 723 |     FrameIndex = MI->getOperand(Op).getIndex(); | 
 | 724 |     return true; | 
 | 725 |   } | 
 | 726 |   return false; | 
 | 727 | } | 
 | 728 |  | 
| David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 729 | static bool isFrameLoadOpcode(int Opcode) { | 
 | 730 |   switch (Opcode) { | 
| Chris Lattner | 4083960 | 2006-02-02 20:12:32 +0000 | [diff] [blame] | 731 |   default: break; | 
 | 732 |   case X86::MOV8rm: | 
 | 733 |   case X86::MOV16rm: | 
 | 734 |   case X86::MOV32rm: | 
| Evan Cheng | 25ab690 | 2006-09-08 06:48:29 +0000 | [diff] [blame] | 735 |   case X86::MOV64rm: | 
| Dale Johannesen | e377d4d | 2007-07-04 21:07:47 +0000 | [diff] [blame] | 736 |   case X86::LD_Fp64m: | 
| Chris Lattner | 4083960 | 2006-02-02 20:12:32 +0000 | [diff] [blame] | 737 |   case X86::MOVSSrm: | 
 | 738 |   case X86::MOVSDrm: | 
| Chris Lattner | 993c897 | 2006-04-18 16:44:51 +0000 | [diff] [blame] | 739 |   case X86::MOVAPSrm: | 
 | 740 |   case X86::MOVAPDrm: | 
| Dan Gohman | 5446274 | 2009-01-09 02:40:34 +0000 | [diff] [blame] | 741 |   case X86::MOVDQArm: | 
| Bill Wendling | 823efee | 2007-04-03 06:00:37 +0000 | [diff] [blame] | 742 |   case X86::MMX_MOVD64rm: | 
 | 743 |   case X86::MMX_MOVQ64rm: | 
| David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 744 |     return true; | 
 | 745 |     break; | 
 | 746 |   } | 
 | 747 |   return false; | 
 | 748 | } | 
 | 749 |  | 
 | 750 | static bool isFrameStoreOpcode(int Opcode) { | 
 | 751 |   switch (Opcode) { | 
 | 752 |   default: break; | 
 | 753 |   case X86::MOV8mr: | 
 | 754 |   case X86::MOV16mr: | 
 | 755 |   case X86::MOV32mr: | 
 | 756 |   case X86::MOV64mr: | 
 | 757 |   case X86::ST_FpP64m: | 
 | 758 |   case X86::MOVSSmr: | 
 | 759 |   case X86::MOVSDmr: | 
 | 760 |   case X86::MOVAPSmr: | 
 | 761 |   case X86::MOVAPDmr: | 
 | 762 |   case X86::MOVDQAmr: | 
 | 763 |   case X86::MMX_MOVD64mr: | 
 | 764 |   case X86::MMX_MOVQ64mr: | 
 | 765 |   case X86::MMX_MOVNTQmr: | 
 | 766 |     return true; | 
 | 767 |   } | 
 | 768 |   return false; | 
 | 769 | } | 
 | 770 |  | 
 | 771 | unsigned X86InstrInfo::isLoadFromStackSlot(const MachineInstr *MI,  | 
 | 772 |                                            int &FrameIndex) const { | 
 | 773 |   if (isFrameLoadOpcode(MI->getOpcode())) | 
 | 774 |     if (isFrameOperand(MI, 1, FrameIndex)) | 
| Chris Lattner | 4083960 | 2006-02-02 20:12:32 +0000 | [diff] [blame] | 775 |       return MI->getOperand(0).getReg(); | 
| David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 776 |   return 0; | 
 | 777 | } | 
 | 778 |  | 
 | 779 | unsigned X86InstrInfo::isLoadFromStackSlotPostFE(const MachineInstr *MI,  | 
 | 780 |                                                  int &FrameIndex) const { | 
 | 781 |   if (isFrameLoadOpcode(MI->getOpcode())) { | 
 | 782 |     unsigned Reg; | 
 | 783 |     if ((Reg = isLoadFromStackSlot(MI, FrameIndex))) | 
 | 784 |       return Reg; | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 785 |     // Check for post-frame index elimination operations | 
| David Greene | 29dbf50 | 2009-12-04 22:38:46 +0000 | [diff] [blame] | 786 |     const MachineMemOperand *Dummy; | 
 | 787 |     return hasLoadFromStackSlot(MI, Dummy, FrameIndex); | 
| Chris Lattner | 4083960 | 2006-02-02 20:12:32 +0000 | [diff] [blame] | 788 |   } | 
 | 789 |   return 0; | 
 | 790 | } | 
 | 791 |  | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 792 | bool X86InstrInfo::hasLoadFromStackSlot(const MachineInstr *MI, | 
| David Greene | 29dbf50 | 2009-12-04 22:38:46 +0000 | [diff] [blame] | 793 |                                         const MachineMemOperand *&MMO, | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 794 |                                         int &FrameIndex) const { | 
 | 795 |   for (MachineInstr::mmo_iterator o = MI->memoperands_begin(), | 
 | 796 |          oe = MI->memoperands_end(); | 
 | 797 |        o != oe; | 
 | 798 |        ++o) { | 
 | 799 |     if ((*o)->isLoad() && (*o)->getValue()) | 
 | 800 |       if (const FixedStackPseudoSourceValue *Value = | 
 | 801 |           dyn_cast<const FixedStackPseudoSourceValue>((*o)->getValue())) { | 
 | 802 |         FrameIndex = Value->getFrameIndex(); | 
| David Greene | 29dbf50 | 2009-12-04 22:38:46 +0000 | [diff] [blame] | 803 |         MMO = *o; | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 804 |         return true; | 
 | 805 |       } | 
 | 806 |   } | 
 | 807 |   return false; | 
 | 808 | } | 
 | 809 |  | 
| Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 810 | unsigned X86InstrInfo::isStoreToStackSlot(const MachineInstr *MI, | 
| Chris Lattner | 4083960 | 2006-02-02 20:12:32 +0000 | [diff] [blame] | 811 |                                           int &FrameIndex) const { | 
| David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 812 |   if (isFrameStoreOpcode(MI->getOpcode())) | 
 | 813 |     if (isFrameOperand(MI, 0, FrameIndex)) | 
| Rafael Espindola | b449a68 | 2009-03-28 17:03:24 +0000 | [diff] [blame] | 814 |       return MI->getOperand(X86AddrNumOperands).getReg(); | 
| David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 815 |   return 0; | 
 | 816 | } | 
 | 817 |  | 
 | 818 | unsigned X86InstrInfo::isStoreToStackSlotPostFE(const MachineInstr *MI, | 
 | 819 |                                                 int &FrameIndex) const { | 
 | 820 |   if (isFrameStoreOpcode(MI->getOpcode())) { | 
 | 821 |     unsigned Reg; | 
 | 822 |     if ((Reg = isStoreToStackSlot(MI, FrameIndex))) | 
 | 823 |       return Reg; | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 824 |     // Check for post-frame index elimination operations | 
| David Greene | 29dbf50 | 2009-12-04 22:38:46 +0000 | [diff] [blame] | 825 |     const MachineMemOperand *Dummy; | 
 | 826 |     return hasStoreToStackSlot(MI, Dummy, FrameIndex); | 
| Chris Lattner | 4083960 | 2006-02-02 20:12:32 +0000 | [diff] [blame] | 827 |   } | 
 | 828 |   return 0; | 
 | 829 | } | 
 | 830 |  | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 831 | bool X86InstrInfo::hasStoreToStackSlot(const MachineInstr *MI, | 
| David Greene | 29dbf50 | 2009-12-04 22:38:46 +0000 | [diff] [blame] | 832 |                                        const MachineMemOperand *&MMO, | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 833 |                                        int &FrameIndex) const { | 
 | 834 |   for (MachineInstr::mmo_iterator o = MI->memoperands_begin(), | 
 | 835 |          oe = MI->memoperands_end(); | 
 | 836 |        o != oe; | 
 | 837 |        ++o) { | 
 | 838 |     if ((*o)->isStore() && (*o)->getValue()) | 
 | 839 |       if (const FixedStackPseudoSourceValue *Value = | 
 | 840 |           dyn_cast<const FixedStackPseudoSourceValue>((*o)->getValue())) { | 
 | 841 |         FrameIndex = Value->getFrameIndex(); | 
| David Greene | 29dbf50 | 2009-12-04 22:38:46 +0000 | [diff] [blame] | 842 |         MMO = *o; | 
| David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 843 |         return true; | 
 | 844 |       } | 
 | 845 |   } | 
 | 846 |   return false; | 
 | 847 | } | 
 | 848 |  | 
| Evan Cheng | e3d8dbf | 2008-03-27 01:45:11 +0000 | [diff] [blame] | 849 | /// regIsPICBase - Return true if register is PIC base (i.e.g defined by | 
 | 850 | /// X86::MOVPC32r. | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 851 | static bool regIsPICBase(unsigned BaseReg, const MachineRegisterInfo &MRI) { | 
| Evan Cheng | e3d8dbf | 2008-03-27 01:45:11 +0000 | [diff] [blame] | 852 |   bool isPICBase = false; | 
 | 853 |   for (MachineRegisterInfo::def_iterator I = MRI.def_begin(BaseReg), | 
 | 854 |          E = MRI.def_end(); I != E; ++I) { | 
 | 855 |     MachineInstr *DefMI = I.getOperand().getParent(); | 
 | 856 |     if (DefMI->getOpcode() != X86::MOVPC32r) | 
 | 857 |       return false; | 
 | 858 |     assert(!isPICBase && "More than one PIC base?"); | 
 | 859 |     isPICBase = true; | 
 | 860 |   } | 
 | 861 |   return isPICBase; | 
 | 862 | } | 
| Evan Cheng | 9d15abe | 2008-03-31 07:54:19 +0000 | [diff] [blame] | 863 |  | 
| Bill Wendling | 9f8fea3 | 2008-05-12 20:54:26 +0000 | [diff] [blame] | 864 | bool | 
| Dan Gohman | 3731bc0 | 2009-10-10 00:34:18 +0000 | [diff] [blame] | 865 | X86InstrInfo::isReallyTriviallyReMaterializable(const MachineInstr *MI, | 
 | 866 |                                                 AliasAnalysis *AA) const { | 
| Dan Gohman | c101e95 | 2007-06-14 20:50:44 +0000 | [diff] [blame] | 867 |   switch (MI->getOpcode()) { | 
 | 868 |   default: break; | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 869 |     case X86::MOV8rm: | 
 | 870 |     case X86::MOV16rm: | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 871 |     case X86::MOV32rm: | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 872 |     case X86::MOV64rm: | 
 | 873 |     case X86::LD_Fp64m: | 
 | 874 |     case X86::MOVSSrm: | 
 | 875 |     case X86::MOVSDrm: | 
 | 876 |     case X86::MOVAPSrm: | 
| Evan Cheng | 600c043 | 2009-11-16 21:56:03 +0000 | [diff] [blame] | 877 |     case X86::MOVUPSrm: | 
| Evan Cheng | d15ac2f | 2009-11-17 09:51:18 +0000 | [diff] [blame] | 878 |     case X86::MOVUPSrm_Int: | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 879 |     case X86::MOVAPDrm: | 
| Dan Gohman | 5446274 | 2009-01-09 02:40:34 +0000 | [diff] [blame] | 880 |     case X86::MOVDQArm: | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 881 |     case X86::MMX_MOVD64rm: | 
| Evan Cheng | d15ac2f | 2009-11-17 09:51:18 +0000 | [diff] [blame] | 882 |     case X86::MMX_MOVQ64rm: | 
 | 883 |     case X86::FsMOVAPSrm: | 
 | 884 |     case X86::FsMOVAPDrm: { | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 885 |       // Loads from constant pools are trivially rematerializable. | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 886 |       if (MI->getOperand(1).isReg() && | 
 | 887 |           MI->getOperand(2).isImm() && | 
 | 888 |           MI->getOperand(3).isReg() && MI->getOperand(3).getReg() == 0 && | 
| Dan Gohman | 3731bc0 | 2009-10-10 00:34:18 +0000 | [diff] [blame] | 889 |           MI->isInvariantLoad(AA)) { | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 890 |         unsigned BaseReg = MI->getOperand(1).getReg(); | 
| Chris Lattner | 18c5987 | 2009-06-27 04:16:01 +0000 | [diff] [blame] | 891 |         if (BaseReg == 0 || BaseReg == X86::RIP) | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 892 |           return true; | 
 | 893 |         // Allow re-materialization of PIC load. | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 894 |         if (!ReMatPICStubLoad && MI->getOperand(4).isGlobal()) | 
| Evan Cheng | ffe2eb0 | 2008-04-01 23:26:12 +0000 | [diff] [blame] | 895 |           return false; | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 896 |         const MachineFunction &MF = *MI->getParent()->getParent(); | 
 | 897 |         const MachineRegisterInfo &MRI = MF.getRegInfo(); | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 898 |         bool isPICBase = false; | 
 | 899 |         for (MachineRegisterInfo::def_iterator I = MRI.def_begin(BaseReg), | 
 | 900 |                E = MRI.def_end(); I != E; ++I) { | 
 | 901 |           MachineInstr *DefMI = I.getOperand().getParent(); | 
 | 902 |           if (DefMI->getOpcode() != X86::MOVPC32r) | 
 | 903 |             return false; | 
 | 904 |           assert(!isPICBase && "More than one PIC base?"); | 
 | 905 |           isPICBase = true; | 
 | 906 |         } | 
 | 907 |         return isPICBase; | 
 | 908 |       }  | 
 | 909 |       return false; | 
| Evan Cheng | d8850a5 | 2008-02-22 09:25:47 +0000 | [diff] [blame] | 910 |     } | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 911 |   | 
 | 912 |      case X86::LEA32r: | 
 | 913 |      case X86::LEA64r: { | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 914 |        if (MI->getOperand(2).isImm() && | 
 | 915 |            MI->getOperand(3).isReg() && MI->getOperand(3).getReg() == 0 && | 
 | 916 |            !MI->getOperand(4).isReg()) { | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 917 |          // lea fi#, lea GV, etc. are all rematerializable. | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 918 |          if (!MI->getOperand(1).isReg()) | 
| Dan Gohman | 83ccd14 | 2008-09-26 21:30:20 +0000 | [diff] [blame] | 919 |            return true; | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 920 |          unsigned BaseReg = MI->getOperand(1).getReg(); | 
 | 921 |          if (BaseReg == 0) | 
 | 922 |            return true; | 
 | 923 |          // Allow re-materialization of lea PICBase + x. | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 924 |          const MachineFunction &MF = *MI->getParent()->getParent(); | 
 | 925 |          const MachineRegisterInfo &MRI = MF.getRegInfo(); | 
| Evan Cheng | e3d8dbf | 2008-03-27 01:45:11 +0000 | [diff] [blame] | 926 |          return regIsPICBase(BaseReg, MRI); | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 927 |        } | 
 | 928 |        return false; | 
 | 929 |      } | 
| Dan Gohman | c101e95 | 2007-06-14 20:50:44 +0000 | [diff] [blame] | 930 |   } | 
| Evan Cheng | e771ebd | 2008-03-27 01:41:09 +0000 | [diff] [blame] | 931 |  | 
| Dan Gohman | d45eddd | 2007-06-26 00:48:07 +0000 | [diff] [blame] | 932 |   // All other instructions marked M_REMATERIALIZABLE are always trivially | 
 | 933 |   // rematerializable. | 
 | 934 |   return true; | 
| Dan Gohman | c101e95 | 2007-06-14 20:50:44 +0000 | [diff] [blame] | 935 | } | 
 | 936 |  | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 937 | /// isSafeToClobberEFLAGS - Return true if it's safe insert an instruction that | 
 | 938 | /// would clobber the EFLAGS condition register. Note the result may be | 
 | 939 | /// conservative. If it cannot definitely determine the safety after visiting | 
| Dan Gohman | 1b1764b | 2009-10-14 00:08:59 +0000 | [diff] [blame] | 940 | /// a few instructions in each direction it assumes it's not safe. | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 941 | static bool isSafeToClobberEFLAGS(MachineBasicBlock &MBB, | 
 | 942 |                                   MachineBasicBlock::iterator I) { | 
| Dan Gohman | 3afda6e | 2008-10-21 03:24:31 +0000 | [diff] [blame] | 943 |   // It's always safe to clobber EFLAGS at the end of a block. | 
 | 944 |   if (I == MBB.end()) | 
 | 945 |     return true; | 
 | 946 |  | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 947 |   // For compile time consideration, if we are not able to determine the | 
| Dan Gohman | 1b1764b | 2009-10-14 00:08:59 +0000 | [diff] [blame] | 948 |   // safety after visiting 4 instructions in each direction, we will assume | 
 | 949 |   // it's not safe. | 
 | 950 |   MachineBasicBlock::iterator Iter = I; | 
 | 951 |   for (unsigned i = 0; i < 4; ++i) { | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 952 |     bool SeenDef = false; | 
| Dan Gohman | 1b1764b | 2009-10-14 00:08:59 +0000 | [diff] [blame] | 953 |     for (unsigned j = 0, e = Iter->getNumOperands(); j != e; ++j) { | 
 | 954 |       MachineOperand &MO = Iter->getOperand(j); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 955 |       if (!MO.isReg()) | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 956 |         continue; | 
 | 957 |       if (MO.getReg() == X86::EFLAGS) { | 
 | 958 |         if (MO.isUse()) | 
 | 959 |           return false; | 
 | 960 |         SeenDef = true; | 
 | 961 |       } | 
 | 962 |     } | 
 | 963 |  | 
 | 964 |     if (SeenDef) | 
 | 965 |       // This instruction defines EFLAGS, no need to look any further. | 
 | 966 |       return true; | 
| Dan Gohman | 1b1764b | 2009-10-14 00:08:59 +0000 | [diff] [blame] | 967 |     ++Iter; | 
| Dan Gohman | 3afda6e | 2008-10-21 03:24:31 +0000 | [diff] [blame] | 968 |  | 
 | 969 |     // If we make it to the end of the block, it's safe to clobber EFLAGS. | 
| Dan Gohman | 1b1764b | 2009-10-14 00:08:59 +0000 | [diff] [blame] | 970 |     if (Iter == MBB.end()) | 
 | 971 |       return true; | 
 | 972 |   } | 
 | 973 |  | 
 | 974 |   Iter = I; | 
 | 975 |   for (unsigned i = 0; i < 4; ++i) { | 
 | 976 |     // If we make it to the beginning of the block, it's safe to clobber | 
 | 977 |     // EFLAGS iff EFLAGS is not live-in. | 
 | 978 |     if (Iter == MBB.begin()) | 
 | 979 |       return !MBB.isLiveIn(X86::EFLAGS); | 
 | 980 |  | 
 | 981 |     --Iter; | 
 | 982 |     bool SawKill = false; | 
 | 983 |     for (unsigned j = 0, e = Iter->getNumOperands(); j != e; ++j) { | 
 | 984 |       MachineOperand &MO = Iter->getOperand(j); | 
 | 985 |       if (MO.isReg() && MO.getReg() == X86::EFLAGS) { | 
 | 986 |         if (MO.isDef()) return MO.isDead(); | 
 | 987 |         if (MO.isKill()) SawKill = true; | 
 | 988 |       } | 
 | 989 |     } | 
 | 990 |  | 
 | 991 |     if (SawKill) | 
 | 992 |       // This instruction kills EFLAGS and doesn't redefine it, so | 
 | 993 |       // there's no need to look further. | 
| Dan Gohman | 3afda6e | 2008-10-21 03:24:31 +0000 | [diff] [blame] | 994 |       return true; | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 995 |   } | 
 | 996 |  | 
 | 997 |   // Conservative answer. | 
 | 998 |   return false; | 
 | 999 | } | 
 | 1000 |  | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1001 | void X86InstrInfo::reMaterialize(MachineBasicBlock &MBB, | 
 | 1002 |                                  MachineBasicBlock::iterator I, | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1003 |                                  unsigned DestReg, unsigned SubIdx, | 
| Evan Cheng | d57cdd5 | 2009-11-14 02:55:43 +0000 | [diff] [blame] | 1004 |                                  const MachineInstr *Orig, | 
 | 1005 |                                  const TargetRegisterInfo *TRI) const { | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1006 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 1007 |   if (I != MBB.end()) DL = I->getDebugLoc(); | 
 | 1008 |  | 
| Evan Cheng | 03eb388 | 2008-04-16 23:44:44 +0000 | [diff] [blame] | 1009 |   if (SubIdx && TargetRegisterInfo::isPhysicalRegister(DestReg)) { | 
| Evan Cheng | d57cdd5 | 2009-11-14 02:55:43 +0000 | [diff] [blame] | 1010 |     DestReg = TRI->getSubReg(DestReg, SubIdx); | 
| Evan Cheng | 03eb388 | 2008-04-16 23:44:44 +0000 | [diff] [blame] | 1011 |     SubIdx = 0; | 
 | 1012 |   } | 
 | 1013 |  | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1014 |   // MOV32r0 etc. are implemented with xor which clobbers condition code. | 
 | 1015 |   // Re-materialize them as movri instructions to avoid side effects. | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1016 |   bool Clone = true; | 
 | 1017 |   unsigned Opc = Orig->getOpcode(); | 
 | 1018 |   switch (Opc) { | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 1019 |   default: break; | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1020 |   case X86::MOV8r0: | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1021 |   case X86::MOV16r0: | 
| Chris Lattner | 9ac7542 | 2009-07-14 20:19:57 +0000 | [diff] [blame] | 1022 |   case X86::MOV32r0: { | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 1023 |     if (!isSafeToClobberEFLAGS(MBB, I)) { | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1024 |       switch (Opc) { | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 1025 |       default: break; | 
 | 1026 |       case X86::MOV8r0:  Opc = X86::MOV8ri;  break; | 
 | 1027 |       case X86::MOV16r0: Opc = X86::MOV16ri; break; | 
 | 1028 |       case X86::MOV32r0: Opc = X86::MOV32ri; break; | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 1029 |       } | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1030 |       Clone = false; | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 1031 |     } | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1032 |     break; | 
| Evan Cheng | 9ef4ca2 | 2008-06-24 07:10:51 +0000 | [diff] [blame] | 1033 |   } | 
 | 1034 |   } | 
 | 1035 |  | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1036 |   if (Clone) { | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 1037 |     MachineInstr *MI = MBB.getParent()->CloneMachineInstr(Orig); | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1038 |     MI->getOperand(0).setReg(DestReg); | 
 | 1039 |     MBB.insert(I, MI); | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1040 |   } else { | 
 | 1041 |     BuildMI(MBB, I, DL, get(Opc), DestReg).addImm(0); | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1042 |   } | 
| Evan Cheng | 03eb388 | 2008-04-16 23:44:44 +0000 | [diff] [blame] | 1043 |  | 
| Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 1044 |   MachineInstr *NewMI = prior(I); | 
 | 1045 |   NewMI->getOperand(0).setSubReg(SubIdx); | 
| Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 1046 | } | 
 | 1047 |  | 
| Evan Cheng | 3f411c7 | 2007-10-05 08:04:01 +0000 | [diff] [blame] | 1048 | /// hasLiveCondCodeDef - True if MI has a condition code def, e.g. EFLAGS, that | 
 | 1049 | /// is not marked dead. | 
 | 1050 | static bool hasLiveCondCodeDef(MachineInstr *MI) { | 
| Evan Cheng | 3f411c7 | 2007-10-05 08:04:01 +0000 | [diff] [blame] | 1051 |   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | 
 | 1052 |     MachineOperand &MO = MI->getOperand(i); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 1053 |     if (MO.isReg() && MO.isDef() && | 
| Evan Cheng | 3f411c7 | 2007-10-05 08:04:01 +0000 | [diff] [blame] | 1054 |         MO.getReg() == X86::EFLAGS && !MO.isDead()) { | 
 | 1055 |       return true; | 
 | 1056 |     } | 
 | 1057 |   } | 
 | 1058 |   return false; | 
 | 1059 | } | 
 | 1060 |  | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1061 | /// convertToThreeAddressWithLEA - Helper for convertToThreeAddress when | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1062 | /// 16-bit LEA is disabled, use 32-bit LEA to form 3-address code by promoting | 
 | 1063 | /// to a 32-bit superregister and then truncating back down to a 16-bit | 
 | 1064 | /// subregister. | 
 | 1065 | MachineInstr * | 
 | 1066 | X86InstrInfo::convertToThreeAddressWithLEA(unsigned MIOpc, | 
 | 1067 |                                            MachineFunction::iterator &MFI, | 
 | 1068 |                                            MachineBasicBlock::iterator &MBBI, | 
 | 1069 |                                            LiveVariables *LV) const { | 
 | 1070 |   MachineInstr *MI = MBBI; | 
 | 1071 |   unsigned Dest = MI->getOperand(0).getReg(); | 
 | 1072 |   unsigned Src = MI->getOperand(1).getReg(); | 
 | 1073 |   bool isDead = MI->getOperand(0).isDead(); | 
 | 1074 |   bool isKill = MI->getOperand(1).isKill(); | 
 | 1075 |  | 
 | 1076 |   unsigned Opc = TM.getSubtarget<X86Subtarget>().is64Bit() | 
 | 1077 |     ? X86::LEA64_32r : X86::LEA32r; | 
 | 1078 |   MachineRegisterInfo &RegInfo = MFI->getParent()->getRegInfo(); | 
 | 1079 |   unsigned leaInReg = RegInfo.createVirtualRegister(&X86::GR32RegClass); | 
 | 1080 |   unsigned leaOutReg = RegInfo.createVirtualRegister(&X86::GR32RegClass); | 
 | 1081 |              | 
 | 1082 |   // Build and insert into an implicit UNDEF value. This is OK because | 
 | 1083 |   // well be shifting and then extracting the lower 16-bits.  | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1084 |   // This has the potential to cause partial register stall. e.g. | 
| Evan Cheng | 04ab19c | 2009-12-12 18:55:26 +0000 | [diff] [blame] | 1085 |   //   movw    (%rbp,%rcx,2), %dx | 
 | 1086 |   //   leal    -65(%rdx), %esi | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1087 |   // But testing has shown this *does* help performance in 64-bit mode (at | 
 | 1088 |   // least on modern x86 machines). | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1089 |   BuildMI(*MFI, MBBI, MI->getDebugLoc(), get(X86::IMPLICIT_DEF), leaInReg); | 
 | 1090 |   MachineInstr *InsMI = | 
 | 1091 |     BuildMI(*MFI, MBBI, MI->getDebugLoc(), get(X86::INSERT_SUBREG),leaInReg) | 
 | 1092 |     .addReg(leaInReg) | 
 | 1093 |     .addReg(Src, getKillRegState(isKill)) | 
 | 1094 |     .addImm(X86::SUBREG_16BIT); | 
 | 1095 |  | 
 | 1096 |   MachineInstrBuilder MIB = BuildMI(*MFI, MBBI, MI->getDebugLoc(), | 
 | 1097 |                                     get(Opc), leaOutReg); | 
 | 1098 |   switch (MIOpc) { | 
 | 1099 |   default: | 
 | 1100 |     llvm_unreachable(0); | 
 | 1101 |     break; | 
 | 1102 |   case X86::SHL16ri: { | 
 | 1103 |     unsigned ShAmt = MI->getOperand(2).getImm(); | 
 | 1104 |     MIB.addReg(0).addImm(1 << ShAmt) | 
 | 1105 |        .addReg(leaInReg, RegState::Kill).addImm(0); | 
 | 1106 |     break; | 
 | 1107 |   } | 
 | 1108 |   case X86::INC16r: | 
 | 1109 |   case X86::INC64_16r: | 
 | 1110 |     addLeaRegOffset(MIB, leaInReg, true, 1); | 
 | 1111 |     break; | 
 | 1112 |   case X86::DEC16r: | 
 | 1113 |   case X86::DEC64_16r: | 
 | 1114 |     addLeaRegOffset(MIB, leaInReg, true, -1); | 
 | 1115 |     break; | 
 | 1116 |   case X86::ADD16ri: | 
 | 1117 |   case X86::ADD16ri8: | 
 | 1118 |     addLeaRegOffset(MIB, leaInReg, true, MI->getOperand(2).getImm());     | 
 | 1119 |     break; | 
 | 1120 |   case X86::ADD16rr: { | 
 | 1121 |     unsigned Src2 = MI->getOperand(2).getReg(); | 
 | 1122 |     bool isKill2 = MI->getOperand(2).isKill(); | 
 | 1123 |     unsigned leaInReg2 = 0; | 
 | 1124 |     MachineInstr *InsMI2 = 0; | 
 | 1125 |     if (Src == Src2) { | 
 | 1126 |       // ADD16rr %reg1028<kill>, %reg1028 | 
 | 1127 |       // just a single insert_subreg. | 
 | 1128 |       addRegReg(MIB, leaInReg, true, leaInReg, false); | 
 | 1129 |     } else { | 
 | 1130 |       leaInReg2 = RegInfo.createVirtualRegister(&X86::GR32RegClass); | 
 | 1131 |       // Build and insert into an implicit UNDEF value. This is OK because | 
 | 1132 |       // well be shifting and then extracting the lower 16-bits.  | 
 | 1133 |       BuildMI(*MFI, MIB, MI->getDebugLoc(), get(X86::IMPLICIT_DEF), leaInReg2); | 
 | 1134 |       InsMI2 = | 
 | 1135 |         BuildMI(*MFI, MIB, MI->getDebugLoc(), get(X86::INSERT_SUBREG),leaInReg2) | 
 | 1136 |         .addReg(leaInReg2) | 
 | 1137 |         .addReg(Src2, getKillRegState(isKill2)) | 
 | 1138 |         .addImm(X86::SUBREG_16BIT); | 
 | 1139 |       addRegReg(MIB, leaInReg, true, leaInReg2, true); | 
 | 1140 |     } | 
 | 1141 |     if (LV && isKill2 && InsMI2) | 
 | 1142 |       LV->replaceKillInstruction(Src2, MI, InsMI2); | 
 | 1143 |     break; | 
 | 1144 |   } | 
 | 1145 |   } | 
 | 1146 |  | 
 | 1147 |   MachineInstr *NewMI = MIB; | 
 | 1148 |   MachineInstr *ExtMI = | 
 | 1149 |     BuildMI(*MFI, MBBI, MI->getDebugLoc(), get(X86::EXTRACT_SUBREG)) | 
 | 1150 |     .addReg(Dest, RegState::Define | getDeadRegState(isDead)) | 
 | 1151 |     .addReg(leaOutReg, RegState::Kill) | 
 | 1152 |     .addImm(X86::SUBREG_16BIT); | 
 | 1153 |  | 
 | 1154 |   if (LV) { | 
 | 1155 |     // Update live variables | 
 | 1156 |     LV->getVarInfo(leaInReg).Kills.push_back(NewMI); | 
 | 1157 |     LV->getVarInfo(leaOutReg).Kills.push_back(ExtMI); | 
 | 1158 |     if (isKill) | 
 | 1159 |       LV->replaceKillInstruction(Src, MI, InsMI); | 
 | 1160 |     if (isDead) | 
 | 1161 |       LV->replaceKillInstruction(Dest, MI, ExtMI); | 
 | 1162 |   } | 
 | 1163 |  | 
 | 1164 |   return ExtMI; | 
 | 1165 | } | 
 | 1166 |  | 
| Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 1167 | /// convertToThreeAddress - This method must be implemented by targets that | 
 | 1168 | /// set the M_CONVERTIBLE_TO_3_ADDR flag.  When this flag is set, the target | 
 | 1169 | /// may be able to convert a two-address instruction into a true | 
 | 1170 | /// three-address instruction on demand.  This allows the X86 target (for | 
 | 1171 | /// example) to convert ADD and SHL instructions into LEA instructions if they | 
 | 1172 | /// would require register copies due to two-addressness. | 
 | 1173 | /// | 
 | 1174 | /// This method returns a null pointer if the transformation cannot be | 
 | 1175 | /// performed, otherwise it returns the new instruction. | 
 | 1176 | /// | 
| Evan Cheng | 258ff67 | 2006-12-01 21:52:41 +0000 | [diff] [blame] | 1177 | MachineInstr * | 
 | 1178 | X86InstrInfo::convertToThreeAddress(MachineFunction::iterator &MFI, | 
 | 1179 |                                     MachineBasicBlock::iterator &MBBI, | 
| Owen Anderson | f660c17 | 2008-07-02 23:41:07 +0000 | [diff] [blame] | 1180 |                                     LiveVariables *LV) const { | 
| Evan Cheng | 258ff67 | 2006-12-01 21:52:41 +0000 | [diff] [blame] | 1181 |   MachineInstr *MI = MBBI; | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 1182 |   MachineFunction &MF = *MI->getParent()->getParent(); | 
| Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 1183 |   // All instructions input are two-addr instructions.  Get the known operands. | 
 | 1184 |   unsigned Dest = MI->getOperand(0).getReg(); | 
 | 1185 |   unsigned Src = MI->getOperand(1).getReg(); | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1186 |   bool isDead = MI->getOperand(0).isDead(); | 
 | 1187 |   bool isKill = MI->getOperand(1).isKill(); | 
| Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 1188 |  | 
| Evan Cheng | 6ce7dc2 | 2006-11-15 20:58:11 +0000 | [diff] [blame] | 1189 |   MachineInstr *NewMI = NULL; | 
| Evan Cheng | 258ff67 | 2006-12-01 21:52:41 +0000 | [diff] [blame] | 1190 |   // FIXME: 16-bit LEA's are really slow on Athlons, but not bad on P4's.  When | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1191 |   // we have better subtarget support, enable the 16-bit LEA generation here. | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1192 |   // 16-bit LEA is also slow on Core2. | 
| Evan Cheng | 258ff67 | 2006-12-01 21:52:41 +0000 | [diff] [blame] | 1193 |   bool DisableLEA16 = true; | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1194 |   bool is64Bit = TM.getSubtarget<X86Subtarget>().is64Bit(); | 
| Evan Cheng | 258ff67 | 2006-12-01 21:52:41 +0000 | [diff] [blame] | 1195 |  | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1196 |   unsigned MIOpc = MI->getOpcode(); | 
 | 1197 |   switch (MIOpc) { | 
| Evan Cheng | ccba76b | 2006-05-30 20:26:50 +0000 | [diff] [blame] | 1198 |   case X86::SHUFPSrri: { | 
 | 1199 |     assert(MI->getNumOperands() == 4 && "Unknown shufps instruction!"); | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1200 |     if (!TM.getSubtarget<X86Subtarget>().hasSSE2()) return 0; | 
 | 1201 |      | 
| Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 1202 |     unsigned B = MI->getOperand(1).getReg(); | 
 | 1203 |     unsigned C = MI->getOperand(2).getReg(); | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1204 |     if (B != C) return 0; | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1205 |     unsigned A = MI->getOperand(0).getReg(); | 
 | 1206 |     unsigned M = MI->getOperand(3).getImm(); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1207 |     NewMI = BuildMI(MF, MI->getDebugLoc(), get(X86::PSHUFDri)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1208 |       .addReg(A, RegState::Define | getDeadRegState(isDead)) | 
 | 1209 |       .addReg(B, getKillRegState(isKill)).addImm(M); | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1210 |     break; | 
 | 1211 |   } | 
| Chris Lattner | 995f550 | 2007-03-28 18:12:31 +0000 | [diff] [blame] | 1212 |   case X86::SHL64ri: { | 
| Evan Cheng | 24f2ea3 | 2007-09-14 21:48:26 +0000 | [diff] [blame] | 1213 |     assert(MI->getNumOperands() >= 3 && "Unknown shift instruction!"); | 
| Chris Lattner | 995f550 | 2007-03-28 18:12:31 +0000 | [diff] [blame] | 1214 |     // NOTE: LEA doesn't produce flags like shift does, but LLVM never uses | 
 | 1215 |     // the flags produced by a shift yet, so this is safe. | 
| Chris Lattner | 995f550 | 2007-03-28 18:12:31 +0000 | [diff] [blame] | 1216 |     unsigned ShAmt = MI->getOperand(2).getImm(); | 
 | 1217 |     if (ShAmt == 0 || ShAmt >= 4) return 0; | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1218 |  | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1219 |     NewMI = BuildMI(MF, MI->getDebugLoc(), get(X86::LEA64r)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1220 |       .addReg(Dest, RegState::Define | getDeadRegState(isDead)) | 
 | 1221 |       .addReg(0).addImm(1 << ShAmt) | 
 | 1222 |       .addReg(Src, getKillRegState(isKill)) | 
 | 1223 |       .addImm(0); | 
| Chris Lattner | 995f550 | 2007-03-28 18:12:31 +0000 | [diff] [blame] | 1224 |     break; | 
 | 1225 |   } | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1226 |   case X86::SHL32ri: { | 
| Evan Cheng | 24f2ea3 | 2007-09-14 21:48:26 +0000 | [diff] [blame] | 1227 |     assert(MI->getNumOperands() >= 3 && "Unknown shift instruction!"); | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1228 |     // NOTE: LEA doesn't produce flags like shift does, but LLVM never uses | 
 | 1229 |     // the flags produced by a shift yet, so this is safe. | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1230 |     unsigned ShAmt = MI->getOperand(2).getImm(); | 
 | 1231 |     if (ShAmt == 0 || ShAmt >= 4) return 0; | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1232 |  | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1233 |     unsigned Opc = is64Bit ? X86::LEA64_32r : X86::LEA32r; | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1234 |     NewMI = BuildMI(MF, MI->getDebugLoc(), get(Opc)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1235 |       .addReg(Dest, RegState::Define | getDeadRegState(isDead)) | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1236 |       .addReg(0).addImm(1 << ShAmt) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1237 |       .addReg(Src, getKillRegState(isKill)).addImm(0); | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1238 |     break; | 
 | 1239 |   } | 
 | 1240 |   case X86::SHL16ri: { | 
| Evan Cheng | 24f2ea3 | 2007-09-14 21:48:26 +0000 | [diff] [blame] | 1241 |     assert(MI->getNumOperands() >= 3 && "Unknown shift instruction!"); | 
| Evan Cheng | 61d9c86 | 2007-09-06 00:14:41 +0000 | [diff] [blame] | 1242 |     // NOTE: LEA doesn't produce flags like shift does, but LLVM never uses | 
 | 1243 |     // the flags produced by a shift yet, so this is safe. | 
| Evan Cheng | 61d9c86 | 2007-09-06 00:14:41 +0000 | [diff] [blame] | 1244 |     unsigned ShAmt = MI->getOperand(2).getImm(); | 
 | 1245 |     if (ShAmt == 0 || ShAmt >= 4) return 0; | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1246 |  | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1247 |     if (DisableLEA16) | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1248 |       return is64Bit ? convertToThreeAddressWithLEA(MIOpc, MFI, MBBI, LV) : 0; | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1249 |     NewMI = BuildMI(MF, MI->getDebugLoc(), get(X86::LEA16r)) | 
 | 1250 |       .addReg(Dest, RegState::Define | getDeadRegState(isDead)) | 
 | 1251 |       .addReg(0).addImm(1 << ShAmt) | 
 | 1252 |       .addReg(Src, getKillRegState(isKill)) | 
 | 1253 |       .addImm(0); | 
| Chris Lattner | a16b7cb | 2007-03-20 06:08:29 +0000 | [diff] [blame] | 1254 |     break; | 
| Evan Cheng | ccba76b | 2006-05-30 20:26:50 +0000 | [diff] [blame] | 1255 |   } | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1256 |   default: { | 
 | 1257 |     // The following opcodes also sets the condition code register(s). Only | 
 | 1258 |     // convert them to equivalent lea if the condition code register def's | 
 | 1259 |     // are dead! | 
 | 1260 |     if (hasLiveCondCodeDef(MI)) | 
 | 1261 |       return 0; | 
| Evan Cheng | ccba76b | 2006-05-30 20:26:50 +0000 | [diff] [blame] | 1262 |  | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1263 |     switch (MIOpc) { | 
 | 1264 |     default: return 0; | 
 | 1265 |     case X86::INC64r: | 
| Dan Gohman | cca2983 | 2009-01-06 23:34:46 +0000 | [diff] [blame] | 1266 |     case X86::INC32r: | 
 | 1267 |     case X86::INC64_32r: { | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1268 |       assert(MI->getNumOperands() >= 2 && "Unknown inc instruction!"); | 
| Evan Cheng | b76143c | 2007-10-09 07:14:53 +0000 | [diff] [blame] | 1269 |       unsigned Opc = MIOpc == X86::INC64r ? X86::LEA64r | 
 | 1270 |         : (is64Bit ? X86::LEA64_32r : X86::LEA32r); | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 1271 |       NewMI = addLeaRegOffset(BuildMI(MF, MI->getDebugLoc(), get(Opc)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1272 |                               .addReg(Dest, RegState::Define | | 
 | 1273 |                                       getDeadRegState(isDead)), | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 1274 |                               Src, isKill, 1); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1275 |       break; | 
| Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 1276 |     } | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1277 |     case X86::INC16r: | 
 | 1278 |     case X86::INC64_16r: | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1279 |       if (DisableLEA16) | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1280 |         return is64Bit ? convertToThreeAddressWithLEA(MIOpc, MFI, MBBI, LV) : 0; | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1281 |       assert(MI->getNumOperands() >= 2 && "Unknown inc instruction!"); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1282 |       NewMI = addRegOffset(BuildMI(MF, MI->getDebugLoc(), get(X86::LEA16r)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1283 |                            .addReg(Dest, RegState::Define | | 
 | 1284 |                                    getDeadRegState(isDead)), | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1285 |                            Src, isKill, 1); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1286 |       break; | 
 | 1287 |     case X86::DEC64r: | 
| Dan Gohman | cca2983 | 2009-01-06 23:34:46 +0000 | [diff] [blame] | 1288 |     case X86::DEC32r: | 
 | 1289 |     case X86::DEC64_32r: { | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1290 |       assert(MI->getNumOperands() >= 2 && "Unknown dec instruction!"); | 
| Evan Cheng | b76143c | 2007-10-09 07:14:53 +0000 | [diff] [blame] | 1291 |       unsigned Opc = MIOpc == X86::DEC64r ? X86::LEA64r | 
 | 1292 |         : (is64Bit ? X86::LEA64_32r : X86::LEA32r); | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 1293 |       NewMI = addLeaRegOffset(BuildMI(MF, MI->getDebugLoc(), get(Opc)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1294 |                               .addReg(Dest, RegState::Define | | 
 | 1295 |                                       getDeadRegState(isDead)), | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 1296 |                               Src, isKill, -1); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1297 |       break; | 
 | 1298 |     } | 
 | 1299 |     case X86::DEC16r: | 
 | 1300 |     case X86::DEC64_16r: | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1301 |       if (DisableLEA16) | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1302 |         return is64Bit ? convertToThreeAddressWithLEA(MIOpc, MFI, MBBI, LV) : 0; | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1303 |       assert(MI->getNumOperands() >= 2 && "Unknown dec instruction!"); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1304 |       NewMI = addRegOffset(BuildMI(MF, MI->getDebugLoc(), get(X86::LEA16r)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1305 |                            .addReg(Dest, RegState::Define | | 
 | 1306 |                                    getDeadRegState(isDead)), | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1307 |                            Src, isKill, -1); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1308 |       break; | 
 | 1309 |     case X86::ADD64rr: | 
 | 1310 |     case X86::ADD32rr: { | 
 | 1311 |       assert(MI->getNumOperands() >= 3 && "Unknown add instruction!"); | 
| Evan Cheng | b76143c | 2007-10-09 07:14:53 +0000 | [diff] [blame] | 1312 |       unsigned Opc = MIOpc == X86::ADD64rr ? X86::LEA64r | 
 | 1313 |         : (is64Bit ? X86::LEA64_32r : X86::LEA32r); | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1314 |       unsigned Src2 = MI->getOperand(2).getReg(); | 
 | 1315 |       bool isKill2 = MI->getOperand(2).isKill(); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1316 |       NewMI = addRegReg(BuildMI(MF, MI->getDebugLoc(), get(Opc)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1317 |                         .addReg(Dest, RegState::Define | | 
 | 1318 |                                 getDeadRegState(isDead)), | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1319 |                         Src, isKill, Src2, isKill2); | 
 | 1320 |       if (LV && isKill2) | 
 | 1321 |         LV->replaceKillInstruction(Src2, MI, NewMI); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1322 |       break; | 
 | 1323 |     } | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1324 |     case X86::ADD16rr: { | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1325 |       if (DisableLEA16) | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1326 |         return is64Bit ? convertToThreeAddressWithLEA(MIOpc, MFI, MBBI, LV) : 0; | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1327 |       assert(MI->getNumOperands() >= 3 && "Unknown add instruction!"); | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1328 |       unsigned Src2 = MI->getOperand(2).getReg(); | 
 | 1329 |       bool isKill2 = MI->getOperand(2).isKill(); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1330 |       NewMI = addRegReg(BuildMI(MF, MI->getDebugLoc(), get(X86::LEA16r)) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 1331 |                         .addReg(Dest, RegState::Define | | 
 | 1332 |                                 getDeadRegState(isDead)), | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1333 |                         Src, isKill, Src2, isKill2); | 
 | 1334 |       if (LV && isKill2) | 
 | 1335 |         LV->replaceKillInstruction(Src2, MI, NewMI); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1336 |       break; | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1337 |     } | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1338 |     case X86::ADD64ri32: | 
 | 1339 |     case X86::ADD64ri8: | 
 | 1340 |       assert(MI->getNumOperands() >= 3 && "Unknown add instruction!"); | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1341 |       NewMI = addLeaRegOffset(BuildMI(MF, MI->getDebugLoc(), get(X86::LEA64r)) | 
 | 1342 |                               .addReg(Dest, RegState::Define | | 
 | 1343 |                                       getDeadRegState(isDead)), | 
 | 1344 |                               Src, isKill, MI->getOperand(2).getImm()); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1345 |       break; | 
 | 1346 |     case X86::ADD32ri: | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1347 |     case X86::ADD32ri8: { | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1348 |       assert(MI->getNumOperands() >= 3 && "Unknown add instruction!"); | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1349 |       unsigned Opc = is64Bit ? X86::LEA64_32r : X86::LEA32r; | 
 | 1350 |       NewMI = addLeaRegOffset(BuildMI(MF, MI->getDebugLoc(), get(Opc)) | 
 | 1351 |                               .addReg(Dest, RegState::Define | | 
 | 1352 |                                       getDeadRegState(isDead)), | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 1353 |                                 Src, isKill, MI->getOperand(2).getImm()); | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1354 |       break; | 
 | 1355 |     } | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1356 |     case X86::ADD16ri: | 
 | 1357 |     case X86::ADD16ri8: | 
 | 1358 |       if (DisableLEA16) | 
| Evan Cheng | dd99f3a | 2009-12-12 20:03:14 +0000 | [diff] [blame] | 1359 |         return is64Bit ? convertToThreeAddressWithLEA(MIOpc, MFI, MBBI, LV) : 0; | 
| Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 1360 |       assert(MI->getNumOperands() >= 3 && "Unknown add instruction!"); | 
 | 1361 |       NewMI = addLeaRegOffset(BuildMI(MF, MI->getDebugLoc(), get(X86::LEA16r)) | 
 | 1362 |                               .addReg(Dest, RegState::Define | | 
 | 1363 |                                       getDeadRegState(isDead)), | 
 | 1364 |                               Src, isKill, MI->getOperand(2).getImm()); | 
 | 1365 |       break; | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1366 |     } | 
 | 1367 |   } | 
| Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 1368 |   } | 
 | 1369 |  | 
| Evan Cheng | 1524673 | 2008-02-07 08:29:53 +0000 | [diff] [blame] | 1370 |   if (!NewMI) return 0; | 
 | 1371 |  | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 1372 |   if (LV) {  // Update live variables | 
 | 1373 |     if (isKill) | 
 | 1374 |       LV->replaceKillInstruction(Src, MI, NewMI); | 
 | 1375 |     if (isDead) | 
 | 1376 |       LV->replaceKillInstruction(Dest, MI, NewMI); | 
 | 1377 |   } | 
 | 1378 |  | 
| Evan Cheng | 559dc46 | 2007-10-05 20:34:26 +0000 | [diff] [blame] | 1379 |   MFI->insert(MBBI, NewMI);          // Insert the new inst     | 
| Evan Cheng | 6ce7dc2 | 2006-11-15 20:58:11 +0000 | [diff] [blame] | 1380 |   return NewMI; | 
| Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 1381 | } | 
 | 1382 |  | 
| Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 1383 | /// commuteInstruction - We have a few instructions that must be hacked on to | 
 | 1384 | /// commute them. | 
 | 1385 | /// | 
| Evan Cheng | 58dcb0e | 2008-06-16 07:33:11 +0000 | [diff] [blame] | 1386 | MachineInstr * | 
 | 1387 | X86InstrInfo::commuteInstruction(MachineInstr *MI, bool NewMI) const { | 
| Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 1388 |   switch (MI->getOpcode()) { | 
| Chris Lattner | 0df53d2 | 2005-01-19 07:31:24 +0000 | [diff] [blame] | 1389 |   case X86::SHRD16rri8: // A = SHRD16rri8 B, C, I -> A = SHLD16rri8 C, B, (16-I) | 
 | 1390 |   case X86::SHLD16rri8: // A = SHLD16rri8 B, C, I -> A = SHRD16rri8 C, B, (16-I) | 
| Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 1391 |   case X86::SHRD32rri8: // A = SHRD32rri8 B, C, I -> A = SHLD32rri8 C, B, (32-I) | 
| Dan Gohman | e47f1f9 | 2007-09-14 23:17:45 +0000 | [diff] [blame] | 1392 |   case X86::SHLD32rri8: // A = SHLD32rri8 B, C, I -> A = SHRD32rri8 C, B, (32-I) | 
 | 1393 |   case X86::SHRD64rri8: // A = SHRD64rri8 B, C, I -> A = SHLD64rri8 C, B, (64-I) | 
 | 1394 |   case X86::SHLD64rri8:{// A = SHLD64rri8 B, C, I -> A = SHRD64rri8 C, B, (64-I) | 
| Chris Lattner | 0df53d2 | 2005-01-19 07:31:24 +0000 | [diff] [blame] | 1395 |     unsigned Opc; | 
 | 1396 |     unsigned Size; | 
 | 1397 |     switch (MI->getOpcode()) { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 1398 |     default: llvm_unreachable("Unreachable!"); | 
| Chris Lattner | 0df53d2 | 2005-01-19 07:31:24 +0000 | [diff] [blame] | 1399 |     case X86::SHRD16rri8: Size = 16; Opc = X86::SHLD16rri8; break; | 
 | 1400 |     case X86::SHLD16rri8: Size = 16; Opc = X86::SHRD16rri8; break; | 
 | 1401 |     case X86::SHRD32rri8: Size = 32; Opc = X86::SHLD32rri8; break; | 
 | 1402 |     case X86::SHLD32rri8: Size = 32; Opc = X86::SHRD32rri8; break; | 
| Dan Gohman | e47f1f9 | 2007-09-14 23:17:45 +0000 | [diff] [blame] | 1403 |     case X86::SHRD64rri8: Size = 64; Opc = X86::SHLD64rri8; break; | 
 | 1404 |     case X86::SHLD64rri8: Size = 64; Opc = X86::SHRD64rri8; break; | 
| Chris Lattner | 0df53d2 | 2005-01-19 07:31:24 +0000 | [diff] [blame] | 1405 |     } | 
| Chris Lattner | 9a1ceae | 2007-12-30 20:49:49 +0000 | [diff] [blame] | 1406 |     unsigned Amt = MI->getOperand(3).getImm(); | 
| Dan Gohman | 74feef2 | 2008-10-17 01:23:35 +0000 | [diff] [blame] | 1407 |     if (NewMI) { | 
 | 1408 |       MachineFunction &MF = *MI->getParent()->getParent(); | 
 | 1409 |       MI = MF.CloneMachineInstr(MI); | 
 | 1410 |       NewMI = false; | 
| Evan Cheng | a4d16a1 | 2008-02-13 02:46:49 +0000 | [diff] [blame] | 1411 |     } | 
| Dan Gohman | 74feef2 | 2008-10-17 01:23:35 +0000 | [diff] [blame] | 1412 |     MI->setDesc(get(Opc)); | 
 | 1413 |     MI->getOperand(3).setImm(Size-Amt); | 
 | 1414 |     return TargetInstrInfoImpl::commuteInstruction(MI, NewMI); | 
| Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 1415 |   } | 
| Evan Cheng | 7ad42d9 | 2007-10-05 23:13:21 +0000 | [diff] [blame] | 1416 |   case X86::CMOVB16rr: | 
 | 1417 |   case X86::CMOVB32rr: | 
 | 1418 |   case X86::CMOVB64rr: | 
 | 1419 |   case X86::CMOVAE16rr: | 
 | 1420 |   case X86::CMOVAE32rr: | 
 | 1421 |   case X86::CMOVAE64rr: | 
 | 1422 |   case X86::CMOVE16rr: | 
 | 1423 |   case X86::CMOVE32rr: | 
 | 1424 |   case X86::CMOVE64rr: | 
 | 1425 |   case X86::CMOVNE16rr: | 
 | 1426 |   case X86::CMOVNE32rr: | 
 | 1427 |   case X86::CMOVNE64rr: | 
 | 1428 |   case X86::CMOVBE16rr: | 
 | 1429 |   case X86::CMOVBE32rr: | 
 | 1430 |   case X86::CMOVBE64rr: | 
 | 1431 |   case X86::CMOVA16rr: | 
 | 1432 |   case X86::CMOVA32rr: | 
 | 1433 |   case X86::CMOVA64rr: | 
 | 1434 |   case X86::CMOVL16rr: | 
 | 1435 |   case X86::CMOVL32rr: | 
 | 1436 |   case X86::CMOVL64rr: | 
 | 1437 |   case X86::CMOVGE16rr: | 
 | 1438 |   case X86::CMOVGE32rr: | 
 | 1439 |   case X86::CMOVGE64rr: | 
 | 1440 |   case X86::CMOVLE16rr: | 
 | 1441 |   case X86::CMOVLE32rr: | 
 | 1442 |   case X86::CMOVLE64rr: | 
 | 1443 |   case X86::CMOVG16rr: | 
 | 1444 |   case X86::CMOVG32rr: | 
 | 1445 |   case X86::CMOVG64rr: | 
 | 1446 |   case X86::CMOVS16rr: | 
 | 1447 |   case X86::CMOVS32rr: | 
 | 1448 |   case X86::CMOVS64rr: | 
 | 1449 |   case X86::CMOVNS16rr: | 
 | 1450 |   case X86::CMOVNS32rr: | 
 | 1451 |   case X86::CMOVNS64rr: | 
 | 1452 |   case X86::CMOVP16rr: | 
 | 1453 |   case X86::CMOVP32rr: | 
 | 1454 |   case X86::CMOVP64rr: | 
 | 1455 |   case X86::CMOVNP16rr: | 
 | 1456 |   case X86::CMOVNP32rr: | 
| Dan Gohman | 305fceb | 2009-01-07 00:35:10 +0000 | [diff] [blame] | 1457 |   case X86::CMOVNP64rr: | 
 | 1458 |   case X86::CMOVO16rr: | 
 | 1459 |   case X86::CMOVO32rr: | 
 | 1460 |   case X86::CMOVO64rr: | 
 | 1461 |   case X86::CMOVNO16rr: | 
 | 1462 |   case X86::CMOVNO32rr: | 
 | 1463 |   case X86::CMOVNO64rr: { | 
| Evan Cheng | 7ad42d9 | 2007-10-05 23:13:21 +0000 | [diff] [blame] | 1464 |     unsigned Opc = 0; | 
 | 1465 |     switch (MI->getOpcode()) { | 
 | 1466 |     default: break; | 
 | 1467 |     case X86::CMOVB16rr:  Opc = X86::CMOVAE16rr; break; | 
 | 1468 |     case X86::CMOVB32rr:  Opc = X86::CMOVAE32rr; break; | 
 | 1469 |     case X86::CMOVB64rr:  Opc = X86::CMOVAE64rr; break; | 
 | 1470 |     case X86::CMOVAE16rr: Opc = X86::CMOVB16rr; break; | 
 | 1471 |     case X86::CMOVAE32rr: Opc = X86::CMOVB32rr; break; | 
 | 1472 |     case X86::CMOVAE64rr: Opc = X86::CMOVB64rr; break; | 
 | 1473 |     case X86::CMOVE16rr:  Opc = X86::CMOVNE16rr; break; | 
 | 1474 |     case X86::CMOVE32rr:  Opc = X86::CMOVNE32rr; break; | 
 | 1475 |     case X86::CMOVE64rr:  Opc = X86::CMOVNE64rr; break; | 
 | 1476 |     case X86::CMOVNE16rr: Opc = X86::CMOVE16rr; break; | 
 | 1477 |     case X86::CMOVNE32rr: Opc = X86::CMOVE32rr; break; | 
 | 1478 |     case X86::CMOVNE64rr: Opc = X86::CMOVE64rr; break; | 
 | 1479 |     case X86::CMOVBE16rr: Opc = X86::CMOVA16rr; break; | 
 | 1480 |     case X86::CMOVBE32rr: Opc = X86::CMOVA32rr; break; | 
 | 1481 |     case X86::CMOVBE64rr: Opc = X86::CMOVA64rr; break; | 
 | 1482 |     case X86::CMOVA16rr:  Opc = X86::CMOVBE16rr; break; | 
 | 1483 |     case X86::CMOVA32rr:  Opc = X86::CMOVBE32rr; break; | 
 | 1484 |     case X86::CMOVA64rr:  Opc = X86::CMOVBE64rr; break; | 
 | 1485 |     case X86::CMOVL16rr:  Opc = X86::CMOVGE16rr; break; | 
 | 1486 |     case X86::CMOVL32rr:  Opc = X86::CMOVGE32rr; break; | 
 | 1487 |     case X86::CMOVL64rr:  Opc = X86::CMOVGE64rr; break; | 
 | 1488 |     case X86::CMOVGE16rr: Opc = X86::CMOVL16rr; break; | 
 | 1489 |     case X86::CMOVGE32rr: Opc = X86::CMOVL32rr; break; | 
 | 1490 |     case X86::CMOVGE64rr: Opc = X86::CMOVL64rr; break; | 
 | 1491 |     case X86::CMOVLE16rr: Opc = X86::CMOVG16rr; break; | 
 | 1492 |     case X86::CMOVLE32rr: Opc = X86::CMOVG32rr; break; | 
 | 1493 |     case X86::CMOVLE64rr: Opc = X86::CMOVG64rr; break; | 
 | 1494 |     case X86::CMOVG16rr:  Opc = X86::CMOVLE16rr; break; | 
 | 1495 |     case X86::CMOVG32rr:  Opc = X86::CMOVLE32rr; break; | 
 | 1496 |     case X86::CMOVG64rr:  Opc = X86::CMOVLE64rr; break; | 
 | 1497 |     case X86::CMOVS16rr:  Opc = X86::CMOVNS16rr; break; | 
 | 1498 |     case X86::CMOVS32rr:  Opc = X86::CMOVNS32rr; break; | 
| Mon P Wang | 0bd07fc | 2009-04-18 05:16:01 +0000 | [diff] [blame] | 1499 |     case X86::CMOVS64rr:  Opc = X86::CMOVNS64rr; break; | 
| Evan Cheng | 7ad42d9 | 2007-10-05 23:13:21 +0000 | [diff] [blame] | 1500 |     case X86::CMOVNS16rr: Opc = X86::CMOVS16rr; break; | 
 | 1501 |     case X86::CMOVNS32rr: Opc = X86::CMOVS32rr; break; | 
 | 1502 |     case X86::CMOVNS64rr: Opc = X86::CMOVS64rr; break; | 
 | 1503 |     case X86::CMOVP16rr:  Opc = X86::CMOVNP16rr; break; | 
 | 1504 |     case X86::CMOVP32rr:  Opc = X86::CMOVNP32rr; break; | 
| Mon P Wang | 0bd07fc | 2009-04-18 05:16:01 +0000 | [diff] [blame] | 1505 |     case X86::CMOVP64rr:  Opc = X86::CMOVNP64rr; break; | 
| Evan Cheng | 7ad42d9 | 2007-10-05 23:13:21 +0000 | [diff] [blame] | 1506 |     case X86::CMOVNP16rr: Opc = X86::CMOVP16rr; break; | 
 | 1507 |     case X86::CMOVNP32rr: Opc = X86::CMOVP32rr; break; | 
 | 1508 |     case X86::CMOVNP64rr: Opc = X86::CMOVP64rr; break; | 
| Dan Gohman | 305fceb | 2009-01-07 00:35:10 +0000 | [diff] [blame] | 1509 |     case X86::CMOVO16rr:  Opc = X86::CMOVNO16rr; break; | 
 | 1510 |     case X86::CMOVO32rr:  Opc = X86::CMOVNO32rr; break; | 
| Mon P Wang | 0bd07fc | 2009-04-18 05:16:01 +0000 | [diff] [blame] | 1511 |     case X86::CMOVO64rr:  Opc = X86::CMOVNO64rr; break; | 
| Dan Gohman | 305fceb | 2009-01-07 00:35:10 +0000 | [diff] [blame] | 1512 |     case X86::CMOVNO16rr: Opc = X86::CMOVO16rr; break; | 
 | 1513 |     case X86::CMOVNO32rr: Opc = X86::CMOVO32rr; break; | 
 | 1514 |     case X86::CMOVNO64rr: Opc = X86::CMOVO64rr; break; | 
| Evan Cheng | 7ad42d9 | 2007-10-05 23:13:21 +0000 | [diff] [blame] | 1515 |     } | 
| Dan Gohman | 74feef2 | 2008-10-17 01:23:35 +0000 | [diff] [blame] | 1516 |     if (NewMI) { | 
 | 1517 |       MachineFunction &MF = *MI->getParent()->getParent(); | 
 | 1518 |       MI = MF.CloneMachineInstr(MI); | 
 | 1519 |       NewMI = false; | 
 | 1520 |     } | 
| Chris Lattner | 5080f4d | 2008-01-11 18:10:50 +0000 | [diff] [blame] | 1521 |     MI->setDesc(get(Opc)); | 
| Evan Cheng | 7ad42d9 | 2007-10-05 23:13:21 +0000 | [diff] [blame] | 1522 |     // Fallthrough intended. | 
 | 1523 |   } | 
| Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 1524 |   default: | 
| Evan Cheng | 58dcb0e | 2008-06-16 07:33:11 +0000 | [diff] [blame] | 1525 |     return TargetInstrInfoImpl::commuteInstruction(MI, NewMI); | 
| Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 1526 |   } | 
 | 1527 | } | 
 | 1528 |  | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1529 | static X86::CondCode GetCondFromBranchOpc(unsigned BrOpc) { | 
 | 1530 |   switch (BrOpc) { | 
 | 1531 |   default: return X86::COND_INVALID; | 
 | 1532 |   case X86::JE:  return X86::COND_E; | 
 | 1533 |   case X86::JNE: return X86::COND_NE; | 
 | 1534 |   case X86::JL:  return X86::COND_L; | 
 | 1535 |   case X86::JLE: return X86::COND_LE; | 
 | 1536 |   case X86::JG:  return X86::COND_G; | 
 | 1537 |   case X86::JGE: return X86::COND_GE; | 
 | 1538 |   case X86::JB:  return X86::COND_B; | 
 | 1539 |   case X86::JBE: return X86::COND_BE; | 
 | 1540 |   case X86::JA:  return X86::COND_A; | 
 | 1541 |   case X86::JAE: return X86::COND_AE; | 
 | 1542 |   case X86::JS:  return X86::COND_S; | 
 | 1543 |   case X86::JNS: return X86::COND_NS; | 
 | 1544 |   case X86::JP:  return X86::COND_P; | 
 | 1545 |   case X86::JNP: return X86::COND_NP; | 
 | 1546 |   case X86::JO:  return X86::COND_O; | 
 | 1547 |   case X86::JNO: return X86::COND_NO; | 
 | 1548 |   } | 
 | 1549 | } | 
 | 1550 |  | 
 | 1551 | unsigned X86::GetCondBranchFromCond(X86::CondCode CC) { | 
 | 1552 |   switch (CC) { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 1553 |   default: llvm_unreachable("Illegal condition code!"); | 
| Evan Cheng | e5f6204 | 2007-09-29 00:00:36 +0000 | [diff] [blame] | 1554 |   case X86::COND_E:  return X86::JE; | 
 | 1555 |   case X86::COND_NE: return X86::JNE; | 
 | 1556 |   case X86::COND_L:  return X86::JL; | 
 | 1557 |   case X86::COND_LE: return X86::JLE; | 
 | 1558 |   case X86::COND_G:  return X86::JG; | 
 | 1559 |   case X86::COND_GE: return X86::JGE; | 
 | 1560 |   case X86::COND_B:  return X86::JB; | 
 | 1561 |   case X86::COND_BE: return X86::JBE; | 
 | 1562 |   case X86::COND_A:  return X86::JA; | 
 | 1563 |   case X86::COND_AE: return X86::JAE; | 
 | 1564 |   case X86::COND_S:  return X86::JS; | 
 | 1565 |   case X86::COND_NS: return X86::JNS; | 
 | 1566 |   case X86::COND_P:  return X86::JP; | 
 | 1567 |   case X86::COND_NP: return X86::JNP; | 
 | 1568 |   case X86::COND_O:  return X86::JO; | 
 | 1569 |   case X86::COND_NO: return X86::JNO; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1570 |   } | 
 | 1571 | } | 
 | 1572 |  | 
| Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 1573 | /// GetOppositeBranchCondition - Return the inverse of the specified condition, | 
 | 1574 | /// e.g. turning COND_E to COND_NE. | 
 | 1575 | X86::CondCode X86::GetOppositeBranchCondition(X86::CondCode CC) { | 
 | 1576 |   switch (CC) { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 1577 |   default: llvm_unreachable("Illegal condition code!"); | 
| Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 1578 |   case X86::COND_E:  return X86::COND_NE; | 
 | 1579 |   case X86::COND_NE: return X86::COND_E; | 
 | 1580 |   case X86::COND_L:  return X86::COND_GE; | 
 | 1581 |   case X86::COND_LE: return X86::COND_G; | 
 | 1582 |   case X86::COND_G:  return X86::COND_LE; | 
 | 1583 |   case X86::COND_GE: return X86::COND_L; | 
 | 1584 |   case X86::COND_B:  return X86::COND_AE; | 
 | 1585 |   case X86::COND_BE: return X86::COND_A; | 
 | 1586 |   case X86::COND_A:  return X86::COND_BE; | 
 | 1587 |   case X86::COND_AE: return X86::COND_B; | 
 | 1588 |   case X86::COND_S:  return X86::COND_NS; | 
 | 1589 |   case X86::COND_NS: return X86::COND_S; | 
 | 1590 |   case X86::COND_P:  return X86::COND_NP; | 
 | 1591 |   case X86::COND_NP: return X86::COND_P; | 
 | 1592 |   case X86::COND_O:  return X86::COND_NO; | 
 | 1593 |   case X86::COND_NO: return X86::COND_O; | 
 | 1594 |   } | 
 | 1595 | } | 
 | 1596 |  | 
| Dale Johannesen | 318093b | 2007-06-14 22:03:45 +0000 | [diff] [blame] | 1597 | bool X86InstrInfo::isUnpredicatedTerminator(const MachineInstr *MI) const { | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 1598 |   const TargetInstrDesc &TID = MI->getDesc(); | 
 | 1599 |   if (!TID.isTerminator()) return false; | 
| Chris Lattner | 6924430 | 2008-01-07 01:56:04 +0000 | [diff] [blame] | 1600 |    | 
 | 1601 |   // Conditional branch is a special case. | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 1602 |   if (TID.isBranch() && !TID.isBarrier()) | 
| Chris Lattner | 6924430 | 2008-01-07 01:56:04 +0000 | [diff] [blame] | 1603 |     return true; | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 1604 |   if (!TID.isPredicable()) | 
| Chris Lattner | 6924430 | 2008-01-07 01:56:04 +0000 | [diff] [blame] | 1605 |     return true; | 
 | 1606 |   return !isPredicated(MI); | 
| Dale Johannesen | 318093b | 2007-06-14 22:03:45 +0000 | [diff] [blame] | 1607 | } | 
| Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 1608 |  | 
| Evan Cheng | 85dce6c | 2007-07-26 17:32:14 +0000 | [diff] [blame] | 1609 | // For purposes of branch analysis do not count FP_REG_KILL as a terminator. | 
 | 1610 | static bool isBrAnalysisUnpredicatedTerminator(const MachineInstr *MI, | 
 | 1611 |                                                const X86InstrInfo &TII) { | 
 | 1612 |   if (MI->getOpcode() == X86::FP_REG_KILL) | 
 | 1613 |     return false; | 
 | 1614 |   return TII.isUnpredicatedTerminator(MI); | 
 | 1615 | } | 
 | 1616 |  | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1617 | bool X86InstrInfo::AnalyzeBranch(MachineBasicBlock &MBB,  | 
 | 1618 |                                  MachineBasicBlock *&TBB, | 
 | 1619 |                                  MachineBasicBlock *&FBB, | 
| Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 1620 |                                  SmallVectorImpl<MachineOperand> &Cond, | 
 | 1621 |                                  bool AllowModify) const { | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1622 |   // Start from the bottom of the block and work up, examining the | 
 | 1623 |   // terminator instructions. | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1624 |   MachineBasicBlock::iterator I = MBB.end(); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1625 |   while (I != MBB.begin()) { | 
 | 1626 |     --I; | 
 | 1627 |     // Working from the bottom, when we see a non-terminator | 
 | 1628 |     // instruction, we're done. | 
 | 1629 |     if (!isBrAnalysisUnpredicatedTerminator(I, *this)) | 
 | 1630 |       break; | 
 | 1631 |     // A terminator that isn't a branch can't easily be handled | 
 | 1632 |     // by this analysis. | 
 | 1633 |     if (!I->getDesc().isBranch()) | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1634 |       return true; | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1635 |     // Handle unconditional branches. | 
 | 1636 |     if (I->getOpcode() == X86::JMP) { | 
| Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 1637 |       if (!AllowModify) { | 
 | 1638 |         TBB = I->getOperand(0).getMBB(); | 
| Evan Cheng | 45e0010 | 2009-05-08 06:34:09 +0000 | [diff] [blame] | 1639 |         continue; | 
| Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 1640 |       } | 
 | 1641 |  | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1642 |       // If the block has any instructions after a JMP, delete them. | 
| Chris Lattner | 7896c9f | 2009-12-03 00:50:42 +0000 | [diff] [blame] | 1643 |       while (llvm::next(I) != MBB.end()) | 
 | 1644 |         llvm::next(I)->eraseFromParent(); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1645 |       Cond.clear(); | 
 | 1646 |       FBB = 0; | 
 | 1647 |       // Delete the JMP if it's equivalent to a fall-through. | 
 | 1648 |       if (MBB.isLayoutSuccessor(I->getOperand(0).getMBB())) { | 
 | 1649 |         TBB = 0; | 
 | 1650 |         I->eraseFromParent(); | 
 | 1651 |         I = MBB.end(); | 
 | 1652 |         continue; | 
 | 1653 |       } | 
 | 1654 |       // TBB is used to indicate the unconditinal destination. | 
 | 1655 |       TBB = I->getOperand(0).getMBB(); | 
 | 1656 |       continue; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1657 |     } | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1658 |     // Handle conditional branches. | 
 | 1659 |     X86::CondCode BranchCode = GetCondFromBranchOpc(I->getOpcode()); | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1660 |     if (BranchCode == X86::COND_INVALID) | 
 | 1661 |       return true;  // Can't handle indirect branch. | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1662 |     // Working from the bottom, handle the first conditional branch. | 
 | 1663 |     if (Cond.empty()) { | 
 | 1664 |       FBB = TBB; | 
 | 1665 |       TBB = I->getOperand(0).getMBB(); | 
 | 1666 |       Cond.push_back(MachineOperand::CreateImm(BranchCode)); | 
 | 1667 |       continue; | 
 | 1668 |     } | 
 | 1669 |     // Handle subsequent conditional branches. Only handle the case | 
 | 1670 |     // where all conditional branches branch to the same destination | 
 | 1671 |     // and their condition opcodes fit one of the special | 
 | 1672 |     // multi-branch idioms. | 
 | 1673 |     assert(Cond.size() == 1); | 
 | 1674 |     assert(TBB); | 
 | 1675 |     // Only handle the case where all conditional branches branch to | 
 | 1676 |     // the same destination. | 
 | 1677 |     if (TBB != I->getOperand(0).getMBB()) | 
 | 1678 |       return true; | 
 | 1679 |     X86::CondCode OldBranchCode = (X86::CondCode)Cond[0].getImm(); | 
 | 1680 |     // If the conditions are the same, we can leave them alone. | 
 | 1681 |     if (OldBranchCode == BranchCode) | 
 | 1682 |       continue; | 
 | 1683 |     // If they differ, see if they fit one of the known patterns. | 
 | 1684 |     // Theoretically we could handle more patterns here, but | 
 | 1685 |     // we shouldn't expect to see them if instruction selection | 
 | 1686 |     // has done a reasonable job. | 
 | 1687 |     if ((OldBranchCode == X86::COND_NP && | 
 | 1688 |          BranchCode == X86::COND_E) || | 
 | 1689 |         (OldBranchCode == X86::COND_E && | 
 | 1690 |          BranchCode == X86::COND_NP)) | 
 | 1691 |       BranchCode = X86::COND_NP_OR_E; | 
 | 1692 |     else if ((OldBranchCode == X86::COND_P && | 
 | 1693 |               BranchCode == X86::COND_NE) || | 
 | 1694 |              (OldBranchCode == X86::COND_NE && | 
 | 1695 |               BranchCode == X86::COND_P)) | 
 | 1696 |       BranchCode = X86::COND_NE_OR_P; | 
 | 1697 |     else | 
 | 1698 |       return true; | 
 | 1699 |     // Update the MachineOperand. | 
 | 1700 |     Cond[0].setImm(BranchCode); | 
| Chris Lattner | 6ce6443 | 2006-10-30 22:27:23 +0000 | [diff] [blame] | 1701 |   } | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1702 |  | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1703 |   return false; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1704 | } | 
 | 1705 |  | 
| Evan Cheng | 6ae3626 | 2007-05-18 00:18:17 +0000 | [diff] [blame] | 1706 | unsigned X86InstrInfo::RemoveBranch(MachineBasicBlock &MBB) const { | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1707 |   MachineBasicBlock::iterator I = MBB.end(); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1708 |   unsigned Count = 0; | 
 | 1709 |  | 
 | 1710 |   while (I != MBB.begin()) { | 
 | 1711 |     --I; | 
 | 1712 |     if (I->getOpcode() != X86::JMP && | 
 | 1713 |         GetCondFromBranchOpc(I->getOpcode()) == X86::COND_INVALID) | 
 | 1714 |       break; | 
 | 1715 |     // Remove the branch. | 
 | 1716 |     I->eraseFromParent(); | 
 | 1717 |     I = MBB.end(); | 
 | 1718 |     ++Count; | 
 | 1719 |   } | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1720 |    | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1721 |   return Count; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1722 | } | 
 | 1723 |  | 
| Evan Cheng | 6ae3626 | 2007-05-18 00:18:17 +0000 | [diff] [blame] | 1724 | unsigned | 
 | 1725 | X86InstrInfo::InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, | 
 | 1726 |                            MachineBasicBlock *FBB, | 
| Owen Anderson | 44eb65c | 2008-08-14 22:49:33 +0000 | [diff] [blame] | 1727 |                            const SmallVectorImpl<MachineOperand> &Cond) const { | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1728 |   // FIXME this should probably have a DebugLoc operand | 
 | 1729 |   DebugLoc dl = DebugLoc::getUnknownLoc(); | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1730 |   // Shouldn't be a fall through. | 
 | 1731 |   assert(TBB && "InsertBranch must not be told to insert a fallthrough"); | 
| Chris Lattner | 34a84ac | 2006-10-21 05:34:23 +0000 | [diff] [blame] | 1732 |   assert((Cond.size() == 1 || Cond.size() == 0) && | 
 | 1733 |          "X86 branch conditions have one component!"); | 
 | 1734 |  | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1735 |   if (Cond.empty()) { | 
 | 1736 |     // Unconditional branch? | 
 | 1737 |     assert(!FBB && "Unconditional branch with multiple successors!"); | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1738 |     BuildMI(&MBB, dl, get(X86::JMP)).addMBB(TBB); | 
| Evan Cheng | 6ae3626 | 2007-05-18 00:18:17 +0000 | [diff] [blame] | 1739 |     return 1; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1740 |   } | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1741 |  | 
 | 1742 |   // Conditional branch. | 
 | 1743 |   unsigned Count = 0; | 
 | 1744 |   X86::CondCode CC = (X86::CondCode)Cond[0].getImm(); | 
 | 1745 |   switch (CC) { | 
 | 1746 |   case X86::COND_NP_OR_E: | 
 | 1747 |     // Synthesize NP_OR_E with two branches. | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1748 |     BuildMI(&MBB, dl, get(X86::JNP)).addMBB(TBB); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1749 |     ++Count; | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1750 |     BuildMI(&MBB, dl, get(X86::JE)).addMBB(TBB); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1751 |     ++Count; | 
 | 1752 |     break; | 
 | 1753 |   case X86::COND_NE_OR_P: | 
 | 1754 |     // Synthesize NE_OR_P with two branches. | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1755 |     BuildMI(&MBB, dl, get(X86::JNE)).addMBB(TBB); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1756 |     ++Count; | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1757 |     BuildMI(&MBB, dl, get(X86::JP)).addMBB(TBB); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1758 |     ++Count; | 
 | 1759 |     break; | 
 | 1760 |   default: { | 
 | 1761 |     unsigned Opc = GetCondBranchFromCond(CC); | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1762 |     BuildMI(&MBB, dl, get(Opc)).addMBB(TBB); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1763 |     ++Count; | 
 | 1764 |   } | 
 | 1765 |   } | 
 | 1766 |   if (FBB) { | 
 | 1767 |     // Two-way Conditional branch. Insert the second branch. | 
| Dale Johannesen | 8d13f8f | 2009-02-13 02:33:27 +0000 | [diff] [blame] | 1768 |     BuildMI(&MBB, dl, get(X86::JMP)).addMBB(FBB); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 1769 |     ++Count; | 
 | 1770 |   } | 
 | 1771 |   return Count; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 1772 | } | 
 | 1773 |  | 
| Dan Gohman | 6d9305c | 2009-04-15 00:04:23 +0000 | [diff] [blame] | 1774 | /// isHReg - Test if the given register is a physical h register. | 
 | 1775 | static bool isHReg(unsigned Reg) { | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1776 |   return X86::GR8_ABCD_HRegClass.contains(Reg); | 
| Dan Gohman | 6d9305c | 2009-04-15 00:04:23 +0000 | [diff] [blame] | 1777 | } | 
 | 1778 |  | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1779 | bool X86InstrInfo::copyRegToReg(MachineBasicBlock &MBB, | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1780 |                                 MachineBasicBlock::iterator MI, | 
 | 1781 |                                 unsigned DestReg, unsigned SrcReg, | 
 | 1782 |                                 const TargetRegisterClass *DestRC, | 
 | 1783 |                                 const TargetRegisterClass *SrcRC) const { | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1784 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 1785 |   if (MI != MBB.end()) DL = MI->getDebugLoc(); | 
 | 1786 |  | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1787 |   // Determine if DstRC and SrcRC have a common superclass in common. | 
 | 1788 |   const TargetRegisterClass *CommonRC = DestRC; | 
 | 1789 |   if (DestRC == SrcRC) | 
 | 1790 |     /* Source and destination have the same register class. */; | 
 | 1791 |   else if (CommonRC->hasSuperClass(SrcRC)) | 
 | 1792 |     CommonRC = SrcRC; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1793 |   else if (!DestRC->hasSubClass(SrcRC)) { | 
 | 1794 |     // Neither of GR64_NOREX or GR64_NOSP is a superclass of the other, | 
| Dan Gohman | 59e3492 | 2009-08-05 22:18:26 +0000 | [diff] [blame] | 1795 |     // but we want to copy then as GR64. Similarly, for GR32_NOREX and | 
 | 1796 |     // GR32_NOSP, copy as GR32. | 
| Dan Gohman | 3108222 | 2009-08-11 15:59:48 +0000 | [diff] [blame] | 1797 |     if (SrcRC->hasSuperClass(&X86::GR64RegClass) && | 
 | 1798 |         DestRC->hasSuperClass(&X86::GR64RegClass)) | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1799 |       CommonRC = &X86::GR64RegClass; | 
| Dan Gohman | 3108222 | 2009-08-11 15:59:48 +0000 | [diff] [blame] | 1800 |     else if (SrcRC->hasSuperClass(&X86::GR32RegClass) && | 
 | 1801 |              DestRC->hasSuperClass(&X86::GR32RegClass)) | 
| Dan Gohman | 59e3492 | 2009-08-05 22:18:26 +0000 | [diff] [blame] | 1802 |       CommonRC = &X86::GR32RegClass; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1803 |     else | 
 | 1804 |       CommonRC = 0; | 
 | 1805 |   } | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1806 |  | 
 | 1807 |   if (CommonRC) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1808 |     unsigned Opc; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1809 |     if (CommonRC == &X86::GR64RegClass || CommonRC == &X86::GR64_NOSPRegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1810 |       Opc = X86::MOV64rr; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1811 |     } else if (CommonRC == &X86::GR32RegClass || | 
 | 1812 |                CommonRC == &X86::GR32_NOSPRegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1813 |       Opc = X86::MOV32rr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1814 |     } else if (CommonRC == &X86::GR16RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1815 |       Opc = X86::MOV16rr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1816 |     } else if (CommonRC == &X86::GR8RegClass) { | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1817 |       // Copying to or from a physical H register on x86-64 requires a NOREX | 
| Bill Wendling | 1824773 | 2009-04-17 22:40:38 +0000 | [diff] [blame] | 1818 |       // move.  Otherwise use a normal move. | 
 | 1819 |       if ((isHReg(DestReg) || isHReg(SrcReg)) && | 
 | 1820 |           TM.getSubtarget<X86Subtarget>().is64Bit()) | 
| Dan Gohman | 6d9305c | 2009-04-15 00:04:23 +0000 | [diff] [blame] | 1821 |         Opc = X86::MOV8rr_NOREX; | 
 | 1822 |       else | 
 | 1823 |         Opc = X86::MOV8rr; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 1824 |     } else if (CommonRC == &X86::GR64_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1825 |       Opc = X86::MOV64rr; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 1826 |     } else if (CommonRC == &X86::GR32_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1827 |       Opc = X86::MOV32rr; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 1828 |     } else if (CommonRC == &X86::GR16_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1829 |       Opc = X86::MOV16rr; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1830 |     } else if (CommonRC == &X86::GR8_ABCD_LRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1831 |       Opc = X86::MOV8rr; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1832 |     } else if (CommonRC == &X86::GR8_ABCD_HRegClass) { | 
 | 1833 |       if (TM.getSubtarget<X86Subtarget>().is64Bit()) | 
 | 1834 |         Opc = X86::MOV8rr_NOREX; | 
 | 1835 |       else | 
 | 1836 |         Opc = X86::MOV8rr; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1837 |     } else if (CommonRC == &X86::GR64_NOREXRegClass || | 
 | 1838 |                CommonRC == &X86::GR64_NOREX_NOSPRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1839 |       Opc = X86::MOV64rr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1840 |     } else if (CommonRC == &X86::GR32_NOREXRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1841 |       Opc = X86::MOV32rr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1842 |     } else if (CommonRC == &X86::GR16_NOREXRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1843 |       Opc = X86::MOV16rr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1844 |     } else if (CommonRC == &X86::GR8_NOREXRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1845 |       Opc = X86::MOV8rr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1846 |     } else if (CommonRC == &X86::RFP32RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1847 |       Opc = X86::MOV_Fp3232; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1848 |     } else if (CommonRC == &X86::RFP64RegClass || CommonRC == &X86::RSTRegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1849 |       Opc = X86::MOV_Fp6464; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1850 |     } else if (CommonRC == &X86::RFP80RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1851 |       Opc = X86::MOV_Fp8080; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1852 |     } else if (CommonRC == &X86::FR32RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1853 |       Opc = X86::FsMOVAPSrr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1854 |     } else if (CommonRC == &X86::FR64RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1855 |       Opc = X86::FsMOVAPDrr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1856 |     } else if (CommonRC == &X86::VR128RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1857 |       Opc = X86::MOVAPSrr; | 
| Dan Gohman | 70bc17d | 2009-04-20 22:54:34 +0000 | [diff] [blame] | 1858 |     } else if (CommonRC == &X86::VR64RegClass) { | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1859 |       Opc = X86::MMX_MOVQ64rr; | 
 | 1860 |     } else { | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1861 |       return false; | 
| Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 1862 |     } | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1863 |     BuildMI(MBB, MI, DL, get(Opc), DestReg).addReg(SrcReg); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1864 |     return true; | 
| Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 1865 |   } | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1866 |  | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1867 |   // Moving EFLAGS to / from another register requires a push and a pop. | 
 | 1868 |   if (SrcRC == &X86::CCRRegClass) { | 
| Owen Anderson | a317767 | 2008-08-26 18:50:40 +0000 | [diff] [blame] | 1869 |     if (SrcReg != X86::EFLAGS) | 
 | 1870 |       return false; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1871 |     if (DestRC == &X86::GR64RegClass || DestRC == &X86::GR64_NOSPRegClass) { | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1872 |       BuildMI(MBB, MI, DL, get(X86::PUSHFQ)); | 
 | 1873 |       BuildMI(MBB, MI, DL, get(X86::POP64r), DestReg); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1874 |       return true; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1875 |     } else if (DestRC == &X86::GR32RegClass || | 
 | 1876 |                DestRC == &X86::GR32_NOSPRegClass) { | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1877 |       BuildMI(MBB, MI, DL, get(X86::PUSHFD)); | 
 | 1878 |       BuildMI(MBB, MI, DL, get(X86::POP32r), DestReg); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1879 |       return true; | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1880 |     } | 
 | 1881 |   } else if (DestRC == &X86::CCRRegClass) { | 
| Owen Anderson | a317767 | 2008-08-26 18:50:40 +0000 | [diff] [blame] | 1882 |     if (DestReg != X86::EFLAGS) | 
 | 1883 |       return false; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1884 |     if (SrcRC == &X86::GR64RegClass || DestRC == &X86::GR64_NOSPRegClass) { | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1885 |       BuildMI(MBB, MI, DL, get(X86::PUSH64r)).addReg(SrcReg); | 
 | 1886 |       BuildMI(MBB, MI, DL, get(X86::POPFQ)); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1887 |       return true; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1888 |     } else if (SrcRC == &X86::GR32RegClass || | 
 | 1889 |                DestRC == &X86::GR32_NOSPRegClass) { | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1890 |       BuildMI(MBB, MI, DL, get(X86::PUSH32r)).addReg(SrcReg); | 
 | 1891 |       BuildMI(MBB, MI, DL, get(X86::POPFD)); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1892 |       return true; | 
| Chris Lattner | 90b347d | 2008-03-09 07:58:04 +0000 | [diff] [blame] | 1893 |     } | 
| Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 1894 |   } | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1895 |  | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1896 |   // Moving from ST(0) turns into FpGET_ST0_32 etc. | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1897 |   if (SrcRC == &X86::RSTRegClass) { | 
| Chris Lattner | 24e0a54 | 2008-03-21 06:38:26 +0000 | [diff] [blame] | 1898 |     // Copying from ST(0)/ST(1). | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1899 |     if (SrcReg != X86::ST0 && SrcReg != X86::ST1) | 
 | 1900 |       // Can only copy from ST(0)/ST(1) right now | 
 | 1901 |       return false; | 
| Chris Lattner | 24e0a54 | 2008-03-21 06:38:26 +0000 | [diff] [blame] | 1902 |     bool isST0 = SrcReg == X86::ST0; | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1903 |     unsigned Opc; | 
 | 1904 |     if (DestRC == &X86::RFP32RegClass) | 
| Chris Lattner | 24e0a54 | 2008-03-21 06:38:26 +0000 | [diff] [blame] | 1905 |       Opc = isST0 ? X86::FpGET_ST0_32 : X86::FpGET_ST1_32; | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1906 |     else if (DestRC == &X86::RFP64RegClass) | 
| Chris Lattner | 24e0a54 | 2008-03-21 06:38:26 +0000 | [diff] [blame] | 1907 |       Opc = isST0 ? X86::FpGET_ST0_64 : X86::FpGET_ST1_64; | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1908 |     else { | 
| Owen Anderson | a317767 | 2008-08-26 18:50:40 +0000 | [diff] [blame] | 1909 |       if (DestRC != &X86::RFP80RegClass) | 
 | 1910 |         return false; | 
| Chris Lattner | 24e0a54 | 2008-03-21 06:38:26 +0000 | [diff] [blame] | 1911 |       Opc = isST0 ? X86::FpGET_ST0_80 : X86::FpGET_ST1_80; | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1912 |     } | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1913 |     BuildMI(MBB, MI, DL, get(Opc), DestReg); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1914 |     return true; | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1915 |   } | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1916 |  | 
 | 1917 |   // Moving to ST(0) turns into FpSET_ST0_32 etc. | 
 | 1918 |   if (DestRC == &X86::RSTRegClass) { | 
| Evan Cheng | a0eedac | 2009-02-09 23:32:07 +0000 | [diff] [blame] | 1919 |     // Copying to ST(0) / ST(1). | 
 | 1920 |     if (DestReg != X86::ST0 && DestReg != X86::ST1) | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1921 |       // Can only copy to TOS right now | 
 | 1922 |       return false; | 
| Evan Cheng | a0eedac | 2009-02-09 23:32:07 +0000 | [diff] [blame] | 1923 |     bool isST0 = DestReg == X86::ST0; | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1924 |     unsigned Opc; | 
 | 1925 |     if (SrcRC == &X86::RFP32RegClass) | 
| Evan Cheng | a0eedac | 2009-02-09 23:32:07 +0000 | [diff] [blame] | 1926 |       Opc = isST0 ? X86::FpSET_ST0_32 : X86::FpSET_ST1_32; | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1927 |     else if (SrcRC == &X86::RFP64RegClass) | 
| Evan Cheng | a0eedac | 2009-02-09 23:32:07 +0000 | [diff] [blame] | 1928 |       Opc = isST0 ? X86::FpSET_ST0_64 : X86::FpSET_ST1_64; | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1929 |     else { | 
| Owen Anderson | a317767 | 2008-08-26 18:50:40 +0000 | [diff] [blame] | 1930 |       if (SrcRC != &X86::RFP80RegClass) | 
 | 1931 |         return false; | 
| Evan Cheng | a0eedac | 2009-02-09 23:32:07 +0000 | [diff] [blame] | 1932 |       Opc = isST0 ? X86::FpSET_ST0_80 : X86::FpSET_ST1_80; | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1933 |     } | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 1934 |     BuildMI(MBB, MI, DL, get(Opc)).addReg(SrcReg); | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1935 |     return true; | 
| Chris Lattner | f30e1cf | 2008-03-09 09:15:31 +0000 | [diff] [blame] | 1936 |   } | 
| Chris Lattner | 5c92750 | 2008-03-09 08:46:19 +0000 | [diff] [blame] | 1937 |    | 
| Owen Anderson | 940f83e | 2008-08-26 18:03:31 +0000 | [diff] [blame] | 1938 |   // Not yet supported! | 
 | 1939 |   return false; | 
| Owen Anderson | d10fd97 | 2007-12-31 06:32:00 +0000 | [diff] [blame] | 1940 | } | 
 | 1941 |  | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1942 | static unsigned getStoreRegOpcode(unsigned SrcReg, | 
 | 1943 |                                   const TargetRegisterClass *RC, | 
 | 1944 |                                   bool isStackAligned, | 
 | 1945 |                                   TargetMachine &TM) { | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 1946 |   unsigned Opc = 0; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1947 |   if (RC == &X86::GR64RegClass || RC == &X86::GR64_NOSPRegClass) { | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 1948 |     Opc = X86::MOV64mr; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1949 |   } else if (RC == &X86::GR32RegClass || RC == &X86::GR32_NOSPRegClass) { | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 1950 |     Opc = X86::MOV32mr; | 
 | 1951 |   } else if (RC == &X86::GR16RegClass) { | 
 | 1952 |     Opc = X86::MOV16mr; | 
 | 1953 |   } else if (RC == &X86::GR8RegClass) { | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1954 |     // Copying to or from a physical H register on x86-64 requires a NOREX | 
 | 1955 |     // move.  Otherwise use a normal move. | 
 | 1956 |     if (isHReg(SrcReg) && | 
 | 1957 |         TM.getSubtarget<X86Subtarget>().is64Bit()) | 
 | 1958 |       Opc = X86::MOV8mr_NOREX; | 
 | 1959 |     else | 
 | 1960 |       Opc = X86::MOV8mr; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 1961 |   } else if (RC == &X86::GR64_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1962 |     Opc = X86::MOV64mr; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 1963 |   } else if (RC == &X86::GR32_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1964 |     Opc = X86::MOV32mr; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 1965 |   } else if (RC == &X86::GR16_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1966 |     Opc = X86::MOV16mr; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1967 |   } else if (RC == &X86::GR8_ABCD_LRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1968 |     Opc = X86::MOV8mr; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 1969 |   } else if (RC == &X86::GR8_ABCD_HRegClass) { | 
 | 1970 |     if (TM.getSubtarget<X86Subtarget>().is64Bit()) | 
 | 1971 |       Opc = X86::MOV8mr_NOREX; | 
 | 1972 |     else | 
 | 1973 |       Opc = X86::MOV8mr; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 1974 |   } else if (RC == &X86::GR64_NOREXRegClass || | 
 | 1975 |              RC == &X86::GR64_NOREX_NOSPRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 1976 |     Opc = X86::MOV64mr; | 
 | 1977 |   } else if (RC == &X86::GR32_NOREXRegClass) { | 
 | 1978 |     Opc = X86::MOV32mr; | 
 | 1979 |   } else if (RC == &X86::GR16_NOREXRegClass) { | 
 | 1980 |     Opc = X86::MOV16mr; | 
 | 1981 |   } else if (RC == &X86::GR8_NOREXRegClass) { | 
 | 1982 |     Opc = X86::MOV8mr; | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 1983 |   } else if (RC == &X86::RFP80RegClass) { | 
 | 1984 |     Opc = X86::ST_FpP80m;   // pops | 
 | 1985 |   } else if (RC == &X86::RFP64RegClass) { | 
 | 1986 |     Opc = X86::ST_Fp64m; | 
 | 1987 |   } else if (RC == &X86::RFP32RegClass) { | 
 | 1988 |     Opc = X86::ST_Fp32m; | 
 | 1989 |   } else if (RC == &X86::FR32RegClass) { | 
 | 1990 |     Opc = X86::MOVSSmr; | 
 | 1991 |   } else if (RC == &X86::FR64RegClass) { | 
 | 1992 |     Opc = X86::MOVSDmr; | 
 | 1993 |   } else if (RC == &X86::VR128RegClass) { | 
| Anton Korobeynikov | 88bbf69 | 2008-07-19 06:30:51 +0000 | [diff] [blame] | 1994 |     // If stack is realigned we can use aligned stores. | 
 | 1995 |     Opc = isStackAligned ? X86::MOVAPSmr : X86::MOVUPSmr; | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 1996 |   } else if (RC == &X86::VR64RegClass) { | 
 | 1997 |     Opc = X86::MMX_MOVQ64mr; | 
 | 1998 |   } else { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 1999 |     llvm_unreachable("Unknown regclass"); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2000 |   } | 
 | 2001 |  | 
 | 2002 |   return Opc; | 
 | 2003 | } | 
 | 2004 |  | 
 | 2005 | void X86InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB, | 
 | 2006 |                                        MachineBasicBlock::iterator MI, | 
 | 2007 |                                        unsigned SrcReg, bool isKill, int FrameIdx, | 
 | 2008 |                                        const TargetRegisterClass *RC) const { | 
| Anton Korobeynikov | 88bbf69 | 2008-07-19 06:30:51 +0000 | [diff] [blame] | 2009 |   const MachineFunction &MF = *MBB.getParent(); | 
| Evan Cheng | 41c0840 | 2008-07-21 06:34:17 +0000 | [diff] [blame] | 2010 |   bool isAligned = (RI.getStackAlignment() >= 16) || | 
 | 2011 |     RI.needsStackRealignment(MF); | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2012 |   unsigned Opc = getStoreRegOpcode(SrcReg, RC, isAligned, TM); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2013 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 2014 |   if (MI != MBB.end()) DL = MI->getDebugLoc(); | 
 | 2015 |   addFrameReference(BuildMI(MBB, MI, DL, get(Opc)), FrameIdx) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 2016 |     .addReg(SrcReg, getKillRegState(isKill)); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2017 | } | 
 | 2018 |  | 
 | 2019 | void X86InstrInfo::storeRegToAddr(MachineFunction &MF, unsigned SrcReg, | 
 | 2020 |                                   bool isKill, | 
 | 2021 |                                   SmallVectorImpl<MachineOperand> &Addr, | 
 | 2022 |                                   const TargetRegisterClass *RC, | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2023 |                                   MachineInstr::mmo_iterator MMOBegin, | 
 | 2024 |                                   MachineInstr::mmo_iterator MMOEnd, | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2025 |                                   SmallVectorImpl<MachineInstr*> &NewMIs) const { | 
| Evan Cheng | 600c043 | 2009-11-16 21:56:03 +0000 | [diff] [blame] | 2026 |   bool isAligned = (*MMOBegin)->getAlignment() >= 16; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2027 |   unsigned Opc = getStoreRegOpcode(SrcReg, RC, isAligned, TM); | 
| Dale Johannesen | 21b5541 | 2009-02-12 23:08:38 +0000 | [diff] [blame] | 2028 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 2029 |   MachineInstrBuilder MIB = BuildMI(MF, DL, get(Opc)); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2030 |   for (unsigned i = 0, e = Addr.size(); i != e; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2031 |     MIB.addOperand(Addr[i]); | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 2032 |   MIB.addReg(SrcReg, getKillRegState(isKill)); | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2033 |   (*MIB).setMemRefs(MMOBegin, MMOEnd); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2034 |   NewMIs.push_back(MIB); | 
 | 2035 | } | 
 | 2036 |  | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2037 | static unsigned getLoadRegOpcode(unsigned DestReg, | 
 | 2038 |                                  const TargetRegisterClass *RC, | 
 | 2039 |                                  bool isStackAligned, | 
 | 2040 |                                  const TargetMachine &TM) { | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2041 |   unsigned Opc = 0; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 2042 |   if (RC == &X86::GR64RegClass || RC == &X86::GR64_NOSPRegClass) { | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2043 |     Opc = X86::MOV64rm; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 2044 |   } else if (RC == &X86::GR32RegClass || RC == &X86::GR32_NOSPRegClass) { | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2045 |     Opc = X86::MOV32rm; | 
 | 2046 |   } else if (RC == &X86::GR16RegClass) { | 
 | 2047 |     Opc = X86::MOV16rm; | 
 | 2048 |   } else if (RC == &X86::GR8RegClass) { | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2049 |     // Copying to or from a physical H register on x86-64 requires a NOREX | 
 | 2050 |     // move.  Otherwise use a normal move. | 
 | 2051 |     if (isHReg(DestReg) && | 
 | 2052 |         TM.getSubtarget<X86Subtarget>().is64Bit()) | 
 | 2053 |       Opc = X86::MOV8rm_NOREX; | 
 | 2054 |     else | 
 | 2055 |       Opc = X86::MOV8rm; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 2056 |   } else if (RC == &X86::GR64_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 2057 |     Opc = X86::MOV64rm; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 2058 |   } else if (RC == &X86::GR32_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 2059 |     Opc = X86::MOV32rm; | 
| Dan Gohman | 6241762 | 2009-04-27 16:33:14 +0000 | [diff] [blame] | 2060 |   } else if (RC == &X86::GR16_ABCDRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 2061 |     Opc = X86::MOV16rm; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2062 |   } else if (RC == &X86::GR8_ABCD_LRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 2063 |     Opc = X86::MOV8rm; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2064 |   } else if (RC == &X86::GR8_ABCD_HRegClass) { | 
 | 2065 |     if (TM.getSubtarget<X86Subtarget>().is64Bit()) | 
 | 2066 |       Opc = X86::MOV8rm_NOREX; | 
 | 2067 |     else | 
 | 2068 |       Opc = X86::MOV8rm; | 
| Dan Gohman | a4714e0 | 2009-07-30 01:56:29 +0000 | [diff] [blame] | 2069 |   } else if (RC == &X86::GR64_NOREXRegClass || | 
 | 2070 |              RC == &X86::GR64_NOREX_NOSPRegClass) { | 
| Dan Gohman | 21e3dfb | 2009-04-13 16:09:41 +0000 | [diff] [blame] | 2071 |     Opc = X86::MOV64rm; | 
 | 2072 |   } else if (RC == &X86::GR32_NOREXRegClass) { | 
 | 2073 |     Opc = X86::MOV32rm; | 
 | 2074 |   } else if (RC == &X86::GR16_NOREXRegClass) { | 
 | 2075 |     Opc = X86::MOV16rm; | 
 | 2076 |   } else if (RC == &X86::GR8_NOREXRegClass) { | 
 | 2077 |     Opc = X86::MOV8rm; | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2078 |   } else if (RC == &X86::RFP80RegClass) { | 
 | 2079 |     Opc = X86::LD_Fp80m; | 
 | 2080 |   } else if (RC == &X86::RFP64RegClass) { | 
 | 2081 |     Opc = X86::LD_Fp64m; | 
 | 2082 |   } else if (RC == &X86::RFP32RegClass) { | 
 | 2083 |     Opc = X86::LD_Fp32m; | 
 | 2084 |   } else if (RC == &X86::FR32RegClass) { | 
 | 2085 |     Opc = X86::MOVSSrm; | 
 | 2086 |   } else if (RC == &X86::FR64RegClass) { | 
 | 2087 |     Opc = X86::MOVSDrm; | 
 | 2088 |   } else if (RC == &X86::VR128RegClass) { | 
| Anton Korobeynikov | 88bbf69 | 2008-07-19 06:30:51 +0000 | [diff] [blame] | 2089 |     // If stack is realigned we can use aligned loads. | 
 | 2090 |     Opc = isStackAligned ? X86::MOVAPSrm : X86::MOVUPSrm; | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2091 |   } else if (RC == &X86::VR64RegClass) { | 
 | 2092 |     Opc = X86::MMX_MOVQ64rm; | 
 | 2093 |   } else { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 2094 |     llvm_unreachable("Unknown regclass"); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2095 |   } | 
 | 2096 |  | 
 | 2097 |   return Opc; | 
 | 2098 | } | 
 | 2099 |  | 
 | 2100 | void X86InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB, | 
| Anton Korobeynikov | 88bbf69 | 2008-07-19 06:30:51 +0000 | [diff] [blame] | 2101 |                                         MachineBasicBlock::iterator MI, | 
 | 2102 |                                         unsigned DestReg, int FrameIdx, | 
 | 2103 |                                         const TargetRegisterClass *RC) const{ | 
 | 2104 |   const MachineFunction &MF = *MBB.getParent(); | 
| Evan Cheng | 41c0840 | 2008-07-21 06:34:17 +0000 | [diff] [blame] | 2105 |   bool isAligned = (RI.getStackAlignment() >= 16) || | 
 | 2106 |     RI.needsStackRealignment(MF); | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2107 |   unsigned Opc = getLoadRegOpcode(DestReg, RC, isAligned, TM); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2108 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 2109 |   if (MI != MBB.end()) DL = MI->getDebugLoc(); | 
 | 2110 |   addFrameReference(BuildMI(MBB, MI, DL, get(Opc), DestReg), FrameIdx); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2111 | } | 
 | 2112 |  | 
 | 2113 | void X86InstrInfo::loadRegFromAddr(MachineFunction &MF, unsigned DestReg, | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 2114 |                                  SmallVectorImpl<MachineOperand> &Addr, | 
 | 2115 |                                  const TargetRegisterClass *RC, | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2116 |                                  MachineInstr::mmo_iterator MMOBegin, | 
 | 2117 |                                  MachineInstr::mmo_iterator MMOEnd, | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2118 |                                  SmallVectorImpl<MachineInstr*> &NewMIs) const { | 
| Evan Cheng | 600c043 | 2009-11-16 21:56:03 +0000 | [diff] [blame] | 2119 |   bool isAligned = (*MMOBegin)->getAlignment() >= 16; | 
| Dan Gohman | 4af325d | 2009-04-27 16:41:36 +0000 | [diff] [blame] | 2120 |   unsigned Opc = getLoadRegOpcode(DestReg, RC, isAligned, TM); | 
| Dale Johannesen | 21b5541 | 2009-02-12 23:08:38 +0000 | [diff] [blame] | 2121 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 2122 |   MachineInstrBuilder MIB = BuildMI(MF, DL, get(Opc), DestReg); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2123 |   for (unsigned i = 0, e = Addr.size(); i != e; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2124 |     MIB.addOperand(Addr[i]); | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2125 |   (*MIB).setMemRefs(MMOBegin, MMOEnd); | 
| Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 2126 |   NewMIs.push_back(MIB); | 
 | 2127 | } | 
 | 2128 |  | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2129 | bool X86InstrInfo::spillCalleeSavedRegisters(MachineBasicBlock &MBB, | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2130 |                                              MachineBasicBlock::iterator MI, | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2131 |                                 const std::vector<CalleeSavedInfo> &CSI) const { | 
 | 2132 |   if (CSI.empty()) | 
 | 2133 |     return false; | 
 | 2134 |  | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2135 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 2136 |   if (MI != MBB.end()) DL = MI->getDebugLoc(); | 
 | 2137 |  | 
| Evan Cheng | a67f32a | 2008-09-26 19:14:21 +0000 | [diff] [blame] | 2138 |   bool is64Bit = TM.getSubtarget<X86Subtarget>().is64Bit(); | 
| Anton Korobeynikov | 6f9bb6f | 2009-08-28 16:06:41 +0000 | [diff] [blame] | 2139 |   bool isWin64 = TM.getSubtarget<X86Subtarget>().isTargetWin64(); | 
| Anton Korobeynikov | c4e8bec | 2008-10-04 11:09:36 +0000 | [diff] [blame] | 2140 |   unsigned SlotSize = is64Bit ? 8 : 4; | 
 | 2141 |  | 
 | 2142 |   MachineFunction &MF = *MBB.getParent(); | 
| Evan Cheng | 910139f | 2009-07-09 06:53:48 +0000 | [diff] [blame] | 2143 |   unsigned FPReg = RI.getFrameRegister(MF); | 
| Anton Korobeynikov | c4e8bec | 2008-10-04 11:09:36 +0000 | [diff] [blame] | 2144 |   X86MachineFunctionInfo *X86FI = MF.getInfo<X86MachineFunctionInfo>(); | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2145 |   unsigned CalleeFrameSize = 0; | 
| Anton Korobeynikov | c4e8bec | 2008-10-04 11:09:36 +0000 | [diff] [blame] | 2146 |    | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2147 |   unsigned Opc = is64Bit ? X86::PUSH64r : X86::PUSH32r; | 
 | 2148 |   for (unsigned i = CSI.size(); i != 0; --i) { | 
 | 2149 |     unsigned Reg = CSI[i-1].getReg(); | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2150 |     const TargetRegisterClass *RegClass = CSI[i-1].getRegClass(); | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2151 |     // Add the callee-saved register as live-in. It's killed at the spill. | 
 | 2152 |     MBB.addLiveIn(Reg); | 
| Evan Cheng | 910139f | 2009-07-09 06:53:48 +0000 | [diff] [blame] | 2153 |     if (Reg == FPReg) | 
 | 2154 |       // X86RegisterInfo::emitPrologue will handle spilling of frame register. | 
 | 2155 |       continue; | 
| Anton Korobeynikov | 6f9bb6f | 2009-08-28 16:06:41 +0000 | [diff] [blame] | 2156 |     if (RegClass != &X86::VR128RegClass && !isWin64) { | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2157 |       CalleeFrameSize += SlotSize; | 
| Evan Cheng | 910139f | 2009-07-09 06:53:48 +0000 | [diff] [blame] | 2158 |       BuildMI(MBB, MI, DL, get(Opc)).addReg(Reg, RegState::Kill); | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2159 |     } else { | 
 | 2160 |       storeRegToStackSlot(MBB, MI, Reg, true, CSI[i-1].getFrameIdx(), RegClass); | 
 | 2161 |     } | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2162 |   } | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2163 |  | 
 | 2164 |   X86FI->setCalleeSavedFrameSize(CalleeFrameSize); | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2165 |   return true; | 
 | 2166 | } | 
 | 2167 |  | 
 | 2168 | bool X86InstrInfo::restoreCalleeSavedRegisters(MachineBasicBlock &MBB, | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2169 |                                                MachineBasicBlock::iterator MI, | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2170 |                                 const std::vector<CalleeSavedInfo> &CSI) const { | 
 | 2171 |   if (CSI.empty()) | 
 | 2172 |     return false; | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2173 |  | 
 | 2174 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 2175 |   if (MI != MBB.end()) DL = MI->getDebugLoc(); | 
 | 2176 |  | 
| Evan Cheng | 910139f | 2009-07-09 06:53:48 +0000 | [diff] [blame] | 2177 |   MachineFunction &MF = *MBB.getParent(); | 
 | 2178 |   unsigned FPReg = RI.getFrameRegister(MF); | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2179 |   bool is64Bit = TM.getSubtarget<X86Subtarget>().is64Bit(); | 
| Anton Korobeynikov | 6f9bb6f | 2009-08-28 16:06:41 +0000 | [diff] [blame] | 2180 |   bool isWin64 = TM.getSubtarget<X86Subtarget>().isTargetWin64(); | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2181 |   unsigned Opc = is64Bit ? X86::POP64r : X86::POP32r; | 
 | 2182 |   for (unsigned i = 0, e = CSI.size(); i != e; ++i) { | 
 | 2183 |     unsigned Reg = CSI[i].getReg(); | 
| Evan Cheng | 910139f | 2009-07-09 06:53:48 +0000 | [diff] [blame] | 2184 |     if (Reg == FPReg) | 
 | 2185 |       // X86RegisterInfo::emitEpilogue will handle restoring of frame register. | 
 | 2186 |       continue; | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2187 |     const TargetRegisterClass *RegClass = CSI[i].getRegClass(); | 
| Anton Korobeynikov | 6f9bb6f | 2009-08-28 16:06:41 +0000 | [diff] [blame] | 2188 |     if (RegClass != &X86::VR128RegClass && !isWin64) { | 
| Eli Friedman | bccf4b3 | 2009-06-04 02:32:04 +0000 | [diff] [blame] | 2189 |       BuildMI(MBB, MI, DL, get(Opc), Reg); | 
 | 2190 |     } else { | 
 | 2191 |       loadRegFromStackSlot(MBB, MI, Reg, CSI[i].getFrameIdx(), RegClass); | 
 | 2192 |     } | 
| Owen Anderson | d94b6a1 | 2008-01-04 23:57:37 +0000 | [diff] [blame] | 2193 |   } | 
 | 2194 |   return true; | 
 | 2195 | } | 
 | 2196 |  | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 2197 | static MachineInstr *FuseTwoAddrInst(MachineFunction &MF, unsigned Opcode, | 
| Dan Gohman | d68a076 | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 2198 |                                      const SmallVectorImpl<MachineOperand> &MOs, | 
| Bill Wendling | 9bc96a5 | 2009-02-03 00:55:04 +0000 | [diff] [blame] | 2199 |                                      MachineInstr *MI, | 
 | 2200 |                                      const TargetInstrInfo &TII) { | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2201 |   // Create the base instruction with the memory operand as the first part. | 
| Bill Wendling | 9bc96a5 | 2009-02-03 00:55:04 +0000 | [diff] [blame] | 2202 |   MachineInstr *NewMI = MF.CreateMachineInstr(TII.get(Opcode), | 
 | 2203 |                                               MI->getDebugLoc(), true); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2204 |   MachineInstrBuilder MIB(NewMI); | 
 | 2205 |   unsigned NumAddrOps = MOs.size(); | 
 | 2206 |   for (unsigned i = 0; i != NumAddrOps; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2207 |     MIB.addOperand(MOs[i]); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2208 |   if (NumAddrOps < 4)  // FrameIndex only | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 2209 |     addOffset(MIB, 0); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2210 |    | 
 | 2211 |   // Loop over the rest of the ri operands, converting them over. | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2212 |   unsigned NumOps = MI->getDesc().getNumOperands()-2; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2213 |   for (unsigned i = 0; i != NumOps; ++i) { | 
 | 2214 |     MachineOperand &MO = MI->getOperand(i+2); | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2215 |     MIB.addOperand(MO); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2216 |   } | 
 | 2217 |   for (unsigned i = NumOps+2, e = MI->getNumOperands(); i != e; ++i) { | 
 | 2218 |     MachineOperand &MO = MI->getOperand(i); | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2219 |     MIB.addOperand(MO); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2220 |   } | 
 | 2221 |   return MIB; | 
 | 2222 | } | 
 | 2223 |  | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 2224 | static MachineInstr *FuseInst(MachineFunction &MF, | 
 | 2225 |                               unsigned Opcode, unsigned OpNo, | 
| Dan Gohman | d68a076 | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 2226 |                               const SmallVectorImpl<MachineOperand> &MOs, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2227 |                               MachineInstr *MI, const TargetInstrInfo &TII) { | 
| Bill Wendling | 9bc96a5 | 2009-02-03 00:55:04 +0000 | [diff] [blame] | 2228 |   MachineInstr *NewMI = MF.CreateMachineInstr(TII.get(Opcode), | 
 | 2229 |                                               MI->getDebugLoc(), true); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2230 |   MachineInstrBuilder MIB(NewMI); | 
 | 2231 |    | 
 | 2232 |   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | 
 | 2233 |     MachineOperand &MO = MI->getOperand(i); | 
 | 2234 |     if (i == OpNo) { | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2235 |       assert(MO.isReg() && "Expected to fold into reg operand!"); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2236 |       unsigned NumAddrOps = MOs.size(); | 
 | 2237 |       for (unsigned i = 0; i != NumAddrOps; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2238 |         MIB.addOperand(MOs[i]); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2239 |       if (NumAddrOps < 4)  // FrameIndex only | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 2240 |         addOffset(MIB, 0); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2241 |     } else { | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2242 |       MIB.addOperand(MO); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2243 |     } | 
 | 2244 |   } | 
 | 2245 |   return MIB; | 
 | 2246 | } | 
 | 2247 |  | 
 | 2248 | static MachineInstr *MakeM0Inst(const TargetInstrInfo &TII, unsigned Opcode, | 
| Dan Gohman | d68a076 | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 2249 |                                 const SmallVectorImpl<MachineOperand> &MOs, | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2250 |                                 MachineInstr *MI) { | 
| Dan Gohman | 8e5f2c6 | 2008-07-07 23:14:23 +0000 | [diff] [blame] | 2251 |   MachineFunction &MF = *MI->getParent()->getParent(); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2252 |   MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), TII.get(Opcode)); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2253 |  | 
 | 2254 |   unsigned NumAddrOps = MOs.size(); | 
 | 2255 |   for (unsigned i = 0; i != NumAddrOps; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2256 |     MIB.addOperand(MOs[i]); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2257 |   if (NumAddrOps < 4)  // FrameIndex only | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 2258 |     addOffset(MIB, 0); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2259 |   return MIB.addImm(0); | 
 | 2260 | } | 
 | 2261 |  | 
 | 2262 | MachineInstr* | 
| Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 2263 | X86InstrInfo::foldMemoryOperandImpl(MachineFunction &MF, | 
 | 2264 |                                     MachineInstr *MI, unsigned i, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 2265 |                                     const SmallVectorImpl<MachineOperand> &MOs, | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2266 |                                     unsigned Size, unsigned Align) const { | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 2267 |   const DenseMap<unsigned*, std::pair<unsigned,unsigned> > *OpcodeTablePtr=NULL; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2268 |   bool isTwoAddrFold = false; | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2269 |   unsigned NumOps = MI->getDesc().getNumOperands(); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2270 |   bool isTwoAddr = NumOps > 1 && | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2271 |     MI->getDesc().getOperandConstraint(1, TOI::TIED_TO) != -1; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2272 |  | 
 | 2273 |   MachineInstr *NewMI = NULL; | 
 | 2274 |   // Folding a memory location into the two-address part of a two-address | 
 | 2275 |   // instruction is different than folding it other places.  It requires | 
 | 2276 |   // replacing the *two* registers with the memory location. | 
 | 2277 |   if (isTwoAddr && NumOps >= 2 && i < 2 && | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2278 |       MI->getOperand(0).isReg() && | 
 | 2279 |       MI->getOperand(1).isReg() && | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2280 |       MI->getOperand(0).getReg() == MI->getOperand(1).getReg()) {  | 
 | 2281 |     OpcodeTablePtr = &RegOp2MemOpTable2Addr; | 
 | 2282 |     isTwoAddrFold = true; | 
 | 2283 |   } else if (i == 0) { // If operand 0 | 
 | 2284 |     if (MI->getOpcode() == X86::MOV16r0) | 
 | 2285 |       NewMI = MakeM0Inst(*this, X86::MOV16mi, MOs, MI); | 
 | 2286 |     else if (MI->getOpcode() == X86::MOV32r0) | 
 | 2287 |       NewMI = MakeM0Inst(*this, X86::MOV32mi, MOs, MI); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2288 |     else if (MI->getOpcode() == X86::MOV8r0) | 
 | 2289 |       NewMI = MakeM0Inst(*this, X86::MOV8mi, MOs, MI); | 
| Evan Cheng | 9f1c831 | 2008-07-03 09:09:37 +0000 | [diff] [blame] | 2290 |     if (NewMI) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2291 |       return NewMI; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2292 |      | 
 | 2293 |     OpcodeTablePtr = &RegOp2MemOpTable0; | 
 | 2294 |   } else if (i == 1) { | 
 | 2295 |     OpcodeTablePtr = &RegOp2MemOpTable1; | 
 | 2296 |   } else if (i == 2) { | 
 | 2297 |     OpcodeTablePtr = &RegOp2MemOpTable2; | 
 | 2298 |   } | 
 | 2299 |    | 
 | 2300 |   // If table selected... | 
 | 2301 |   if (OpcodeTablePtr) { | 
 | 2302 |     // Find the Opcode to fuse | 
| Jeffrey Yasskin | 81cf432 | 2009-11-10 01:02:17 +0000 | [diff] [blame] | 2303 |     DenseMap<unsigned*, std::pair<unsigned,unsigned> >::const_iterator I = | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2304 |       OpcodeTablePtr->find((unsigned*)MI->getOpcode()); | 
 | 2305 |     if (I != OpcodeTablePtr->end()) { | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2306 |       unsigned Opcode = I->second.first; | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 2307 |       unsigned MinAlign = I->second.second; | 
 | 2308 |       if (Align < MinAlign) | 
 | 2309 |         return NULL; | 
| Evan Cheng | 879caea | 2009-09-11 01:01:31 +0000 | [diff] [blame] | 2310 |       bool NarrowToMOV32rm = false; | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2311 |       if (Size) { | 
 | 2312 |         unsigned RCSize =  MI->getDesc().OpInfo[i].getRegClass(&RI)->getSize(); | 
 | 2313 |         if (Size < RCSize) { | 
 | 2314 |           // Check if it's safe to fold the load. If the size of the object is | 
 | 2315 |           // narrower than the load width, then it's not. | 
 | 2316 |           if (Opcode != X86::MOV64rm || RCSize != 8 || Size != 4) | 
 | 2317 |             return NULL; | 
 | 2318 |           // If this is a 64-bit load, but the spill slot is 32, then we can do | 
 | 2319 |           // a 32-bit load which is implicitly zero-extended. This likely is due | 
 | 2320 |           // to liveintervalanalysis remat'ing a load from stack slot. | 
| Evan Cheng | 879caea | 2009-09-11 01:01:31 +0000 | [diff] [blame] | 2321 |           if (MI->getOperand(0).getSubReg() || MI->getOperand(1).getSubReg()) | 
 | 2322 |             return NULL; | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2323 |           Opcode = X86::MOV32rm; | 
| Evan Cheng | 879caea | 2009-09-11 01:01:31 +0000 | [diff] [blame] | 2324 |           NarrowToMOV32rm = true; | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2325 |         } | 
 | 2326 |       } | 
 | 2327 |  | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2328 |       if (isTwoAddrFold) | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2329 |         NewMI = FuseTwoAddrInst(MF, Opcode, MOs, MI, *this); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2330 |       else | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2331 |         NewMI = FuseInst(MF, Opcode, i, MOs, MI, *this); | 
| Evan Cheng | 879caea | 2009-09-11 01:01:31 +0000 | [diff] [blame] | 2332 |  | 
 | 2333 |       if (NarrowToMOV32rm) { | 
 | 2334 |         // If this is the special case where we use a MOV32rm to load a 32-bit | 
 | 2335 |         // value and zero-extend the top bits. Change the destination register | 
 | 2336 |         // to a 32-bit one. | 
 | 2337 |         unsigned DstReg = NewMI->getOperand(0).getReg(); | 
 | 2338 |         if (TargetRegisterInfo::isPhysicalRegister(DstReg)) | 
 | 2339 |           NewMI->getOperand(0).setReg(RI.getSubReg(DstReg, | 
 | 2340 |                                                    4/*x86_subreg_32bit*/)); | 
 | 2341 |         else | 
 | 2342 |           NewMI->getOperand(0).setSubReg(4/*x86_subreg_32bit*/); | 
 | 2343 |       } | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2344 |       return NewMI; | 
 | 2345 |     } | 
 | 2346 |   } | 
 | 2347 |    | 
 | 2348 |   // No fusion  | 
 | 2349 |   if (PrintFailedFusing) | 
| Chris Lattner | 705e07f | 2009-08-23 03:41:05 +0000 | [diff] [blame] | 2350 |     errs() << "We failed to fuse operand " << i << " in " << *MI; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2351 |   return NULL; | 
 | 2352 | } | 
 | 2353 |  | 
 | 2354 |  | 
| Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 2355 | MachineInstr* X86InstrInfo::foldMemoryOperandImpl(MachineFunction &MF, | 
 | 2356 |                                                   MachineInstr *MI, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 2357 |                                            const SmallVectorImpl<unsigned> &Ops, | 
| Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 2358 |                                                   int FrameIndex) const { | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2359 |   // Check switch flag  | 
 | 2360 |   if (NoFusing) return NULL; | 
 | 2361 |  | 
| Evan Cheng | 5fd79d0 | 2008-02-08 21:20:40 +0000 | [diff] [blame] | 2362 |   const MachineFrameInfo *MFI = MF.getFrameInfo(); | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2363 |   unsigned Size = MFI->getObjectSize(FrameIndex); | 
| Evan Cheng | 5fd79d0 | 2008-02-08 21:20:40 +0000 | [diff] [blame] | 2364 |   unsigned Alignment = MFI->getObjectAlignment(FrameIndex); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2365 |   if (Ops.size() == 2 && Ops[0] == 0 && Ops[1] == 1) { | 
 | 2366 |     unsigned NewOpc = 0; | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2367 |     unsigned RCSize = 0; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2368 |     switch (MI->getOpcode()) { | 
 | 2369 |     default: return NULL; | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2370 |     case X86::TEST8rr:  NewOpc = X86::CMP8ri; RCSize = 1; break; | 
 | 2371 |     case X86::TEST16rr: NewOpc = X86::CMP16ri; RCSize = 2; break; | 
 | 2372 |     case X86::TEST32rr: NewOpc = X86::CMP32ri; RCSize = 4; break; | 
 | 2373 |     case X86::TEST64rr: NewOpc = X86::CMP64ri32; RCSize = 8; break; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2374 |     } | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2375 |     // Check if it's safe to fold the load. If the size of the object is | 
 | 2376 |     // narrower than the load width, then it's not. | 
 | 2377 |     if (Size < RCSize) | 
 | 2378 |       return NULL; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2379 |     // Change to CMPXXri r, 0 first. | 
| Chris Lattner | 5080f4d | 2008-01-11 18:10:50 +0000 | [diff] [blame] | 2380 |     MI->setDesc(get(NewOpc)); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2381 |     MI->getOperand(1).ChangeToImmediate(0); | 
 | 2382 |   } else if (Ops.size() != 1) | 
 | 2383 |     return NULL; | 
 | 2384 |  | 
 | 2385 |   SmallVector<MachineOperand,4> MOs; | 
 | 2386 |   MOs.push_back(MachineOperand::CreateFI(FrameIndex)); | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2387 |   return foldMemoryOperandImpl(MF, MI, Ops[0], MOs, Size, Alignment); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2388 | } | 
 | 2389 |  | 
| Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 2390 | MachineInstr* X86InstrInfo::foldMemoryOperandImpl(MachineFunction &MF, | 
 | 2391 |                                                   MachineInstr *MI, | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 2392 |                                            const SmallVectorImpl<unsigned> &Ops, | 
| Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 2393 |                                                   MachineInstr *LoadMI) const { | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2394 |   // Check switch flag  | 
 | 2395 |   if (NoFusing) return NULL; | 
 | 2396 |  | 
| Dan Gohman | cddc11e | 2008-07-12 00:10:52 +0000 | [diff] [blame] | 2397 |   // Determine the alignment of the load. | 
| Evan Cheng | 5fd79d0 | 2008-02-08 21:20:40 +0000 | [diff] [blame] | 2398 |   unsigned Alignment = 0; | 
| Dan Gohman | cddc11e | 2008-07-12 00:10:52 +0000 | [diff] [blame] | 2399 |   if (LoadMI->hasOneMemOperand()) | 
| Dan Gohman | c76909a | 2009-09-25 20:36:54 +0000 | [diff] [blame] | 2400 |     Alignment = (*LoadMI->memoperands_begin())->getAlignment(); | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2401 |   else | 
 | 2402 |     switch (LoadMI->getOpcode()) { | 
 | 2403 |     case X86::V_SET0: | 
 | 2404 |     case X86::V_SETALLONES: | 
 | 2405 |       Alignment = 16; | 
 | 2406 |       break; | 
 | 2407 |     case X86::FsFLD0SD: | 
 | 2408 |       Alignment = 8; | 
 | 2409 |       break; | 
 | 2410 |     case X86::FsFLD0SS: | 
 | 2411 |       Alignment = 4; | 
 | 2412 |       break; | 
 | 2413 |     default: | 
 | 2414 |       llvm_unreachable("Don't know how to fold this instruction!"); | 
 | 2415 |     } | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2416 |   if (Ops.size() == 2 && Ops[0] == 0 && Ops[1] == 1) { | 
 | 2417 |     unsigned NewOpc = 0; | 
 | 2418 |     switch (MI->getOpcode()) { | 
 | 2419 |     default: return NULL; | 
 | 2420 |     case X86::TEST8rr:  NewOpc = X86::CMP8ri; break; | 
 | 2421 |     case X86::TEST16rr: NewOpc = X86::CMP16ri; break; | 
 | 2422 |     case X86::TEST32rr: NewOpc = X86::CMP32ri; break; | 
 | 2423 |     case X86::TEST64rr: NewOpc = X86::CMP64ri32; break; | 
 | 2424 |     } | 
 | 2425 |     // Change to CMPXXri r, 0 first. | 
| Chris Lattner | 5080f4d | 2008-01-11 18:10:50 +0000 | [diff] [blame] | 2426 |     MI->setDesc(get(NewOpc)); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2427 |     MI->getOperand(1).ChangeToImmediate(0); | 
 | 2428 |   } else if (Ops.size() != 1) | 
 | 2429 |     return NULL; | 
 | 2430 |  | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 2431 |   SmallVector<MachineOperand,X86AddrNumOperands> MOs; | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2432 |   switch (LoadMI->getOpcode()) { | 
 | 2433 |   case X86::V_SET0: | 
 | 2434 |   case X86::V_SETALLONES: | 
 | 2435 |   case X86::FsFLD0SD: | 
 | 2436 |   case X86::FsFLD0SS: { | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2437 |     // Folding a V_SET0 or V_SETALLONES as a load, to ease register pressure. | 
 | 2438 |     // Create a constant-pool entry and operands to load from it. | 
 | 2439 |  | 
 | 2440 |     // x86-32 PIC requires a PIC base register for constant pools. | 
 | 2441 |     unsigned PICBase = 0; | 
| Jakob Stoklund Olesen | 93e55de | 2009-07-16 21:24:13 +0000 | [diff] [blame] | 2442 |     if (TM.getRelocationModel() == Reloc::PIC_) { | 
| Evan Cheng | 2b48ab9 | 2009-07-16 18:44:05 +0000 | [diff] [blame] | 2443 |       if (TM.getSubtarget<X86Subtarget>().is64Bit()) | 
 | 2444 |         PICBase = X86::RIP; | 
| Jakob Stoklund Olesen | 93e55de | 2009-07-16 21:24:13 +0000 | [diff] [blame] | 2445 |       else | 
| Evan Cheng | 2b48ab9 | 2009-07-16 18:44:05 +0000 | [diff] [blame] | 2446 |         // FIXME: PICBase = TM.getInstrInfo()->getGlobalBaseReg(&MF); | 
 | 2447 |         // This doesn't work for several reasons. | 
 | 2448 |         // 1. GlobalBaseReg may have been spilled. | 
 | 2449 |         // 2. It may not be live at MI. | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2450 |         return NULL; | 
| Jakob Stoklund Olesen | 93e55de | 2009-07-16 21:24:13 +0000 | [diff] [blame] | 2451 |     } | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2452 |  | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2453 |     // Create a constant-pool entry. | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2454 |     MachineConstantPool &MCP = *MF.getConstantPool(); | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2455 |     const Type *Ty; | 
 | 2456 |     if (LoadMI->getOpcode() == X86::FsFLD0SS) | 
 | 2457 |       Ty = Type::getFloatTy(MF.getFunction()->getContext()); | 
 | 2458 |     else if (LoadMI->getOpcode() == X86::FsFLD0SD) | 
 | 2459 |       Ty = Type::getDoubleTy(MF.getFunction()->getContext()); | 
 | 2460 |     else | 
 | 2461 |       Ty = VectorType::get(Type::getInt32Ty(MF.getFunction()->getContext()), 4); | 
 | 2462 |     Constant *C = LoadMI->getOpcode() == X86::V_SETALLONES ? | 
 | 2463 |                     Constant::getAllOnesValue(Ty) : | 
 | 2464 |                     Constant::getNullValue(Ty); | 
 | 2465 |     unsigned CPI = MCP.getConstantPoolIndex(C, Alignment); | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2466 |  | 
 | 2467 |     // Create operands to load from the constant pool entry. | 
 | 2468 |     MOs.push_back(MachineOperand::CreateReg(PICBase, false)); | 
 | 2469 |     MOs.push_back(MachineOperand::CreateImm(1)); | 
 | 2470 |     MOs.push_back(MachineOperand::CreateReg(0, false)); | 
 | 2471 |     MOs.push_back(MachineOperand::CreateCPI(CPI, 0)); | 
| Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 2472 |     MOs.push_back(MachineOperand::CreateReg(0, false)); | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2473 |     break; | 
 | 2474 |   } | 
 | 2475 |   default: { | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2476 |     // Folding a normal load. Just copy the load's address operands. | 
 | 2477 |     unsigned NumOps = LoadMI->getDesc().getNumOperands(); | 
| Rafael Espindola | 705d800 | 2009-03-27 15:57:50 +0000 | [diff] [blame] | 2478 |     for (unsigned i = NumOps - X86AddrNumOperands; i != NumOps; ++i) | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2479 |       MOs.push_back(LoadMI->getOperand(i)); | 
| Dan Gohman | 4a0b3e1 | 2009-09-21 18:30:38 +0000 | [diff] [blame] | 2480 |     break; | 
 | 2481 |   } | 
| Dan Gohman | 62c939d | 2008-12-03 05:21:24 +0000 | [diff] [blame] | 2482 |   } | 
| Evan Cheng | 9cef48e | 2009-09-11 00:39:26 +0000 | [diff] [blame] | 2483 |   return foldMemoryOperandImpl(MF, MI, Ops[0], MOs, 0, Alignment); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2484 | } | 
 | 2485 |  | 
 | 2486 |  | 
| Dan Gohman | 8e8b8a2 | 2008-10-16 01:49:15 +0000 | [diff] [blame] | 2487 | bool X86InstrInfo::canFoldMemoryOperand(const MachineInstr *MI, | 
 | 2488 |                                   const SmallVectorImpl<unsigned> &Ops) const { | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2489 |   // Check switch flag  | 
 | 2490 |   if (NoFusing) return 0; | 
 | 2491 |  | 
 | 2492 |   if (Ops.size() == 2 && Ops[0] == 0 && Ops[1] == 1) { | 
 | 2493 |     switch (MI->getOpcode()) { | 
 | 2494 |     default: return false; | 
 | 2495 |     case X86::TEST8rr:  | 
 | 2496 |     case X86::TEST16rr: | 
 | 2497 |     case X86::TEST32rr: | 
 | 2498 |     case X86::TEST64rr: | 
 | 2499 |       return true; | 
 | 2500 |     } | 
 | 2501 |   } | 
 | 2502 |  | 
 | 2503 |   if (Ops.size() != 1) | 
 | 2504 |     return false; | 
 | 2505 |  | 
 | 2506 |   unsigned OpNum = Ops[0]; | 
 | 2507 |   unsigned Opc = MI->getOpcode(); | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2508 |   unsigned NumOps = MI->getDesc().getNumOperands(); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2509 |   bool isTwoAddr = NumOps > 1 && | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2510 |     MI->getDesc().getOperandConstraint(1, TOI::TIED_TO) != -1; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2511 |  | 
 | 2512 |   // Folding a memory location into the two-address part of a two-address | 
 | 2513 |   // instruction is different than folding it other places.  It requires | 
 | 2514 |   // replacing the *two* registers with the memory location. | 
| Evan Cheng | f9b36f0 | 2009-07-15 06:10:07 +0000 | [diff] [blame] | 2515 |   const DenseMap<unsigned*, std::pair<unsigned,unsigned> > *OpcodeTablePtr=NULL; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2516 |   if (isTwoAddr && NumOps >= 2 && OpNum < 2) {  | 
 | 2517 |     OpcodeTablePtr = &RegOp2MemOpTable2Addr; | 
 | 2518 |   } else if (OpNum == 0) { // If operand 0 | 
 | 2519 |     switch (Opc) { | 
| Chris Lattner | 9ac7542 | 2009-07-14 20:19:57 +0000 | [diff] [blame] | 2520 |     case X86::MOV8r0: | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2521 |     case X86::MOV16r0: | 
 | 2522 |     case X86::MOV32r0: | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2523 |       return true; | 
 | 2524 |     default: break; | 
 | 2525 |     } | 
 | 2526 |     OpcodeTablePtr = &RegOp2MemOpTable0; | 
 | 2527 |   } else if (OpNum == 1) { | 
 | 2528 |     OpcodeTablePtr = &RegOp2MemOpTable1; | 
 | 2529 |   } else if (OpNum == 2) { | 
 | 2530 |     OpcodeTablePtr = &RegOp2MemOpTable2; | 
 | 2531 |   } | 
 | 2532 |    | 
 | 2533 |   if (OpcodeTablePtr) { | 
 | 2534 |     // Find the Opcode to fuse | 
| Jeffrey Yasskin | 81cf432 | 2009-11-10 01:02:17 +0000 | [diff] [blame] | 2535 |     DenseMap<unsigned*, std::pair<unsigned,unsigned> >::const_iterator I = | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2536 |       OpcodeTablePtr->find((unsigned*)Opc); | 
 | 2537 |     if (I != OpcodeTablePtr->end()) | 
 | 2538 |       return true; | 
 | 2539 |   } | 
 | 2540 |   return false; | 
 | 2541 | } | 
 | 2542 |  | 
 | 2543 | bool X86InstrInfo::unfoldMemoryOperand(MachineFunction &MF, MachineInstr *MI, | 
 | 2544 |                                 unsigned Reg, bool UnfoldLoad, bool UnfoldStore, | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2545 |                                 SmallVectorImpl<MachineInstr*> &NewMIs) const { | 
| Jeffrey Yasskin | 81cf432 | 2009-11-10 01:02:17 +0000 | [diff] [blame] | 2546 |   DenseMap<unsigned*, std::pair<unsigned,unsigned> >::const_iterator I = | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2547 |     MemOp2RegOpTable.find((unsigned*)MI->getOpcode()); | 
 | 2548 |   if (I == MemOp2RegOpTable.end()) | 
 | 2549 |     return false; | 
| Dale Johannesen | 21b5541 | 2009-02-12 23:08:38 +0000 | [diff] [blame] | 2550 |   DebugLoc dl = MI->getDebugLoc(); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2551 |   unsigned Opc = I->second.first; | 
 | 2552 |   unsigned Index = I->second.second & 0xf; | 
 | 2553 |   bool FoldedLoad = I->second.second & (1 << 4); | 
 | 2554 |   bool FoldedStore = I->second.second & (1 << 5); | 
 | 2555 |   if (UnfoldLoad && !FoldedLoad) | 
 | 2556 |     return false; | 
 | 2557 |   UnfoldLoad &= FoldedLoad; | 
 | 2558 |   if (UnfoldStore && !FoldedStore) | 
 | 2559 |     return false; | 
 | 2560 |   UnfoldStore &= FoldedStore; | 
 | 2561 |  | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2562 |   const TargetInstrDesc &TID = get(Opc); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2563 |   const TargetOperandInfo &TOI = TID.OpInfo[Index]; | 
| Chris Lattner | cb778a8 | 2009-07-29 21:10:12 +0000 | [diff] [blame] | 2564 |   const TargetRegisterClass *RC = TOI.getRegClass(&RI); | 
| Rafael Espindola | 705d800 | 2009-03-27 15:57:50 +0000 | [diff] [blame] | 2565 |   SmallVector<MachineOperand, X86AddrNumOperands> AddrOps; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2566 |   SmallVector<MachineOperand,2> BeforeOps; | 
 | 2567 |   SmallVector<MachineOperand,2> AfterOps; | 
 | 2568 |   SmallVector<MachineOperand,4> ImpOps; | 
 | 2569 |   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | 
 | 2570 |     MachineOperand &Op = MI->getOperand(i); | 
| Rafael Espindola | 705d800 | 2009-03-27 15:57:50 +0000 | [diff] [blame] | 2571 |     if (i >= Index && i < Index + X86AddrNumOperands) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2572 |       AddrOps.push_back(Op); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2573 |     else if (Op.isReg() && Op.isImplicit()) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2574 |       ImpOps.push_back(Op); | 
 | 2575 |     else if (i < Index) | 
 | 2576 |       BeforeOps.push_back(Op); | 
 | 2577 |     else if (i > Index) | 
 | 2578 |       AfterOps.push_back(Op); | 
 | 2579 |   } | 
 | 2580 |  | 
 | 2581 |   // Emit the load instruction. | 
 | 2582 |   if (UnfoldLoad) { | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2583 |     std::pair<MachineInstr::mmo_iterator, | 
 | 2584 |               MachineInstr::mmo_iterator> MMOs = | 
 | 2585 |       MF.extractLoadMemRefs(MI->memoperands_begin(), | 
 | 2586 |                             MI->memoperands_end()); | 
 | 2587 |     loadRegFromAddr(MF, Reg, AddrOps, RC, MMOs.first, MMOs.second, NewMIs); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2588 |     if (UnfoldStore) { | 
 | 2589 |       // Address operands cannot be marked isKill. | 
| Rafael Espindola | 705d800 | 2009-03-27 15:57:50 +0000 | [diff] [blame] | 2590 |       for (unsigned i = 1; i != 1 + X86AddrNumOperands; ++i) { | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2591 |         MachineOperand &MO = NewMIs[0]->getOperand(i); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2592 |         if (MO.isReg()) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2593 |           MO.setIsKill(false); | 
 | 2594 |       } | 
 | 2595 |     } | 
 | 2596 |   } | 
 | 2597 |  | 
 | 2598 |   // Emit the data processing instruction. | 
| Bill Wendling | 9bc96a5 | 2009-02-03 00:55:04 +0000 | [diff] [blame] | 2599 |   MachineInstr *DataMI = MF.CreateMachineInstr(TID, MI->getDebugLoc(), true); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2600 |   MachineInstrBuilder MIB(DataMI); | 
 | 2601 |    | 
 | 2602 |   if (FoldedStore) | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 2603 |     MIB.addReg(Reg, RegState::Define); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2604 |   for (unsigned i = 0, e = BeforeOps.size(); i != e; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2605 |     MIB.addOperand(BeforeOps[i]); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2606 |   if (FoldedLoad) | 
 | 2607 |     MIB.addReg(Reg); | 
 | 2608 |   for (unsigned i = 0, e = AfterOps.size(); i != e; ++i) | 
| Dan Gohman | 9735761 | 2009-02-18 05:45:50 +0000 | [diff] [blame] | 2609 |     MIB.addOperand(AfterOps[i]); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2610 |   for (unsigned i = 0, e = ImpOps.size(); i != e; ++i) { | 
 | 2611 |     MachineOperand &MO = ImpOps[i]; | 
| Bill Wendling | 587daed | 2009-05-13 21:33:08 +0000 | [diff] [blame] | 2612 |     MIB.addReg(MO.getReg(), | 
 | 2613 |                getDefRegState(MO.isDef()) | | 
 | 2614 |                RegState::Implicit | | 
 | 2615 |                getKillRegState(MO.isKill()) | | 
| Evan Cheng | 4784f1f | 2009-06-30 08:49:04 +0000 | [diff] [blame] | 2616 |                getDeadRegState(MO.isDead()) | | 
 | 2617 |                getUndefRegState(MO.isUndef())); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2618 |   } | 
 | 2619 |   // Change CMP32ri r, 0 back to TEST32rr r, r, etc. | 
 | 2620 |   unsigned NewOpc = 0; | 
 | 2621 |   switch (DataMI->getOpcode()) { | 
 | 2622 |   default: break; | 
 | 2623 |   case X86::CMP64ri32: | 
 | 2624 |   case X86::CMP32ri: | 
 | 2625 |   case X86::CMP16ri: | 
 | 2626 |   case X86::CMP8ri: { | 
 | 2627 |     MachineOperand &MO0 = DataMI->getOperand(0); | 
 | 2628 |     MachineOperand &MO1 = DataMI->getOperand(1); | 
 | 2629 |     if (MO1.getImm() == 0) { | 
 | 2630 |       switch (DataMI->getOpcode()) { | 
 | 2631 |       default: break; | 
 | 2632 |       case X86::CMP64ri32: NewOpc = X86::TEST64rr; break; | 
 | 2633 |       case X86::CMP32ri:   NewOpc = X86::TEST32rr; break; | 
 | 2634 |       case X86::CMP16ri:   NewOpc = X86::TEST16rr; break; | 
 | 2635 |       case X86::CMP8ri:    NewOpc = X86::TEST8rr; break; | 
 | 2636 |       } | 
| Chris Lattner | 5080f4d | 2008-01-11 18:10:50 +0000 | [diff] [blame] | 2637 |       DataMI->setDesc(get(NewOpc)); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2638 |       MO1.ChangeToRegister(MO0.getReg(), false); | 
 | 2639 |     } | 
 | 2640 |   } | 
 | 2641 |   } | 
 | 2642 |   NewMIs.push_back(DataMI); | 
 | 2643 |  | 
 | 2644 |   // Emit the store instruction. | 
 | 2645 |   if (UnfoldStore) { | 
| Chris Lattner | cb778a8 | 2009-07-29 21:10:12 +0000 | [diff] [blame] | 2646 |     const TargetRegisterClass *DstRC = TID.OpInfo[0].getRegClass(&RI); | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2647 |     std::pair<MachineInstr::mmo_iterator, | 
 | 2648 |               MachineInstr::mmo_iterator> MMOs = | 
 | 2649 |       MF.extractStoreMemRefs(MI->memoperands_begin(), | 
 | 2650 |                              MI->memoperands_end()); | 
 | 2651 |     storeRegToAddr(MF, Reg, true, AddrOps, DstRC, MMOs.first, MMOs.second, NewMIs); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2652 |   } | 
 | 2653 |  | 
 | 2654 |   return true; | 
 | 2655 | } | 
 | 2656 |  | 
 | 2657 | bool | 
 | 2658 | X86InstrInfo::unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N, | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 2659 |                                   SmallVectorImpl<SDNode*> &NewNodes) const { | 
| Dan Gohman | e8be6c6 | 2008-07-17 19:10:17 +0000 | [diff] [blame] | 2660 |   if (!N->isMachineOpcode()) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2661 |     return false; | 
 | 2662 |  | 
| Jeffrey Yasskin | 81cf432 | 2009-11-10 01:02:17 +0000 | [diff] [blame] | 2663 |   DenseMap<unsigned*, std::pair<unsigned,unsigned> >::const_iterator I = | 
| Dan Gohman | e8be6c6 | 2008-07-17 19:10:17 +0000 | [diff] [blame] | 2664 |     MemOp2RegOpTable.find((unsigned*)N->getMachineOpcode()); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2665 |   if (I == MemOp2RegOpTable.end()) | 
 | 2666 |     return false; | 
 | 2667 |   unsigned Opc = I->second.first; | 
 | 2668 |   unsigned Index = I->second.second & 0xf; | 
 | 2669 |   bool FoldedLoad = I->second.second & (1 << 4); | 
 | 2670 |   bool FoldedStore = I->second.second & (1 << 5); | 
| Chris Lattner | 749c6f6 | 2008-01-07 07:27:27 +0000 | [diff] [blame] | 2671 |   const TargetInstrDesc &TID = get(Opc); | 
| Chris Lattner | cb778a8 | 2009-07-29 21:10:12 +0000 | [diff] [blame] | 2672 |   const TargetRegisterClass *RC = TID.OpInfo[Index].getRegClass(&RI); | 
| Dan Gohman | b37a820 | 2009-03-04 19:23:38 +0000 | [diff] [blame] | 2673 |   unsigned NumDefs = TID.NumDefs; | 
| Dan Gohman | 475871a | 2008-07-27 21:46:04 +0000 | [diff] [blame] | 2674 |   std::vector<SDValue> AddrOps; | 
 | 2675 |   std::vector<SDValue> BeforeOps; | 
 | 2676 |   std::vector<SDValue> AfterOps; | 
| Dale Johannesen | ed2eee6 | 2009-02-06 01:31:28 +0000 | [diff] [blame] | 2677 |   DebugLoc dl = N->getDebugLoc(); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2678 |   unsigned NumOps = N->getNumOperands(); | 
| Dan Gohman | c76909a | 2009-09-25 20:36:54 +0000 | [diff] [blame] | 2679 |   for (unsigned i = 0; i != NumOps-1; ++i) { | 
| Dan Gohman | 475871a | 2008-07-27 21:46:04 +0000 | [diff] [blame] | 2680 |     SDValue Op = N->getOperand(i); | 
| Rafael Espindola | 705d800 | 2009-03-27 15:57:50 +0000 | [diff] [blame] | 2681 |     if (i >= Index-NumDefs && i < Index-NumDefs + X86AddrNumOperands) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2682 |       AddrOps.push_back(Op); | 
| Dan Gohman | b37a820 | 2009-03-04 19:23:38 +0000 | [diff] [blame] | 2683 |     else if (i < Index-NumDefs) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2684 |       BeforeOps.push_back(Op); | 
| Dan Gohman | b37a820 | 2009-03-04 19:23:38 +0000 | [diff] [blame] | 2685 |     else if (i > Index-NumDefs) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2686 |       AfterOps.push_back(Op); | 
 | 2687 |   } | 
| Dan Gohman | 475871a | 2008-07-27 21:46:04 +0000 | [diff] [blame] | 2688 |   SDValue Chain = N->getOperand(NumOps-1); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2689 |   AddrOps.push_back(Chain); | 
 | 2690 |  | 
 | 2691 |   // Emit the load instruction. | 
 | 2692 |   SDNode *Load = 0; | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2693 |   MachineFunction &MF = DAG.getMachineFunction(); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2694 |   if (FoldedLoad) { | 
| Owen Anderson | e50ed30 | 2009-08-10 22:56:29 +0000 | [diff] [blame] | 2695 |     EVT VT = *RC->vt_begin(); | 
| Evan Cheng | 600c043 | 2009-11-16 21:56:03 +0000 | [diff] [blame] | 2696 |     std::pair<MachineInstr::mmo_iterator, | 
 | 2697 |               MachineInstr::mmo_iterator> MMOs = | 
 | 2698 |       MF.extractLoadMemRefs(cast<MachineSDNode>(N)->memoperands_begin(), | 
 | 2699 |                             cast<MachineSDNode>(N)->memoperands_end()); | 
 | 2700 |     bool isAligned = (*MMOs.first)->getAlignment() >= 16; | 
| Dan Gohman | 602b0c8 | 2009-09-25 18:54:59 +0000 | [diff] [blame] | 2701 |     Load = DAG.getMachineNode(getLoadRegOpcode(0, RC, isAligned, TM), dl, | 
 | 2702 |                               VT, MVT::Other, &AddrOps[0], AddrOps.size()); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2703 |     NewNodes.push_back(Load); | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2704 |  | 
 | 2705 |     // Preserve memory reference information. | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2706 |     cast<MachineSDNode>(Load)->setMemRefs(MMOs.first, MMOs.second); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2707 |   } | 
 | 2708 |  | 
 | 2709 |   // Emit the data processing instruction. | 
| Owen Anderson | e50ed30 | 2009-08-10 22:56:29 +0000 | [diff] [blame] | 2710 |   std::vector<EVT> VTs; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2711 |   const TargetRegisterClass *DstRC = 0; | 
| Chris Lattner | 349c495 | 2008-01-07 03:13:06 +0000 | [diff] [blame] | 2712 |   if (TID.getNumDefs() > 0) { | 
| Chris Lattner | cb778a8 | 2009-07-29 21:10:12 +0000 | [diff] [blame] | 2713 |     DstRC = TID.OpInfo[0].getRegClass(&RI); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2714 |     VTs.push_back(*DstRC->vt_begin()); | 
 | 2715 |   } | 
 | 2716 |   for (unsigned i = 0, e = N->getNumValues(); i != e; ++i) { | 
| Owen Anderson | e50ed30 | 2009-08-10 22:56:29 +0000 | [diff] [blame] | 2717 |     EVT VT = N->getValueType(i); | 
| Owen Anderson | 825b72b | 2009-08-11 20:47:22 +0000 | [diff] [blame] | 2718 |     if (VT != MVT::Other && i >= (unsigned)TID.getNumDefs()) | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2719 |       VTs.push_back(VT); | 
 | 2720 |   } | 
 | 2721 |   if (Load) | 
| Dan Gohman | 475871a | 2008-07-27 21:46:04 +0000 | [diff] [blame] | 2722 |     BeforeOps.push_back(SDValue(Load, 0)); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2723 |   std::copy(AfterOps.begin(), AfterOps.end(), std::back_inserter(BeforeOps)); | 
| Dan Gohman | 602b0c8 | 2009-09-25 18:54:59 +0000 | [diff] [blame] | 2724 |   SDNode *NewNode= DAG.getMachineNode(Opc, dl, VTs, &BeforeOps[0], | 
 | 2725 |                                       BeforeOps.size()); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2726 |   NewNodes.push_back(NewNode); | 
 | 2727 |  | 
 | 2728 |   // Emit the store instruction. | 
 | 2729 |   if (FoldedStore) { | 
 | 2730 |     AddrOps.pop_back(); | 
| Dan Gohman | 475871a | 2008-07-27 21:46:04 +0000 | [diff] [blame] | 2731 |     AddrOps.push_back(SDValue(NewNode, 0)); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2732 |     AddrOps.push_back(Chain); | 
| Evan Cheng | 600c043 | 2009-11-16 21:56:03 +0000 | [diff] [blame] | 2733 |     std::pair<MachineInstr::mmo_iterator, | 
 | 2734 |               MachineInstr::mmo_iterator> MMOs = | 
 | 2735 |       MF.extractStoreMemRefs(cast<MachineSDNode>(N)->memoperands_begin(), | 
 | 2736 |                              cast<MachineSDNode>(N)->memoperands_end()); | 
 | 2737 |     bool isAligned = (*MMOs.first)->getAlignment() >= 16; | 
| Dan Gohman | 602b0c8 | 2009-09-25 18:54:59 +0000 | [diff] [blame] | 2738 |     SDNode *Store = DAG.getMachineNode(getStoreRegOpcode(0, DstRC, | 
 | 2739 |                                                          isAligned, TM), | 
 | 2740 |                                        dl, MVT::Other, | 
 | 2741 |                                        &AddrOps[0], AddrOps.size()); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2742 |     NewNodes.push_back(Store); | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2743 |  | 
 | 2744 |     // Preserve memory reference information. | 
| Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 2745 |     cast<MachineSDNode>(Load)->setMemRefs(MMOs.first, MMOs.second); | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2746 |   } | 
 | 2747 |  | 
 | 2748 |   return true; | 
 | 2749 | } | 
 | 2750 |  | 
 | 2751 | unsigned X86InstrInfo::getOpcodeAfterMemoryUnfold(unsigned Opc, | 
| Dan Gohman | 0115e16 | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 2752 |                                       bool UnfoldLoad, bool UnfoldStore, | 
 | 2753 |                                       unsigned *LoadRegIndex) const { | 
| Jeffrey Yasskin | 81cf432 | 2009-11-10 01:02:17 +0000 | [diff] [blame] | 2754 |   DenseMap<unsigned*, std::pair<unsigned,unsigned> >::const_iterator I = | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2755 |     MemOp2RegOpTable.find((unsigned*)Opc); | 
 | 2756 |   if (I == MemOp2RegOpTable.end()) | 
 | 2757 |     return 0; | 
 | 2758 |   bool FoldedLoad = I->second.second & (1 << 4); | 
 | 2759 |   bool FoldedStore = I->second.second & (1 << 5); | 
 | 2760 |   if (UnfoldLoad && !FoldedLoad) | 
 | 2761 |     return 0; | 
 | 2762 |   if (UnfoldStore && !FoldedStore) | 
 | 2763 |     return 0; | 
| Dan Gohman | 0115e16 | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 2764 |   if (LoadRegIndex) | 
 | 2765 |     *LoadRegIndex = I->second.second & 0xf; | 
| Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 2766 |   return I->second.first; | 
 | 2767 | } | 
 | 2768 |  | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 2769 | bool X86InstrInfo:: | 
| Owen Anderson | 44eb65c | 2008-08-14 22:49:33 +0000 | [diff] [blame] | 2770 | ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const { | 
| Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 2771 |   assert(Cond.size() == 1 && "Invalid X86 branch condition!"); | 
| Evan Cheng | 97af60b | 2008-08-29 23:21:31 +0000 | [diff] [blame] | 2772 |   X86::CondCode CC = static_cast<X86::CondCode>(Cond[0].getImm()); | 
| Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 2773 |   if (CC == X86::COND_NE_OR_P || CC == X86::COND_NP_OR_E) | 
 | 2774 |     return true; | 
| Evan Cheng | 97af60b | 2008-08-29 23:21:31 +0000 | [diff] [blame] | 2775 |   Cond[0].setImm(GetOppositeBranchCondition(CC)); | 
| Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 2776 |   return false; | 
| Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 2777 | } | 
 | 2778 |  | 
| Evan Cheng | 2306628 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 2779 | bool X86InstrInfo:: | 
| Evan Cheng | 4350eb8 | 2009-02-06 17:17:30 +0000 | [diff] [blame] | 2780 | isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const { | 
 | 2781 |   // FIXME: Return false for x87 stack register classes for now. We can't | 
| Evan Cheng | 2306628 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 2782 |   // allow any loads of these registers before FpGet_ST0_80. | 
| Evan Cheng | 4350eb8 | 2009-02-06 17:17:30 +0000 | [diff] [blame] | 2783 |   return !(RC == &X86::CCRRegClass || RC == &X86::RFP32RegClass || | 
 | 2784 |            RC == &X86::RFP64RegClass || RC == &X86::RFP80RegClass); | 
| Evan Cheng | 2306628 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 2785 | } | 
 | 2786 |  | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2787 | unsigned X86InstrInfo::sizeOfImm(const TargetInstrDesc *Desc) { | 
 | 2788 |   switch (Desc->TSFlags & X86II::ImmMask) { | 
 | 2789 |   case X86II::Imm8:   return 1; | 
 | 2790 |   case X86II::Imm16:  return 2; | 
 | 2791 |   case X86II::Imm32:  return 4; | 
 | 2792 |   case X86II::Imm64:  return 8; | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 2793 |   default: llvm_unreachable("Immediate size not set!"); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2794 |     return 0; | 
 | 2795 |   } | 
 | 2796 | } | 
 | 2797 |  | 
 | 2798 | /// isX86_64ExtendedReg - Is the MachineOperand a x86-64 extended register? | 
 | 2799 | /// e.g. r8, xmm8, etc. | 
 | 2800 | bool X86InstrInfo::isX86_64ExtendedReg(const MachineOperand &MO) { | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2801 |   if (!MO.isReg()) return false; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2802 |   switch (MO.getReg()) { | 
 | 2803 |   default: break; | 
 | 2804 |   case X86::R8:    case X86::R9:    case X86::R10:   case X86::R11: | 
 | 2805 |   case X86::R12:   case X86::R13:   case X86::R14:   case X86::R15: | 
 | 2806 |   case X86::R8D:   case X86::R9D:   case X86::R10D:  case X86::R11D: | 
 | 2807 |   case X86::R12D:  case X86::R13D:  case X86::R14D:  case X86::R15D: | 
 | 2808 |   case X86::R8W:   case X86::R9W:   case X86::R10W:  case X86::R11W: | 
 | 2809 |   case X86::R12W:  case X86::R13W:  case X86::R14W:  case X86::R15W: | 
 | 2810 |   case X86::R8B:   case X86::R9B:   case X86::R10B:  case X86::R11B: | 
 | 2811 |   case X86::R12B:  case X86::R13B:  case X86::R14B:  case X86::R15B: | 
 | 2812 |   case X86::XMM8:  case X86::XMM9:  case X86::XMM10: case X86::XMM11: | 
 | 2813 |   case X86::XMM12: case X86::XMM13: case X86::XMM14: case X86::XMM15: | 
 | 2814 |     return true; | 
 | 2815 |   } | 
 | 2816 |   return false; | 
 | 2817 | } | 
 | 2818 |  | 
 | 2819 |  | 
 | 2820 | /// determineREX - Determine if the MachineInstr has to be encoded with a X86-64 | 
 | 2821 | /// REX prefix which specifies 1) 64-bit instructions, 2) non-default operand | 
 | 2822 | /// size, and 3) use of X86-64 extended registers. | 
 | 2823 | unsigned X86InstrInfo::determineREX(const MachineInstr &MI) { | 
 | 2824 |   unsigned REX = 0; | 
 | 2825 |   const TargetInstrDesc &Desc = MI.getDesc(); | 
 | 2826 |  | 
 | 2827 |   // Pseudo instructions do not need REX prefix byte. | 
 | 2828 |   if ((Desc.TSFlags & X86II::FormMask) == X86II::Pseudo) | 
 | 2829 |     return 0; | 
 | 2830 |   if (Desc.TSFlags & X86II::REX_W) | 
 | 2831 |     REX |= 1 << 3; | 
 | 2832 |  | 
 | 2833 |   unsigned NumOps = Desc.getNumOperands(); | 
 | 2834 |   if (NumOps) { | 
 | 2835 |     bool isTwoAddr = NumOps > 1 && | 
 | 2836 |       Desc.getOperandConstraint(1, TOI::TIED_TO) != -1; | 
 | 2837 |  | 
 | 2838 |     // If it accesses SPL, BPL, SIL, or DIL, then it requires a 0x40 REX prefix. | 
 | 2839 |     unsigned i = isTwoAddr ? 1 : 0; | 
 | 2840 |     for (unsigned e = NumOps; i != e; ++i) { | 
 | 2841 |       const MachineOperand& MO = MI.getOperand(i); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2842 |       if (MO.isReg()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2843 |         unsigned Reg = MO.getReg(); | 
 | 2844 |         if (isX86_64NonExtLowByteReg(Reg)) | 
 | 2845 |           REX |= 0x40; | 
 | 2846 |       } | 
 | 2847 |     } | 
 | 2848 |  | 
 | 2849 |     switch (Desc.TSFlags & X86II::FormMask) { | 
 | 2850 |     case X86II::MRMInitReg: | 
 | 2851 |       if (isX86_64ExtendedReg(MI.getOperand(0))) | 
 | 2852 |         REX |= (1 << 0) | (1 << 2); | 
 | 2853 |       break; | 
 | 2854 |     case X86II::MRMSrcReg: { | 
 | 2855 |       if (isX86_64ExtendedReg(MI.getOperand(0))) | 
 | 2856 |         REX |= 1 << 2; | 
 | 2857 |       i = isTwoAddr ? 2 : 1; | 
 | 2858 |       for (unsigned e = NumOps; i != e; ++i) { | 
 | 2859 |         const MachineOperand& MO = MI.getOperand(i); | 
 | 2860 |         if (isX86_64ExtendedReg(MO)) | 
 | 2861 |           REX |= 1 << 0; | 
 | 2862 |       } | 
 | 2863 |       break; | 
 | 2864 |     } | 
 | 2865 |     case X86II::MRMSrcMem: { | 
 | 2866 |       if (isX86_64ExtendedReg(MI.getOperand(0))) | 
 | 2867 |         REX |= 1 << 2; | 
 | 2868 |       unsigned Bit = 0; | 
 | 2869 |       i = isTwoAddr ? 2 : 1; | 
 | 2870 |       for (; i != NumOps; ++i) { | 
 | 2871 |         const MachineOperand& MO = MI.getOperand(i); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2872 |         if (MO.isReg()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2873 |           if (isX86_64ExtendedReg(MO)) | 
 | 2874 |             REX |= 1 << Bit; | 
 | 2875 |           Bit++; | 
 | 2876 |         } | 
 | 2877 |       } | 
 | 2878 |       break; | 
 | 2879 |     } | 
 | 2880 |     case X86II::MRM0m: case X86II::MRM1m: | 
 | 2881 |     case X86II::MRM2m: case X86II::MRM3m: | 
 | 2882 |     case X86II::MRM4m: case X86II::MRM5m: | 
 | 2883 |     case X86II::MRM6m: case X86II::MRM7m: | 
 | 2884 |     case X86II::MRMDestMem: { | 
| Dan Gohman | 8cc632f | 2009-04-13 15:04:25 +0000 | [diff] [blame] | 2885 |       unsigned e = (isTwoAddr ? X86AddrNumOperands+1 : X86AddrNumOperands); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2886 |       i = isTwoAddr ? 1 : 0; | 
 | 2887 |       if (NumOps > e && isX86_64ExtendedReg(MI.getOperand(e))) | 
 | 2888 |         REX |= 1 << 2; | 
 | 2889 |       unsigned Bit = 0; | 
 | 2890 |       for (; i != e; ++i) { | 
 | 2891 |         const MachineOperand& MO = MI.getOperand(i); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2892 |         if (MO.isReg()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2893 |           if (isX86_64ExtendedReg(MO)) | 
 | 2894 |             REX |= 1 << Bit; | 
 | 2895 |           Bit++; | 
 | 2896 |         } | 
 | 2897 |       } | 
 | 2898 |       break; | 
 | 2899 |     } | 
 | 2900 |     default: { | 
 | 2901 |       if (isX86_64ExtendedReg(MI.getOperand(0))) | 
 | 2902 |         REX |= 1 << 0; | 
 | 2903 |       i = isTwoAddr ? 2 : 1; | 
 | 2904 |       for (unsigned e = NumOps; i != e; ++i) { | 
 | 2905 |         const MachineOperand& MO = MI.getOperand(i); | 
 | 2906 |         if (isX86_64ExtendedReg(MO)) | 
 | 2907 |           REX |= 1 << 2; | 
 | 2908 |       } | 
 | 2909 |       break; | 
 | 2910 |     } | 
 | 2911 |     } | 
 | 2912 |   } | 
 | 2913 |   return REX; | 
 | 2914 | } | 
 | 2915 |  | 
 | 2916 | /// sizePCRelativeBlockAddress - This method returns the size of a PC | 
 | 2917 | /// relative block address instruction | 
 | 2918 | /// | 
 | 2919 | static unsigned sizePCRelativeBlockAddress() { | 
 | 2920 |   return 4; | 
 | 2921 | } | 
 | 2922 |  | 
 | 2923 | /// sizeGlobalAddress - Give the size of the emission of this global address | 
 | 2924 | /// | 
 | 2925 | static unsigned sizeGlobalAddress(bool dword) { | 
 | 2926 |   return dword ? 8 : 4; | 
 | 2927 | } | 
 | 2928 |  | 
 | 2929 | /// sizeConstPoolAddress - Give the size of the emission of this constant | 
 | 2930 | /// pool address | 
 | 2931 | /// | 
 | 2932 | static unsigned sizeConstPoolAddress(bool dword) { | 
 | 2933 |   return dword ? 8 : 4; | 
 | 2934 | } | 
 | 2935 |  | 
 | 2936 | /// sizeExternalSymbolAddress - Give the size of the emission of this external | 
 | 2937 | /// symbol | 
 | 2938 | /// | 
 | 2939 | static unsigned sizeExternalSymbolAddress(bool dword) { | 
 | 2940 |   return dword ? 8 : 4; | 
 | 2941 | } | 
 | 2942 |  | 
 | 2943 | /// sizeJumpTableAddress - Give the size of the emission of this jump | 
 | 2944 | /// table address | 
 | 2945 | /// | 
 | 2946 | static unsigned sizeJumpTableAddress(bool dword) { | 
 | 2947 |   return dword ? 8 : 4; | 
 | 2948 | } | 
 | 2949 |  | 
 | 2950 | static unsigned sizeConstant(unsigned Size) { | 
 | 2951 |   return Size; | 
 | 2952 | } | 
 | 2953 |  | 
 | 2954 | static unsigned sizeRegModRMByte(){ | 
 | 2955 |   return 1; | 
 | 2956 | } | 
 | 2957 |  | 
 | 2958 | static unsigned sizeSIBByte(){ | 
 | 2959 |   return 1; | 
 | 2960 | } | 
 | 2961 |  | 
 | 2962 | static unsigned getDisplacementFieldSize(const MachineOperand *RelocOp) { | 
 | 2963 |   unsigned FinalSize = 0; | 
 | 2964 |   // If this is a simple integer displacement that doesn't require a relocation. | 
 | 2965 |   if (!RelocOp) { | 
 | 2966 |     FinalSize += sizeConstant(4); | 
 | 2967 |     return FinalSize; | 
 | 2968 |   } | 
 | 2969 |    | 
 | 2970 |   // Otherwise, this is something that requires a relocation. | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2971 |   if (RelocOp->isGlobal()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2972 |     FinalSize += sizeGlobalAddress(false); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2973 |   } else if (RelocOp->isCPI()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2974 |     FinalSize += sizeConstPoolAddress(false); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2975 |   } else if (RelocOp->isJTI()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2976 |     FinalSize += sizeJumpTableAddress(false); | 
 | 2977 |   } else { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 2978 |     llvm_unreachable("Unknown value to relocate!"); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2979 |   } | 
 | 2980 |   return FinalSize; | 
 | 2981 | } | 
 | 2982 |  | 
 | 2983 | static unsigned getMemModRMByteSize(const MachineInstr &MI, unsigned Op, | 
 | 2984 |                                     bool IsPIC, bool Is64BitMode) { | 
 | 2985 |   const MachineOperand &Op3 = MI.getOperand(Op+3); | 
 | 2986 |   int DispVal = 0; | 
 | 2987 |   const MachineOperand *DispForReloc = 0; | 
 | 2988 |   unsigned FinalSize = 0; | 
 | 2989 |    | 
 | 2990 |   // Figure out what sort of displacement we have to handle here. | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2991 |   if (Op3.isGlobal()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2992 |     DispForReloc = &Op3; | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2993 |   } else if (Op3.isCPI()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 2994 |     if (Is64BitMode || IsPIC) { | 
 | 2995 |       DispForReloc = &Op3; | 
 | 2996 |     } else { | 
 | 2997 |       DispVal = 1; | 
 | 2998 |     } | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 2999 |   } else if (Op3.isJTI()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3000 |     if (Is64BitMode || IsPIC) { | 
 | 3001 |       DispForReloc = &Op3; | 
 | 3002 |     } else { | 
 | 3003 |       DispVal = 1;  | 
 | 3004 |     } | 
 | 3005 |   } else { | 
 | 3006 |     DispVal = 1; | 
 | 3007 |   } | 
 | 3008 |  | 
 | 3009 |   const MachineOperand &Base     = MI.getOperand(Op); | 
 | 3010 |   const MachineOperand &IndexReg = MI.getOperand(Op+2); | 
 | 3011 |  | 
 | 3012 |   unsigned BaseReg = Base.getReg(); | 
 | 3013 |  | 
 | 3014 |   // Is a SIB byte needed? | 
| Evan Cheng | 6ed3491 | 2009-05-12 00:07:35 +0000 | [diff] [blame] | 3015 |   if ((!Is64BitMode || DispForReloc || BaseReg != 0) && | 
 | 3016 |       IndexReg.getReg() == 0 && | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3017 |       (BaseReg == 0 || X86RegisterInfo::getX86RegNum(BaseReg) != N86::ESP)) {       | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3018 |     if (BaseReg == 0) {  // Just a displacement? | 
 | 3019 |       // Emit special case [disp32] encoding | 
 | 3020 |       ++FinalSize;  | 
 | 3021 |       FinalSize += getDisplacementFieldSize(DispForReloc); | 
 | 3022 |     } else { | 
 | 3023 |       unsigned BaseRegNo = X86RegisterInfo::getX86RegNum(BaseReg); | 
 | 3024 |       if (!DispForReloc && DispVal == 0 && BaseRegNo != N86::EBP) { | 
 | 3025 |         // Emit simple indirect register encoding... [EAX] f.e. | 
 | 3026 |         ++FinalSize; | 
 | 3027 |       // Be pessimistic and assume it's a disp32, not a disp8 | 
 | 3028 |       } else { | 
 | 3029 |         // Emit the most general non-SIB encoding: [REG+disp32] | 
 | 3030 |         ++FinalSize; | 
 | 3031 |         FinalSize += getDisplacementFieldSize(DispForReloc); | 
 | 3032 |       } | 
 | 3033 |     } | 
 | 3034 |  | 
 | 3035 |   } else {  // We need a SIB byte, so start by outputting the ModR/M byte first | 
 | 3036 |     assert(IndexReg.getReg() != X86::ESP && | 
 | 3037 |            IndexReg.getReg() != X86::RSP && "Cannot use ESP as index reg!"); | 
 | 3038 |  | 
 | 3039 |     bool ForceDisp32 = false; | 
 | 3040 |     if (BaseReg == 0 || DispForReloc) { | 
 | 3041 |       // Emit the normal disp32 encoding. | 
 | 3042 |       ++FinalSize; | 
 | 3043 |       ForceDisp32 = true; | 
 | 3044 |     } else { | 
 | 3045 |       ++FinalSize; | 
 | 3046 |     } | 
 | 3047 |  | 
 | 3048 |     FinalSize += sizeSIBByte(); | 
 | 3049 |  | 
 | 3050 |     // Do we need to output a displacement? | 
 | 3051 |     if (DispVal != 0 || ForceDisp32) { | 
 | 3052 |       FinalSize += getDisplacementFieldSize(DispForReloc); | 
 | 3053 |     } | 
 | 3054 |   } | 
 | 3055 |   return FinalSize; | 
 | 3056 | } | 
 | 3057 |  | 
 | 3058 |  | 
 | 3059 | static unsigned GetInstSizeWithDesc(const MachineInstr &MI, | 
 | 3060 |                                     const TargetInstrDesc *Desc, | 
 | 3061 |                                     bool IsPIC, bool Is64BitMode) { | 
 | 3062 |    | 
 | 3063 |   unsigned Opcode = Desc->Opcode; | 
 | 3064 |   unsigned FinalSize = 0; | 
 | 3065 |  | 
 | 3066 |   // Emit the lock opcode prefix as needed. | 
 | 3067 |   if (Desc->TSFlags & X86II::LOCK) ++FinalSize; | 
 | 3068 |  | 
| Bill Wendling | 2265ba0 | 2009-05-28 23:40:46 +0000 | [diff] [blame] | 3069 |   // Emit segment override opcode prefix as needed. | 
| Anton Korobeynikov | d21a630 | 2008-10-12 10:30:11 +0000 | [diff] [blame] | 3070 |   switch (Desc->TSFlags & X86II::SegOvrMask) { | 
 | 3071 |   case X86II::FS: | 
 | 3072 |   case X86II::GS: | 
 | 3073 |    ++FinalSize; | 
 | 3074 |    break; | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 3075 |   default: llvm_unreachable("Invalid segment!"); | 
| Anton Korobeynikov | d21a630 | 2008-10-12 10:30:11 +0000 | [diff] [blame] | 3076 |   case 0: break;  // No segment override! | 
 | 3077 |   } | 
 | 3078 |  | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3079 |   // Emit the repeat opcode prefix as needed. | 
 | 3080 |   if ((Desc->TSFlags & X86II::Op0Mask) == X86II::REP) ++FinalSize; | 
 | 3081 |  | 
 | 3082 |   // Emit the operand size opcode prefix as needed. | 
 | 3083 |   if (Desc->TSFlags & X86II::OpSize) ++FinalSize; | 
 | 3084 |  | 
 | 3085 |   // Emit the address size opcode prefix as needed. | 
 | 3086 |   if (Desc->TSFlags & X86II::AdSize) ++FinalSize; | 
 | 3087 |  | 
 | 3088 |   bool Need0FPrefix = false; | 
 | 3089 |   switch (Desc->TSFlags & X86II::Op0Mask) { | 
 | 3090 |   case X86II::TB:  // Two-byte opcode prefix | 
 | 3091 |   case X86II::T8:  // 0F 38 | 
 | 3092 |   case X86II::TA:  // 0F 3A | 
 | 3093 |     Need0FPrefix = true; | 
 | 3094 |     break; | 
| Eric Christopher | b4dc13c | 2009-08-08 21:55:08 +0000 | [diff] [blame] | 3095 |   case X86II::TF: // F2 0F 38 | 
 | 3096 |     ++FinalSize; | 
 | 3097 |     Need0FPrefix = true; | 
 | 3098 |     break; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3099 |   case X86II::REP: break; // already handled. | 
 | 3100 |   case X86II::XS:   // F3 0F | 
 | 3101 |     ++FinalSize; | 
 | 3102 |     Need0FPrefix = true; | 
 | 3103 |     break; | 
 | 3104 |   case X86II::XD:   // F2 0F | 
 | 3105 |     ++FinalSize; | 
 | 3106 |     Need0FPrefix = true; | 
 | 3107 |     break; | 
 | 3108 |   case X86II::D8: case X86II::D9: case X86II::DA: case X86II::DB: | 
 | 3109 |   case X86II::DC: case X86II::DD: case X86II::DE: case X86II::DF: | 
 | 3110 |     ++FinalSize; | 
 | 3111 |     break; // Two-byte opcode prefix | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 3112 |   default: llvm_unreachable("Invalid prefix!"); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3113 |   case 0: break;  // No prefix! | 
 | 3114 |   } | 
 | 3115 |  | 
 | 3116 |   if (Is64BitMode) { | 
 | 3117 |     // REX prefix | 
 | 3118 |     unsigned REX = X86InstrInfo::determineREX(MI); | 
 | 3119 |     if (REX) | 
 | 3120 |       ++FinalSize; | 
 | 3121 |   } | 
 | 3122 |  | 
 | 3123 |   // 0x0F escape code must be emitted just before the opcode. | 
 | 3124 |   if (Need0FPrefix) | 
 | 3125 |     ++FinalSize; | 
 | 3126 |  | 
 | 3127 |   switch (Desc->TSFlags & X86II::Op0Mask) { | 
 | 3128 |   case X86II::T8:  // 0F 38 | 
 | 3129 |     ++FinalSize; | 
 | 3130 |     break; | 
| Bill Wendling | 2265ba0 | 2009-05-28 23:40:46 +0000 | [diff] [blame] | 3131 |   case X86II::TA:  // 0F 3A | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3132 |     ++FinalSize; | 
 | 3133 |     break; | 
| Eric Christopher | b4dc13c | 2009-08-08 21:55:08 +0000 | [diff] [blame] | 3134 |   case X86II::TF: // F2 0F 38 | 
 | 3135 |     ++FinalSize; | 
 | 3136 |     break; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3137 |   } | 
 | 3138 |  | 
 | 3139 |   // If this is a two-address instruction, skip one of the register operands. | 
 | 3140 |   unsigned NumOps = Desc->getNumOperands(); | 
 | 3141 |   unsigned CurOp = 0; | 
 | 3142 |   if (NumOps > 1 && Desc->getOperandConstraint(1, TOI::TIED_TO) != -1) | 
 | 3143 |     CurOp++; | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3144 |   else if (NumOps > 2 && Desc->getOperandConstraint(NumOps-1, TOI::TIED_TO)== 0) | 
 | 3145 |     // Skip the last source operand that is tied_to the dest reg. e.g. LXADD32 | 
 | 3146 |     --NumOps; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3147 |  | 
 | 3148 |   switch (Desc->TSFlags & X86II::FormMask) { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 3149 |   default: llvm_unreachable("Unknown FormMask value in X86 MachineCodeEmitter!"); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3150 |   case X86II::Pseudo: | 
 | 3151 |     // Remember the current PC offset, this is the PIC relocation | 
 | 3152 |     // base address. | 
 | 3153 |     switch (Opcode) { | 
 | 3154 |     default:  | 
 | 3155 |       break; | 
 | 3156 |     case TargetInstrInfo::INLINEASM: { | 
 | 3157 |       const MachineFunction *MF = MI.getParent()->getParent(); | 
| Chris Lattner | d90183d | 2009-08-02 05:20:37 +0000 | [diff] [blame] | 3158 |       const TargetInstrInfo &TII = *MF->getTarget().getInstrInfo(); | 
 | 3159 |       FinalSize += TII.getInlineAsmLength(MI.getOperand(0).getSymbolName(), | 
| Chris Lattner | af76e59 | 2009-08-22 20:48:53 +0000 | [diff] [blame] | 3160 |                                           *MF->getTarget().getMCAsmInfo()); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3161 |       break; | 
 | 3162 |     } | 
| Dan Gohman | 4406604 | 2008-07-01 00:05:16 +0000 | [diff] [blame] | 3163 |     case TargetInstrInfo::DBG_LABEL: | 
 | 3164 |     case TargetInstrInfo::EH_LABEL: | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3165 |       break; | 
 | 3166 |     case TargetInstrInfo::IMPLICIT_DEF: | 
| Jakob Stoklund Olesen | 26207e5 | 2009-09-28 20:32:26 +0000 | [diff] [blame] | 3167 |     case TargetInstrInfo::KILL: | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3168 |     case X86::FP_REG_KILL: | 
 | 3169 |       break; | 
 | 3170 |     case X86::MOVPC32r: { | 
 | 3171 |       // This emits the "call" portion of this pseudo instruction. | 
 | 3172 |       ++FinalSize; | 
 | 3173 |       FinalSize += sizeConstant(X86InstrInfo::sizeOfImm(Desc)); | 
 | 3174 |       break; | 
 | 3175 |     } | 
 | 3176 |     } | 
 | 3177 |     CurOp = NumOps; | 
 | 3178 |     break; | 
 | 3179 |   case X86II::RawFrm: | 
 | 3180 |     ++FinalSize; | 
 | 3181 |  | 
 | 3182 |     if (CurOp != NumOps) { | 
 | 3183 |       const MachineOperand &MO = MI.getOperand(CurOp++); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3184 |       if (MO.isMBB()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3185 |         FinalSize += sizePCRelativeBlockAddress(); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3186 |       } else if (MO.isGlobal()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3187 |         FinalSize += sizeGlobalAddress(false); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3188 |       } else if (MO.isSymbol()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3189 |         FinalSize += sizeExternalSymbolAddress(false); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3190 |       } else if (MO.isImm()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3191 |         FinalSize += sizeConstant(X86InstrInfo::sizeOfImm(Desc)); | 
 | 3192 |       } else { | 
| Torok Edwin | c23197a | 2009-07-14 16:55:14 +0000 | [diff] [blame] | 3193 |         llvm_unreachable("Unknown RawFrm operand!"); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3194 |       } | 
 | 3195 |     } | 
 | 3196 |     break; | 
 | 3197 |  | 
 | 3198 |   case X86II::AddRegFrm: | 
 | 3199 |     ++FinalSize; | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3200 |     ++CurOp; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3201 |      | 
 | 3202 |     if (CurOp != NumOps) { | 
 | 3203 |       const MachineOperand &MO1 = MI.getOperand(CurOp++); | 
 | 3204 |       unsigned Size = X86InstrInfo::sizeOfImm(Desc); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3205 |       if (MO1.isImm()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3206 |         FinalSize += sizeConstant(Size); | 
 | 3207 |       else { | 
 | 3208 |         bool dword = false; | 
 | 3209 |         if (Opcode == X86::MOV64ri) | 
 | 3210 |           dword = true;  | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3211 |         if (MO1.isGlobal()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3212 |           FinalSize += sizeGlobalAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3213 |         } else if (MO1.isSymbol()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3214 |           FinalSize += sizeExternalSymbolAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3215 |         else if (MO1.isCPI()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3216 |           FinalSize += sizeConstPoolAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3217 |         else if (MO1.isJTI()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3218 |           FinalSize += sizeJumpTableAddress(dword); | 
 | 3219 |       } | 
 | 3220 |     } | 
 | 3221 |     break; | 
 | 3222 |  | 
 | 3223 |   case X86II::MRMDestReg: { | 
 | 3224 |     ++FinalSize;  | 
 | 3225 |     FinalSize += sizeRegModRMByte(); | 
 | 3226 |     CurOp += 2; | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3227 |     if (CurOp != NumOps) { | 
 | 3228 |       ++CurOp; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3229 |       FinalSize += sizeConstant(X86InstrInfo::sizeOfImm(Desc)); | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3230 |     } | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3231 |     break; | 
 | 3232 |   } | 
 | 3233 |   case X86II::MRMDestMem: { | 
 | 3234 |     ++FinalSize; | 
 | 3235 |     FinalSize += getMemModRMByteSize(MI, CurOp, IsPIC, Is64BitMode); | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3236 |     CurOp +=  X86AddrNumOperands + 1; | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3237 |     if (CurOp != NumOps) { | 
 | 3238 |       ++CurOp; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3239 |       FinalSize += sizeConstant(X86InstrInfo::sizeOfImm(Desc)); | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3240 |     } | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3241 |     break; | 
 | 3242 |   } | 
 | 3243 |  | 
 | 3244 |   case X86II::MRMSrcReg: | 
 | 3245 |     ++FinalSize; | 
 | 3246 |     FinalSize += sizeRegModRMByte(); | 
 | 3247 |     CurOp += 2; | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3248 |     if (CurOp != NumOps) { | 
 | 3249 |       ++CurOp; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3250 |       FinalSize += sizeConstant(X86InstrInfo::sizeOfImm(Desc)); | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3251 |     } | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3252 |     break; | 
 | 3253 |  | 
 | 3254 |   case X86II::MRMSrcMem: { | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3255 |     int AddrOperands; | 
 | 3256 |     if (Opcode == X86::LEA64r || Opcode == X86::LEA64_32r || | 
 | 3257 |         Opcode == X86::LEA16r || Opcode == X86::LEA32r) | 
 | 3258 |       AddrOperands = X86AddrNumOperands - 1; // No segment register | 
 | 3259 |     else | 
 | 3260 |       AddrOperands = X86AddrNumOperands; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3261 |  | 
 | 3262 |     ++FinalSize; | 
 | 3263 |     FinalSize += getMemModRMByteSize(MI, CurOp+1, IsPIC, Is64BitMode); | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3264 |     CurOp += AddrOperands + 1; | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3265 |     if (CurOp != NumOps) { | 
 | 3266 |       ++CurOp; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3267 |       FinalSize += sizeConstant(X86InstrInfo::sizeOfImm(Desc)); | 
| Nicolas Geoffray | 546e36a | 2008-04-20 23:36:47 +0000 | [diff] [blame] | 3268 |     } | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3269 |     break; | 
 | 3270 |   } | 
 | 3271 |  | 
 | 3272 |   case X86II::MRM0r: case X86II::MRM1r: | 
 | 3273 |   case X86II::MRM2r: case X86II::MRM3r: | 
 | 3274 |   case X86II::MRM4r: case X86II::MRM5r: | 
 | 3275 |   case X86II::MRM6r: case X86II::MRM7r: | 
 | 3276 |     ++FinalSize; | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3277 |     if (Desc->getOpcode() == X86::LFENCE || | 
| Bill Wendling | 2265ba0 | 2009-05-28 23:40:46 +0000 | [diff] [blame] | 3278 |         Desc->getOpcode() == X86::MFENCE) { | 
 | 3279 |       // Special handling of lfence and mfence; | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3280 |       FinalSize += sizeRegModRMByte(); | 
| Bill Wendling | 2265ba0 | 2009-05-28 23:40:46 +0000 | [diff] [blame] | 3281 |     } else if (Desc->getOpcode() == X86::MONITOR || | 
 | 3282 |                Desc->getOpcode() == X86::MWAIT) { | 
 | 3283 |       // Special handling of monitor and mwait. | 
 | 3284 |       FinalSize += sizeRegModRMByte() + 1; // +1 for the opcode. | 
 | 3285 |     } else { | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3286 |       ++CurOp; | 
 | 3287 |       FinalSize += sizeRegModRMByte(); | 
 | 3288 |     } | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3289 |  | 
 | 3290 |     if (CurOp != NumOps) { | 
 | 3291 |       const MachineOperand &MO1 = MI.getOperand(CurOp++); | 
 | 3292 |       unsigned Size = X86InstrInfo::sizeOfImm(Desc); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3293 |       if (MO1.isImm()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3294 |         FinalSize += sizeConstant(Size); | 
 | 3295 |       else { | 
 | 3296 |         bool dword = false; | 
 | 3297 |         if (Opcode == X86::MOV64ri32) | 
 | 3298 |           dword = true; | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3299 |         if (MO1.isGlobal()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3300 |           FinalSize += sizeGlobalAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3301 |         } else if (MO1.isSymbol()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3302 |           FinalSize += sizeExternalSymbolAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3303 |         else if (MO1.isCPI()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3304 |           FinalSize += sizeConstPoolAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3305 |         else if (MO1.isJTI()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3306 |           FinalSize += sizeJumpTableAddress(dword); | 
 | 3307 |       } | 
 | 3308 |     } | 
 | 3309 |     break; | 
 | 3310 |  | 
 | 3311 |   case X86II::MRM0m: case X86II::MRM1m: | 
 | 3312 |   case X86II::MRM2m: case X86II::MRM3m: | 
 | 3313 |   case X86II::MRM4m: case X86II::MRM5m: | 
 | 3314 |   case X86II::MRM6m: case X86II::MRM7m: { | 
 | 3315 |      | 
 | 3316 |     ++FinalSize; | 
 | 3317 |     FinalSize += getMemModRMByteSize(MI, CurOp, IsPIC, Is64BitMode); | 
| Evan Cheng | b0030dd | 2009-05-04 22:49:16 +0000 | [diff] [blame] | 3318 |     CurOp += X86AddrNumOperands; | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3319 |  | 
 | 3320 |     if (CurOp != NumOps) { | 
 | 3321 |       const MachineOperand &MO = MI.getOperand(CurOp++); | 
 | 3322 |       unsigned Size = X86InstrInfo::sizeOfImm(Desc); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3323 |       if (MO.isImm()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3324 |         FinalSize += sizeConstant(Size); | 
 | 3325 |       else { | 
 | 3326 |         bool dword = false; | 
 | 3327 |         if (Opcode == X86::MOV64mi32) | 
 | 3328 |           dword = true; | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3329 |         if (MO.isGlobal()) { | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3330 |           FinalSize += sizeGlobalAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3331 |         } else if (MO.isSymbol()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3332 |           FinalSize += sizeExternalSymbolAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3333 |         else if (MO.isCPI()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3334 |           FinalSize += sizeConstPoolAddress(dword); | 
| Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 3335 |         else if (MO.isJTI()) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3336 |           FinalSize += sizeJumpTableAddress(dword); | 
 | 3337 |       } | 
 | 3338 |     } | 
 | 3339 |     break; | 
 | 3340 |   } | 
 | 3341 |  | 
 | 3342 |   case X86II::MRMInitReg: | 
 | 3343 |     ++FinalSize; | 
 | 3344 |     // Duplicate register, used by things like MOV8r0 (aka xor reg,reg). | 
 | 3345 |     FinalSize += sizeRegModRMByte(); | 
 | 3346 |     ++CurOp; | 
 | 3347 |     break; | 
 | 3348 |   } | 
 | 3349 |  | 
 | 3350 |   if (!Desc->isVariadic() && CurOp != NumOps) { | 
| Torok Edwin | ab7c09b | 2009-07-08 18:01:40 +0000 | [diff] [blame] | 3351 |     std::string msg; | 
 | 3352 |     raw_string_ostream Msg(msg); | 
 | 3353 |     Msg << "Cannot determine size: " << MI; | 
 | 3354 |     llvm_report_error(Msg.str()); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3355 |   } | 
 | 3356 |    | 
 | 3357 |  | 
 | 3358 |   return FinalSize; | 
 | 3359 | } | 
 | 3360 |  | 
 | 3361 |  | 
 | 3362 | unsigned X86InstrInfo::GetInstSizeInBytes(const MachineInstr *MI) const { | 
 | 3363 |   const TargetInstrDesc &Desc = MI->getDesc(); | 
| Chris Lattner | 84853a1 | 2009-07-10 20:53:38 +0000 | [diff] [blame] | 3364 |   bool IsPIC = TM.getRelocationModel() == Reloc::PIC_; | 
| Dan Gohman | c9f5f3f | 2008-05-14 01:58:56 +0000 | [diff] [blame] | 3365 |   bool Is64BitMode = TM.getSubtargetImpl()->is64Bit(); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3366 |   unsigned Size = GetInstSizeWithDesc(*MI, &Desc, IsPIC, Is64BitMode); | 
| Chris Lattner | b1fb84d | 2009-06-25 17:28:07 +0000 | [diff] [blame] | 3367 |   if (Desc.getOpcode() == X86::MOVPC32r) | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3368 |     Size += GetInstSizeWithDesc(*MI, &get(X86::POP32r), IsPIC, Is64BitMode); | 
| Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 3369 |   return Size; | 
 | 3370 | } | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3371 |  | 
| Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 3372 | /// getGlobalBaseReg - Return a virtual register initialized with the | 
 | 3373 | /// the global base register value. Output instructions required to | 
 | 3374 | /// initialize the register in the function entry block, if necessary. | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3375 | /// | 
| Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 3376 | unsigned X86InstrInfo::getGlobalBaseReg(MachineFunction *MF) const { | 
 | 3377 |   assert(!TM.getSubtarget<X86Subtarget>().is64Bit() && | 
 | 3378 |          "X86-64 PIC uses RIP relative addressing"); | 
 | 3379 |  | 
 | 3380 |   X86MachineFunctionInfo *X86FI = MF->getInfo<X86MachineFunctionInfo>(); | 
 | 3381 |   unsigned GlobalBaseReg = X86FI->getGlobalBaseReg(); | 
 | 3382 |   if (GlobalBaseReg != 0) | 
 | 3383 |     return GlobalBaseReg; | 
 | 3384 |  | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3385 |   // Insert the set of GlobalBaseReg into the first MBB of the function | 
 | 3386 |   MachineBasicBlock &FirstMBB = MF->front(); | 
 | 3387 |   MachineBasicBlock::iterator MBBI = FirstMBB.begin(); | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 3388 |   DebugLoc DL = DebugLoc::getUnknownLoc(); | 
 | 3389 |   if (MBBI != FirstMBB.end()) DL = MBBI->getDebugLoc(); | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3390 |   MachineRegisterInfo &RegInfo = MF->getRegInfo(); | 
 | 3391 |   unsigned PC = RegInfo.createVirtualRegister(X86::GR32RegisterClass); | 
 | 3392 |    | 
 | 3393 |   const TargetInstrInfo *TII = TM.getInstrInfo(); | 
 | 3394 |   // Operand of MovePCtoStack is completely ignored by asm printer. It's | 
 | 3395 |   // only used in JIT code emission as displacement to pc. | 
| Chris Lattner | ac5e887 | 2009-06-25 17:38:33 +0000 | [diff] [blame] | 3396 |   BuildMI(FirstMBB, MBBI, DL, TII->get(X86::MOVPC32r), PC).addImm(0); | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3397 |    | 
 | 3398 |   // If we're using vanilla 'GOT' PIC style, we should use relative addressing | 
| Chris Lattner | ac5e887 | 2009-06-25 17:38:33 +0000 | [diff] [blame] | 3399 |   // not to pc, but to _GLOBAL_OFFSET_TABLE_ external. | 
| Chris Lattner | 15a380a | 2009-07-09 04:39:06 +0000 | [diff] [blame] | 3400 |   if (TM.getSubtarget<X86Subtarget>().isPICStyleGOT()) { | 
| Chris Lattner | ac5e887 | 2009-06-25 17:38:33 +0000 | [diff] [blame] | 3401 |     GlobalBaseReg = RegInfo.createVirtualRegister(X86::GR32RegisterClass); | 
 | 3402 |     // Generate addl $__GLOBAL_OFFSET_TABLE_ + [.-piclabel], %some_register | 
| Bill Wendling | fbef310 | 2009-02-11 21:51:19 +0000 | [diff] [blame] | 3403 |     BuildMI(FirstMBB, MBBI, DL, TII->get(X86::ADD32ri), GlobalBaseReg) | 
| Daniel Dunbar | 31e2c7b | 2009-09-01 22:06:46 +0000 | [diff] [blame] | 3404 |       .addReg(PC).addExternalSymbol("_GLOBAL_OFFSET_TABLE_", | 
| Chris Lattner | ac5e887 | 2009-06-25 17:38:33 +0000 | [diff] [blame] | 3405 |                                     X86II::MO_GOT_ABSOLUTE_ADDRESS); | 
| Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 3406 |   } else { | 
 | 3407 |     GlobalBaseReg = PC; | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3408 |   } | 
 | 3409 |  | 
| Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 3410 |   X86FI->setGlobalBaseReg(GlobalBaseReg); | 
 | 3411 |   return GlobalBaseReg; | 
| Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 3412 | } |