Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 1 | //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// |
| 2 | // |
| 3 | // The Subzero Code Generator |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 9 | /// |
| 10 | /// \file |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 11 | /// This file implements the skeleton of the TargetLowering class, specifically |
| 12 | /// invoking the appropriate lowering method for a given instruction kind and |
| 13 | /// driving global register allocation. It also implements the non-deleted |
| 14 | /// instruction iteration in LoweringContext. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 15 | /// |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 16 | //===----------------------------------------------------------------------===// |
| 17 | |
John Porto | 67f8de9 | 2015-06-25 10:14:17 -0700 | [diff] [blame] | 18 | #include "IceTargetLowering.h" |
| 19 | |
John Porto | aff4ccf | 2015-06-10 16:35:06 -0700 | [diff] [blame] | 20 | #include "IceAssemblerARM32.h" |
John Porto | 2da710c | 2015-06-29 07:57:02 -0700 | [diff] [blame] | 21 | #include "IceAssemblerMIPS32.h" |
John Porto | aff4ccf | 2015-06-10 16:35:06 -0700 | [diff] [blame] | 22 | #include "IceAssemblerX8632.h" |
John Porto | 7e93c62 | 2015-06-23 10:58:57 -0700 | [diff] [blame] | 23 | #include "IceAssemblerX8664.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 24 | #include "IceCfg.h" // setError() |
| 25 | #include "IceCfgNode.h" |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 26 | #include "IceGlobalInits.h" |
John Porto | ec3f565 | 2015-08-31 15:07:09 -0700 | [diff] [blame] | 27 | #include "IceInstVarIter.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 28 | #include "IceOperand.h" |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 29 | #include "IceRegAlloc.h" |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 30 | #include "IceTargetLoweringARM32.h" |
Jim Stichnoth | 6da4cef | 2015-06-11 13:26:33 -0700 | [diff] [blame] | 31 | #include "IceTargetLoweringMIPS32.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 32 | #include "IceTargetLoweringX8632.h" |
John Porto | 7e93c62 | 2015-06-23 10:58:57 -0700 | [diff] [blame] | 33 | #include "IceTargetLoweringX8664.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 34 | |
| 35 | namespace Ice { |
| 36 | |
| 37 | void LoweringContext::init(CfgNode *N) { |
| 38 | Node = N; |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 39 | End = getNode()->getInsts().end(); |
| 40 | rewind(); |
| 41 | advanceForward(Next); |
| 42 | } |
| 43 | |
| 44 | void LoweringContext::rewind() { |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 45 | Begin = getNode()->getInsts().begin(); |
| 46 | Cur = Begin; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 47 | skipDeleted(Cur); |
| 48 | Next = Cur; |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 49 | availabilityReset(); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 50 | } |
| 51 | |
| 52 | void LoweringContext::insert(Inst *Inst) { |
| 53 | getNode()->getInsts().insert(Next, Inst); |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 54 | LastInserted = Inst; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 55 | } |
| 56 | |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 57 | void LoweringContext::skipDeleted(InstList::iterator &I) const { |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 58 | while (I != End && I->isDeleted()) |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 59 | ++I; |
| 60 | } |
| 61 | |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 62 | void LoweringContext::advanceForward(InstList::iterator &I) const { |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 63 | if (I != End) { |
| 64 | ++I; |
| 65 | skipDeleted(I); |
| 66 | } |
| 67 | } |
| 68 | |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 69 | Inst *LoweringContext::getLastInserted() const { |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 70 | assert(LastInserted); |
| 71 | return LastInserted; |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 72 | } |
| 73 | |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 74 | void LoweringContext::availabilityReset() { |
| 75 | LastDest = nullptr; |
| 76 | LastSrc = nullptr; |
| 77 | } |
| 78 | |
| 79 | void LoweringContext::availabilityUpdate() { |
| 80 | availabilityReset(); |
| 81 | Inst *Instr = LastInserted; |
| 82 | if (Instr == nullptr) |
| 83 | return; |
Jim Stichnoth | 28b71be | 2015-10-12 15:24:46 -0700 | [diff] [blame] | 84 | if (!Instr->isVarAssign()) |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 85 | return; |
Jim Stichnoth | 28b71be | 2015-10-12 15:24:46 -0700 | [diff] [blame] | 86 | // Since isVarAssign() is true, the source operand must be a Variable. |
| 87 | LastDest = Instr->getDest(); |
| 88 | LastSrc = llvm::cast<Variable>(Instr->getSrc(0)); |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 89 | } |
| 90 | |
| 91 | Variable *LoweringContext::availabilityGet(Operand *Src) const { |
| 92 | assert(Src); |
| 93 | if (Src == LastDest) |
| 94 | return LastSrc; |
| 95 | return nullptr; |
| 96 | } |
| 97 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 98 | TargetLowering *TargetLowering::createLowering(TargetArch Target, Cfg *Func) { |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 99 | #define SUBZERO_TARGET(X) \ |
| 100 | if (Target == Target_##X) \ |
| 101 | return Target##X::create(Func); |
| 102 | #include "llvm/Config/SZTargets.def" |
| 103 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 104 | Func->setError("Unsupported target"); |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 105 | return nullptr; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 106 | } |
| 107 | |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 108 | void TargetLowering::staticInit(TargetArch Target) { |
| 109 | // Call the specified target's static initializer. |
| 110 | switch (Target) { |
| 111 | default: |
| 112 | llvm::report_fatal_error("Unsupported target"); |
| 113 | break; |
| 114 | #define SUBZERO_TARGET(X) \ |
| 115 | case Target_##X: { \ |
| 116 | static bool InitGuard##X = false; \ |
| 117 | if (InitGuard##X) \ |
| 118 | return; \ |
| 119 | InitGuard##X = true; \ |
| 120 | Target##X::staticInit(); \ |
| 121 | } break; |
| 122 | #include "llvm/Config/SZTargets.def" |
| 123 | } |
| 124 | } |
| 125 | |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 126 | TargetLowering::TargetLowering(Cfg *Func) |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 127 | : Func(Func), Ctx(Func->getContext()), Context() {} |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 128 | |
Jan Voung | ec27073 | 2015-01-12 17:00:22 -0800 | [diff] [blame] | 129 | std::unique_ptr<Assembler> TargetLowering::createAssembler(TargetArch Target, |
| 130 | Cfg *Func) { |
Jan Voung | 90ccc3f | 2015-04-30 14:15:10 -0700 | [diff] [blame] | 131 | #define SUBZERO_TARGET(X) \ |
| 132 | if (Target == Target_##X) \ |
Jim Stichnoth | 5bff61c | 2015-10-28 09:26:00 -0700 | [diff] [blame] | 133 | return std::unique_ptr<Assembler>(new X::Assembler##X()); |
Jan Voung | 90ccc3f | 2015-04-30 14:15:10 -0700 | [diff] [blame] | 134 | #include "llvm/Config/SZTargets.def" |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 135 | |
| 136 | Func->setError("Unsupported target assembler"); |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 137 | return nullptr; |
Jan Voung | 8acded0 | 2014-09-22 18:02:25 -0700 | [diff] [blame] | 138 | } |
| 139 | |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 140 | void TargetLowering::doAddressOpt() { |
| 141 | if (llvm::isa<InstLoad>(*Context.getCur())) |
| 142 | doAddressOptLoad(); |
| 143 | else if (llvm::isa<InstStore>(*Context.getCur())) |
| 144 | doAddressOptStore(); |
| 145 | Context.advanceCur(); |
| 146 | Context.advanceNext(); |
| 147 | } |
| 148 | |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 149 | void TargetLowering::doNopInsertion(RandomNumberGenerator &RNG) { |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 150 | Inst *I = Context.getCur(); |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 151 | bool ShouldSkip = llvm::isa<InstFakeUse>(I) || llvm::isa<InstFakeDef>(I) || |
| 152 | llvm::isa<InstFakeKill>(I) || I->isRedundantAssign() || |
| 153 | I->isDeleted(); |
| 154 | if (!ShouldSkip) { |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 155 | int Probability = Ctx->getFlags().getNopProbabilityAsPercentage(); |
| 156 | for (int I = 0; I < Ctx->getFlags().getMaxNopsPerInstruction(); ++I) { |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 157 | randomlyInsertNop(Probability / 100.0, RNG); |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 158 | } |
| 159 | } |
| 160 | } |
| 161 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 162 | // Lowers a single instruction according to the information in Context, by |
| 163 | // checking the Context.Cur instruction kind and calling the appropriate |
| 164 | // lowering method. The lowering method should insert target instructions at |
| 165 | // the Cur.Next insertion point, and should not delete the Context.Cur |
| 166 | // instruction or advance Context.Cur. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 167 | // |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 168 | // The lowering method may look ahead in the instruction stream as desired, and |
| 169 | // lower additional instructions in conjunction with the current one, for |
| 170 | // example fusing a compare and branch. If it does, it should advance |
| 171 | // Context.Cur to point to the next non-deleted instruction to process, and it |
| 172 | // should delete any additional instructions it consumes. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 173 | void TargetLowering::lower() { |
| 174 | assert(!Context.atEnd()); |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 175 | Inst *Inst = Context.getCur(); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 176 | Inst->deleteIfDead(); |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 177 | if (!Inst->isDeleted() && !llvm::isa<InstFakeDef>(Inst) && |
| 178 | !llvm::isa<InstFakeUse>(Inst)) { |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 179 | // Mark the current instruction as deleted before lowering, otherwise the |
| 180 | // Dest variable will likely get marked as non-SSA. See |
| 181 | // Variable::setDefinition(). However, just pass-through FakeDef and |
| 182 | // FakeUse instructions that might have been inserted prior to lowering. |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 183 | Inst->setDeleted(); |
| 184 | switch (Inst->getKind()) { |
| 185 | case Inst::Alloca: |
| 186 | lowerAlloca(llvm::cast<InstAlloca>(Inst)); |
| 187 | break; |
| 188 | case Inst::Arithmetic: |
| 189 | lowerArithmetic(llvm::cast<InstArithmetic>(Inst)); |
| 190 | break; |
| 191 | case Inst::Assign: |
| 192 | lowerAssign(llvm::cast<InstAssign>(Inst)); |
| 193 | break; |
| 194 | case Inst::Br: |
| 195 | lowerBr(llvm::cast<InstBr>(Inst)); |
| 196 | break; |
| 197 | case Inst::Call: |
| 198 | lowerCall(llvm::cast<InstCall>(Inst)); |
| 199 | break; |
| 200 | case Inst::Cast: |
| 201 | lowerCast(llvm::cast<InstCast>(Inst)); |
| 202 | break; |
| 203 | case Inst::ExtractElement: |
| 204 | lowerExtractElement(llvm::cast<InstExtractElement>(Inst)); |
| 205 | break; |
| 206 | case Inst::Fcmp: |
| 207 | lowerFcmp(llvm::cast<InstFcmp>(Inst)); |
| 208 | break; |
| 209 | case Inst::Icmp: |
| 210 | lowerIcmp(llvm::cast<InstIcmp>(Inst)); |
| 211 | break; |
| 212 | case Inst::InsertElement: |
| 213 | lowerInsertElement(llvm::cast<InstInsertElement>(Inst)); |
| 214 | break; |
| 215 | case Inst::IntrinsicCall: { |
| 216 | InstIntrinsicCall *Call = llvm::cast<InstIntrinsicCall>(Inst); |
| 217 | if (Call->getIntrinsicInfo().ReturnsTwice) |
| 218 | setCallsReturnsTwice(true); |
| 219 | lowerIntrinsicCall(Call); |
| 220 | break; |
| 221 | } |
| 222 | case Inst::Load: |
| 223 | lowerLoad(llvm::cast<InstLoad>(Inst)); |
| 224 | break; |
| 225 | case Inst::Phi: |
| 226 | lowerPhi(llvm::cast<InstPhi>(Inst)); |
| 227 | break; |
| 228 | case Inst::Ret: |
| 229 | lowerRet(llvm::cast<InstRet>(Inst)); |
| 230 | break; |
| 231 | case Inst::Select: |
| 232 | lowerSelect(llvm::cast<InstSelect>(Inst)); |
| 233 | break; |
| 234 | case Inst::Store: |
| 235 | lowerStore(llvm::cast<InstStore>(Inst)); |
| 236 | break; |
| 237 | case Inst::Switch: |
| 238 | lowerSwitch(llvm::cast<InstSwitch>(Inst)); |
| 239 | break; |
| 240 | case Inst::Unreachable: |
| 241 | lowerUnreachable(llvm::cast<InstUnreachable>(Inst)); |
| 242 | break; |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 243 | default: |
| 244 | lowerOther(Inst); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 245 | break; |
| 246 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 247 | |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 248 | postLower(); |
| 249 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 250 | |
| 251 | Context.advanceCur(); |
| 252 | Context.advanceNext(); |
| 253 | } |
| 254 | |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 255 | void TargetLowering::lowerInst(CfgNode *Node, InstList::iterator Next, |
| 256 | InstHighLevel *Instr) { |
| 257 | // TODO(stichnot): Consider modifying the design/implementation to avoid |
| 258 | // multiple init() calls when using lowerInst() to lower several instructions |
| 259 | // in the same node. |
| 260 | Context.init(Node); |
| 261 | Context.setNext(Next); |
| 262 | Context.insert(Instr); |
| 263 | --Next; |
| 264 | assert(&*Next == Instr); |
| 265 | Context.setCur(Next); |
| 266 | lower(); |
| 267 | } |
| 268 | |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 269 | void TargetLowering::lowerOther(const Inst *Instr) { |
| 270 | (void)Instr; |
| 271 | Func->setError("Can't lower unsupported instruction type"); |
| 272 | } |
| 273 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 274 | // Drives register allocation, allowing all physical registers (except perhaps |
| 275 | // for the frame pointer) to be allocated. This set of registers could |
| 276 | // potentially be parameterized if we want to restrict registers e.g. for |
| 277 | // performance testing. |
Jim Stichnoth | 70d0a05 | 2014-11-14 15:53:46 -0800 | [diff] [blame] | 278 | void TargetLowering::regAlloc(RegAllocKind Kind) { |
Jim Stichnoth | 8363a06 | 2014-10-07 10:02:38 -0700 | [diff] [blame] | 279 | TimerMarker T(TimerStack::TT_regAlloc, Func); |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 280 | LinearScan LinearScan(Func); |
| 281 | RegSetMask RegInclude = RegSet_None; |
| 282 | RegSetMask RegExclude = RegSet_None; |
| 283 | RegInclude |= RegSet_CallerSave; |
| 284 | RegInclude |= RegSet_CalleeSave; |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 285 | if (hasFramePointer()) |
| 286 | RegExclude |= RegSet_FramePointer; |
| 287 | llvm::SmallBitVector RegMask = getRegisterSet(RegInclude, RegExclude); |
Jim Stichnoth | 4001c93 | 2015-10-09 14:33:26 -0700 | [diff] [blame] | 288 | bool Repeat = (Kind == RAK_Global && Ctx->getFlags().shouldRepeatRegAlloc()); |
| 289 | do { |
| 290 | LinearScan.init(Kind); |
| 291 | LinearScan.scan(RegMask, Ctx->getFlags().shouldRandomizeRegAlloc()); |
| 292 | if (!LinearScan.hasEvictions()) |
| 293 | Repeat = false; |
| 294 | Kind = RAK_SecondChance; |
| 295 | } while (Repeat); |
Jim Stichnoth | a1da6ff | 2015-11-16 15:59:39 -0800 | [diff] [blame^] | 296 | // TODO(stichnot): Run the register allocator one more time to do stack slot |
| 297 | // coalescing. The idea would be to initialize the Unhandled list with the |
| 298 | // set of Variables that have no register and a non-empty live range, and |
| 299 | // model an infinite number of registers. Maybe use the register aliasing |
| 300 | // mechanism to get better packing of narrower slots. |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 301 | } |
| 302 | |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 303 | void TargetLowering::markRedefinitions() { |
| 304 | // Find (non-SSA) instructions where the Dest variable appears in some source |
| 305 | // operand, and set the IsDestRedefined flag to keep liveness analysis |
| 306 | // consistent. |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 307 | for (auto Inst = Context.getCur(), E = Context.getNext(); Inst != E; ++Inst) { |
| 308 | if (Inst->isDeleted()) |
| 309 | continue; |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 310 | Variable *Dest = Inst->getDest(); |
| 311 | if (Dest == nullptr) |
| 312 | continue; |
| 313 | FOREACH_VAR_IN_INST(Var, *Inst) { |
| 314 | if (Var == Dest) { |
| 315 | Inst->setDestRedefined(); |
| 316 | break; |
| 317 | } |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 318 | } |
| 319 | } |
| 320 | } |
| 321 | |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 322 | void TargetLowering::sortVarsByAlignment(VarList &Dest, |
| 323 | const VarList &Source) const { |
| 324 | Dest = Source; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 325 | // Instead of std::sort, we could do a bucket sort with log2(alignment) as |
| 326 | // the buckets, if performance is an issue. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 327 | std::sort(Dest.begin(), Dest.end(), |
| 328 | [this](const Variable *V1, const Variable *V2) { |
Jim Stichnoth | 8e6bf6e | 2015-06-03 15:58:12 -0700 | [diff] [blame] | 329 | return typeWidthInBytesOnStack(V1->getType()) > |
| 330 | typeWidthInBytesOnStack(V2->getType()); |
| 331 | }); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 332 | } |
| 333 | |
| 334 | void TargetLowering::getVarStackSlotParams( |
| 335 | VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed, |
| 336 | size_t *GlobalsSize, size_t *SpillAreaSizeBytes, |
| 337 | uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes, |
| 338 | std::function<bool(Variable *)> TargetVarHook) { |
| 339 | const VariablesMetadata *VMetadata = Func->getVMetadata(); |
| 340 | llvm::BitVector IsVarReferenced(Func->getNumVariables()); |
| 341 | for (CfgNode *Node : Func->getNodes()) { |
| 342 | for (Inst &Inst : Node->getInsts()) { |
| 343 | if (Inst.isDeleted()) |
| 344 | continue; |
| 345 | if (const Variable *Var = Inst.getDest()) |
| 346 | IsVarReferenced[Var->getIndex()] = true; |
John Porto | ec3f565 | 2015-08-31 15:07:09 -0700 | [diff] [blame] | 347 | FOREACH_VAR_IN_INST(Var, Inst) { |
| 348 | IsVarReferenced[Var->getIndex()] = true; |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 349 | } |
| 350 | } |
| 351 | } |
| 352 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 353 | // If SimpleCoalescing is false, each variable without a register gets its |
| 354 | // own unique stack slot, which leads to large stack frames. If |
| 355 | // SimpleCoalescing is true, then each "global" variable without a register |
| 356 | // gets its own slot, but "local" variable slots are reused across basic |
| 357 | // blocks. E.g., if A and B are local to block 1 and C is local to block 2, |
| 358 | // then C may share a slot with A or B. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 359 | // |
| 360 | // We cannot coalesce stack slots if this function calls a "returns twice" |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 361 | // function. In that case, basic blocks may be revisited, and variables local |
| 362 | // to those basic blocks are actually live until after the called function |
| 363 | // returns a second time. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 364 | const bool SimpleCoalescing = !callsReturnsTwice(); |
| 365 | |
| 366 | std::vector<size_t> LocalsSize(Func->getNumNodes()); |
| 367 | const VarList &Variables = Func->getVariables(); |
| 368 | VarList SpilledVariables; |
| 369 | for (Variable *Var : Variables) { |
| 370 | if (Var->hasReg()) { |
| 371 | RegsUsed[Var->getRegNum()] = true; |
| 372 | continue; |
| 373 | } |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 374 | // An argument either does not need a stack slot (if passed in a register) |
| 375 | // or already has one (if passed on the stack). |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 376 | if (Var->getIsArg()) |
| 377 | continue; |
| 378 | // An unreferenced variable doesn't need a stack slot. |
| 379 | if (!IsVarReferenced[Var->getIndex()]) |
| 380 | continue; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 381 | // Check a target-specific variable (it may end up sharing stack slots) and |
| 382 | // not need accounting here. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 383 | if (TargetVarHook(Var)) |
| 384 | continue; |
| 385 | SpilledVariables.push_back(Var); |
| 386 | } |
| 387 | |
| 388 | SortedSpilledVariables.reserve(SpilledVariables.size()); |
| 389 | sortVarsByAlignment(SortedSpilledVariables, SpilledVariables); |
| 390 | |
| 391 | for (Variable *Var : SortedSpilledVariables) { |
| 392 | size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 393 | // We have sorted by alignment, so the first variable we encounter that is |
| 394 | // located in each area determines the max alignment for the area. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 395 | if (!*SpillAreaAlignmentBytes) |
| 396 | *SpillAreaAlignmentBytes = Increment; |
| 397 | if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 398 | if (VMetadata->isMultiBlock(Var)) { |
| 399 | *GlobalsSize += Increment; |
| 400 | } else { |
| 401 | SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 402 | LocalsSize[NodeIndex] += Increment; |
| 403 | if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes) |
| 404 | *SpillAreaSizeBytes = LocalsSize[NodeIndex]; |
| 405 | if (!*LocalsSlotsAlignmentBytes) |
| 406 | *LocalsSlotsAlignmentBytes = Increment; |
| 407 | } |
| 408 | } else { |
| 409 | *SpillAreaSizeBytes += Increment; |
| 410 | } |
| 411 | } |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 412 | // For testing legalization of large stack offsets on targets with limited |
| 413 | // offset bits in instruction encodings, add some padding. |
| 414 | *SpillAreaSizeBytes += Ctx->getFlags().getTestStackExtra(); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 415 | } |
| 416 | |
| 417 | void TargetLowering::alignStackSpillAreas(uint32_t SpillAreaStartOffset, |
| 418 | uint32_t SpillAreaAlignmentBytes, |
| 419 | size_t GlobalsSize, |
| 420 | uint32_t LocalsSlotsAlignmentBytes, |
| 421 | uint32_t *SpillAreaPaddingBytes, |
| 422 | uint32_t *LocalsSlotsPaddingBytes) { |
| 423 | if (SpillAreaAlignmentBytes) { |
| 424 | uint32_t PaddingStart = SpillAreaStartOffset; |
| 425 | uint32_t SpillAreaStart = |
| 426 | Utils::applyAlignment(PaddingStart, SpillAreaAlignmentBytes); |
| 427 | *SpillAreaPaddingBytes = SpillAreaStart - PaddingStart; |
| 428 | } |
| 429 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 430 | // If there are separate globals and locals areas, make sure the locals area |
| 431 | // is aligned by padding the end of the globals area. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 432 | if (LocalsSlotsAlignmentBytes) { |
| 433 | uint32_t GlobalsAndSubsequentPaddingSize = GlobalsSize; |
| 434 | GlobalsAndSubsequentPaddingSize = |
| 435 | Utils::applyAlignment(GlobalsSize, LocalsSlotsAlignmentBytes); |
| 436 | *LocalsSlotsPaddingBytes = GlobalsAndSubsequentPaddingSize - GlobalsSize; |
| 437 | } |
| 438 | } |
| 439 | |
| 440 | void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables, |
| 441 | size_t SpillAreaPaddingBytes, |
| 442 | size_t SpillAreaSizeBytes, |
| 443 | size_t GlobalsAndSubsequentPaddingSize, |
| 444 | bool UsesFramePointer) { |
| 445 | const VariablesMetadata *VMetadata = Func->getVMetadata(); |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 446 | // For testing legalization of large stack offsets on targets with limited |
| 447 | // offset bits in instruction encodings, add some padding. This assumes that |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 448 | // SpillAreaSizeBytes has accounted for the extra test padding. When |
| 449 | // UseFramePointer is true, the offset depends on the padding, not just the |
| 450 | // SpillAreaSizeBytes. On the other hand, when UseFramePointer is false, the |
| 451 | // offsets depend on the gap between SpillAreaSizeBytes and |
| 452 | // SpillAreaPaddingBytes, so we don't increment that. |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 453 | size_t TestPadding = Ctx->getFlags().getTestStackExtra(); |
| 454 | if (UsesFramePointer) |
| 455 | SpillAreaPaddingBytes += TestPadding; |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 456 | size_t GlobalsSpaceUsed = SpillAreaPaddingBytes; |
| 457 | size_t NextStackOffset = SpillAreaPaddingBytes; |
| 458 | std::vector<size_t> LocalsSize(Func->getNumNodes()); |
| 459 | const bool SimpleCoalescing = !callsReturnsTwice(); |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 460 | |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 461 | for (Variable *Var : SortedSpilledVariables) { |
| 462 | size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
| 463 | if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 464 | if (VMetadata->isMultiBlock(Var)) { |
| 465 | GlobalsSpaceUsed += Increment; |
| 466 | NextStackOffset = GlobalsSpaceUsed; |
| 467 | } else { |
| 468 | SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 469 | LocalsSize[NodeIndex] += Increment; |
| 470 | NextStackOffset = SpillAreaPaddingBytes + |
| 471 | GlobalsAndSubsequentPaddingSize + |
| 472 | LocalsSize[NodeIndex]; |
| 473 | } |
| 474 | } else { |
| 475 | NextStackOffset += Increment; |
| 476 | } |
| 477 | if (UsesFramePointer) |
| 478 | Var->setStackOffset(-NextStackOffset); |
| 479 | else |
| 480 | Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset); |
| 481 | } |
| 482 | } |
| 483 | |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 484 | InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest, |
| 485 | SizeT MaxSrcs) { |
Jim Stichnoth | 5bff61c | 2015-10-28 09:26:00 -0700 | [diff] [blame] | 486 | constexpr bool HasTailCall = false; |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 487 | Constant *CallTarget = Ctx->getConstantExternSym(Name); |
| 488 | InstCall *Call = |
| 489 | InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); |
| 490 | return Call; |
| 491 | } |
| 492 | |
Andrew Scull | cfa628b | 2015-08-20 14:23:05 -0700 | [diff] [blame] | 493 | bool TargetLowering::shouldOptimizeMemIntrins() { |
| 494 | return Ctx->getFlags().getOptLevel() >= Opt_1 || |
| 495 | Ctx->getFlags().getForceMemIntrinOpt(); |
| 496 | } |
| 497 | |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 498 | void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const { |
Jim Stichnoth | 20b71f5 | 2015-06-24 15:52:24 -0700 | [diff] [blame] | 499 | if (!BuildDefs::dump()) |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 500 | return; |
| 501 | Ostream &Str = Ctx->getStrEmit(); |
| 502 | if (C->getSuppressMangling()) |
| 503 | Str << C->getName(); |
| 504 | else |
| 505 | Str << Ctx->mangleName(C->getName()); |
| 506 | RelocOffsetT Offset = C->getOffset(); |
| 507 | if (Offset) { |
| 508 | if (Offset > 0) |
| 509 | Str << "+"; |
| 510 | Str << Offset; |
| 511 | } |
| 512 | } |
| 513 | |
| 514 | void TargetLowering::emit(const ConstantRelocatable *C) const { |
Jim Stichnoth | 20b71f5 | 2015-06-24 15:52:24 -0700 | [diff] [blame] | 515 | if (!BuildDefs::dump()) |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 516 | return; |
| 517 | Ostream &Str = Ctx->getStrEmit(); |
| 518 | Str << getConstantPrefix(); |
| 519 | emitWithoutPrefix(C); |
| 520 | } |
| 521 | |
Jim Stichnoth | bbca754 | 2015-02-11 16:08:31 -0800 | [diff] [blame] | 522 | std::unique_ptr<TargetDataLowering> |
| 523 | TargetDataLowering::createLowering(GlobalContext *Ctx) { |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 524 | TargetArch Target = Ctx->getFlags().getTargetArch(); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 525 | #define SUBZERO_TARGET(X) \ |
| 526 | if (Target == Target_##X) \ |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 527 | return TargetData##X::create(Ctx); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 528 | #include "llvm/Config/SZTargets.def" |
| 529 | |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 530 | llvm::report_fatal_error("Unsupported target data lowering"); |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 531 | } |
| 532 | |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 533 | TargetDataLowering::~TargetDataLowering() = default; |
Jan Voung | 839c4ce | 2014-07-28 15:19:43 -0700 | [diff] [blame] | 534 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 535 | namespace { |
| 536 | |
| 537 | // dataSectionSuffix decides whether to use SectionSuffix or MangledVarName as |
| 538 | // data section suffix. Essentially, when using separate data sections for |
| 539 | // globals SectionSuffix is not necessary. |
| 540 | IceString dataSectionSuffix(const IceString &SectionSuffix, |
| 541 | const IceString &MangledVarName, |
| 542 | const bool DataSections) { |
| 543 | if (SectionSuffix.empty() && !DataSections) { |
| 544 | return ""; |
| 545 | } |
| 546 | |
| 547 | if (DataSections) { |
| 548 | // With data sections we don't need to use the SectionSuffix. |
| 549 | return "." + MangledVarName; |
| 550 | } |
| 551 | |
| 552 | assert(!SectionSuffix.empty()); |
| 553 | return "." + SectionSuffix; |
| 554 | } |
| 555 | |
| 556 | } // end of anonymous namespace |
| 557 | |
| 558 | void TargetDataLowering::emitGlobal(const VariableDeclaration &Var, |
| 559 | const IceString &SectionSuffix) { |
Jim Stichnoth | 20b71f5 | 2015-06-24 15:52:24 -0700 | [diff] [blame] | 560 | if (!BuildDefs::dump()) |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 561 | return; |
| 562 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 563 | // If external and not initialized, this must be a cross test. Don't generate |
| 564 | // a declaration for such cases. |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 565 | const bool IsExternal = |
| 566 | Var.isExternal() || Ctx->getFlags().getDisableInternal(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 567 | if (IsExternal && !Var.hasInitializer()) |
| 568 | return; |
| 569 | |
| 570 | Ostream &Str = Ctx->getStrEmit(); |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 571 | const bool HasNonzeroInitializer = Var.hasNonzeroInitializer(); |
| 572 | const bool IsConstant = Var.getIsConstant(); |
| 573 | const SizeT Size = Var.getNumBytes(); |
| 574 | const IceString MangledName = Var.mangleName(Ctx); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 575 | |
| 576 | Str << "\t.type\t" << MangledName << ",%object\n"; |
| 577 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 578 | const bool UseDataSections = Ctx->getFlags().getDataSections(); |
| 579 | const IceString Suffix = |
| 580 | dataSectionSuffix(SectionSuffix, MangledName, UseDataSections); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 581 | if (IsConstant) |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 582 | Str << "\t.section\t.rodata" << Suffix << ",\"a\",%progbits\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 583 | else if (HasNonzeroInitializer) |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 584 | Str << "\t.section\t.data" << Suffix << ",\"aw\",%progbits\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 585 | else |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 586 | Str << "\t.section\t.bss" << Suffix << ",\"aw\",%nobits\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 587 | |
| 588 | if (IsExternal) |
| 589 | Str << "\t.globl\t" << MangledName << "\n"; |
| 590 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 591 | const uint32_t Align = Var.getAlignment(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 592 | if (Align > 1) { |
| 593 | assert(llvm::isPowerOf2_32(Align)); |
| 594 | // Use the .p2align directive, since the .align N directive can either |
| 595 | // interpret N as bytes, or power of 2 bytes, depending on the target. |
| 596 | Str << "\t.p2align\t" << llvm::Log2_32(Align) << "\n"; |
| 597 | } |
| 598 | |
| 599 | Str << MangledName << ":\n"; |
| 600 | |
| 601 | if (HasNonzeroInitializer) { |
John Porto | 1bec8bc | 2015-06-22 10:51:13 -0700 | [diff] [blame] | 602 | for (const std::unique_ptr<VariableDeclaration::Initializer> &Init : |
| 603 | Var.getInitializers()) { |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 604 | switch (Init->getKind()) { |
| 605 | case VariableDeclaration::Initializer::DataInitializerKind: { |
Jan Voung | e0df91f | 2015-06-30 08:47:06 -0700 | [diff] [blame] | 606 | const auto &Data = |
| 607 | llvm::cast<VariableDeclaration::DataInitializer>(Init.get()) |
| 608 | ->getContents(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 609 | for (SizeT i = 0; i < Init->getNumBytes(); ++i) { |
| 610 | Str << "\t.byte\t" << (((unsigned)Data[i]) & 0xff) << "\n"; |
| 611 | } |
| 612 | break; |
| 613 | } |
| 614 | case VariableDeclaration::Initializer::ZeroInitializerKind: |
| 615 | Str << "\t.zero\t" << Init->getNumBytes() << "\n"; |
| 616 | break; |
| 617 | case VariableDeclaration::Initializer::RelocInitializerKind: { |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 618 | const auto *Reloc = |
John Porto | 1bec8bc | 2015-06-22 10:51:13 -0700 | [diff] [blame] | 619 | llvm::cast<VariableDeclaration::RelocInitializer>(Init.get()); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 620 | Str << "\t" << getEmit32Directive() << "\t"; |
| 621 | Str << Reloc->getDeclaration()->mangleName(Ctx); |
| 622 | if (RelocOffsetT Offset = Reloc->getOffset()) { |
| 623 | if (Offset >= 0 || (Offset == INT32_MIN)) |
| 624 | Str << " + " << Offset; |
| 625 | else |
| 626 | Str << " - " << -Offset; |
| 627 | } |
| 628 | Str << "\n"; |
| 629 | break; |
| 630 | } |
| 631 | } |
| 632 | } |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 633 | } else { |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 634 | // NOTE: for non-constant zero initializers, this is BSS (no bits), so an |
| 635 | // ELF writer would not write to the file, and only track virtual offsets, |
| 636 | // but the .s writer still needs this .zero and cannot simply use the .size |
| 637 | // to advance offsets. |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 638 | Str << "\t.zero\t" << Size << "\n"; |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 639 | } |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 640 | |
| 641 | Str << "\t.size\t" << MangledName << ", " << Size << "\n"; |
| 642 | } |
| 643 | |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 644 | std::unique_ptr<TargetHeaderLowering> |
| 645 | TargetHeaderLowering::createLowering(GlobalContext *Ctx) { |
| 646 | TargetArch Target = Ctx->getFlags().getTargetArch(); |
| 647 | #define SUBZERO_TARGET(X) \ |
| 648 | if (Target == Target_##X) \ |
| 649 | return TargetHeader##X::create(Ctx); |
| 650 | #include "llvm/Config/SZTargets.def" |
| 651 | |
| 652 | llvm::report_fatal_error("Unsupported target header lowering"); |
| 653 | } |
| 654 | |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 655 | TargetHeaderLowering::~TargetHeaderLowering() = default; |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 656 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 657 | } // end of namespace Ice |