Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 1 | //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// |
| 2 | // |
| 3 | // The Subzero Code Generator |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 9 | /// |
| 10 | /// \file |
Jim Stichnoth | 92a6e5b | 2015-12-02 16:52:44 -0800 | [diff] [blame] | 11 | /// \brief Implements the skeleton of the TargetLowering class. |
| 12 | /// |
| 13 | /// Specifically this invokes the appropriate lowering method for a given |
| 14 | /// instruction kind and driving global register allocation. It also implements |
| 15 | /// the non-deleted instruction iteration in LoweringContext. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 16 | /// |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 17 | //===----------------------------------------------------------------------===// |
| 18 | |
John Porto | 67f8de9 | 2015-06-25 10:14:17 -0700 | [diff] [blame] | 19 | #include "IceTargetLowering.h" |
| 20 | |
John Porto | 36d6aa6 | 2016-02-26 07:19:59 -0800 | [diff] [blame] | 21 | #include "IceBitVector.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 22 | #include "IceCfg.h" // setError() |
| 23 | #include "IceCfgNode.h" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 24 | #include "IceGlobalContext.h" |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 25 | #include "IceGlobalInits.h" |
John Porto | ec3f565 | 2015-08-31 15:07:09 -0700 | [diff] [blame] | 26 | #include "IceInstVarIter.h" |
Manasij Mukherjee | 7cd926d | 2016-08-04 12:33:23 -0700 | [diff] [blame] | 27 | #include "IceLiveness.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 28 | #include "IceOperand.h" |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 29 | #include "IceRegAlloc.h" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 30 | |
John Porto | c5bc5cb | 2016-03-21 11:18:02 -0700 | [diff] [blame] | 31 | #include <string> |
| 32 | #include <vector> |
| 33 | |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 34 | #define TARGET_LOWERING_CLASS_FOR(t) Target_##t |
| 35 | |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 36 | // We prevent target-specific implementation details from leaking outside their |
| 37 | // implementations by forbidding #include of target-specific header files |
| 38 | // anywhere outside their own files. To create target-specific objects |
| 39 | // (TargetLowering, TargetDataLowering, and TargetHeaderLowering) we use the |
| 40 | // following named constructors. For reference, each target Foo needs to |
| 41 | // implement the following named constructors and initializer: |
| 42 | // |
| 43 | // namespace Foo { |
| 44 | // unique_ptr<Ice::TargetLowering> createTargetLowering(Ice::Cfg *); |
| 45 | // unique_ptr<Ice::TargetDataLowering> |
| 46 | // createTargetDataLowering(Ice::GlobalContext*); |
| 47 | // unique_ptr<Ice::TargetHeaderLowering> |
| 48 | // createTargetHeaderLowering(Ice::GlobalContext *); |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 49 | // void staticInit(::Ice::GlobalContext *); |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 50 | // } |
| 51 | #define SUBZERO_TARGET(X) \ |
| 52 | namespace X { \ |
| 53 | std::unique_ptr<::Ice::TargetLowering> \ |
| 54 | createTargetLowering(::Ice::Cfg *Func); \ |
| 55 | std::unique_ptr<::Ice::TargetDataLowering> \ |
| 56 | createTargetDataLowering(::Ice::GlobalContext *Ctx); \ |
| 57 | std::unique_ptr<::Ice::TargetHeaderLowering> \ |
| 58 | createTargetHeaderLowering(::Ice::GlobalContext *Ctx); \ |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 59 | void staticInit(::Ice::GlobalContext *Ctx); \ |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 60 | bool shouldBePooled(const ::Ice::Constant *C); \ |
Nicolas Capens | 32f9cce | 2016-10-19 01:24:27 -0400 | [diff] [blame] | 61 | ::Ice::Type getPointerType(); \ |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 62 | } // end of namespace X |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 63 | #include "SZTargets.def" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 64 | #undef SUBZERO_TARGET |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 65 | |
| 66 | namespace Ice { |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 67 | void LoweringContext::init(CfgNode *N) { |
| 68 | Node = N; |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 69 | End = getNode()->getInsts().end(); |
| 70 | rewind(); |
| 71 | advanceForward(Next); |
| 72 | } |
| 73 | |
| 74 | void LoweringContext::rewind() { |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 75 | Begin = getNode()->getInsts().begin(); |
| 76 | Cur = Begin; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 77 | skipDeleted(Cur); |
| 78 | Next = Cur; |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 79 | availabilityReset(); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 80 | } |
| 81 | |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 82 | void LoweringContext::insert(Inst *Instr) { |
| 83 | getNode()->getInsts().insert(Next, Instr); |
| 84 | LastInserted = Instr; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 85 | } |
| 86 | |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 87 | void LoweringContext::skipDeleted(InstList::iterator &I) const { |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 88 | while (I != End && I->isDeleted()) |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 89 | ++I; |
| 90 | } |
| 91 | |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 92 | void LoweringContext::advanceForward(InstList::iterator &I) const { |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 93 | if (I != End) { |
| 94 | ++I; |
| 95 | skipDeleted(I); |
| 96 | } |
| 97 | } |
| 98 | |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 99 | Inst *LoweringContext::getLastInserted() const { |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 100 | assert(LastInserted); |
| 101 | return LastInserted; |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 102 | } |
| 103 | |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 104 | void LoweringContext::availabilityReset() { |
| 105 | LastDest = nullptr; |
| 106 | LastSrc = nullptr; |
| 107 | } |
| 108 | |
| 109 | void LoweringContext::availabilityUpdate() { |
| 110 | availabilityReset(); |
| 111 | Inst *Instr = LastInserted; |
| 112 | if (Instr == nullptr) |
| 113 | return; |
Jim Stichnoth | 28b71be | 2015-10-12 15:24:46 -0700 | [diff] [blame] | 114 | if (!Instr->isVarAssign()) |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 115 | return; |
Jim Stichnoth | 28b71be | 2015-10-12 15:24:46 -0700 | [diff] [blame] | 116 | // Since isVarAssign() is true, the source operand must be a Variable. |
| 117 | LastDest = Instr->getDest(); |
| 118 | LastSrc = llvm::cast<Variable>(Instr->getSrc(0)); |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 119 | } |
| 120 | |
| 121 | Variable *LoweringContext::availabilityGet(Operand *Src) const { |
| 122 | assert(Src); |
| 123 | if (Src == LastDest) |
| 124 | return LastSrc; |
| 125 | return nullptr; |
| 126 | } |
| 127 | |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 128 | namespace { |
| 129 | |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 130 | void printRegisterSet(Ostream &Str, const SmallBitVector &Bitset, |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 131 | std::function<std::string(RegNumT)> getRegName, |
| 132 | const std::string &LineIndentString) { |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 133 | constexpr size_t RegistersPerLine = 16; |
| 134 | size_t Count = 0; |
Jim Stichnoth | 8aa3966 | 2016-02-10 11:20:30 -0800 | [diff] [blame] | 135 | for (RegNumT RegNum : RegNumBVIter(Bitset)) { |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 136 | if (Count == 0) { |
| 137 | Str << LineIndentString; |
| 138 | } else { |
| 139 | Str << ","; |
| 140 | } |
| 141 | if (Count > 0 && Count % RegistersPerLine == 0) |
| 142 | Str << "\n" << LineIndentString; |
| 143 | ++Count; |
Jim Stichnoth | 8aa3966 | 2016-02-10 11:20:30 -0800 | [diff] [blame] | 144 | Str << getRegName(RegNum); |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 145 | } |
| 146 | if (Count) |
| 147 | Str << "\n"; |
| 148 | } |
| 149 | |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 150 | // Splits "<class>:<reg>" into "<class>" plus "<reg>". If there is no <class> |
| 151 | // component, the result is "" plus "<reg>". |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 152 | void splitToClassAndName(const std::string &RegName, std::string *SplitRegClass, |
| 153 | std::string *SplitRegName) { |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 154 | constexpr const char Separator[] = ":"; |
| 155 | constexpr size_t SeparatorWidth = llvm::array_lengthof(Separator) - 1; |
| 156 | size_t Pos = RegName.find(Separator); |
| 157 | if (Pos == std::string::npos) { |
| 158 | *SplitRegClass = ""; |
| 159 | *SplitRegName = RegName; |
| 160 | } else { |
| 161 | *SplitRegClass = RegName.substr(0, Pos); |
| 162 | *SplitRegName = RegName.substr(Pos + SeparatorWidth); |
| 163 | } |
| 164 | } |
| 165 | |
Jim Stichnoth | 816fd68 | 2016-02-16 05:47:32 -0800 | [diff] [blame] | 166 | LLVM_ATTRIBUTE_NORETURN void badTargetFatalError(TargetArch Target) { |
| 167 | llvm::report_fatal_error("Unsupported target: " + |
| 168 | std::string(targetArchString(Target))); |
| 169 | } |
| 170 | |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 171 | } // end of anonymous namespace |
| 172 | |
| 173 | void TargetLowering::filterTypeToRegisterSet( |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 174 | GlobalContext *Ctx, int32_t NumRegs, SmallBitVector TypeToRegisterSet[], |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 175 | size_t TypeToRegisterSetSize, |
| 176 | std::function<std::string(RegNumT)> getRegName, |
| 177 | std::function<const char *(RegClass)> getRegClassName) { |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 178 | std::vector<SmallBitVector> UseSet(TypeToRegisterSetSize, |
| 179 | SmallBitVector(NumRegs)); |
| 180 | std::vector<SmallBitVector> ExcludeSet(TypeToRegisterSetSize, |
| 181 | SmallBitVector(NumRegs)); |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 182 | |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 183 | std::unordered_map<std::string, RegNumT> RegNameToIndex; |
Jim Stichnoth | 8aa3966 | 2016-02-10 11:20:30 -0800 | [diff] [blame] | 184 | for (int32_t RegIndex = 0; RegIndex < NumRegs; ++RegIndex) { |
| 185 | const auto RegNum = RegNumT::fromInt(RegIndex); |
| 186 | RegNameToIndex[getRegName(RegNum)] = RegNum; |
| 187 | } |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 188 | |
John Porto | c5bc5cb | 2016-03-21 11:18:02 -0700 | [diff] [blame] | 189 | std::vector<std::string> BadRegNames; |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 190 | |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 191 | // The processRegList function iterates across the RegNames vector. Each |
| 192 | // entry in the vector is a string of the form "<reg>" or "<class>:<reg>". |
| 193 | // The register class and register number are computed, and the corresponding |
| 194 | // bit is set in RegSet[][]. If "<class>:" is missing, then the bit is set |
| 195 | // for all classes. |
John Porto | c5bc5cb | 2016-03-21 11:18:02 -0700 | [diff] [blame] | 196 | auto processRegList = [&](const std::vector<std::string> &RegNames, |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 197 | std::vector<SmallBitVector> &RegSet) { |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 198 | for (const std::string &RegClassAndName : RegNames) { |
| 199 | std::string RClass; |
| 200 | std::string RName; |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 201 | splitToClassAndName(RegClassAndName, &RClass, &RName); |
| 202 | if (!RegNameToIndex.count(RName)) { |
| 203 | BadRegNames.push_back(RName); |
| 204 | continue; |
| 205 | } |
| 206 | const int32_t RegIndex = RegNameToIndex.at(RName); |
| 207 | for (SizeT TypeIndex = 0; TypeIndex < TypeToRegisterSetSize; |
| 208 | ++TypeIndex) { |
| 209 | if (RClass.empty() || |
| 210 | RClass == getRegClassName(static_cast<RegClass>(TypeIndex))) { |
| 211 | RegSet[TypeIndex][RegIndex] = TypeToRegisterSet[TypeIndex][RegIndex]; |
| 212 | } |
| 213 | } |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 214 | } |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 215 | }; |
| 216 | |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 217 | processRegList(getFlags().getUseRestrictedRegisters(), UseSet); |
| 218 | processRegList(getFlags().getExcludedRegisters(), ExcludeSet); |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 219 | |
| 220 | if (!BadRegNames.empty()) { |
| 221 | std::string Buffer; |
| 222 | llvm::raw_string_ostream StrBuf(Buffer); |
| 223 | StrBuf << "Unrecognized use/exclude registers:"; |
| 224 | for (const auto &RegName : BadRegNames) |
| 225 | StrBuf << " " << RegName; |
| 226 | llvm::report_fatal_error(StrBuf.str()); |
| 227 | } |
| 228 | |
| 229 | // Apply filters. |
| 230 | for (size_t TypeIndex = 0; TypeIndex < TypeToRegisterSetSize; ++TypeIndex) { |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 231 | SmallBitVector *TypeBitSet = &TypeToRegisterSet[TypeIndex]; |
| 232 | SmallBitVector *UseBitSet = &UseSet[TypeIndex]; |
| 233 | SmallBitVector *ExcludeBitSet = &ExcludeSet[TypeIndex]; |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 234 | if (UseBitSet->any()) |
| 235 | *TypeBitSet = *UseBitSet; |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 236 | (*TypeBitSet).reset(*ExcludeBitSet); |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 237 | } |
| 238 | |
| 239 | // Display filtered register sets, if requested. |
| 240 | if (BuildDefs::dump() && NumRegs && |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 241 | (getFlags().getVerbose() & IceV_AvailableRegs)) { |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 242 | Ostream &Str = Ctx->getStrDump(); |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 243 | const std::string Indent = " "; |
| 244 | const std::string IndentTwice = Indent + Indent; |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 245 | Str << "Registers available for register allocation:\n"; |
| 246 | for (size_t TypeIndex = 0; TypeIndex < TypeToRegisterSetSize; ++TypeIndex) { |
Jim Stichnoth | 2544d4d | 2016-01-22 13:07:46 -0800 | [diff] [blame] | 247 | Str << Indent << getRegClassName(static_cast<RegClass>(TypeIndex)) |
| 248 | << ":\n"; |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 249 | printRegisterSet(Str, TypeToRegisterSet[TypeIndex], getRegName, |
| 250 | IndentTwice); |
| 251 | } |
| 252 | Str << "\n"; |
| 253 | } |
| 254 | } |
| 255 | |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 256 | std::unique_ptr<TargetLowering> |
| 257 | TargetLowering::createLowering(TargetArch Target, Cfg *Func) { |
| 258 | switch (Target) { |
| 259 | default: |
Jim Stichnoth | 816fd68 | 2016-02-16 05:47:32 -0800 | [diff] [blame] | 260 | badTargetFatalError(Target); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 261 | #define SUBZERO_TARGET(X) \ |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 262 | case TARGET_LOWERING_CLASS_FOR(X): \ |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 263 | return ::X::createTargetLowering(Func); |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 264 | #include "SZTargets.def" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 265 | #undef SUBZERO_TARGET |
| 266 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 267 | } |
| 268 | |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 269 | void TargetLowering::staticInit(GlobalContext *Ctx) { |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 270 | const TargetArch Target = getFlags().getTargetArch(); |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 271 | // Call the specified target's static initializer. |
| 272 | switch (Target) { |
| 273 | default: |
Jim Stichnoth | 816fd68 | 2016-02-16 05:47:32 -0800 | [diff] [blame] | 274 | badTargetFatalError(Target); |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 275 | #define SUBZERO_TARGET(X) \ |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 276 | case TARGET_LOWERING_CLASS_FOR(X): { \ |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 277 | static bool InitGuard##X = false; \ |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 278 | if (InitGuard##X) { \ |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 279 | return; \ |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 280 | } \ |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 281 | InitGuard##X = true; \ |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 282 | ::X::staticInit(Ctx); \ |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 283 | } break; |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 284 | #include "SZTargets.def" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 285 | #undef SUBZERO_TARGET |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 286 | } |
| 287 | } |
| 288 | |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 289 | bool TargetLowering::shouldBePooled(const Constant *C) { |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 290 | const TargetArch Target = getFlags().getTargetArch(); |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 291 | switch (Target) { |
| 292 | default: |
| 293 | return false; |
| 294 | #define SUBZERO_TARGET(X) \ |
| 295 | case TARGET_LOWERING_CLASS_FOR(X): \ |
| 296 | return ::X::shouldBePooled(C); |
| 297 | #include "SZTargets.def" |
| 298 | #undef SUBZERO_TARGET |
| 299 | } |
| 300 | } |
| 301 | |
Nicolas Capens | 32f9cce | 2016-10-19 01:24:27 -0400 | [diff] [blame] | 302 | ::Ice::Type TargetLowering::getPointerType() { |
| 303 | const TargetArch Target = getFlags().getTargetArch(); |
| 304 | switch (Target) { |
| 305 | default: |
| 306 | return ::Ice::IceType_void; |
| 307 | #define SUBZERO_TARGET(X) \ |
| 308 | case TARGET_LOWERING_CLASS_FOR(X): \ |
| 309 | return ::X::getPointerType(); |
| 310 | #include "SZTargets.def" |
| 311 | #undef SUBZERO_TARGET |
| 312 | } |
| 313 | } |
| 314 | |
John Porto | ac2388c | 2016-01-22 07:10:56 -0800 | [diff] [blame] | 315 | TargetLowering::SandboxType |
| 316 | TargetLowering::determineSandboxTypeFromFlags(const ClFlags &Flags) { |
| 317 | assert(!Flags.getUseSandboxing() || !Flags.getUseNonsfi()); |
| 318 | if (Flags.getUseNonsfi()) { |
| 319 | return TargetLowering::ST_Nonsfi; |
| 320 | } |
| 321 | if (Flags.getUseSandboxing()) { |
| 322 | return TargetLowering::ST_NaCl; |
| 323 | } |
| 324 | return TargetLowering::ST_None; |
| 325 | } |
| 326 | |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 327 | TargetLowering::TargetLowering(Cfg *Func) |
John Porto | ac2388c | 2016-01-22 07:10:56 -0800 | [diff] [blame] | 328 | : Func(Func), Ctx(Func->getContext()), |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 329 | SandboxingType(determineSandboxTypeFromFlags(getFlags())) {} |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 330 | |
John Porto | 3bf335f | 2016-01-15 11:17:55 -0800 | [diff] [blame] | 331 | TargetLowering::AutoBundle::AutoBundle(TargetLowering *Target, |
| 332 | InstBundleLock::Option Option) |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 333 | : Target(Target), NeedSandboxing(getFlags().getUseSandboxing()) { |
John Porto | 3bf335f | 2016-01-15 11:17:55 -0800 | [diff] [blame] | 334 | assert(!Target->AutoBundling); |
| 335 | Target->AutoBundling = true; |
| 336 | if (NeedSandboxing) { |
| 337 | Target->_bundle_lock(Option); |
| 338 | } |
| 339 | } |
| 340 | |
| 341 | TargetLowering::AutoBundle::~AutoBundle() { |
| 342 | assert(Target->AutoBundling); |
| 343 | Target->AutoBundling = false; |
| 344 | if (NeedSandboxing) { |
| 345 | Target->_bundle_unlock(); |
| 346 | } |
| 347 | } |
| 348 | |
John Porto | 5e0a8a7 | 2015-11-20 13:50:36 -0800 | [diff] [blame] | 349 | void TargetLowering::genTargetHelperCalls() { |
Jim Stichnoth | b88d8c8 | 2016-03-11 15:33:00 -0800 | [diff] [blame] | 350 | TimerMarker T(TimerStack::TT_genHelpers, Func); |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 351 | Utils::BoolFlagSaver _(GeneratingTargetHelpers, true); |
John Porto | 5e0a8a7 | 2015-11-20 13:50:36 -0800 | [diff] [blame] | 352 | for (CfgNode *Node : Func->getNodes()) { |
| 353 | Context.init(Node); |
| 354 | while (!Context.atEnd()) { |
| 355 | PostIncrLoweringContext _(Context); |
Jim Stichnoth | f5fdd23 | 2016-05-09 12:24:36 -0700 | [diff] [blame] | 356 | genTargetHelperCallFor(iteratorToInst(Context.getCur())); |
John Porto | 5e0a8a7 | 2015-11-20 13:50:36 -0800 | [diff] [blame] | 357 | } |
| 358 | } |
| 359 | } |
| 360 | |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 361 | void TargetLowering::doAddressOpt() { |
Manasij Mukherjee | 0c70417 | 2016-07-21 12:40:24 -0700 | [diff] [blame] | 362 | doAddressOptOther(); |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 363 | if (llvm::isa<InstLoad>(*Context.getCur())) |
| 364 | doAddressOptLoad(); |
| 365 | else if (llvm::isa<InstStore>(*Context.getCur())) |
| 366 | doAddressOptStore(); |
Nicolas Capens | e986b31 | 2017-01-27 00:56:42 -0800 | [diff] [blame] | 367 | else if (auto *Intrinsic = |
| 368 | llvm::dyn_cast<InstIntrinsicCall>(&*Context.getCur())) { |
| 369 | if (Intrinsic->getIntrinsicInfo().ID == Intrinsics::LoadSubVector) |
| 370 | doAddressOptLoadSubVector(); |
| 371 | else if (Intrinsic->getIntrinsicInfo().ID == Intrinsics::StoreSubVector) |
| 372 | doAddressOptStoreSubVector(); |
| 373 | } |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 374 | Context.advanceCur(); |
| 375 | Context.advanceNext(); |
| 376 | } |
| 377 | |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 378 | void TargetLowering::doNopInsertion(RandomNumberGenerator &RNG) { |
Jim Stichnoth | f5fdd23 | 2016-05-09 12:24:36 -0700 | [diff] [blame] | 379 | Inst *I = iteratorToInst(Context.getCur()); |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 380 | bool ShouldSkip = llvm::isa<InstFakeUse>(I) || llvm::isa<InstFakeDef>(I) || |
| 381 | llvm::isa<InstFakeKill>(I) || I->isRedundantAssign() || |
| 382 | I->isDeleted(); |
| 383 | if (!ShouldSkip) { |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 384 | int Probability = getFlags().getNopProbabilityAsPercentage(); |
| 385 | for (int I = 0; I < getFlags().getMaxNopsPerInstruction(); ++I) { |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 386 | randomlyInsertNop(Probability / 100.0, RNG); |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 387 | } |
| 388 | } |
| 389 | } |
| 390 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 391 | // Lowers a single instruction according to the information in Context, by |
| 392 | // checking the Context.Cur instruction kind and calling the appropriate |
| 393 | // lowering method. The lowering method should insert target instructions at |
| 394 | // the Cur.Next insertion point, and should not delete the Context.Cur |
| 395 | // instruction or advance Context.Cur. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 396 | // |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 397 | // The lowering method may look ahead in the instruction stream as desired, and |
| 398 | // lower additional instructions in conjunction with the current one, for |
| 399 | // example fusing a compare and branch. If it does, it should advance |
| 400 | // Context.Cur to point to the next non-deleted instruction to process, and it |
| 401 | // should delete any additional instructions it consumes. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 402 | void TargetLowering::lower() { |
| 403 | assert(!Context.atEnd()); |
Jim Stichnoth | f5fdd23 | 2016-05-09 12:24:36 -0700 | [diff] [blame] | 404 | Inst *Instr = iteratorToInst(Context.getCur()); |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 405 | Instr->deleteIfDead(); |
| 406 | if (!Instr->isDeleted() && !llvm::isa<InstFakeDef>(Instr) && |
| 407 | !llvm::isa<InstFakeUse>(Instr)) { |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 408 | // Mark the current instruction as deleted before lowering, otherwise the |
| 409 | // Dest variable will likely get marked as non-SSA. See |
| 410 | // Variable::setDefinition(). However, just pass-through FakeDef and |
| 411 | // FakeUse instructions that might have been inserted prior to lowering. |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 412 | Instr->setDeleted(); |
| 413 | switch (Instr->getKind()) { |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 414 | case Inst::Alloca: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 415 | lowerAlloca(llvm::cast<InstAlloca>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 416 | break; |
| 417 | case Inst::Arithmetic: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 418 | lowerArithmetic(llvm::cast<InstArithmetic>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 419 | break; |
| 420 | case Inst::Assign: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 421 | lowerAssign(llvm::cast<InstAssign>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 422 | break; |
| 423 | case Inst::Br: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 424 | lowerBr(llvm::cast<InstBr>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 425 | break; |
Eric Holk | 67c7c41 | 2016-04-15 13:05:37 -0700 | [diff] [blame] | 426 | case Inst::Breakpoint: |
| 427 | lowerBreakpoint(llvm::cast<InstBreakpoint>(Instr)); |
| 428 | break; |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 429 | case Inst::Call: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 430 | lowerCall(llvm::cast<InstCall>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 431 | break; |
| 432 | case Inst::Cast: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 433 | lowerCast(llvm::cast<InstCast>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 434 | break; |
| 435 | case Inst::ExtractElement: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 436 | lowerExtractElement(llvm::cast<InstExtractElement>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 437 | break; |
| 438 | case Inst::Fcmp: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 439 | lowerFcmp(llvm::cast<InstFcmp>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 440 | break; |
| 441 | case Inst::Icmp: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 442 | lowerIcmp(llvm::cast<InstIcmp>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 443 | break; |
| 444 | case Inst::InsertElement: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 445 | lowerInsertElement(llvm::cast<InstInsertElement>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 446 | break; |
| 447 | case Inst::IntrinsicCall: { |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 448 | auto *Call = llvm::cast<InstIntrinsicCall>(Instr); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 449 | if (Call->getIntrinsicInfo().ReturnsTwice) |
| 450 | setCallsReturnsTwice(true); |
| 451 | lowerIntrinsicCall(Call); |
| 452 | break; |
| 453 | } |
| 454 | case Inst::Load: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 455 | lowerLoad(llvm::cast<InstLoad>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 456 | break; |
| 457 | case Inst::Phi: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 458 | lowerPhi(llvm::cast<InstPhi>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 459 | break; |
| 460 | case Inst::Ret: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 461 | lowerRet(llvm::cast<InstRet>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 462 | break; |
| 463 | case Inst::Select: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 464 | lowerSelect(llvm::cast<InstSelect>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 465 | break; |
John Porto | a47c11c | 2016-04-21 05:53:42 -0700 | [diff] [blame] | 466 | case Inst::ShuffleVector: |
| 467 | lowerShuffleVector(llvm::cast<InstShuffleVector>(Instr)); |
| 468 | break; |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 469 | case Inst::Store: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 470 | lowerStore(llvm::cast<InstStore>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 471 | break; |
| 472 | case Inst::Switch: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 473 | lowerSwitch(llvm::cast<InstSwitch>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 474 | break; |
| 475 | case Inst::Unreachable: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 476 | lowerUnreachable(llvm::cast<InstUnreachable>(Instr)); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 477 | break; |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 478 | default: |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 479 | lowerOther(Instr); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 480 | break; |
| 481 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 482 | |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 483 | postLower(); |
| 484 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 485 | |
| 486 | Context.advanceCur(); |
| 487 | Context.advanceNext(); |
| 488 | } |
| 489 | |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 490 | void TargetLowering::lowerInst(CfgNode *Node, InstList::iterator Next, |
| 491 | InstHighLevel *Instr) { |
| 492 | // TODO(stichnot): Consider modifying the design/implementation to avoid |
| 493 | // multiple init() calls when using lowerInst() to lower several instructions |
| 494 | // in the same node. |
| 495 | Context.init(Node); |
| 496 | Context.setNext(Next); |
| 497 | Context.insert(Instr); |
| 498 | --Next; |
Jim Stichnoth | f5fdd23 | 2016-05-09 12:24:36 -0700 | [diff] [blame] | 499 | assert(iteratorToInst(Next) == Instr); |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 500 | Context.setCur(Next); |
| 501 | lower(); |
| 502 | } |
| 503 | |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 504 | void TargetLowering::lowerOther(const Inst *Instr) { |
| 505 | (void)Instr; |
| 506 | Func->setError("Can't lower unsupported instruction type"); |
| 507 | } |
| 508 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 509 | // Drives register allocation, allowing all physical registers (except perhaps |
| 510 | // for the frame pointer) to be allocated. This set of registers could |
| 511 | // potentially be parameterized if we want to restrict registers e.g. for |
| 512 | // performance testing. |
Jim Stichnoth | 70d0a05 | 2014-11-14 15:53:46 -0800 | [diff] [blame] | 513 | void TargetLowering::regAlloc(RegAllocKind Kind) { |
Jim Stichnoth | 8363a06 | 2014-10-07 10:02:38 -0700 | [diff] [blame] | 514 | TimerMarker T(TimerStack::TT_regAlloc, Func); |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 515 | LinearScan LinearScan(Func); |
| 516 | RegSetMask RegInclude = RegSet_None; |
| 517 | RegSetMask RegExclude = RegSet_None; |
| 518 | RegInclude |= RegSet_CallerSave; |
| 519 | RegInclude |= RegSet_CalleeSave; |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 520 | if (hasFramePointer()) |
| 521 | RegExclude |= RegSet_FramePointer; |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 522 | SmallBitVector RegMask = getRegisterSet(RegInclude, RegExclude); |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 523 | bool Repeat = (Kind == RAK_Global && getFlags().getRepeatRegAlloc()); |
Nicolas Capens | 8bd18e1 | 2017-01-20 15:21:45 -0500 | [diff] [blame] | 524 | CfgSet<Variable *> EmptySet; |
Jim Stichnoth | 4001c93 | 2015-10-09 14:33:26 -0700 | [diff] [blame] | 525 | do { |
Nicolas Capens | 8bd18e1 | 2017-01-20 15:21:45 -0500 | [diff] [blame] | 526 | LinearScan.init(Kind, EmptySet); |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 527 | LinearScan.scan(RegMask, getFlags().getRandomizeRegisterAllocation()); |
Jim Stichnoth | 4001c93 | 2015-10-09 14:33:26 -0700 | [diff] [blame] | 528 | if (!LinearScan.hasEvictions()) |
| 529 | Repeat = false; |
| 530 | Kind = RAK_SecondChance; |
| 531 | } while (Repeat); |
Jim Stichnoth | a1da6ff | 2015-11-16 15:59:39 -0800 | [diff] [blame] | 532 | // TODO(stichnot): Run the register allocator one more time to do stack slot |
| 533 | // coalescing. The idea would be to initialize the Unhandled list with the |
| 534 | // set of Variables that have no register and a non-empty live range, and |
| 535 | // model an infinite number of registers. Maybe use the register aliasing |
| 536 | // mechanism to get better packing of narrower slots. |
Manasij Mukherjee | 7cd926d | 2016-08-04 12:33:23 -0700 | [diff] [blame] | 537 | if (getFlags().getSplitGlobalVars()) |
| 538 | postRegallocSplitting(RegMask); |
| 539 | } |
| 540 | |
| 541 | namespace { |
| 542 | CfgVector<Inst *> getInstructionsInRange(CfgNode *Node, InstNumberT Start, |
| 543 | InstNumberT End) { |
| 544 | CfgVector<Inst *> Result; |
| 545 | bool Started = false; |
| 546 | auto Process = [&](InstList &Insts) { |
| 547 | for (auto &Instr : Insts) { |
| 548 | if (Instr.isDeleted()) { |
| 549 | continue; |
| 550 | } |
| 551 | if (Instr.getNumber() == Start) { |
| 552 | Started = true; |
| 553 | } |
| 554 | if (Started) { |
| 555 | Result.emplace_back(&Instr); |
| 556 | } |
| 557 | if (Instr.getNumber() == End) { |
| 558 | break; |
| 559 | } |
| 560 | } |
| 561 | }; |
| 562 | Process(Node->getPhis()); |
| 563 | Process(Node->getInsts()); |
| 564 | // TODO(manasijm): Investigate why checking >= End significantly changes |
| 565 | // output. Should not happen when renumbering produces monotonically |
| 566 | // increasing instruction numbers and live ranges begin and end on non-deleted |
| 567 | // instructions. |
| 568 | return Result; |
| 569 | } |
| 570 | } |
| 571 | |
| 572 | void TargetLowering::postRegallocSplitting(const SmallBitVector &RegMask) { |
| 573 | // Splits the live ranges of global(/multi block) variables and runs the |
| 574 | // register allocator to find registers for as many of the new variables as |
| 575 | // possible. |
| 576 | // TODO(manasijm): Merge the small liveranges back into multi-block ones when |
| 577 | // the variables get the same register. This will reduce the amount of new |
| 578 | // instructions inserted. This might involve a full dataflow analysis. |
| 579 | // Also, modify the preference mechanism in the register allocator to match. |
| 580 | |
| 581 | TimerMarker _(TimerStack::TT_splitGlobalVars, Func); |
| 582 | CfgSet<Variable *> SplitCandidates; |
| 583 | |
| 584 | // Find variables that do not have registers but are allowed to. Also skip |
| 585 | // variables with single segment live ranges as they are not split further in |
| 586 | // this function. |
| 587 | for (Variable *Var : Func->getVariables()) { |
| 588 | if (!Var->mustNotHaveReg() && !Var->hasReg()) { |
| 589 | if (Var->getLiveRange().getNumSegments() > 1) |
| 590 | SplitCandidates.insert(Var); |
| 591 | } |
| 592 | } |
| 593 | if (SplitCandidates.empty()) |
| 594 | return; |
| 595 | |
| 596 | CfgSet<Variable *> ExtraVars; |
| 597 | |
| 598 | struct UseInfo { |
| 599 | Variable *Replacing = nullptr; |
| 600 | Inst *FirstUse = nullptr; |
| 601 | Inst *LastDef = nullptr; |
| 602 | SizeT UseCount = 0; |
| 603 | }; |
| 604 | CfgUnorderedMap<Variable *, UseInfo> VarInfo; |
| 605 | // Split the live ranges of the viable variables by node. |
| 606 | // Compute metadata (UseInfo) for each of the resulting variables. |
| 607 | for (auto *Var : SplitCandidates) { |
| 608 | for (auto &Segment : Var->getLiveRange().getSegments()) { |
| 609 | UseInfo Info; |
| 610 | Info.Replacing = Var; |
| 611 | auto *Node = Var->getLiveRange().getNodeForSegment(Segment.first); |
| 612 | |
| 613 | for (auto *Instr : |
| 614 | getInstructionsInRange(Node, Segment.first, Segment.second)) { |
| 615 | for (SizeT i = 0; i < Instr->getSrcSize(); ++i) { |
| 616 | // It's safe to iterate over the top-level src operands rather than |
| 617 | // using FOREACH_VAR_IN_INST(), because any variables inside e.g. |
| 618 | // mem operands should already have registers. |
| 619 | if (auto *Var = llvm::dyn_cast<Variable>(Instr->getSrc(i))) { |
| 620 | if (Var == Info.Replacing) { |
| 621 | if (Info.FirstUse == nullptr && !llvm::isa<InstPhi>(Instr)) { |
| 622 | Info.FirstUse = Instr; |
| 623 | } |
| 624 | Info.UseCount++; |
| 625 | } |
| 626 | } |
| 627 | } |
| 628 | if (Instr->getDest() == Info.Replacing && !llvm::isa<InstPhi>(Instr)) { |
| 629 | Info.LastDef = Instr; |
| 630 | } |
| 631 | } |
| 632 | |
| 633 | static constexpr SizeT MinUseThreshold = 3; |
| 634 | // Skip if variable has less than `MinUseThreshold` uses in the segment. |
| 635 | if (Info.UseCount < MinUseThreshold) |
| 636 | continue; |
| 637 | |
| 638 | if (!Info.FirstUse && !Info.LastDef) { |
| 639 | continue; |
| 640 | } |
| 641 | |
| 642 | LiveRange LR; |
| 643 | LR.addSegment(Segment); |
| 644 | Variable *NewVar = Func->makeVariable(Var->getType()); |
| 645 | |
| 646 | NewVar->setLiveRange(LR); |
| 647 | |
| 648 | VarInfo[NewVar] = Info; |
| 649 | |
| 650 | ExtraVars.insert(NewVar); |
| 651 | } |
| 652 | } |
| 653 | // Run the register allocator with all these new variables included |
| 654 | LinearScan RegAlloc(Func); |
| 655 | RegAlloc.init(RAK_Global, SplitCandidates); |
| 656 | RegAlloc.scan(RegMask, getFlags().getRandomizeRegisterAllocation()); |
| 657 | |
| 658 | // Modify the Cfg to use the new variables that now have registers. |
| 659 | for (auto *ExtraVar : ExtraVars) { |
| 660 | if (!ExtraVar->hasReg()) { |
| 661 | continue; |
| 662 | } |
| 663 | |
| 664 | auto &Info = VarInfo[ExtraVar]; |
| 665 | |
| 666 | assert(ExtraVar->getLiveRange().getSegments().size() == 1); |
| 667 | auto Segment = ExtraVar->getLiveRange().getSegments()[0]; |
| 668 | |
| 669 | auto *Node = |
| 670 | Info.Replacing->getLiveRange().getNodeForSegment(Segment.first); |
| 671 | |
| 672 | auto RelevantInsts = |
| 673 | getInstructionsInRange(Node, Segment.first, Segment.second); |
| 674 | |
| 675 | if (RelevantInsts.empty()) |
| 676 | continue; |
| 677 | |
| 678 | // Replace old variables |
| 679 | for (auto *Instr : RelevantInsts) { |
| 680 | if (llvm::isa<InstPhi>(Instr)) |
| 681 | continue; |
| 682 | // TODO(manasijm): Figure out how to safely enable replacing phi dest |
| 683 | // variables. The issue is that we can not insert low level mov |
| 684 | // instructions into the PhiList. |
| 685 | for (SizeT i = 0; i < Instr->getSrcSize(); ++i) { |
| 686 | // FOREACH_VAR_IN_INST() not needed. Same logic as above. |
| 687 | if (auto *Var = llvm::dyn_cast<Variable>(Instr->getSrc(i))) { |
| 688 | if (Var == Info.Replacing) { |
| 689 | Instr->replaceSource(i, ExtraVar); |
| 690 | } |
| 691 | } |
| 692 | } |
| 693 | if (Instr->getDest() == Info.Replacing) { |
| 694 | Instr->replaceDest(ExtraVar); |
| 695 | } |
| 696 | } |
| 697 | |
| 698 | assert(Info.FirstUse != Info.LastDef); |
| 699 | assert(Info.FirstUse || Info.LastDef); |
| 700 | |
| 701 | // Insert spill code |
| 702 | if (Info.FirstUse != nullptr) { |
| 703 | auto *NewInst = |
| 704 | Func->getTarget()->createLoweredMove(ExtraVar, Info.Replacing); |
| 705 | Node->getInsts().insert(instToIterator(Info.FirstUse), NewInst); |
| 706 | } |
| 707 | if (Info.LastDef != nullptr) { |
| 708 | auto *NewInst = |
| 709 | Func->getTarget()->createLoweredMove(Info.Replacing, ExtraVar); |
| 710 | Node->getInsts().insertAfter(instToIterator(Info.LastDef), NewInst); |
| 711 | } |
| 712 | } |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 713 | } |
| 714 | |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 715 | void TargetLowering::markRedefinitions() { |
| 716 | // Find (non-SSA) instructions where the Dest variable appears in some source |
| 717 | // operand, and set the IsDestRedefined flag to keep liveness analysis |
| 718 | // consistent. |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 719 | for (auto Instr = Context.getCur(), E = Context.getNext(); Instr != E; |
| 720 | ++Instr) { |
| 721 | if (Instr->isDeleted()) |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 722 | continue; |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 723 | Variable *Dest = Instr->getDest(); |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 724 | if (Dest == nullptr) |
| 725 | continue; |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 726 | FOREACH_VAR_IN_INST(Var, *Instr) { |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 727 | if (Var == Dest) { |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 728 | Instr->setDestRedefined(); |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 729 | break; |
| 730 | } |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 731 | } |
| 732 | } |
| 733 | } |
| 734 | |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 735 | void TargetLowering::addFakeDefUses(const Inst *Instr) { |
| 736 | FOREACH_VAR_IN_INST(Var, *Instr) { |
| 737 | if (auto *Var64 = llvm::dyn_cast<Variable64On32>(Var)) { |
| 738 | Context.insert<InstFakeUse>(Var64->getLo()); |
| 739 | Context.insert<InstFakeUse>(Var64->getHi()); |
Jaydeep Patil | 958ddb7 | 2016-10-03 07:52:48 -0700 | [diff] [blame] | 740 | } else if (auto *VarVec = llvm::dyn_cast<VariableVecOn32>(Var)) { |
| 741 | for (Variable *Var : VarVec->getContainers()) { |
| 742 | Context.insert<InstFakeUse>(Var); |
| 743 | } |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 744 | } else { |
| 745 | Context.insert<InstFakeUse>(Var); |
| 746 | } |
| 747 | } |
| 748 | Variable *Dest = Instr->getDest(); |
| 749 | if (Dest == nullptr) |
| 750 | return; |
| 751 | if (auto *Var64 = llvm::dyn_cast<Variable64On32>(Dest)) { |
| 752 | Context.insert<InstFakeDef>(Var64->getLo()); |
| 753 | Context.insert<InstFakeDef>(Var64->getHi()); |
Jaydeep Patil | 958ddb7 | 2016-10-03 07:52:48 -0700 | [diff] [blame] | 754 | } else if (auto *VarVec = llvm::dyn_cast<VariableVecOn32>(Dest)) { |
| 755 | for (Variable *Var : VarVec->getContainers()) { |
| 756 | Context.insert<InstFakeDef>(Var); |
| 757 | } |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 758 | } else { |
| 759 | Context.insert<InstFakeDef>(Dest); |
| 760 | } |
| 761 | } |
| 762 | |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 763 | void TargetLowering::sortVarsByAlignment(VarList &Dest, |
| 764 | const VarList &Source) const { |
| 765 | Dest = Source; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 766 | // Instead of std::sort, we could do a bucket sort with log2(alignment) as |
| 767 | // the buckets, if performance is an issue. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 768 | std::sort(Dest.begin(), Dest.end(), |
| 769 | [this](const Variable *V1, const Variable *V2) { |
Jim Stichnoth | 53dae72 | 2016-08-31 15:32:32 -0700 | [diff] [blame] | 770 | const size_t WidthV1 = typeWidthInBytesOnStack(V1->getType()); |
| 771 | const size_t WidthV2 = typeWidthInBytesOnStack(V2->getType()); |
| 772 | if (WidthV1 == WidthV2) |
| 773 | return V1->getIndex() < V2->getIndex(); |
| 774 | return WidthV1 > WidthV2; |
Jim Stichnoth | 8e6bf6e | 2015-06-03 15:58:12 -0700 | [diff] [blame] | 775 | }); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 776 | } |
| 777 | |
| 778 | void TargetLowering::getVarStackSlotParams( |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 779 | VarList &SortedSpilledVariables, SmallBitVector &RegsUsed, |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 780 | size_t *GlobalsSize, size_t *SpillAreaSizeBytes, |
| 781 | uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes, |
| 782 | std::function<bool(Variable *)> TargetVarHook) { |
| 783 | const VariablesMetadata *VMetadata = Func->getVMetadata(); |
John Porto | 36d6aa6 | 2016-02-26 07:19:59 -0800 | [diff] [blame] | 784 | BitVector IsVarReferenced(Func->getNumVariables()); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 785 | for (CfgNode *Node : Func->getNodes()) { |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 786 | for (Inst &Instr : Node->getInsts()) { |
| 787 | if (Instr.isDeleted()) |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 788 | continue; |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 789 | if (const Variable *Var = Instr.getDest()) |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 790 | IsVarReferenced[Var->getIndex()] = true; |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 791 | FOREACH_VAR_IN_INST(Var, Instr) { |
John Porto | ec3f565 | 2015-08-31 15:07:09 -0700 | [diff] [blame] | 792 | IsVarReferenced[Var->getIndex()] = true; |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 793 | } |
| 794 | } |
| 795 | } |
| 796 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 797 | // If SimpleCoalescing is false, each variable without a register gets its |
| 798 | // own unique stack slot, which leads to large stack frames. If |
| 799 | // SimpleCoalescing is true, then each "global" variable without a register |
| 800 | // gets its own slot, but "local" variable slots are reused across basic |
| 801 | // blocks. E.g., if A and B are local to block 1 and C is local to block 2, |
| 802 | // then C may share a slot with A or B. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 803 | // |
| 804 | // We cannot coalesce stack slots if this function calls a "returns twice" |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 805 | // function. In that case, basic blocks may be revisited, and variables local |
| 806 | // to those basic blocks are actually live until after the called function |
| 807 | // returns a second time. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 808 | const bool SimpleCoalescing = !callsReturnsTwice(); |
| 809 | |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 810 | CfgVector<size_t> LocalsSize(Func->getNumNodes()); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 811 | const VarList &Variables = Func->getVariables(); |
| 812 | VarList SpilledVariables; |
| 813 | for (Variable *Var : Variables) { |
| 814 | if (Var->hasReg()) { |
David Sehr | 2f3b8ec | 2015-11-16 16:51:39 -0800 | [diff] [blame] | 815 | // Don't consider a rematerializable variable to be an actual register use |
| 816 | // (specifically of the frame pointer). Otherwise, the prolog may decide |
| 817 | // to save the frame pointer twice - once because of the explicit need for |
| 818 | // a frame pointer, and once because of an active use of a callee-save |
| 819 | // register. |
| 820 | if (!Var->isRematerializable()) |
| 821 | RegsUsed[Var->getRegNum()] = true; |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 822 | continue; |
| 823 | } |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 824 | // An argument either does not need a stack slot (if passed in a register) |
| 825 | // or already has one (if passed on the stack). |
Jim Stichnoth | b9a8472 | 2016-08-01 13:18:36 -0700 | [diff] [blame] | 826 | if (Var->getIsArg()) { |
| 827 | if (!Var->hasReg()) { |
| 828 | assert(!Var->hasStackOffset()); |
| 829 | Var->setHasStackOffset(); |
| 830 | } |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 831 | continue; |
Jim Stichnoth | b9a8472 | 2016-08-01 13:18:36 -0700 | [diff] [blame] | 832 | } |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 833 | // An unreferenced variable doesn't need a stack slot. |
| 834 | if (!IsVarReferenced[Var->getIndex()]) |
| 835 | continue; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 836 | // Check a target-specific variable (it may end up sharing stack slots) and |
| 837 | // not need accounting here. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 838 | if (TargetVarHook(Var)) |
| 839 | continue; |
Jim Stichnoth | b9a8472 | 2016-08-01 13:18:36 -0700 | [diff] [blame] | 840 | assert(!Var->hasStackOffset()); |
| 841 | Var->setHasStackOffset(); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 842 | SpilledVariables.push_back(Var); |
| 843 | } |
| 844 | |
| 845 | SortedSpilledVariables.reserve(SpilledVariables.size()); |
| 846 | sortVarsByAlignment(SortedSpilledVariables, SpilledVariables); |
| 847 | |
| 848 | for (Variable *Var : SortedSpilledVariables) { |
| 849 | size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 850 | // We have sorted by alignment, so the first variable we encounter that is |
| 851 | // located in each area determines the max alignment for the area. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 852 | if (!*SpillAreaAlignmentBytes) |
| 853 | *SpillAreaAlignmentBytes = Increment; |
| 854 | if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 855 | if (VMetadata->isMultiBlock(Var)) { |
| 856 | *GlobalsSize += Increment; |
| 857 | } else { |
| 858 | SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 859 | LocalsSize[NodeIndex] += Increment; |
| 860 | if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes) |
| 861 | *SpillAreaSizeBytes = LocalsSize[NodeIndex]; |
| 862 | if (!*LocalsSlotsAlignmentBytes) |
| 863 | *LocalsSlotsAlignmentBytes = Increment; |
| 864 | } |
| 865 | } else { |
| 866 | *SpillAreaSizeBytes += Increment; |
| 867 | } |
| 868 | } |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 869 | // For testing legalization of large stack offsets on targets with limited |
| 870 | // offset bits in instruction encodings, add some padding. |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 871 | *SpillAreaSizeBytes += getFlags().getTestStackExtra(); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 872 | } |
| 873 | |
| 874 | void TargetLowering::alignStackSpillAreas(uint32_t SpillAreaStartOffset, |
| 875 | uint32_t SpillAreaAlignmentBytes, |
| 876 | size_t GlobalsSize, |
| 877 | uint32_t LocalsSlotsAlignmentBytes, |
| 878 | uint32_t *SpillAreaPaddingBytes, |
| 879 | uint32_t *LocalsSlotsPaddingBytes) { |
| 880 | if (SpillAreaAlignmentBytes) { |
| 881 | uint32_t PaddingStart = SpillAreaStartOffset; |
| 882 | uint32_t SpillAreaStart = |
| 883 | Utils::applyAlignment(PaddingStart, SpillAreaAlignmentBytes); |
| 884 | *SpillAreaPaddingBytes = SpillAreaStart - PaddingStart; |
| 885 | } |
| 886 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 887 | // If there are separate globals and locals areas, make sure the locals area |
| 888 | // is aligned by padding the end of the globals area. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 889 | if (LocalsSlotsAlignmentBytes) { |
| 890 | uint32_t GlobalsAndSubsequentPaddingSize = GlobalsSize; |
| 891 | GlobalsAndSubsequentPaddingSize = |
| 892 | Utils::applyAlignment(GlobalsSize, LocalsSlotsAlignmentBytes); |
| 893 | *LocalsSlotsPaddingBytes = GlobalsAndSubsequentPaddingSize - GlobalsSize; |
| 894 | } |
| 895 | } |
| 896 | |
| 897 | void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables, |
| 898 | size_t SpillAreaPaddingBytes, |
| 899 | size_t SpillAreaSizeBytes, |
| 900 | size_t GlobalsAndSubsequentPaddingSize, |
| 901 | bool UsesFramePointer) { |
| 902 | const VariablesMetadata *VMetadata = Func->getVMetadata(); |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 903 | // For testing legalization of large stack offsets on targets with limited |
| 904 | // offset bits in instruction encodings, add some padding. This assumes that |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 905 | // SpillAreaSizeBytes has accounted for the extra test padding. When |
| 906 | // UseFramePointer is true, the offset depends on the padding, not just the |
| 907 | // SpillAreaSizeBytes. On the other hand, when UseFramePointer is false, the |
| 908 | // offsets depend on the gap between SpillAreaSizeBytes and |
| 909 | // SpillAreaPaddingBytes, so we don't increment that. |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 910 | size_t TestPadding = getFlags().getTestStackExtra(); |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 911 | if (UsesFramePointer) |
| 912 | SpillAreaPaddingBytes += TestPadding; |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 913 | size_t GlobalsSpaceUsed = SpillAreaPaddingBytes; |
| 914 | size_t NextStackOffset = SpillAreaPaddingBytes; |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 915 | CfgVector<size_t> LocalsSize(Func->getNumNodes()); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 916 | const bool SimpleCoalescing = !callsReturnsTwice(); |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 917 | |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 918 | for (Variable *Var : SortedSpilledVariables) { |
| 919 | size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
| 920 | if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 921 | if (VMetadata->isMultiBlock(Var)) { |
| 922 | GlobalsSpaceUsed += Increment; |
| 923 | NextStackOffset = GlobalsSpaceUsed; |
| 924 | } else { |
| 925 | SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 926 | LocalsSize[NodeIndex] += Increment; |
| 927 | NextStackOffset = SpillAreaPaddingBytes + |
| 928 | GlobalsAndSubsequentPaddingSize + |
| 929 | LocalsSize[NodeIndex]; |
| 930 | } |
| 931 | } else { |
| 932 | NextStackOffset += Increment; |
| 933 | } |
| 934 | if (UsesFramePointer) |
| 935 | Var->setStackOffset(-NextStackOffset); |
| 936 | else |
| 937 | Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset); |
| 938 | } |
| 939 | } |
| 940 | |
Karl Schimpf | 20070e8 | 2016-03-17 13:30:13 -0700 | [diff] [blame] | 941 | InstCall *TargetLowering::makeHelperCall(RuntimeHelper FuncID, Variable *Dest, |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 942 | SizeT MaxSrcs) { |
Jim Stichnoth | 5bff61c | 2015-10-28 09:26:00 -0700 | [diff] [blame] | 943 | constexpr bool HasTailCall = false; |
Karl Schimpf | 20070e8 | 2016-03-17 13:30:13 -0700 | [diff] [blame] | 944 | Constant *CallTarget = Ctx->getRuntimeHelperFunc(FuncID); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 945 | InstCall *Call = |
| 946 | InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); |
| 947 | return Call; |
| 948 | } |
| 949 | |
Andrew Scull | cfa628b | 2015-08-20 14:23:05 -0700 | [diff] [blame] | 950 | bool TargetLowering::shouldOptimizeMemIntrins() { |
Jim Stichnoth | 386b52e | 2016-08-05 15:18:41 -0700 | [diff] [blame] | 951 | return Func->getOptLevel() >= Opt_1 || getFlags().getForceMemIntrinOpt(); |
Andrew Scull | cfa628b | 2015-08-20 14:23:05 -0700 | [diff] [blame] | 952 | } |
| 953 | |
Eric Holk | cfc2553 | 2016-02-09 17:47:58 -0800 | [diff] [blame] | 954 | void TargetLowering::scalarizeArithmetic(InstArithmetic::OpKind Kind, |
| 955 | Variable *Dest, Operand *Src0, |
| 956 | Operand *Src1) { |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 957 | scalarizeInstruction( |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 958 | Dest, [this, Kind](Variable *Dest, Operand *Src0, Operand *Src1) { |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 959 | return Context.insert<InstArithmetic>(Kind, Dest, Src0, Src1); |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 960 | }, Src0, Src1); |
Eric Holk | cfc2553 | 2016-02-09 17:47:58 -0800 | [diff] [blame] | 961 | } |
| 962 | |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 963 | void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C, |
| 964 | const char *Suffix) const { |
Jim Stichnoth | 20b71f5 | 2015-06-24 15:52:24 -0700 | [diff] [blame] | 965 | if (!BuildDefs::dump()) |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 966 | return; |
| 967 | Ostream &Str = Ctx->getStrEmit(); |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 968 | const std::string &EmitStr = C->getEmitString(); |
John Porto | 27fddcc | 2016-02-02 15:06:09 -0800 | [diff] [blame] | 969 | if (!EmitStr.empty()) { |
| 970 | // C has a custom emit string, so we use it instead of the canonical |
| 971 | // Name + Offset form. |
| 972 | Str << EmitStr; |
| 973 | return; |
| 974 | } |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 975 | Str << C->getName() << Suffix; |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 976 | RelocOffsetT Offset = C->getOffset(); |
| 977 | if (Offset) { |
| 978 | if (Offset > 0) |
| 979 | Str << "+"; |
| 980 | Str << Offset; |
| 981 | } |
| 982 | } |
| 983 | |
Jim Stichnoth | bbca754 | 2015-02-11 16:08:31 -0800 | [diff] [blame] | 984 | std::unique_ptr<TargetDataLowering> |
| 985 | TargetDataLowering::createLowering(GlobalContext *Ctx) { |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 986 | TargetArch Target = getFlags().getTargetArch(); |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 987 | switch (Target) { |
| 988 | default: |
Jim Stichnoth | 816fd68 | 2016-02-16 05:47:32 -0800 | [diff] [blame] | 989 | badTargetFatalError(Target); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 990 | #define SUBZERO_TARGET(X) \ |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 991 | case TARGET_LOWERING_CLASS_FOR(X): \ |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 992 | return ::X::createTargetDataLowering(Ctx); |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 993 | #include "SZTargets.def" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 994 | #undef SUBZERO_TARGET |
| 995 | } |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 996 | } |
| 997 | |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 998 | TargetDataLowering::~TargetDataLowering() = default; |
Jan Voung | 839c4ce | 2014-07-28 15:19:43 -0700 | [diff] [blame] | 999 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1000 | namespace { |
| 1001 | |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1002 | // dataSectionSuffix decides whether to use SectionSuffix or VarName as data |
| 1003 | // section suffix. Essentially, when using separate data sections for globals |
| 1004 | // SectionSuffix is not necessary. |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 1005 | std::string dataSectionSuffix(const std::string &SectionSuffix, |
| 1006 | const std::string &VarName, |
| 1007 | const bool DataSections) { |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1008 | if (SectionSuffix.empty() && !DataSections) { |
| 1009 | return ""; |
| 1010 | } |
| 1011 | |
| 1012 | if (DataSections) { |
| 1013 | // With data sections we don't need to use the SectionSuffix. |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1014 | return "." + VarName; |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1015 | } |
| 1016 | |
| 1017 | assert(!SectionSuffix.empty()); |
| 1018 | return "." + SectionSuffix; |
| 1019 | } |
| 1020 | |
| 1021 | } // end of anonymous namespace |
| 1022 | |
| 1023 | void TargetDataLowering::emitGlobal(const VariableDeclaration &Var, |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 1024 | const std::string &SectionSuffix) { |
Jim Stichnoth | 20b71f5 | 2015-06-24 15:52:24 -0700 | [diff] [blame] | 1025 | if (!BuildDefs::dump()) |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1026 | return; |
| 1027 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 1028 | // If external and not initialized, this must be a cross test. Don't generate |
| 1029 | // a declaration for such cases. |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 1030 | const bool IsExternal = Var.isExternal() || getFlags().getDisableInternal(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1031 | if (IsExternal && !Var.hasInitializer()) |
| 1032 | return; |
| 1033 | |
| 1034 | Ostream &Str = Ctx->getStrEmit(); |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1035 | const bool HasNonzeroInitializer = Var.hasNonzeroInitializer(); |
| 1036 | const bool IsConstant = Var.getIsConstant(); |
| 1037 | const SizeT Size = Var.getNumBytes(); |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 1038 | const std::string Name = Var.getName().toString(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1039 | |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1040 | Str << "\t.type\t" << Name << ",%object\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1041 | |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 1042 | const bool UseDataSections = getFlags().getDataSections(); |
| 1043 | const bool UseNonsfi = getFlags().getUseNonsfi(); |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 1044 | const std::string Suffix = |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1045 | dataSectionSuffix(SectionSuffix, Name, UseDataSections); |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 1046 | if (IsConstant && UseNonsfi) |
| 1047 | Str << "\t.section\t.data.rel.ro" << Suffix << ",\"aw\",%progbits\n"; |
| 1048 | else if (IsConstant) |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1049 | Str << "\t.section\t.rodata" << Suffix << ",\"a\",%progbits\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1050 | else if (HasNonzeroInitializer) |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1051 | Str << "\t.section\t.data" << Suffix << ",\"aw\",%progbits\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1052 | else |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1053 | Str << "\t.section\t.bss" << Suffix << ",\"aw\",%nobits\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1054 | |
| 1055 | if (IsExternal) |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1056 | Str << "\t.globl\t" << Name << "\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1057 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1058 | const uint32_t Align = Var.getAlignment(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1059 | if (Align > 1) { |
| 1060 | assert(llvm::isPowerOf2_32(Align)); |
| 1061 | // Use the .p2align directive, since the .align N directive can either |
| 1062 | // interpret N as bytes, or power of 2 bytes, depending on the target. |
| 1063 | Str << "\t.p2align\t" << llvm::Log2_32(Align) << "\n"; |
| 1064 | } |
| 1065 | |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1066 | Str << Name << ":\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1067 | |
| 1068 | if (HasNonzeroInitializer) { |
John Porto | a78e4ba | 2016-03-15 09:28:04 -0700 | [diff] [blame] | 1069 | for (const auto *Init : Var.getInitializers()) { |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1070 | switch (Init->getKind()) { |
| 1071 | case VariableDeclaration::Initializer::DataInitializerKind: { |
Jan Voung | e0df91f | 2015-06-30 08:47:06 -0700 | [diff] [blame] | 1072 | const auto &Data = |
John Porto | a78e4ba | 2016-03-15 09:28:04 -0700 | [diff] [blame] | 1073 | llvm::cast<VariableDeclaration::DataInitializer>(Init) |
Jan Voung | e0df91f | 2015-06-30 08:47:06 -0700 | [diff] [blame] | 1074 | ->getContents(); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1075 | for (SizeT i = 0; i < Init->getNumBytes(); ++i) { |
| 1076 | Str << "\t.byte\t" << (((unsigned)Data[i]) & 0xff) << "\n"; |
| 1077 | } |
| 1078 | break; |
| 1079 | } |
| 1080 | case VariableDeclaration::Initializer::ZeroInitializerKind: |
| 1081 | Str << "\t.zero\t" << Init->getNumBytes() << "\n"; |
| 1082 | break; |
| 1083 | case VariableDeclaration::Initializer::RelocInitializerKind: { |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1084 | const auto *Reloc = |
John Porto | a78e4ba | 2016-03-15 09:28:04 -0700 | [diff] [blame] | 1085 | llvm::cast<VariableDeclaration::RelocInitializer>(Init); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1086 | Str << "\t" << getEmit32Directive() << "\t"; |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1087 | Str << Reloc->getDeclaration()->getName(); |
John Porto | dc61925 | 2016-02-10 15:57:16 -0800 | [diff] [blame] | 1088 | if (Reloc->hasFixup()) { |
| 1089 | // TODO(jpp): this is ARM32 specific. |
| 1090 | Str << "(GOTOFF)"; |
| 1091 | } |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1092 | if (RelocOffsetT Offset = Reloc->getOffset()) { |
| 1093 | if (Offset >= 0 || (Offset == INT32_MIN)) |
| 1094 | Str << " + " << Offset; |
| 1095 | else |
| 1096 | Str << " - " << -Offset; |
| 1097 | } |
| 1098 | Str << "\n"; |
| 1099 | break; |
| 1100 | } |
| 1101 | } |
| 1102 | } |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1103 | } else { |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 1104 | // NOTE: for non-constant zero initializers, this is BSS (no bits), so an |
| 1105 | // ELF writer would not write to the file, and only track virtual offsets, |
| 1106 | // but the .s writer still needs this .zero and cannot simply use the .size |
| 1107 | // to advance offsets. |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1108 | Str << "\t.zero\t" << Size << "\n"; |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 1109 | } |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1110 | |
Jim Stichnoth | 98ba006 | 2016-03-07 09:26:22 -0800 | [diff] [blame] | 1111 | Str << "\t.size\t" << Name << ", " << Size << "\n"; |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 1112 | } |
| 1113 | |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 1114 | std::unique_ptr<TargetHeaderLowering> |
| 1115 | TargetHeaderLowering::createLowering(GlobalContext *Ctx) { |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 1116 | TargetArch Target = getFlags().getTargetArch(); |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 1117 | switch (Target) { |
| 1118 | default: |
Jim Stichnoth | 816fd68 | 2016-02-16 05:47:32 -0800 | [diff] [blame] | 1119 | badTargetFatalError(Target); |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 1120 | #define SUBZERO_TARGET(X) \ |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 1121 | case TARGET_LOWERING_CLASS_FOR(X): \ |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 1122 | return ::X::createTargetHeaderLowering(Ctx); |
Jim Stichnoth | 999a22f | 2016-03-12 10:22:53 -0800 | [diff] [blame] | 1123 | #include "SZTargets.def" |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 1124 | #undef SUBZERO_TARGET |
| 1125 | } |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 1126 | } |
| 1127 | |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 1128 | TargetHeaderLowering::~TargetHeaderLowering() = default; |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 1129 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 1130 | } // end of namespace Ice |