Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 1 | //===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===// |
| 2 | // |
| 3 | // The Subzero Code Generator |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 9 | /// |
| 10 | /// \file |
| 11 | /// This file declares the TargetLowering, LoweringContext, and |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 12 | /// TargetDataLowering classes. TargetLowering is an abstract class used to |
| 13 | /// drive the translation/lowering process. LoweringContext maintains a context |
| 14 | /// for lowering each instruction, offering conveniences such as iterating over |
| 15 | /// non-deleted instructions. TargetDataLowering is an abstract class used to |
| 16 | /// drive the lowering/emission of global initializers, external global |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 17 | /// declarations, and internal constant pools. |
| 18 | /// |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 19 | //===----------------------------------------------------------------------===// |
| 20 | |
| 21 | #ifndef SUBZERO_SRC_ICETARGETLOWERING_H |
| 22 | #define SUBZERO_SRC_ICETARGETLOWERING_H |
| 23 | |
| 24 | #include "IceDefs.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 25 | #include "IceInst.h" // for the names of the Inst subtypes |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 26 | #include "IceOperand.h" |
Jim Stichnoth | a18cc9c | 2014-09-30 19:10:22 -0700 | [diff] [blame] | 27 | #include "IceTypes.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 28 | |
| 29 | namespace Ice { |
| 30 | |
Karl Schimpf | c5abdc1 | 2015-10-09 13:29:13 -0700 | [diff] [blame] | 31 | // UnimplementedError is defined as a macro so that we can get actual line |
| 32 | // numbers. |
| 33 | #define UnimplementedError(Flags) \ |
| 34 | do { \ |
| 35 | if (!static_cast<const ClFlags &>(Flags).getSkipUnimplemented()) { \ |
| 36 | /* Use llvm_unreachable instead of report_fatal_error, which gives \ |
| 37 | better stack traces. */ \ |
| 38 | llvm_unreachable("Not yet implemented"); \ |
| 39 | abort(); \ |
| 40 | } \ |
| 41 | } while (0) |
| 42 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 43 | /// LoweringContext makes it easy to iterate through non-deleted instructions in |
| 44 | /// a node, and insert new (lowered) instructions at the current point. Along |
| 45 | /// with the instruction list container and associated iterators, it holds the |
| 46 | /// current node, which is needed when inserting new instructions in order to |
| 47 | /// track whether variables are used as single-block or multi-block. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 48 | class LoweringContext { |
Jim Stichnoth | 7b451a9 | 2014-10-15 14:39:23 -0700 | [diff] [blame] | 49 | LoweringContext(const LoweringContext &) = delete; |
| 50 | LoweringContext &operator=(const LoweringContext &) = delete; |
| 51 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 52 | public: |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 53 | LoweringContext() = default; |
| 54 | ~LoweringContext() = default; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 55 | void init(CfgNode *Node); |
| 56 | Inst *getNextInst() const { |
| 57 | if (Next == End) |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 58 | return nullptr; |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 59 | return Next; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 60 | } |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 61 | Inst *getNextInst(InstList::iterator &Iter) const { |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 62 | advanceForward(Iter); |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 63 | if (Iter == End) |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 64 | return nullptr; |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 65 | return Iter; |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 66 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 67 | CfgNode *getNode() const { return Node; } |
| 68 | bool atEnd() const { return Cur == End; } |
| 69 | InstList::iterator getCur() const { return Cur; } |
Jim Stichnoth | 5d2fa0c | 2014-12-01 09:30:55 -0800 | [diff] [blame] | 70 | InstList::iterator getNext() const { return Next; } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 71 | InstList::iterator getEnd() const { return End; } |
| 72 | void insert(Inst *Inst); |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 73 | Inst *getLastInserted() const; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 74 | void advanceCur() { Cur = Next; } |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 75 | void advanceNext() { advanceForward(Next); } |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 76 | void setCur(InstList::iterator C) { Cur = C; } |
| 77 | void setNext(InstList::iterator N) { Next = N; } |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 78 | void rewind(); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 79 | void setInsertPoint(const InstList::iterator &Position) { Next = Position; } |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 80 | void availabilityReset(); |
| 81 | void availabilityUpdate(); |
| 82 | Variable *availabilityGet(Operand *Src) const; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 83 | |
| 84 | private: |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 85 | /// Node is the argument to Inst::updateVars(). |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 86 | CfgNode *Node = nullptr; |
| 87 | Inst *LastInserted = nullptr; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 88 | /// Cur points to the current instruction being considered. It is guaranteed |
| 89 | /// to point to a non-deleted instruction, or to be End. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 90 | InstList::iterator Cur; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 91 | /// Next doubles as a pointer to the next valid instruction (if any), and the |
| 92 | /// new-instruction insertion point. It is also updated for the caller in case |
| 93 | /// the lowering consumes more than one high-level instruction. It is |
| 94 | /// guaranteed to point to a non-deleted instruction after Cur, or to be End. |
| 95 | // TODO: Consider separating the notion of "next valid instruction" and "new |
| 96 | // instruction insertion point", to avoid confusion when previously-deleted |
| 97 | // instructions come between the two points. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 98 | InstList::iterator Next; |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 99 | /// Begin is a copy of Insts.begin(), used if iterators are moved backward. |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 100 | InstList::iterator Begin; |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 101 | /// End is a copy of Insts.end(), used if Next needs to be advanced. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 102 | InstList::iterator End; |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 103 | /// LastDest and LastSrc capture the parameters of the last "Dest=Src" simple |
| 104 | /// assignment inserted (provided Src is a variable). This is used for simple |
| 105 | /// availability analysis. |
| 106 | Variable *LastDest = nullptr; |
| 107 | Variable *LastSrc = nullptr; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 108 | |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 109 | void skipDeleted(InstList::iterator &I) const; |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 110 | void advanceForward(InstList::iterator &I) const; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 111 | }; |
| 112 | |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 113 | /// A helper class to advance the LoweringContext at each loop iteration. |
| 114 | class PostIncrLoweringContext { |
| 115 | PostIncrLoweringContext() = delete; |
| 116 | PostIncrLoweringContext(const PostIncrLoweringContext &) = delete; |
| 117 | PostIncrLoweringContext &operator=(const PostIncrLoweringContext &) = delete; |
| 118 | |
| 119 | public: |
| 120 | explicit PostIncrLoweringContext(LoweringContext &Context) |
| 121 | : Context(Context) {} |
| 122 | ~PostIncrLoweringContext() { |
| 123 | Context.advanceCur(); |
| 124 | Context.advanceNext(); |
| 125 | } |
| 126 | |
| 127 | private: |
| 128 | LoweringContext &Context; |
| 129 | }; |
| 130 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 131 | class TargetLowering { |
Jim Stichnoth | c6ead20 | 2015-02-24 09:30:30 -0800 | [diff] [blame] | 132 | TargetLowering() = delete; |
Jim Stichnoth | 7b451a9 | 2014-10-15 14:39:23 -0700 | [diff] [blame] | 133 | TargetLowering(const TargetLowering &) = delete; |
| 134 | TargetLowering &operator=(const TargetLowering &) = delete; |
| 135 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 136 | public: |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 137 | // TODO(jvoung): return a unique_ptr like the other factory functions. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 138 | static TargetLowering *createLowering(TargetArch Target, Cfg *Func); |
Jim Stichnoth | 94844f1 | 2015-11-04 16:06:16 -0800 | [diff] [blame] | 139 | static void staticInit(TargetArch Target); |
| 140 | // Each target must define a public static method: |
| 141 | // static void staticInit(); |
Jan Voung | ec27073 | 2015-01-12 17:00:22 -0800 | [diff] [blame] | 142 | static std::unique_ptr<Assembler> createAssembler(TargetArch Target, |
| 143 | Cfg *Func); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 144 | void translate() { |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 145 | switch (Ctx->getFlags().getOptLevel()) { |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 146 | case Opt_m1: |
| 147 | translateOm1(); |
| 148 | break; |
| 149 | case Opt_0: |
| 150 | translateO0(); |
| 151 | break; |
| 152 | case Opt_1: |
| 153 | translateO1(); |
| 154 | break; |
| 155 | case Opt_2: |
| 156 | translateO2(); |
| 157 | break; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 158 | } |
| 159 | } |
| 160 | virtual void translateOm1() { |
| 161 | Func->setError("Target doesn't specify Om1 lowering steps."); |
| 162 | } |
| 163 | virtual void translateO0() { |
| 164 | Func->setError("Target doesn't specify O0 lowering steps."); |
| 165 | } |
| 166 | virtual void translateO1() { |
| 167 | Func->setError("Target doesn't specify O1 lowering steps."); |
| 168 | } |
| 169 | virtual void translateO2() { |
| 170 | Func->setError("Target doesn't specify O2 lowering steps."); |
| 171 | } |
| 172 | |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 173 | /// Tries to do address mode optimization on a single instruction. |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 174 | void doAddressOpt(); |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 175 | /// Randomly insert NOPs. |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 176 | void doNopInsertion(RandomNumberGenerator &RNG); |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 177 | /// Lowers a single non-Phi instruction. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 178 | void lower(); |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 179 | /// Inserts and lowers a single high-level instruction at a specific insertion |
| 180 | /// point. |
| 181 | void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr); |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 182 | /// Does preliminary lowering of the set of Phi instructions in the current |
| 183 | /// node. The main intention is to do what's needed to keep the unlowered Phi |
| 184 | /// instructions consistent with the lowered non-Phi instructions, e.g. to |
| 185 | /// lower 64-bit operands on a 32-bit target. |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 186 | virtual void prelowerPhis() {} |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 187 | /// Tries to do branch optimization on a single instruction. Returns true if |
| 188 | /// some optimization was done. |
Jim Stichnoth | ff9c706 | 2014-09-18 04:50:49 -0700 | [diff] [blame] | 189 | virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) { |
| 190 | return false; |
| 191 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 192 | |
Jim Stichnoth | 3d44fe8 | 2014-11-01 10:10:18 -0700 | [diff] [blame] | 193 | virtual SizeT getNumRegisters() const = 0; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 194 | /// Returns a variable pre-colored to the specified physical register. This is |
| 195 | /// generally used to get very direct access to the register such as in the |
| 196 | /// prolog or epilog or for marking scratch registers as killed by a call. If |
| 197 | /// a Type is not provided, a target-specific default type is used. |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 198 | virtual Variable *getPhysicalRegister(SizeT RegNum, |
| 199 | Type Ty = IceType_void) = 0; |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 200 | /// Returns a printable name for the register. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 201 | virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0; |
| 202 | |
| 203 | virtual bool hasFramePointer() const { return false; } |
Jim Stichnoth | e741871 | 2015-10-09 06:54:02 -0700 | [diff] [blame] | 204 | virtual SizeT getStackReg() const = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 205 | virtual SizeT getFrameOrStackReg() const = 0; |
Matt Wala | d4799f4 | 2014-08-14 14:24:12 -0700 | [diff] [blame] | 206 | virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0; |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 207 | |
Andrew Scull | 6d47bcd | 2015-09-17 17:10:05 -0700 | [diff] [blame] | 208 | /// Return whether a 64-bit Variable should be split into a Variable64On32. |
| 209 | virtual bool shouldSplitToVariable64On32(Type Ty) const = 0; |
| 210 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 211 | bool hasComputedFrame() const { return HasComputedFrame; } |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 212 | /// Returns true if this function calls a function that has the "returns |
| 213 | /// twice" attribute. |
Jan Voung | 44d53e1 | 2014-09-11 19:18:03 -0700 | [diff] [blame] | 214 | bool callsReturnsTwice() const { return CallsReturnsTwice; } |
Jim Stichnoth | dd842db | 2015-01-27 12:53:53 -0800 | [diff] [blame] | 215 | void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 216 | int32_t getStackAdjustment() const { return StackAdjustment; } |
| 217 | void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; } |
| 218 | void resetStackAdjustment() { StackAdjustment = 0; } |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 219 | SizeT makeNextLabelNumber() { return NextLabelNumber++; } |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 220 | SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 221 | LoweringContext &getContext() { return Context; } |
| 222 | |
| 223 | enum RegSet { |
| 224 | RegSet_None = 0, |
| 225 | RegSet_CallerSave = 1 << 0, |
| 226 | RegSet_CalleeSave = 1 << 1, |
| 227 | RegSet_StackPointer = 1 << 2, |
| 228 | RegSet_FramePointer = 1 << 3, |
| 229 | RegSet_All = ~RegSet_None |
| 230 | }; |
Andrew Scull | 8072bae | 2015-09-14 16:01:26 -0700 | [diff] [blame] | 231 | using RegSetMask = uint32_t; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 232 | |
| 233 | virtual llvm::SmallBitVector getRegisterSet(RegSetMask Include, |
| 234 | RegSetMask Exclude) const = 0; |
| 235 | virtual const llvm::SmallBitVector &getRegisterSetForType(Type Ty) const = 0; |
John Porto | bb0a5fe | 2015-09-04 11:23:41 -0700 | [diff] [blame] | 236 | virtual const llvm::SmallBitVector &getAliasesForRegister(SizeT) const = 0; |
| 237 | |
Jim Stichnoth | 70d0a05 | 2014-11-14 15:53:46 -0800 | [diff] [blame] | 238 | void regAlloc(RegAllocKind Kind); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 239 | |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 240 | virtual void |
| 241 | makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation, |
| 242 | const llvm::SmallBitVector &ExcludeRegisters, |
| 243 | uint64_t Salt) const = 0; |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 244 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 245 | /// Save/restore any mutable state for the situation where code emission needs |
| 246 | /// multiple passes, such as sandboxing or relaxation. Subclasses may provide |
| 247 | /// their own implementation, but should be sure to also call the parent |
| 248 | /// class's methods. |
Jim Stichnoth | 9738a9e | 2015-02-23 16:39:06 -0800 | [diff] [blame] | 249 | virtual void snapshotEmitState() { |
| 250 | SnapshotStackAdjustment = StackAdjustment; |
| 251 | } |
| 252 | virtual void rollbackEmitState() { |
| 253 | StackAdjustment = SnapshotStackAdjustment; |
| 254 | } |
| 255 | |
Andrew Scull | 87f80c1 | 2015-07-20 10:19:16 -0700 | [diff] [blame] | 256 | /// Get the minimum number of clusters required for a jump table to be |
| 257 | /// considered. |
| 258 | virtual SizeT getMinJumpTableSize() const = 0; |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 259 | virtual void emitJumpTable(const Cfg *Func, |
| 260 | const InstJumpTable *JumpTable) const = 0; |
Andrew Scull | 87f80c1 | 2015-07-20 10:19:16 -0700 | [diff] [blame] | 261 | |
Jim Stichnoth | 144cdce | 2014-09-22 16:02:59 -0700 | [diff] [blame] | 262 | virtual void emitVariable(const Variable *Var) const = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 263 | |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 264 | void emitWithoutPrefix(const ConstantRelocatable *CR) const; |
| 265 | void emit(const ConstantRelocatable *CR) const; |
| 266 | virtual const char *getConstantPrefix() const = 0; |
| 267 | |
| 268 | virtual void emit(const ConstantUndef *C) const = 0; |
| 269 | virtual void emit(const ConstantInteger32 *C) const = 0; |
| 270 | virtual void emit(const ConstantInteger64 *C) const = 0; |
| 271 | virtual void emit(const ConstantFloat *C) const = 0; |
| 272 | virtual void emit(const ConstantDouble *C) const = 0; |
| 273 | |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 274 | /// Performs target-specific argument lowering. |
Matt Wala | 45a0623 | 2014-07-09 16:33:22 -0700 | [diff] [blame] | 275 | virtual void lowerArguments() = 0; |
| 276 | |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 277 | virtual void initNodeForLowering(CfgNode *) {} |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 278 | virtual void addProlog(CfgNode *Node) = 0; |
| 279 | virtual void addEpilog(CfgNode *Node) = 0; |
| 280 | |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 281 | virtual ~TargetLowering() = default; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 282 | |
| 283 | protected: |
Jim Stichnoth | c6ead20 | 2015-02-24 09:30:30 -0800 | [diff] [blame] | 284 | explicit TargetLowering(Cfg *Func); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 285 | virtual void lowerAlloca(const InstAlloca *Inst) = 0; |
| 286 | virtual void lowerArithmetic(const InstArithmetic *Inst) = 0; |
| 287 | virtual void lowerAssign(const InstAssign *Inst) = 0; |
| 288 | virtual void lowerBr(const InstBr *Inst) = 0; |
| 289 | virtual void lowerCall(const InstCall *Inst) = 0; |
| 290 | virtual void lowerCast(const InstCast *Inst) = 0; |
| 291 | virtual void lowerFcmp(const InstFcmp *Inst) = 0; |
Matt Wala | 4988923 | 2014-07-18 12:45:09 -0700 | [diff] [blame] | 292 | virtual void lowerExtractElement(const InstExtractElement *Inst) = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 293 | virtual void lowerIcmp(const InstIcmp *Inst) = 0; |
Matt Wala | 4988923 | 2014-07-18 12:45:09 -0700 | [diff] [blame] | 294 | virtual void lowerInsertElement(const InstInsertElement *Inst) = 0; |
Jan Voung | 3bd9f1a | 2014-06-18 10:50:57 -0700 | [diff] [blame] | 295 | virtual void lowerIntrinsicCall(const InstIntrinsicCall *Inst) = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 296 | virtual void lowerLoad(const InstLoad *Inst) = 0; |
| 297 | virtual void lowerPhi(const InstPhi *Inst) = 0; |
| 298 | virtual void lowerRet(const InstRet *Inst) = 0; |
| 299 | virtual void lowerSelect(const InstSelect *Inst) = 0; |
| 300 | virtual void lowerStore(const InstStore *Inst) = 0; |
| 301 | virtual void lowerSwitch(const InstSwitch *Inst) = 0; |
| 302 | virtual void lowerUnreachable(const InstUnreachable *Inst) = 0; |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 303 | virtual void lowerOther(const Inst *Instr); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 304 | |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 305 | virtual void doAddressOptLoad() {} |
| 306 | virtual void doAddressOptStore() {} |
Jim Stichnoth | ad2989b | 2015-09-15 10:21:42 -0700 | [diff] [blame] | 307 | virtual void doMockBoundsCheck(Operand *) {} |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 308 | virtual void randomlyInsertNop(float Probability, |
| 309 | RandomNumberGenerator &RNG) = 0; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 310 | /// This gives the target an opportunity to post-process the lowered expansion |
| 311 | /// before returning. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 312 | virtual void postLower() {} |
| 313 | |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 314 | /// Find (non-SSA) instructions where the Dest variable appears in some source |
| 315 | /// operand, and set the IsDestRedefined flag. This keeps liveness analysis |
| 316 | /// consistent. |
| 317 | void markRedefinitions(); |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 318 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 319 | /// Make a pass over the Cfg to determine which variables need stack slots and |
| 320 | /// place them in a sorted list (SortedSpilledVariables). Among those, vars, |
| 321 | /// classify the spill variables as local to the basic block vs global |
| 322 | /// (multi-block) in order to compute the parameters GlobalsSize and |
| 323 | /// SpillAreaSizeBytes (represents locals or general vars if the coalescing of |
| 324 | /// locals is disallowed) along with alignments required for variables in each |
| 325 | /// area. We rely on accurate VMetadata in order to classify a variable as |
| 326 | /// global vs local (otherwise the variable is conservatively global). The |
| 327 | /// in-args should be initialized to 0. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 328 | /// |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 329 | /// This is only a pre-pass and the actual stack slot assignment is handled |
| 330 | /// separately. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 331 | /// |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 332 | /// There may be target-specific Variable types, which will be handled by |
| 333 | /// TargetVarHook. If the TargetVarHook returns true, then the variable is |
| 334 | /// skipped and not considered with the rest of the spilled variables. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 335 | void getVarStackSlotParams(VarList &SortedSpilledVariables, |
| 336 | llvm::SmallBitVector &RegsUsed, |
| 337 | size_t *GlobalsSize, size_t *SpillAreaSizeBytes, |
| 338 | uint32_t *SpillAreaAlignmentBytes, |
| 339 | uint32_t *LocalsSlotsAlignmentBytes, |
| 340 | std::function<bool(Variable *)> TargetVarHook); |
| 341 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 342 | /// Calculate the amount of padding needed to align the local and global areas |
| 343 | /// to the required alignment. This assumes the globals/locals layout used by |
| 344 | /// getVarStackSlotParams and assignVarStackSlots. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 345 | void alignStackSpillAreas(uint32_t SpillAreaStartOffset, |
| 346 | uint32_t SpillAreaAlignmentBytes, |
| 347 | size_t GlobalsSize, |
| 348 | uint32_t LocalsSlotsAlignmentBytes, |
| 349 | uint32_t *SpillAreaPaddingBytes, |
| 350 | uint32_t *LocalsSlotsPaddingBytes); |
| 351 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 352 | /// Make a pass through the SortedSpilledVariables and actually assign stack |
| 353 | /// slots. SpillAreaPaddingBytes takes into account stack alignment padding. |
| 354 | /// The SpillArea starts after that amount of padding. This matches the scheme |
| 355 | /// in getVarStackSlotParams, where there may be a separate multi-block global |
| 356 | /// var spill area and a local var spill area. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 357 | void assignVarStackSlots(VarList &SortedSpilledVariables, |
| 358 | size_t SpillAreaPaddingBytes, |
| 359 | size_t SpillAreaSizeBytes, |
| 360 | size_t GlobalsAndSubsequentPaddingSize, |
| 361 | bool UsesFramePointer); |
| 362 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 363 | /// Sort the variables in Source based on required alignment. The variables |
| 364 | /// with the largest alignment need are placed in the front of the Dest list. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 365 | void sortVarsByAlignment(VarList &Dest, const VarList &Source) const; |
| 366 | |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 367 | /// Make a call to an external helper function. |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 368 | InstCall *makeHelperCall(const IceString &Name, Variable *Dest, |
| 369 | SizeT MaxSrcs); |
| 370 | |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 371 | void |
| 372 | _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) { |
| 373 | Context.insert(InstBundleLock::create(Func, BundleOption)); |
| 374 | } |
| 375 | void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); } |
Jim Stichnoth | 230d410 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 376 | void _set_dest_redefined() { Context.getLastInserted()->setDestRedefined(); } |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 377 | |
Andrew Scull | cfa628b | 2015-08-20 14:23:05 -0700 | [diff] [blame] | 378 | bool shouldOptimizeMemIntrins(); |
| 379 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 380 | Cfg *Func; |
| 381 | GlobalContext *Ctx; |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 382 | bool HasComputedFrame = false; |
| 383 | bool CallsReturnsTwice = false; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 384 | /// StackAdjustment keeps track of the current stack offset from its natural |
Jim Stichnoth | 55f931f | 2015-09-23 16:33:08 -0700 | [diff] [blame] | 385 | /// location, e.g. as arguments are pushed for a function call or as |
| 386 | /// fixed-size alloca instructions are executed in the entry block. |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 387 | int32_t StackAdjustment = 0; |
| 388 | SizeT NextLabelNumber = 0; |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 389 | SizeT NextJumpTableNumber = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 390 | LoweringContext Context; |
Jim Stichnoth | 9738a9e | 2015-02-23 16:39:06 -0800 | [diff] [blame] | 391 | |
Jim Stichnoth | c450879 | 2015-03-01 23:12:55 -0800 | [diff] [blame] | 392 | // Runtime helper function names |
| 393 | const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16"; |
| 394 | const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8"; |
| 395 | const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1"; |
| 396 | const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1"; |
| 397 | const static constexpr char *H_call_ctpop_i32 = "__popcountsi2"; |
| 398 | const static constexpr char *H_call_ctpop_i64 = "__popcountdi2"; |
| 399 | const static constexpr char *H_call_longjmp = "longjmp"; |
| 400 | const static constexpr char *H_call_memcpy = "memcpy"; |
| 401 | const static constexpr char *H_call_memmove = "memmove"; |
| 402 | const static constexpr char *H_call_memset = "memset"; |
| 403 | const static constexpr char *H_call_read_tp = "__nacl_read_tp"; |
| 404 | const static constexpr char *H_call_setjmp = "setjmp"; |
| 405 | const static constexpr char *H_fptosi_f32_i64 = "__Sz_fptosi_f32_i64"; |
| 406 | const static constexpr char *H_fptosi_f64_i64 = "__Sz_fptosi_f64_i64"; |
| 407 | const static constexpr char *H_fptoui_4xi32_f32 = "__Sz_fptoui_4xi32_f32"; |
| 408 | const static constexpr char *H_fptoui_f32_i32 = "__Sz_fptoui_f32_i32"; |
| 409 | const static constexpr char *H_fptoui_f32_i64 = "__Sz_fptoui_f32_i64"; |
| 410 | const static constexpr char *H_fptoui_f64_i32 = "__Sz_fptoui_f64_i32"; |
| 411 | const static constexpr char *H_fptoui_f64_i64 = "__Sz_fptoui_f64_i64"; |
| 412 | const static constexpr char *H_frem_f32 = "fmodf"; |
| 413 | const static constexpr char *H_frem_f64 = "fmod"; |
Jan Voung | 6ec369e | 2015-06-30 11:03:15 -0700 | [diff] [blame] | 414 | const static constexpr char *H_sdiv_i32 = "__divsi3"; |
Jim Stichnoth | c450879 | 2015-03-01 23:12:55 -0800 | [diff] [blame] | 415 | const static constexpr char *H_sdiv_i64 = "__divdi3"; |
| 416 | const static constexpr char *H_sitofp_i64_f32 = "__Sz_sitofp_i64_f32"; |
| 417 | const static constexpr char *H_sitofp_i64_f64 = "__Sz_sitofp_i64_f64"; |
Jan Voung | 6ec369e | 2015-06-30 11:03:15 -0700 | [diff] [blame] | 418 | const static constexpr char *H_srem_i32 = "__modsi3"; |
Jim Stichnoth | c450879 | 2015-03-01 23:12:55 -0800 | [diff] [blame] | 419 | const static constexpr char *H_srem_i64 = "__moddi3"; |
Jan Voung | 6ec369e | 2015-06-30 11:03:15 -0700 | [diff] [blame] | 420 | const static constexpr char *H_udiv_i32 = "__udivsi3"; |
Jim Stichnoth | c450879 | 2015-03-01 23:12:55 -0800 | [diff] [blame] | 421 | const static constexpr char *H_udiv_i64 = "__udivdi3"; |
| 422 | const static constexpr char *H_uitofp_4xi32_4xf32 = "__Sz_uitofp_4xi32_4xf32"; |
| 423 | const static constexpr char *H_uitofp_i32_f32 = "__Sz_uitofp_i32_f32"; |
| 424 | const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64"; |
| 425 | const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32"; |
| 426 | const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64"; |
Jan Voung | 6ec369e | 2015-06-30 11:03:15 -0700 | [diff] [blame] | 427 | const static constexpr char *H_urem_i32 = "__umodsi3"; |
Jim Stichnoth | c450879 | 2015-03-01 23:12:55 -0800 | [diff] [blame] | 428 | const static constexpr char *H_urem_i64 = "__umoddi3"; |
| 429 | |
Jim Stichnoth | 9738a9e | 2015-02-23 16:39:06 -0800 | [diff] [blame] | 430 | private: |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 431 | int32_t SnapshotStackAdjustment = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 432 | }; |
| 433 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 434 | /// TargetDataLowering is used for "lowering" data including initializers for |
| 435 | /// global variables, and the internal constant pools. It is separated out from |
| 436 | /// TargetLowering because it does not require a Cfg. |
Jan Voung | 72984d8 | 2015-01-29 14:42:38 -0800 | [diff] [blame] | 437 | class TargetDataLowering { |
| 438 | TargetDataLowering() = delete; |
| 439 | TargetDataLowering(const TargetDataLowering &) = delete; |
| 440 | TargetDataLowering &operator=(const TargetDataLowering &) = delete; |
Jim Stichnoth | 7b451a9 | 2014-10-15 14:39:23 -0700 | [diff] [blame] | 441 | |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 442 | public: |
Jim Stichnoth | bbca754 | 2015-02-11 16:08:31 -0800 | [diff] [blame] | 443 | static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx); |
Jan Voung | 72984d8 | 2015-01-29 14:42:38 -0800 | [diff] [blame] | 444 | virtual ~TargetDataLowering(); |
Jan Voung | 839c4ce | 2014-07-28 15:19:43 -0700 | [diff] [blame] | 445 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 446 | virtual void lowerGlobals(const VariableDeclarationList &Vars, |
| 447 | const IceString &SectionSuffix) = 0; |
John Porto | 0f86d03 | 2015-06-15 07:44:27 -0700 | [diff] [blame] | 448 | virtual void lowerConstants() = 0; |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 449 | virtual void lowerJumpTables() = 0; |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 450 | |
| 451 | protected: |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 452 | void emitGlobal(const VariableDeclaration &Var, |
| 453 | const IceString &SectionSuffix); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 454 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 455 | /// For now, we assume .long is the right directive for emitting 4 byte emit |
| 456 | /// global relocations. However, LLVM MIPS usually uses .4byte instead. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 457 | /// Perhaps there is some difference when the location is unaligned. |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 458 | static const char *getEmit32Directive() { return ".long"; } |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 459 | |
Jim Stichnoth | c6ead20 | 2015-02-24 09:30:30 -0800 | [diff] [blame] | 460 | explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {} |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 461 | GlobalContext *Ctx; |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 462 | }; |
| 463 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 464 | /// TargetHeaderLowering is used to "lower" the header of an output file. It |
| 465 | /// writes out the target-specific header attributes. E.g., for ARM this writes |
| 466 | /// out the build attributes (float ABI, etc.). |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 467 | class TargetHeaderLowering { |
| 468 | TargetHeaderLowering() = delete; |
| 469 | TargetHeaderLowering(const TargetHeaderLowering &) = delete; |
| 470 | TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete; |
| 471 | |
| 472 | public: |
| 473 | static std::unique_ptr<TargetHeaderLowering> |
| 474 | createLowering(GlobalContext *Ctx); |
| 475 | virtual ~TargetHeaderLowering(); |
| 476 | |
| 477 | virtual void lower() {} |
| 478 | |
| 479 | protected: |
| 480 | explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {} |
| 481 | GlobalContext *Ctx; |
| 482 | }; |
| 483 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 484 | } // end of namespace Ice |
| 485 | |
| 486 | #endif // SUBZERO_SRC_ICETARGETLOWERING_H |