blob: 0095a39d53f4625024960c84405d96e955ae4898 [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
11/// This file declares the TargetLowering, LoweringContext, and
Andrew Scull57e12682015-09-16 11:30:19 -070012/// TargetDataLowering classes. TargetLowering is an abstract class used to
13/// drive the translation/lowering process. LoweringContext maintains a context
14/// for lowering each instruction, offering conveniences such as iterating over
15/// non-deleted instructions. TargetDataLowering is an abstract class used to
16/// drive the lowering/emission of global initializers, external global
Andrew Scull9612d322015-07-06 14:53:25 -070017/// declarations, and internal constant pools.
18///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070019//===----------------------------------------------------------------------===//
20
21#ifndef SUBZERO_SRC_ICETARGETLOWERING_H
22#define SUBZERO_SRC_ICETARGETLOWERING_H
23
24#include "IceDefs.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070025#include "IceInst.h" // for the names of the Inst subtypes
Jan Voung76bb0be2015-05-14 09:26:19 -070026#include "IceOperand.h"
Jim Stichnotha18cc9c2014-09-30 19:10:22 -070027#include "IceTypes.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070028
29namespace Ice {
30
Andrew Scull57e12682015-09-16 11:30:19 -070031/// LoweringContext makes it easy to iterate through non-deleted instructions in
32/// a node, and insert new (lowered) instructions at the current point. Along
33/// with the instruction list container and associated iterators, it holds the
34/// current node, which is needed when inserting new instructions in order to
35/// track whether variables are used as single-block or multi-block.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070036class LoweringContext {
Jim Stichnoth7b451a92014-10-15 14:39:23 -070037 LoweringContext(const LoweringContext &) = delete;
38 LoweringContext &operator=(const LoweringContext &) = delete;
39
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070040public:
Jim Stichnotheafb56c2015-06-22 10:35:22 -070041 LoweringContext() = default;
42 ~LoweringContext() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070043 void init(CfgNode *Node);
44 Inst *getNextInst() const {
45 if (Next == End)
Jim Stichnothae953202014-12-20 06:17:49 -080046 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080047 return Next;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070048 }
Jan Voungc820ddf2014-07-29 14:38:51 -070049 Inst *getNextInst(InstList::iterator &Iter) const {
Jan Vounge6e497d2014-07-30 10:06:03 -070050 advanceForward(Iter);
Jan Voungc820ddf2014-07-29 14:38:51 -070051 if (Iter == End)
Jim Stichnothae953202014-12-20 06:17:49 -080052 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080053 return Iter;
Jan Voungc820ddf2014-07-29 14:38:51 -070054 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070055 CfgNode *getNode() const { return Node; }
56 bool atEnd() const { return Cur == End; }
57 InstList::iterator getCur() const { return Cur; }
Jim Stichnoth5d2fa0c2014-12-01 09:30:55 -080058 InstList::iterator getNext() const { return Next; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070059 InstList::iterator getEnd() const { return End; }
60 void insert(Inst *Inst);
Jan Vounge6e497d2014-07-30 10:06:03 -070061 Inst *getLastInserted() const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070062 void advanceCur() { Cur = Next; }
Jan Vounge6e497d2014-07-30 10:06:03 -070063 void advanceNext() { advanceForward(Next); }
Jim Stichnotha3f57b92015-07-30 12:46:04 -070064 void setCur(InstList::iterator C) { Cur = C; }
65 void setNext(InstList::iterator N) { Next = N; }
Jim Stichnoth336f6c42014-10-30 15:01:31 -070066 void rewind();
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070067 void setInsertPoint(const InstList::iterator &Position) { Next = Position; }
68
69private:
Andrew Scull9612d322015-07-06 14:53:25 -070070 /// Node is the argument to Inst::updateVars().
Jim Stichnotheafb56c2015-06-22 10:35:22 -070071 CfgNode *Node = nullptr;
72 Inst *LastInserted = nullptr;
Andrew Scull57e12682015-09-16 11:30:19 -070073 /// Cur points to the current instruction being considered. It is guaranteed
74 /// to point to a non-deleted instruction, or to be End.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070075 InstList::iterator Cur;
Andrew Scull57e12682015-09-16 11:30:19 -070076 /// Next doubles as a pointer to the next valid instruction (if any), and the
77 /// new-instruction insertion point. It is also updated for the caller in case
78 /// the lowering consumes more than one high-level instruction. It is
79 /// guaranteed to point to a non-deleted instruction after Cur, or to be End.
80 // TODO: Consider separating the notion of "next valid instruction" and "new
81 // instruction insertion point", to avoid confusion when previously-deleted
82 // instructions come between the two points.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070083 InstList::iterator Next;
Andrew Scull9612d322015-07-06 14:53:25 -070084 /// Begin is a copy of Insts.begin(), used if iterators are moved backward.
Jan Vounge6e497d2014-07-30 10:06:03 -070085 InstList::iterator Begin;
Andrew Scull9612d322015-07-06 14:53:25 -070086 /// End is a copy of Insts.end(), used if Next needs to be advanced.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070087 InstList::iterator End;
88
Jan Voungc820ddf2014-07-29 14:38:51 -070089 void skipDeleted(InstList::iterator &I) const;
Jan Vounge6e497d2014-07-30 10:06:03 -070090 void advanceForward(InstList::iterator &I) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070091};
92
Jan Voung28068ad2015-07-31 12:58:46 -070093/// A helper class to advance the LoweringContext at each loop iteration.
94class PostIncrLoweringContext {
95 PostIncrLoweringContext() = delete;
96 PostIncrLoweringContext(const PostIncrLoweringContext &) = delete;
97 PostIncrLoweringContext &operator=(const PostIncrLoweringContext &) = delete;
98
99public:
100 explicit PostIncrLoweringContext(LoweringContext &Context)
101 : Context(Context) {}
102 ~PostIncrLoweringContext() {
103 Context.advanceCur();
104 Context.advanceNext();
105 }
106
107private:
108 LoweringContext &Context;
109};
110
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700111class TargetLowering {
Jim Stichnothc6ead202015-02-24 09:30:30 -0800112 TargetLowering() = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700113 TargetLowering(const TargetLowering &) = delete;
114 TargetLowering &operator=(const TargetLowering &) = delete;
115
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700116public:
Jan Voungb36ad9b2015-04-21 17:01:49 -0700117 // TODO(jvoung): return a unique_ptr like the other factory functions.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700118 static TargetLowering *createLowering(TargetArch Target, Cfg *Func);
Jan Voungec270732015-01-12 17:00:22 -0800119 static std::unique_ptr<Assembler> createAssembler(TargetArch Target,
120 Cfg *Func);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700121 void translate() {
Jan Voung1f47ad02015-03-20 15:01:26 -0700122 switch (Ctx->getFlags().getOptLevel()) {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700123 case Opt_m1:
124 translateOm1();
125 break;
126 case Opt_0:
127 translateO0();
128 break;
129 case Opt_1:
130 translateO1();
131 break;
132 case Opt_2:
133 translateO2();
134 break;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700135 }
136 }
137 virtual void translateOm1() {
138 Func->setError("Target doesn't specify Om1 lowering steps.");
139 }
140 virtual void translateO0() {
141 Func->setError("Target doesn't specify O0 lowering steps.");
142 }
143 virtual void translateO1() {
144 Func->setError("Target doesn't specify O1 lowering steps.");
145 }
146 virtual void translateO2() {
147 Func->setError("Target doesn't specify O2 lowering steps.");
148 }
149
Andrew Scull9612d322015-07-06 14:53:25 -0700150 /// Tries to do address mode optimization on a single instruction.
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700151 void doAddressOpt();
Andrew Scull9612d322015-07-06 14:53:25 -0700152 /// Randomly insert NOPs.
Qining Luaee5fa82015-08-20 14:59:03 -0700153 void doNopInsertion(RandomNumberGenerator &RNG);
Andrew Scull9612d322015-07-06 14:53:25 -0700154 /// Lowers a single non-Phi instruction.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700155 void lower();
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700156 /// Inserts and lowers a single high-level instruction at a specific insertion
157 /// point.
158 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr);
Andrew Scull57e12682015-09-16 11:30:19 -0700159 /// Does preliminary lowering of the set of Phi instructions in the current
160 /// node. The main intention is to do what's needed to keep the unlowered Phi
161 /// instructions consistent with the lowered non-Phi instructions, e.g. to
162 /// lower 64-bit operands on a 32-bit target.
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700163 virtual void prelowerPhis() {}
Andrew Scull57e12682015-09-16 11:30:19 -0700164 /// Tries to do branch optimization on a single instruction. Returns true if
165 /// some optimization was done.
Jim Stichnothff9c7062014-09-18 04:50:49 -0700166 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) {
167 return false;
168 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700169
Jim Stichnoth3d44fe82014-11-01 10:10:18 -0700170 virtual SizeT getNumRegisters() const = 0;
Andrew Scull57e12682015-09-16 11:30:19 -0700171 /// Returns a variable pre-colored to the specified physical register. This is
172 /// generally used to get very direct access to the register such as in the
173 /// prolog or epilog or for marking scratch registers as killed by a call. If
174 /// a Type is not provided, a target-specific default type is used.
Jim Stichnoth98712a32014-10-24 10:59:02 -0700175 virtual Variable *getPhysicalRegister(SizeT RegNum,
176 Type Ty = IceType_void) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700177 /// Returns a printable name for the register.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700178 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0;
179
180 virtual bool hasFramePointer() const { return false; }
181 virtual SizeT getFrameOrStackReg() const = 0;
Matt Walad4799f42014-08-14 14:24:12 -0700182 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0;
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700183
Andrew Scull6d47bcd2015-09-17 17:10:05 -0700184 /// Return whether a 64-bit Variable should be split into a Variable64On32.
185 virtual bool shouldSplitToVariable64On32(Type Ty) const = 0;
186
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700187 bool hasComputedFrame() const { return HasComputedFrame; }
Andrew Scull57e12682015-09-16 11:30:19 -0700188 /// Returns true if this function calls a function that has the "returns
189 /// twice" attribute.
Jan Voung44d53e12014-09-11 19:18:03 -0700190 bool callsReturnsTwice() const { return CallsReturnsTwice; }
Jim Stichnothdd842db2015-01-27 12:53:53 -0800191 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700192 int32_t getStackAdjustment() const { return StackAdjustment; }
193 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; }
194 void resetStackAdjustment() { StackAdjustment = 0; }
Jan Voungb36ad9b2015-04-21 17:01:49 -0700195 SizeT makeNextLabelNumber() { return NextLabelNumber++; }
Andrew Scull86df4e92015-07-30 13:54:44 -0700196 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700197 LoweringContext &getContext() { return Context; }
198
199 enum RegSet {
200 RegSet_None = 0,
201 RegSet_CallerSave = 1 << 0,
202 RegSet_CalleeSave = 1 << 1,
203 RegSet_StackPointer = 1 << 2,
204 RegSet_FramePointer = 1 << 3,
205 RegSet_All = ~RegSet_None
206 };
Andrew Scull8072bae2015-09-14 16:01:26 -0700207 using RegSetMask = uint32_t;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700208
209 virtual llvm::SmallBitVector getRegisterSet(RegSetMask Include,
210 RegSetMask Exclude) const = 0;
211 virtual const llvm::SmallBitVector &getRegisterSetForType(Type Ty) const = 0;
John Portobb0a5fe2015-09-04 11:23:41 -0700212 virtual const llvm::SmallBitVector &getAliasesForRegister(SizeT) const = 0;
213
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800214 void regAlloc(RegAllocKind Kind);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700215
Qining Luaee5fa82015-08-20 14:59:03 -0700216 virtual void
217 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation,
218 const llvm::SmallBitVector &ExcludeRegisters,
219 uint64_t Salt) const = 0;
Jim Stichnothe6d24782014-12-19 05:42:24 -0800220
Andrew Scull57e12682015-09-16 11:30:19 -0700221 /// Save/restore any mutable state for the situation where code emission needs
222 /// multiple passes, such as sandboxing or relaxation. Subclasses may provide
223 /// their own implementation, but should be sure to also call the parent
224 /// class's methods.
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800225 virtual void snapshotEmitState() {
226 SnapshotStackAdjustment = StackAdjustment;
227 }
228 virtual void rollbackEmitState() {
229 StackAdjustment = SnapshotStackAdjustment;
230 }
231
Andrew Scull87f80c12015-07-20 10:19:16 -0700232 /// Get the minimum number of clusters required for a jump table to be
233 /// considered.
234 virtual SizeT getMinJumpTableSize() const = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700235 virtual void emitJumpTable(const Cfg *Func,
236 const InstJumpTable *JumpTable) const = 0;
Andrew Scull87f80c12015-07-20 10:19:16 -0700237
Jim Stichnoth144cdce2014-09-22 16:02:59 -0700238 virtual void emitVariable(const Variable *Var) const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700239
Jan Voung76bb0be2015-05-14 09:26:19 -0700240 void emitWithoutPrefix(const ConstantRelocatable *CR) const;
241 void emit(const ConstantRelocatable *CR) const;
242 virtual const char *getConstantPrefix() const = 0;
243
244 virtual void emit(const ConstantUndef *C) const = 0;
245 virtual void emit(const ConstantInteger32 *C) const = 0;
246 virtual void emit(const ConstantInteger64 *C) const = 0;
247 virtual void emit(const ConstantFloat *C) const = 0;
248 virtual void emit(const ConstantDouble *C) const = 0;
249
Andrew Scull9612d322015-07-06 14:53:25 -0700250 /// Performs target-specific argument lowering.
Matt Wala45a06232014-07-09 16:33:22 -0700251 virtual void lowerArguments() = 0;
252
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700253 virtual void initNodeForLowering(CfgNode *) {}
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700254 virtual void addProlog(CfgNode *Node) = 0;
255 virtual void addEpilog(CfgNode *Node) = 0;
256
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700257 virtual ~TargetLowering() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700258
259protected:
Jim Stichnothc6ead202015-02-24 09:30:30 -0800260 explicit TargetLowering(Cfg *Func);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700261 virtual void lowerAlloca(const InstAlloca *Inst) = 0;
262 virtual void lowerArithmetic(const InstArithmetic *Inst) = 0;
263 virtual void lowerAssign(const InstAssign *Inst) = 0;
264 virtual void lowerBr(const InstBr *Inst) = 0;
265 virtual void lowerCall(const InstCall *Inst) = 0;
266 virtual void lowerCast(const InstCast *Inst) = 0;
267 virtual void lowerFcmp(const InstFcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700268 virtual void lowerExtractElement(const InstExtractElement *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700269 virtual void lowerIcmp(const InstIcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700270 virtual void lowerInsertElement(const InstInsertElement *Inst) = 0;
Jan Voung3bd9f1a2014-06-18 10:50:57 -0700271 virtual void lowerIntrinsicCall(const InstIntrinsicCall *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700272 virtual void lowerLoad(const InstLoad *Inst) = 0;
273 virtual void lowerPhi(const InstPhi *Inst) = 0;
274 virtual void lowerRet(const InstRet *Inst) = 0;
275 virtual void lowerSelect(const InstSelect *Inst) = 0;
276 virtual void lowerStore(const InstStore *Inst) = 0;
277 virtual void lowerSwitch(const InstSwitch *Inst) = 0;
278 virtual void lowerUnreachable(const InstUnreachable *Inst) = 0;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700279 virtual void lowerOther(const Inst *Instr);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700280
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700281 virtual void doAddressOptLoad() {}
282 virtual void doAddressOptStore() {}
Jim Stichnothad2989b2015-09-15 10:21:42 -0700283 virtual void doMockBoundsCheck(Operand *) {}
Qining Luaee5fa82015-08-20 14:59:03 -0700284 virtual void randomlyInsertNop(float Probability,
285 RandomNumberGenerator &RNG) = 0;
Andrew Scull57e12682015-09-16 11:30:19 -0700286 /// This gives the target an opportunity to post-process the lowered expansion
287 /// before returning.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700288 virtual void postLower() {}
289
Jim Stichnoth230d4102015-09-25 17:40:32 -0700290 /// Find (non-SSA) instructions where the Dest variable appears in some source
291 /// operand, and set the IsDestRedefined flag. This keeps liveness analysis
292 /// consistent.
293 void markRedefinitions();
Jan Voungb3401d22015-05-18 09:38:21 -0700294
Andrew Scull57e12682015-09-16 11:30:19 -0700295 /// Make a pass over the Cfg to determine which variables need stack slots and
296 /// place them in a sorted list (SortedSpilledVariables). Among those, vars,
297 /// classify the spill variables as local to the basic block vs global
298 /// (multi-block) in order to compute the parameters GlobalsSize and
299 /// SpillAreaSizeBytes (represents locals or general vars if the coalescing of
300 /// locals is disallowed) along with alignments required for variables in each
301 /// area. We rely on accurate VMetadata in order to classify a variable as
302 /// global vs local (otherwise the variable is conservatively global). The
303 /// in-args should be initialized to 0.
Andrew Scull9612d322015-07-06 14:53:25 -0700304 ///
Andrew Scull57e12682015-09-16 11:30:19 -0700305 /// This is only a pre-pass and the actual stack slot assignment is handled
306 /// separately.
Andrew Scull9612d322015-07-06 14:53:25 -0700307 ///
Andrew Scull57e12682015-09-16 11:30:19 -0700308 /// There may be target-specific Variable types, which will be handled by
309 /// TargetVarHook. If the TargetVarHook returns true, then the variable is
310 /// skipped and not considered with the rest of the spilled variables.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700311 void getVarStackSlotParams(VarList &SortedSpilledVariables,
312 llvm::SmallBitVector &RegsUsed,
313 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
314 uint32_t *SpillAreaAlignmentBytes,
315 uint32_t *LocalsSlotsAlignmentBytes,
316 std::function<bool(Variable *)> TargetVarHook);
317
Andrew Scull57e12682015-09-16 11:30:19 -0700318 /// Calculate the amount of padding needed to align the local and global areas
319 /// to the required alignment. This assumes the globals/locals layout used by
320 /// getVarStackSlotParams and assignVarStackSlots.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700321 void alignStackSpillAreas(uint32_t SpillAreaStartOffset,
322 uint32_t SpillAreaAlignmentBytes,
323 size_t GlobalsSize,
324 uint32_t LocalsSlotsAlignmentBytes,
325 uint32_t *SpillAreaPaddingBytes,
326 uint32_t *LocalsSlotsPaddingBytes);
327
Andrew Scull57e12682015-09-16 11:30:19 -0700328 /// Make a pass through the SortedSpilledVariables and actually assign stack
329 /// slots. SpillAreaPaddingBytes takes into account stack alignment padding.
330 /// The SpillArea starts after that amount of padding. This matches the scheme
331 /// in getVarStackSlotParams, where there may be a separate multi-block global
332 /// var spill area and a local var spill area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700333 void assignVarStackSlots(VarList &SortedSpilledVariables,
334 size_t SpillAreaPaddingBytes,
335 size_t SpillAreaSizeBytes,
336 size_t GlobalsAndSubsequentPaddingSize,
337 bool UsesFramePointer);
338
Andrew Scull57e12682015-09-16 11:30:19 -0700339 /// Sort the variables in Source based on required alignment. The variables
340 /// with the largest alignment need are placed in the front of the Dest list.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700341 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const;
342
Andrew Scull9612d322015-07-06 14:53:25 -0700343 /// Make a call to an external helper function.
Jan Voungb36ad9b2015-04-21 17:01:49 -0700344 InstCall *makeHelperCall(const IceString &Name, Variable *Dest,
345 SizeT MaxSrcs);
346
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700347 void
348 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) {
349 Context.insert(InstBundleLock::create(Func, BundleOption));
350 }
351 void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); }
Jim Stichnoth230d4102015-09-25 17:40:32 -0700352 void _set_dest_redefined() { Context.getLastInserted()->setDestRedefined(); }
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700353
Andrew Scullcfa628b2015-08-20 14:23:05 -0700354 bool shouldOptimizeMemIntrins();
355
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700356 Cfg *Func;
357 GlobalContext *Ctx;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700358 bool HasComputedFrame = false;
359 bool CallsReturnsTwice = false;
Andrew Scull57e12682015-09-16 11:30:19 -0700360 /// StackAdjustment keeps track of the current stack offset from its natural
Jim Stichnoth55f931f2015-09-23 16:33:08 -0700361 /// location, e.g. as arguments are pushed for a function call or as
362 /// fixed-size alloca instructions are executed in the entry block.
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700363 int32_t StackAdjustment = 0;
364 SizeT NextLabelNumber = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700365 SizeT NextJumpTableNumber = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700366 LoweringContext Context;
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800367
Jim Stichnothc4508792015-03-01 23:12:55 -0800368 // Runtime helper function names
369 const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16";
370 const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8";
371 const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1";
372 const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1";
373 const static constexpr char *H_call_ctpop_i32 = "__popcountsi2";
374 const static constexpr char *H_call_ctpop_i64 = "__popcountdi2";
375 const static constexpr char *H_call_longjmp = "longjmp";
376 const static constexpr char *H_call_memcpy = "memcpy";
377 const static constexpr char *H_call_memmove = "memmove";
378 const static constexpr char *H_call_memset = "memset";
379 const static constexpr char *H_call_read_tp = "__nacl_read_tp";
380 const static constexpr char *H_call_setjmp = "setjmp";
381 const static constexpr char *H_fptosi_f32_i64 = "__Sz_fptosi_f32_i64";
382 const static constexpr char *H_fptosi_f64_i64 = "__Sz_fptosi_f64_i64";
383 const static constexpr char *H_fptoui_4xi32_f32 = "__Sz_fptoui_4xi32_f32";
384 const static constexpr char *H_fptoui_f32_i32 = "__Sz_fptoui_f32_i32";
385 const static constexpr char *H_fptoui_f32_i64 = "__Sz_fptoui_f32_i64";
386 const static constexpr char *H_fptoui_f64_i32 = "__Sz_fptoui_f64_i32";
387 const static constexpr char *H_fptoui_f64_i64 = "__Sz_fptoui_f64_i64";
388 const static constexpr char *H_frem_f32 = "fmodf";
389 const static constexpr char *H_frem_f64 = "fmod";
Jan Voung6ec369e2015-06-30 11:03:15 -0700390 const static constexpr char *H_sdiv_i32 = "__divsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800391 const static constexpr char *H_sdiv_i64 = "__divdi3";
392 const static constexpr char *H_sitofp_i64_f32 = "__Sz_sitofp_i64_f32";
393 const static constexpr char *H_sitofp_i64_f64 = "__Sz_sitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700394 const static constexpr char *H_srem_i32 = "__modsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800395 const static constexpr char *H_srem_i64 = "__moddi3";
Jan Voung6ec369e2015-06-30 11:03:15 -0700396 const static constexpr char *H_udiv_i32 = "__udivsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800397 const static constexpr char *H_udiv_i64 = "__udivdi3";
398 const static constexpr char *H_uitofp_4xi32_4xf32 = "__Sz_uitofp_4xi32_4xf32";
399 const static constexpr char *H_uitofp_i32_f32 = "__Sz_uitofp_i32_f32";
400 const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64";
401 const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32";
402 const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700403 const static constexpr char *H_urem_i32 = "__umodsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800404 const static constexpr char *H_urem_i64 = "__umoddi3";
405
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800406private:
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700407 int32_t SnapshotStackAdjustment = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700408};
409
Andrew Scull57e12682015-09-16 11:30:19 -0700410/// TargetDataLowering is used for "lowering" data including initializers for
411/// global variables, and the internal constant pools. It is separated out from
412/// TargetLowering because it does not require a Cfg.
Jan Voung72984d82015-01-29 14:42:38 -0800413class TargetDataLowering {
414 TargetDataLowering() = delete;
415 TargetDataLowering(const TargetDataLowering &) = delete;
416 TargetDataLowering &operator=(const TargetDataLowering &) = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700417
Jim Stichnothde4ca712014-06-29 08:13:48 -0700418public:
Jim Stichnothbbca7542015-02-11 16:08:31 -0800419 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx);
Jan Voung72984d82015-01-29 14:42:38 -0800420 virtual ~TargetDataLowering();
Jan Voung839c4ce2014-07-28 15:19:43 -0700421
John Porto8b1a7052015-06-17 13:20:08 -0700422 virtual void lowerGlobals(const VariableDeclarationList &Vars,
423 const IceString &SectionSuffix) = 0;
John Porto0f86d032015-06-15 07:44:27 -0700424 virtual void lowerConstants() = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700425 virtual void lowerJumpTables() = 0;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700426
427protected:
John Porto8b1a7052015-06-17 13:20:08 -0700428 void emitGlobal(const VariableDeclaration &Var,
429 const IceString &SectionSuffix);
Jan Voung58eea4d2015-06-15 15:11:56 -0700430
Andrew Scull57e12682015-09-16 11:30:19 -0700431 /// For now, we assume .long is the right directive for emitting 4 byte emit
432 /// global relocations. However, LLVM MIPS usually uses .4byte instead.
Andrew Scull9612d322015-07-06 14:53:25 -0700433 /// Perhaps there is some difference when the location is unaligned.
John Porto8b1a7052015-06-17 13:20:08 -0700434 static const char *getEmit32Directive() { return ".long"; }
Jan Voung58eea4d2015-06-15 15:11:56 -0700435
Jim Stichnothc6ead202015-02-24 09:30:30 -0800436 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
Jim Stichnothde4ca712014-06-29 08:13:48 -0700437 GlobalContext *Ctx;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700438};
439
Andrew Scull57e12682015-09-16 11:30:19 -0700440/// TargetHeaderLowering is used to "lower" the header of an output file. It
441/// writes out the target-specific header attributes. E.g., for ARM this writes
442/// out the build attributes (float ABI, etc.).
Jan Voungfb792842015-06-11 15:27:50 -0700443class TargetHeaderLowering {
444 TargetHeaderLowering() = delete;
445 TargetHeaderLowering(const TargetHeaderLowering &) = delete;
446 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete;
447
448public:
449 static std::unique_ptr<TargetHeaderLowering>
450 createLowering(GlobalContext *Ctx);
451 virtual ~TargetHeaderLowering();
452
453 virtual void lower() {}
454
455protected:
456 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
457 GlobalContext *Ctx;
458};
459
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700460} // end of namespace Ice
461
462#endif // SUBZERO_SRC_ICETARGETLOWERING_H