blob: 7184ff00a8f5735856d7ab7671dba3bb6f3b140b [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
11/// This file declares the TargetLowering, LoweringContext, and
12/// TargetDataLowering classes. TargetLowering is an abstract class
13/// used to drive the translation/lowering process. LoweringContext
14/// maintains a context for lowering each instruction, offering
15/// conveniences such as iterating over non-deleted instructions.
16/// TargetDataLowering is an abstract class used to drive the
17/// lowering/emission of global initializers, external global
18/// declarations, and internal constant pools.
19///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070020//===----------------------------------------------------------------------===//
21
22#ifndef SUBZERO_SRC_ICETARGETLOWERING_H
23#define SUBZERO_SRC_ICETARGETLOWERING_H
24
25#include "IceDefs.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070026#include "IceInst.h" // for the names of the Inst subtypes
Jan Voung76bb0be2015-05-14 09:26:19 -070027#include "IceOperand.h"
Jim Stichnotha18cc9c2014-09-30 19:10:22 -070028#include "IceTypes.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070029
30namespace Ice {
31
Andrew Scull9612d322015-07-06 14:53:25 -070032/// LoweringContext makes it easy to iterate through non-deleted
33/// instructions in a node, and insert new (lowered) instructions at
34/// the current point. Along with the instruction list container and
35/// associated iterators, it holds the current node, which is needed
36/// when inserting new instructions in order to track whether variables
37/// are used as single-block or multi-block.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070038class LoweringContext {
Jim Stichnoth7b451a92014-10-15 14:39:23 -070039 LoweringContext(const LoweringContext &) = delete;
40 LoweringContext &operator=(const LoweringContext &) = delete;
41
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070042public:
Jim Stichnotheafb56c2015-06-22 10:35:22 -070043 LoweringContext() = default;
44 ~LoweringContext() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070045 void init(CfgNode *Node);
46 Inst *getNextInst() const {
47 if (Next == End)
Jim Stichnothae953202014-12-20 06:17:49 -080048 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080049 return Next;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070050 }
Jan Voungc820ddf2014-07-29 14:38:51 -070051 Inst *getNextInst(InstList::iterator &Iter) const {
Jan Vounge6e497d2014-07-30 10:06:03 -070052 advanceForward(Iter);
Jan Voungc820ddf2014-07-29 14:38:51 -070053 if (Iter == End)
Jim Stichnothae953202014-12-20 06:17:49 -080054 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080055 return Iter;
Jan Voungc820ddf2014-07-29 14:38:51 -070056 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070057 CfgNode *getNode() const { return Node; }
58 bool atEnd() const { return Cur == End; }
59 InstList::iterator getCur() const { return Cur; }
Jim Stichnoth5d2fa0c2014-12-01 09:30:55 -080060 InstList::iterator getNext() const { return Next; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070061 InstList::iterator getEnd() const { return End; }
62 void insert(Inst *Inst);
Jan Vounge6e497d2014-07-30 10:06:03 -070063 Inst *getLastInserted() const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070064 void advanceCur() { Cur = Next; }
Jan Vounge6e497d2014-07-30 10:06:03 -070065 void advanceNext() { advanceForward(Next); }
Jim Stichnotha3f57b92015-07-30 12:46:04 -070066 void setCur(InstList::iterator C) { Cur = C; }
67 void setNext(InstList::iterator N) { Next = N; }
Jim Stichnoth336f6c42014-10-30 15:01:31 -070068 void rewind();
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070069 void setInsertPoint(const InstList::iterator &Position) { Next = Position; }
70
71private:
Andrew Scull9612d322015-07-06 14:53:25 -070072 /// Node is the argument to Inst::updateVars().
Jim Stichnotheafb56c2015-06-22 10:35:22 -070073 CfgNode *Node = nullptr;
74 Inst *LastInserted = nullptr;
Andrew Scull9612d322015-07-06 14:53:25 -070075 /// Cur points to the current instruction being considered. It is
76 /// guaranteed to point to a non-deleted instruction, or to be End.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070077 InstList::iterator Cur;
Andrew Scull9612d322015-07-06 14:53:25 -070078 /// Next doubles as a pointer to the next valid instruction (if any),
79 /// and the new-instruction insertion point. It is also updated for
80 /// the caller in case the lowering consumes more than one high-level
81 /// instruction. It is guaranteed to point to a non-deleted
82 /// instruction after Cur, or to be End. TODO: Consider separating
83 /// the notion of "next valid instruction" and "new instruction
84 /// insertion point", to avoid confusion when previously-deleted
85 /// instructions come between the two points.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070086 InstList::iterator Next;
Andrew Scull9612d322015-07-06 14:53:25 -070087 /// Begin is a copy of Insts.begin(), used if iterators are moved backward.
Jan Vounge6e497d2014-07-30 10:06:03 -070088 InstList::iterator Begin;
Andrew Scull9612d322015-07-06 14:53:25 -070089 /// End is a copy of Insts.end(), used if Next needs to be advanced.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070090 InstList::iterator End;
91
Jan Voungc820ddf2014-07-29 14:38:51 -070092 void skipDeleted(InstList::iterator &I) const;
Jan Vounge6e497d2014-07-30 10:06:03 -070093 void advanceForward(InstList::iterator &I) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070094};
95
Jan Voung28068ad2015-07-31 12:58:46 -070096/// A helper class to advance the LoweringContext at each loop iteration.
97class PostIncrLoweringContext {
98 PostIncrLoweringContext() = delete;
99 PostIncrLoweringContext(const PostIncrLoweringContext &) = delete;
100 PostIncrLoweringContext &operator=(const PostIncrLoweringContext &) = delete;
101
102public:
103 explicit PostIncrLoweringContext(LoweringContext &Context)
104 : Context(Context) {}
105 ~PostIncrLoweringContext() {
106 Context.advanceCur();
107 Context.advanceNext();
108 }
109
110private:
111 LoweringContext &Context;
112};
113
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700114class TargetLowering {
Jim Stichnothc6ead202015-02-24 09:30:30 -0800115 TargetLowering() = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700116 TargetLowering(const TargetLowering &) = delete;
117 TargetLowering &operator=(const TargetLowering &) = delete;
118
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700119public:
Jan Voungb36ad9b2015-04-21 17:01:49 -0700120 // TODO(jvoung): return a unique_ptr like the other factory functions.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700121 static TargetLowering *createLowering(TargetArch Target, Cfg *Func);
Jan Voungec270732015-01-12 17:00:22 -0800122 static std::unique_ptr<Assembler> createAssembler(TargetArch Target,
123 Cfg *Func);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700124 void translate() {
Jan Voung1f47ad02015-03-20 15:01:26 -0700125 switch (Ctx->getFlags().getOptLevel()) {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700126 case Opt_m1:
127 translateOm1();
128 break;
129 case Opt_0:
130 translateO0();
131 break;
132 case Opt_1:
133 translateO1();
134 break;
135 case Opt_2:
136 translateO2();
137 break;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700138 }
139 }
140 virtual void translateOm1() {
141 Func->setError("Target doesn't specify Om1 lowering steps.");
142 }
143 virtual void translateO0() {
144 Func->setError("Target doesn't specify O0 lowering steps.");
145 }
146 virtual void translateO1() {
147 Func->setError("Target doesn't specify O1 lowering steps.");
148 }
149 virtual void translateO2() {
150 Func->setError("Target doesn't specify O2 lowering steps.");
151 }
152
Andrew Scull9612d322015-07-06 14:53:25 -0700153 /// Tries to do address mode optimization on a single instruction.
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700154 void doAddressOpt();
Andrew Scull9612d322015-07-06 14:53:25 -0700155 /// Randomly insert NOPs.
Qining Luaee5fa82015-08-20 14:59:03 -0700156 void doNopInsertion(RandomNumberGenerator &RNG);
Andrew Scull9612d322015-07-06 14:53:25 -0700157 /// Lowers a single non-Phi instruction.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700158 void lower();
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700159 /// Inserts and lowers a single high-level instruction at a specific insertion
160 /// point.
161 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr);
Andrew Scull9612d322015-07-06 14:53:25 -0700162 /// Does preliminary lowering of the set of Phi instructions in the
163 /// current node. The main intention is to do what's needed to keep
164 /// the unlowered Phi instructions consistent with the lowered
165 /// non-Phi instructions, e.g. to lower 64-bit operands on a 32-bit
166 /// target.
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700167 virtual void prelowerPhis() {}
Andrew Scull9612d322015-07-06 14:53:25 -0700168 /// Tries to do branch optimization on a single instruction. Returns
169 /// true if some optimization was done.
Jim Stichnothff9c7062014-09-18 04:50:49 -0700170 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) {
171 return false;
172 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700173
Jim Stichnoth3d44fe82014-11-01 10:10:18 -0700174 virtual SizeT getNumRegisters() const = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700175 /// Returns a variable pre-colored to the specified physical
176 /// register. This is generally used to get very direct access to
177 /// the register such as in the prolog or epilog or for marking
178 /// scratch registers as killed by a call. If a Type is not
179 /// provided, a target-specific default type is used.
Jim Stichnoth98712a32014-10-24 10:59:02 -0700180 virtual Variable *getPhysicalRegister(SizeT RegNum,
181 Type Ty = IceType_void) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700182 /// Returns a printable name for the register.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700183 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0;
184
185 virtual bool hasFramePointer() const { return false; }
186 virtual SizeT getFrameOrStackReg() const = 0;
Matt Walad4799f42014-08-14 14:24:12 -0700187 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0;
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700188
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700189 bool hasComputedFrame() const { return HasComputedFrame; }
Andrew Scull9612d322015-07-06 14:53:25 -0700190 /// Returns true if this function calls a function that has the
191 /// "returns twice" attribute.
Jan Voung44d53e12014-09-11 19:18:03 -0700192 bool callsReturnsTwice() const { return CallsReturnsTwice; }
Jim Stichnothdd842db2015-01-27 12:53:53 -0800193 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700194 int32_t getStackAdjustment() const { return StackAdjustment; }
195 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; }
196 void resetStackAdjustment() { StackAdjustment = 0; }
Jan Voungb36ad9b2015-04-21 17:01:49 -0700197 SizeT makeNextLabelNumber() { return NextLabelNumber++; }
Andrew Scull86df4e92015-07-30 13:54:44 -0700198 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700199 LoweringContext &getContext() { return Context; }
200
201 enum RegSet {
202 RegSet_None = 0,
203 RegSet_CallerSave = 1 << 0,
204 RegSet_CalleeSave = 1 << 1,
205 RegSet_StackPointer = 1 << 2,
206 RegSet_FramePointer = 1 << 3,
207 RegSet_All = ~RegSet_None
208 };
Andrew Scull8072bae2015-09-14 16:01:26 -0700209 using RegSetMask = uint32_t;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700210
211 virtual llvm::SmallBitVector getRegisterSet(RegSetMask Include,
212 RegSetMask Exclude) const = 0;
213 virtual const llvm::SmallBitVector &getRegisterSetForType(Type Ty) const = 0;
John Portobb0a5fe2015-09-04 11:23:41 -0700214 virtual const llvm::SmallBitVector &getAliasesForRegister(SizeT) const = 0;
215
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800216 void regAlloc(RegAllocKind Kind);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700217
Qining Luaee5fa82015-08-20 14:59:03 -0700218 virtual void
219 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation,
220 const llvm::SmallBitVector &ExcludeRegisters,
221 uint64_t Salt) const = 0;
Jim Stichnothe6d24782014-12-19 05:42:24 -0800222
Andrew Scull9612d322015-07-06 14:53:25 -0700223 /// Save/restore any mutable state for the situation where code
224 /// emission needs multiple passes, such as sandboxing or relaxation.
225 /// Subclasses may provide their own implementation, but should be
226 /// sure to also call the parent class's methods.
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800227 virtual void snapshotEmitState() {
228 SnapshotStackAdjustment = StackAdjustment;
229 }
230 virtual void rollbackEmitState() {
231 StackAdjustment = SnapshotStackAdjustment;
232 }
233
Andrew Scull87f80c12015-07-20 10:19:16 -0700234 /// Get the minimum number of clusters required for a jump table to be
235 /// considered.
236 virtual SizeT getMinJumpTableSize() const = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700237 virtual void emitJumpTable(const Cfg *Func,
238 const InstJumpTable *JumpTable) const = 0;
Andrew Scull87f80c12015-07-20 10:19:16 -0700239
Jim Stichnoth144cdce2014-09-22 16:02:59 -0700240 virtual void emitVariable(const Variable *Var) const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700241
Jan Voung76bb0be2015-05-14 09:26:19 -0700242 void emitWithoutPrefix(const ConstantRelocatable *CR) const;
243 void emit(const ConstantRelocatable *CR) const;
244 virtual const char *getConstantPrefix() const = 0;
245
246 virtual void emit(const ConstantUndef *C) const = 0;
247 virtual void emit(const ConstantInteger32 *C) const = 0;
248 virtual void emit(const ConstantInteger64 *C) const = 0;
249 virtual void emit(const ConstantFloat *C) const = 0;
250 virtual void emit(const ConstantDouble *C) const = 0;
251
Andrew Scull9612d322015-07-06 14:53:25 -0700252 /// Performs target-specific argument lowering.
Matt Wala45a06232014-07-09 16:33:22 -0700253 virtual void lowerArguments() = 0;
254
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700255 virtual void initNodeForLowering(CfgNode *) {}
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700256 virtual void addProlog(CfgNode *Node) = 0;
257 virtual void addEpilog(CfgNode *Node) = 0;
258
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700259 virtual ~TargetLowering() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700260
261protected:
Jim Stichnothc6ead202015-02-24 09:30:30 -0800262 explicit TargetLowering(Cfg *Func);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700263 virtual void lowerAlloca(const InstAlloca *Inst) = 0;
264 virtual void lowerArithmetic(const InstArithmetic *Inst) = 0;
265 virtual void lowerAssign(const InstAssign *Inst) = 0;
266 virtual void lowerBr(const InstBr *Inst) = 0;
267 virtual void lowerCall(const InstCall *Inst) = 0;
268 virtual void lowerCast(const InstCast *Inst) = 0;
269 virtual void lowerFcmp(const InstFcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700270 virtual void lowerExtractElement(const InstExtractElement *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700271 virtual void lowerIcmp(const InstIcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700272 virtual void lowerInsertElement(const InstInsertElement *Inst) = 0;
Jan Voung3bd9f1a2014-06-18 10:50:57 -0700273 virtual void lowerIntrinsicCall(const InstIntrinsicCall *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700274 virtual void lowerLoad(const InstLoad *Inst) = 0;
275 virtual void lowerPhi(const InstPhi *Inst) = 0;
276 virtual void lowerRet(const InstRet *Inst) = 0;
277 virtual void lowerSelect(const InstSelect *Inst) = 0;
278 virtual void lowerStore(const InstStore *Inst) = 0;
279 virtual void lowerSwitch(const InstSwitch *Inst) = 0;
280 virtual void lowerUnreachable(const InstUnreachable *Inst) = 0;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700281 virtual void lowerOther(const Inst *Instr);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700282
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700283 virtual void doAddressOptLoad() {}
284 virtual void doAddressOptStore() {}
Jim Stichnothad2989b2015-09-15 10:21:42 -0700285 virtual void doMockBoundsCheck(Operand *) {}
Qining Luaee5fa82015-08-20 14:59:03 -0700286 virtual void randomlyInsertNop(float Probability,
287 RandomNumberGenerator &RNG) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700288 /// This gives the target an opportunity to post-process the lowered
289 /// expansion before returning.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700290 virtual void postLower() {}
291
Andrew Scull9612d322015-07-06 14:53:25 -0700292 /// Find two-address non-SSA instructions and set the DestNonKillable flag
293 /// to keep liveness analysis consistent.
Jan Voungb3401d22015-05-18 09:38:21 -0700294 void inferTwoAddress();
295
Andrew Scull9612d322015-07-06 14:53:25 -0700296 /// Make a pass over the Cfg to determine which variables need stack slots
297 /// and place them in a sorted list (SortedSpilledVariables). Among those,
298 /// vars, classify the spill variables as local to the basic block vs
299 /// global (multi-block) in order to compute the parameters GlobalsSize
300 /// and SpillAreaSizeBytes (represents locals or general vars if the
301 /// coalescing of locals is disallowed) along with alignments required
302 /// for variables in each area. We rely on accurate VMetadata in order to
303 /// classify a variable as global vs local (otherwise the variable is
304 /// conservatively global). The in-args should be initialized to 0.
305 ///
306 /// This is only a pre-pass and the actual stack slot assignment is
307 /// handled separately.
308 ///
309 /// There may be target-specific Variable types, which will be handled
310 /// by TargetVarHook. If the TargetVarHook returns true, then the variable
311 /// is skipped and not considered with the rest of the spilled variables.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700312 void getVarStackSlotParams(VarList &SortedSpilledVariables,
313 llvm::SmallBitVector &RegsUsed,
314 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
315 uint32_t *SpillAreaAlignmentBytes,
316 uint32_t *LocalsSlotsAlignmentBytes,
317 std::function<bool(Variable *)> TargetVarHook);
318
Andrew Scull9612d322015-07-06 14:53:25 -0700319 /// Calculate the amount of padding needed to align the local and global
320 /// areas to the required alignment. This assumes the globals/locals layout
321 /// used by getVarStackSlotParams and assignVarStackSlots.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700322 void alignStackSpillAreas(uint32_t SpillAreaStartOffset,
323 uint32_t SpillAreaAlignmentBytes,
324 size_t GlobalsSize,
325 uint32_t LocalsSlotsAlignmentBytes,
326 uint32_t *SpillAreaPaddingBytes,
327 uint32_t *LocalsSlotsPaddingBytes);
328
Andrew Scull9612d322015-07-06 14:53:25 -0700329 /// Make a pass through the SortedSpilledVariables and actually assign
330 /// stack slots. SpillAreaPaddingBytes takes into account stack alignment
331 /// padding. The SpillArea starts after that amount of padding.
332 /// This matches the scheme in getVarStackSlotParams, where there may
333 /// be a separate multi-block global var spill area and a local var
334 /// spill area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700335 void assignVarStackSlots(VarList &SortedSpilledVariables,
336 size_t SpillAreaPaddingBytes,
337 size_t SpillAreaSizeBytes,
338 size_t GlobalsAndSubsequentPaddingSize,
339 bool UsesFramePointer);
340
Andrew Scull9612d322015-07-06 14:53:25 -0700341 /// Sort the variables in Source based on required alignment.
342 /// The variables with the largest alignment need are placed in the front
343 /// of the Dest list.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700344 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const;
345
Andrew Scull9612d322015-07-06 14:53:25 -0700346 /// Make a call to an external helper function.
Jan Voungb36ad9b2015-04-21 17:01:49 -0700347 InstCall *makeHelperCall(const IceString &Name, Variable *Dest,
348 SizeT MaxSrcs);
349
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700350 void
351 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) {
352 Context.insert(InstBundleLock::create(Func, BundleOption));
353 }
354 void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); }
Jan Voungf645d852015-07-09 10:35:09 -0700355 void _set_dest_nonkillable() {
356 Context.getLastInserted()->setDestNonKillable();
357 }
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700358
Andrew Scullcfa628b2015-08-20 14:23:05 -0700359 bool shouldOptimizeMemIntrins();
360
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700361 Cfg *Func;
362 GlobalContext *Ctx;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700363 bool HasComputedFrame = false;
364 bool CallsReturnsTwice = false;
Andrew Scull9612d322015-07-06 14:53:25 -0700365 /// StackAdjustment keeps track of the current stack offset from its
366 /// natural location, as arguments are pushed for a function call.
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700367 int32_t StackAdjustment = 0;
368 SizeT NextLabelNumber = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700369 SizeT NextJumpTableNumber = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700370 LoweringContext Context;
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800371
Jim Stichnothc4508792015-03-01 23:12:55 -0800372 // Runtime helper function names
373 const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16";
374 const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8";
375 const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1";
376 const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1";
377 const static constexpr char *H_call_ctpop_i32 = "__popcountsi2";
378 const static constexpr char *H_call_ctpop_i64 = "__popcountdi2";
379 const static constexpr char *H_call_longjmp = "longjmp";
380 const static constexpr char *H_call_memcpy = "memcpy";
381 const static constexpr char *H_call_memmove = "memmove";
382 const static constexpr char *H_call_memset = "memset";
383 const static constexpr char *H_call_read_tp = "__nacl_read_tp";
384 const static constexpr char *H_call_setjmp = "setjmp";
385 const static constexpr char *H_fptosi_f32_i64 = "__Sz_fptosi_f32_i64";
386 const static constexpr char *H_fptosi_f64_i64 = "__Sz_fptosi_f64_i64";
387 const static constexpr char *H_fptoui_4xi32_f32 = "__Sz_fptoui_4xi32_f32";
388 const static constexpr char *H_fptoui_f32_i32 = "__Sz_fptoui_f32_i32";
389 const static constexpr char *H_fptoui_f32_i64 = "__Sz_fptoui_f32_i64";
390 const static constexpr char *H_fptoui_f64_i32 = "__Sz_fptoui_f64_i32";
391 const static constexpr char *H_fptoui_f64_i64 = "__Sz_fptoui_f64_i64";
392 const static constexpr char *H_frem_f32 = "fmodf";
393 const static constexpr char *H_frem_f64 = "fmod";
Jan Voung6ec369e2015-06-30 11:03:15 -0700394 const static constexpr char *H_sdiv_i32 = "__divsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800395 const static constexpr char *H_sdiv_i64 = "__divdi3";
396 const static constexpr char *H_sitofp_i64_f32 = "__Sz_sitofp_i64_f32";
397 const static constexpr char *H_sitofp_i64_f64 = "__Sz_sitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700398 const static constexpr char *H_srem_i32 = "__modsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800399 const static constexpr char *H_srem_i64 = "__moddi3";
Jan Voung6ec369e2015-06-30 11:03:15 -0700400 const static constexpr char *H_udiv_i32 = "__udivsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800401 const static constexpr char *H_udiv_i64 = "__udivdi3";
402 const static constexpr char *H_uitofp_4xi32_4xf32 = "__Sz_uitofp_4xi32_4xf32";
403 const static constexpr char *H_uitofp_i32_f32 = "__Sz_uitofp_i32_f32";
404 const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64";
405 const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32";
406 const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700407 const static constexpr char *H_urem_i32 = "__umodsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800408 const static constexpr char *H_urem_i64 = "__umoddi3";
409
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800410private:
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700411 int32_t SnapshotStackAdjustment = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700412};
413
Andrew Scull9612d322015-07-06 14:53:25 -0700414/// TargetDataLowering is used for "lowering" data including initializers
415/// for global variables, and the internal constant pools. It is separated
416/// out from TargetLowering because it does not require a Cfg.
Jan Voung72984d82015-01-29 14:42:38 -0800417class TargetDataLowering {
418 TargetDataLowering() = delete;
419 TargetDataLowering(const TargetDataLowering &) = delete;
420 TargetDataLowering &operator=(const TargetDataLowering &) = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700421
Jim Stichnothde4ca712014-06-29 08:13:48 -0700422public:
Jim Stichnothbbca7542015-02-11 16:08:31 -0800423 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx);
Jan Voung72984d82015-01-29 14:42:38 -0800424 virtual ~TargetDataLowering();
Jan Voung839c4ce2014-07-28 15:19:43 -0700425
John Porto8b1a7052015-06-17 13:20:08 -0700426 virtual void lowerGlobals(const VariableDeclarationList &Vars,
427 const IceString &SectionSuffix) = 0;
John Porto0f86d032015-06-15 07:44:27 -0700428 virtual void lowerConstants() = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700429 virtual void lowerJumpTables() = 0;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700430
431protected:
John Porto8b1a7052015-06-17 13:20:08 -0700432 void emitGlobal(const VariableDeclaration &Var,
433 const IceString &SectionSuffix);
Jan Voung58eea4d2015-06-15 15:11:56 -0700434
Andrew Scull9612d322015-07-06 14:53:25 -0700435 /// For now, we assume .long is the right directive for emitting 4 byte
436 /// emit global relocations. However, LLVM MIPS usually uses .4byte instead.
437 /// Perhaps there is some difference when the location is unaligned.
John Porto8b1a7052015-06-17 13:20:08 -0700438 static const char *getEmit32Directive() { return ".long"; }
Jan Voung58eea4d2015-06-15 15:11:56 -0700439
Jim Stichnothc6ead202015-02-24 09:30:30 -0800440 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
Jim Stichnothde4ca712014-06-29 08:13:48 -0700441 GlobalContext *Ctx;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700442};
443
Andrew Scull9612d322015-07-06 14:53:25 -0700444/// TargetHeaderLowering is used to "lower" the header of an output file.
445/// It writes out the target-specific header attributes. E.g., for ARM
446/// this writes out the build attributes (float ABI, etc.).
Jan Voungfb792842015-06-11 15:27:50 -0700447class TargetHeaderLowering {
448 TargetHeaderLowering() = delete;
449 TargetHeaderLowering(const TargetHeaderLowering &) = delete;
450 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete;
451
452public:
453 static std::unique_ptr<TargetHeaderLowering>
454 createLowering(GlobalContext *Ctx);
455 virtual ~TargetHeaderLowering();
456
457 virtual void lower() {}
458
459protected:
460 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
461 GlobalContext *Ctx;
462};
463
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700464} // end of namespace Ice
465
466#endif // SUBZERO_SRC_ICETARGETLOWERING_H