blob: c7ef9189fa585dfd9bf7f204e1b0949e4dec7270 [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
11/// This file declares the TargetLowering, LoweringContext, and
12/// TargetDataLowering classes. TargetLowering is an abstract class
13/// used to drive the translation/lowering process. LoweringContext
14/// maintains a context for lowering each instruction, offering
15/// conveniences such as iterating over non-deleted instructions.
16/// TargetDataLowering is an abstract class used to drive the
17/// lowering/emission of global initializers, external global
18/// declarations, and internal constant pools.
19///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070020//===----------------------------------------------------------------------===//
21
22#ifndef SUBZERO_SRC_ICETARGETLOWERING_H
23#define SUBZERO_SRC_ICETARGETLOWERING_H
24
25#include "IceDefs.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070026#include "IceInst.h" // for the names of the Inst subtypes
Jan Voung76bb0be2015-05-14 09:26:19 -070027#include "IceOperand.h"
Jim Stichnotha18cc9c2014-09-30 19:10:22 -070028#include "IceTypes.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070029
30namespace Ice {
31
Andrew Scull9612d322015-07-06 14:53:25 -070032/// LoweringContext makes it easy to iterate through non-deleted
33/// instructions in a node, and insert new (lowered) instructions at
34/// the current point. Along with the instruction list container and
35/// associated iterators, it holds the current node, which is needed
36/// when inserting new instructions in order to track whether variables
37/// are used as single-block or multi-block.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070038class LoweringContext {
Jim Stichnoth7b451a92014-10-15 14:39:23 -070039 LoweringContext(const LoweringContext &) = delete;
40 LoweringContext &operator=(const LoweringContext &) = delete;
41
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070042public:
Jim Stichnotheafb56c2015-06-22 10:35:22 -070043 LoweringContext() = default;
44 ~LoweringContext() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070045 void init(CfgNode *Node);
46 Inst *getNextInst() const {
47 if (Next == End)
Jim Stichnothae953202014-12-20 06:17:49 -080048 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080049 return Next;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070050 }
Jan Voungc820ddf2014-07-29 14:38:51 -070051 Inst *getNextInst(InstList::iterator &Iter) const {
Jan Vounge6e497d2014-07-30 10:06:03 -070052 advanceForward(Iter);
Jan Voungc820ddf2014-07-29 14:38:51 -070053 if (Iter == End)
Jim Stichnothae953202014-12-20 06:17:49 -080054 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080055 return Iter;
Jan Voungc820ddf2014-07-29 14:38:51 -070056 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070057 CfgNode *getNode() const { return Node; }
58 bool atEnd() const { return Cur == End; }
59 InstList::iterator getCur() const { return Cur; }
Jim Stichnoth5d2fa0c2014-12-01 09:30:55 -080060 InstList::iterator getNext() const { return Next; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070061 InstList::iterator getEnd() const { return End; }
62 void insert(Inst *Inst);
Jan Vounge6e497d2014-07-30 10:06:03 -070063 Inst *getLastInserted() const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070064 void advanceCur() { Cur = Next; }
Jan Vounge6e497d2014-07-30 10:06:03 -070065 void advanceNext() { advanceForward(Next); }
Jim Stichnotha3f57b92015-07-30 12:46:04 -070066 void setCur(InstList::iterator C) { Cur = C; }
67 void setNext(InstList::iterator N) { Next = N; }
Jim Stichnoth336f6c42014-10-30 15:01:31 -070068 void rewind();
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070069 void setInsertPoint(const InstList::iterator &Position) { Next = Position; }
70
71private:
Andrew Scull9612d322015-07-06 14:53:25 -070072 /// Node is the argument to Inst::updateVars().
Jim Stichnotheafb56c2015-06-22 10:35:22 -070073 CfgNode *Node = nullptr;
74 Inst *LastInserted = nullptr;
Andrew Scull9612d322015-07-06 14:53:25 -070075 /// Cur points to the current instruction being considered. It is
76 /// guaranteed to point to a non-deleted instruction, or to be End.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070077 InstList::iterator Cur;
Andrew Scull9612d322015-07-06 14:53:25 -070078 /// Next doubles as a pointer to the next valid instruction (if any),
79 /// and the new-instruction insertion point. It is also updated for
80 /// the caller in case the lowering consumes more than one high-level
81 /// instruction. It is guaranteed to point to a non-deleted
82 /// instruction after Cur, or to be End. TODO: Consider separating
83 /// the notion of "next valid instruction" and "new instruction
84 /// insertion point", to avoid confusion when previously-deleted
85 /// instructions come between the two points.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070086 InstList::iterator Next;
Andrew Scull9612d322015-07-06 14:53:25 -070087 /// Begin is a copy of Insts.begin(), used if iterators are moved backward.
Jan Vounge6e497d2014-07-30 10:06:03 -070088 InstList::iterator Begin;
Andrew Scull9612d322015-07-06 14:53:25 -070089 /// End is a copy of Insts.end(), used if Next needs to be advanced.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070090 InstList::iterator End;
91
Jan Voungc820ddf2014-07-29 14:38:51 -070092 void skipDeleted(InstList::iterator &I) const;
Jan Vounge6e497d2014-07-30 10:06:03 -070093 void advanceForward(InstList::iterator &I) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070094};
95
96class TargetLowering {
Jim Stichnothc6ead202015-02-24 09:30:30 -080097 TargetLowering() = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -070098 TargetLowering(const TargetLowering &) = delete;
99 TargetLowering &operator=(const TargetLowering &) = delete;
100
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700101public:
Jan Voungb36ad9b2015-04-21 17:01:49 -0700102 // TODO(jvoung): return a unique_ptr like the other factory functions.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700103 static TargetLowering *createLowering(TargetArch Target, Cfg *Func);
Jan Voungec270732015-01-12 17:00:22 -0800104 static std::unique_ptr<Assembler> createAssembler(TargetArch Target,
105 Cfg *Func);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700106 void translate() {
Jan Voung1f47ad02015-03-20 15:01:26 -0700107 switch (Ctx->getFlags().getOptLevel()) {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700108 case Opt_m1:
109 translateOm1();
110 break;
111 case Opt_0:
112 translateO0();
113 break;
114 case Opt_1:
115 translateO1();
116 break;
117 case Opt_2:
118 translateO2();
119 break;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700120 }
121 }
122 virtual void translateOm1() {
123 Func->setError("Target doesn't specify Om1 lowering steps.");
124 }
125 virtual void translateO0() {
126 Func->setError("Target doesn't specify O0 lowering steps.");
127 }
128 virtual void translateO1() {
129 Func->setError("Target doesn't specify O1 lowering steps.");
130 }
131 virtual void translateO2() {
132 Func->setError("Target doesn't specify O2 lowering steps.");
133 }
134
Andrew Scull9612d322015-07-06 14:53:25 -0700135 /// Tries to do address mode optimization on a single instruction.
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700136 void doAddressOpt();
Andrew Scull9612d322015-07-06 14:53:25 -0700137 /// Randomly insert NOPs.
Matt Walac3302742014-08-15 16:21:56 -0700138 void doNopInsertion();
Andrew Scull9612d322015-07-06 14:53:25 -0700139 /// Lowers a single non-Phi instruction.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700140 void lower();
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700141 /// Inserts and lowers a single high-level instruction at a specific insertion
142 /// point.
143 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr);
Andrew Scull9612d322015-07-06 14:53:25 -0700144 /// Does preliminary lowering of the set of Phi instructions in the
145 /// current node. The main intention is to do what's needed to keep
146 /// the unlowered Phi instructions consistent with the lowered
147 /// non-Phi instructions, e.g. to lower 64-bit operands on a 32-bit
148 /// target.
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700149 virtual void prelowerPhis() {}
Andrew Scull9612d322015-07-06 14:53:25 -0700150 /// Tries to do branch optimization on a single instruction. Returns
151 /// true if some optimization was done.
Jim Stichnothff9c7062014-09-18 04:50:49 -0700152 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) {
153 return false;
154 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700155
Jim Stichnoth3d44fe82014-11-01 10:10:18 -0700156 virtual SizeT getNumRegisters() const = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700157 /// Returns a variable pre-colored to the specified physical
158 /// register. This is generally used to get very direct access to
159 /// the register such as in the prolog or epilog or for marking
160 /// scratch registers as killed by a call. If a Type is not
161 /// provided, a target-specific default type is used.
Jim Stichnoth98712a32014-10-24 10:59:02 -0700162 virtual Variable *getPhysicalRegister(SizeT RegNum,
163 Type Ty = IceType_void) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700164 /// Returns a printable name for the register.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700165 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0;
166
167 virtual bool hasFramePointer() const { return false; }
168 virtual SizeT getFrameOrStackReg() const = 0;
Matt Walad4799f42014-08-14 14:24:12 -0700169 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0;
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700170
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700171 bool hasComputedFrame() const { return HasComputedFrame; }
Andrew Scull9612d322015-07-06 14:53:25 -0700172 /// Returns true if this function calls a function that has the
173 /// "returns twice" attribute.
Jan Voung44d53e12014-09-11 19:18:03 -0700174 bool callsReturnsTwice() const { return CallsReturnsTwice; }
Jim Stichnothdd842db2015-01-27 12:53:53 -0800175 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700176 int32_t getStackAdjustment() const { return StackAdjustment; }
177 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; }
178 void resetStackAdjustment() { StackAdjustment = 0; }
Jan Voungb36ad9b2015-04-21 17:01:49 -0700179 SizeT makeNextLabelNumber() { return NextLabelNumber++; }
Andrew Scull86df4e92015-07-30 13:54:44 -0700180 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700181 LoweringContext &getContext() { return Context; }
182
183 enum RegSet {
184 RegSet_None = 0,
185 RegSet_CallerSave = 1 << 0,
186 RegSet_CalleeSave = 1 << 1,
187 RegSet_StackPointer = 1 << 2,
188 RegSet_FramePointer = 1 << 3,
189 RegSet_All = ~RegSet_None
190 };
191 typedef uint32_t RegSetMask;
192
193 virtual llvm::SmallBitVector getRegisterSet(RegSetMask Include,
194 RegSetMask Exclude) const = 0;
195 virtual const llvm::SmallBitVector &getRegisterSetForType(Type Ty) const = 0;
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800196 void regAlloc(RegAllocKind Kind);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700197
Jim Stichnothe6d24782014-12-19 05:42:24 -0800198 virtual void makeRandomRegisterPermutation(
199 llvm::SmallVectorImpl<int32_t> &Permutation,
200 const llvm::SmallBitVector &ExcludeRegisters) const = 0;
201
Andrew Scull9612d322015-07-06 14:53:25 -0700202 /// Save/restore any mutable state for the situation where code
203 /// emission needs multiple passes, such as sandboxing or relaxation.
204 /// Subclasses may provide their own implementation, but should be
205 /// sure to also call the parent class's methods.
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800206 virtual void snapshotEmitState() {
207 SnapshotStackAdjustment = StackAdjustment;
208 }
209 virtual void rollbackEmitState() {
210 StackAdjustment = SnapshotStackAdjustment;
211 }
212
Andrew Scull87f80c12015-07-20 10:19:16 -0700213 /// Get the minimum number of clusters required for a jump table to be
214 /// considered.
215 virtual SizeT getMinJumpTableSize() const = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700216 virtual void emitJumpTable(const Cfg *Func,
217 const InstJumpTable *JumpTable) const = 0;
Andrew Scull87f80c12015-07-20 10:19:16 -0700218
Jim Stichnoth144cdce2014-09-22 16:02:59 -0700219 virtual void emitVariable(const Variable *Var) const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700220
Jan Voung76bb0be2015-05-14 09:26:19 -0700221 void emitWithoutPrefix(const ConstantRelocatable *CR) const;
222 void emit(const ConstantRelocatable *CR) const;
223 virtual const char *getConstantPrefix() const = 0;
224
225 virtual void emit(const ConstantUndef *C) const = 0;
226 virtual void emit(const ConstantInteger32 *C) const = 0;
227 virtual void emit(const ConstantInteger64 *C) const = 0;
228 virtual void emit(const ConstantFloat *C) const = 0;
229 virtual void emit(const ConstantDouble *C) const = 0;
230
Andrew Scull9612d322015-07-06 14:53:25 -0700231 /// Performs target-specific argument lowering.
Matt Wala45a06232014-07-09 16:33:22 -0700232 virtual void lowerArguments() = 0;
233
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700234 virtual void initNodeForLowering(CfgNode *) {}
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700235 virtual void addProlog(CfgNode *Node) = 0;
236 virtual void addEpilog(CfgNode *Node) = 0;
237
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700238 virtual ~TargetLowering() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700239
240protected:
Jim Stichnothc6ead202015-02-24 09:30:30 -0800241 explicit TargetLowering(Cfg *Func);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700242 virtual void lowerAlloca(const InstAlloca *Inst) = 0;
243 virtual void lowerArithmetic(const InstArithmetic *Inst) = 0;
244 virtual void lowerAssign(const InstAssign *Inst) = 0;
245 virtual void lowerBr(const InstBr *Inst) = 0;
246 virtual void lowerCall(const InstCall *Inst) = 0;
247 virtual void lowerCast(const InstCast *Inst) = 0;
248 virtual void lowerFcmp(const InstFcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700249 virtual void lowerExtractElement(const InstExtractElement *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700250 virtual void lowerIcmp(const InstIcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700251 virtual void lowerInsertElement(const InstInsertElement *Inst) = 0;
Jan Voung3bd9f1a2014-06-18 10:50:57 -0700252 virtual void lowerIntrinsicCall(const InstIntrinsicCall *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700253 virtual void lowerLoad(const InstLoad *Inst) = 0;
254 virtual void lowerPhi(const InstPhi *Inst) = 0;
255 virtual void lowerRet(const InstRet *Inst) = 0;
256 virtual void lowerSelect(const InstSelect *Inst) = 0;
257 virtual void lowerStore(const InstStore *Inst) = 0;
258 virtual void lowerSwitch(const InstSwitch *Inst) = 0;
259 virtual void lowerUnreachable(const InstUnreachable *Inst) = 0;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700260 virtual void lowerOther(const Inst *Instr);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700261
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700262 virtual void doAddressOptLoad() {}
263 virtual void doAddressOptStore() {}
Matt Walac3302742014-08-15 16:21:56 -0700264 virtual void randomlyInsertNop(float Probability) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700265 /// This gives the target an opportunity to post-process the lowered
266 /// expansion before returning.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700267 virtual void postLower() {}
268
Andrew Scull9612d322015-07-06 14:53:25 -0700269 /// Find two-address non-SSA instructions and set the DestNonKillable flag
270 /// to keep liveness analysis consistent.
Jan Voungb3401d22015-05-18 09:38:21 -0700271 void inferTwoAddress();
272
Andrew Scull9612d322015-07-06 14:53:25 -0700273 /// Make a pass over the Cfg to determine which variables need stack slots
274 /// and place them in a sorted list (SortedSpilledVariables). Among those,
275 /// vars, classify the spill variables as local to the basic block vs
276 /// global (multi-block) in order to compute the parameters GlobalsSize
277 /// and SpillAreaSizeBytes (represents locals or general vars if the
278 /// coalescing of locals is disallowed) along with alignments required
279 /// for variables in each area. We rely on accurate VMetadata in order to
280 /// classify a variable as global vs local (otherwise the variable is
281 /// conservatively global). The in-args should be initialized to 0.
282 ///
283 /// This is only a pre-pass and the actual stack slot assignment is
284 /// handled separately.
285 ///
286 /// There may be target-specific Variable types, which will be handled
287 /// by TargetVarHook. If the TargetVarHook returns true, then the variable
288 /// is skipped and not considered with the rest of the spilled variables.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700289 void getVarStackSlotParams(VarList &SortedSpilledVariables,
290 llvm::SmallBitVector &RegsUsed,
291 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
292 uint32_t *SpillAreaAlignmentBytes,
293 uint32_t *LocalsSlotsAlignmentBytes,
294 std::function<bool(Variable *)> TargetVarHook);
295
Andrew Scull9612d322015-07-06 14:53:25 -0700296 /// Calculate the amount of padding needed to align the local and global
297 /// areas to the required alignment. This assumes the globals/locals layout
298 /// used by getVarStackSlotParams and assignVarStackSlots.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700299 void alignStackSpillAreas(uint32_t SpillAreaStartOffset,
300 uint32_t SpillAreaAlignmentBytes,
301 size_t GlobalsSize,
302 uint32_t LocalsSlotsAlignmentBytes,
303 uint32_t *SpillAreaPaddingBytes,
304 uint32_t *LocalsSlotsPaddingBytes);
305
Andrew Scull9612d322015-07-06 14:53:25 -0700306 /// Make a pass through the SortedSpilledVariables and actually assign
307 /// stack slots. SpillAreaPaddingBytes takes into account stack alignment
308 /// padding. The SpillArea starts after that amount of padding.
309 /// This matches the scheme in getVarStackSlotParams, where there may
310 /// be a separate multi-block global var spill area and a local var
311 /// spill area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700312 void assignVarStackSlots(VarList &SortedSpilledVariables,
313 size_t SpillAreaPaddingBytes,
314 size_t SpillAreaSizeBytes,
315 size_t GlobalsAndSubsequentPaddingSize,
316 bool UsesFramePointer);
317
Andrew Scull9612d322015-07-06 14:53:25 -0700318 /// Sort the variables in Source based on required alignment.
319 /// The variables with the largest alignment need are placed in the front
320 /// of the Dest list.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700321 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const;
322
Andrew Scull9612d322015-07-06 14:53:25 -0700323 /// Make a call to an external helper function.
Jan Voungb36ad9b2015-04-21 17:01:49 -0700324 InstCall *makeHelperCall(const IceString &Name, Variable *Dest,
325 SizeT MaxSrcs);
326
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700327 void
328 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) {
329 Context.insert(InstBundleLock::create(Func, BundleOption));
330 }
331 void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); }
Jan Voungf645d852015-07-09 10:35:09 -0700332 void _set_dest_nonkillable() {
333 Context.getLastInserted()->setDestNonKillable();
334 }
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700335
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700336 Cfg *Func;
337 GlobalContext *Ctx;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700338 bool HasComputedFrame = false;
339 bool CallsReturnsTwice = false;
Andrew Scull9612d322015-07-06 14:53:25 -0700340 /// StackAdjustment keeps track of the current stack offset from its
341 /// natural location, as arguments are pushed for a function call.
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700342 int32_t StackAdjustment = 0;
343 SizeT NextLabelNumber = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700344 SizeT NextJumpTableNumber = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700345 LoweringContext Context;
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800346
Jim Stichnothc4508792015-03-01 23:12:55 -0800347 // Runtime helper function names
348 const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16";
349 const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8";
350 const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1";
351 const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1";
352 const static constexpr char *H_call_ctpop_i32 = "__popcountsi2";
353 const static constexpr char *H_call_ctpop_i64 = "__popcountdi2";
354 const static constexpr char *H_call_longjmp = "longjmp";
355 const static constexpr char *H_call_memcpy = "memcpy";
356 const static constexpr char *H_call_memmove = "memmove";
357 const static constexpr char *H_call_memset = "memset";
358 const static constexpr char *H_call_read_tp = "__nacl_read_tp";
359 const static constexpr char *H_call_setjmp = "setjmp";
360 const static constexpr char *H_fptosi_f32_i64 = "__Sz_fptosi_f32_i64";
361 const static constexpr char *H_fptosi_f64_i64 = "__Sz_fptosi_f64_i64";
362 const static constexpr char *H_fptoui_4xi32_f32 = "__Sz_fptoui_4xi32_f32";
363 const static constexpr char *H_fptoui_f32_i32 = "__Sz_fptoui_f32_i32";
364 const static constexpr char *H_fptoui_f32_i64 = "__Sz_fptoui_f32_i64";
365 const static constexpr char *H_fptoui_f64_i32 = "__Sz_fptoui_f64_i32";
366 const static constexpr char *H_fptoui_f64_i64 = "__Sz_fptoui_f64_i64";
367 const static constexpr char *H_frem_f32 = "fmodf";
368 const static constexpr char *H_frem_f64 = "fmod";
Jan Voung6ec369e2015-06-30 11:03:15 -0700369 const static constexpr char *H_sdiv_i32 = "__divsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800370 const static constexpr char *H_sdiv_i64 = "__divdi3";
371 const static constexpr char *H_sitofp_i64_f32 = "__Sz_sitofp_i64_f32";
372 const static constexpr char *H_sitofp_i64_f64 = "__Sz_sitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700373 const static constexpr char *H_srem_i32 = "__modsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800374 const static constexpr char *H_srem_i64 = "__moddi3";
Jan Voung6ec369e2015-06-30 11:03:15 -0700375 const static constexpr char *H_udiv_i32 = "__udivsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800376 const static constexpr char *H_udiv_i64 = "__udivdi3";
377 const static constexpr char *H_uitofp_4xi32_4xf32 = "__Sz_uitofp_4xi32_4xf32";
378 const static constexpr char *H_uitofp_i32_f32 = "__Sz_uitofp_i32_f32";
379 const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64";
380 const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32";
381 const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700382 const static constexpr char *H_urem_i32 = "__umodsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800383 const static constexpr char *H_urem_i64 = "__umoddi3";
384
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800385private:
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700386 int32_t SnapshotStackAdjustment = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700387};
388
Andrew Scull9612d322015-07-06 14:53:25 -0700389/// TargetDataLowering is used for "lowering" data including initializers
390/// for global variables, and the internal constant pools. It is separated
391/// out from TargetLowering because it does not require a Cfg.
Jan Voung72984d82015-01-29 14:42:38 -0800392class TargetDataLowering {
393 TargetDataLowering() = delete;
394 TargetDataLowering(const TargetDataLowering &) = delete;
395 TargetDataLowering &operator=(const TargetDataLowering &) = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700396
Jim Stichnothde4ca712014-06-29 08:13:48 -0700397public:
Jim Stichnothbbca7542015-02-11 16:08:31 -0800398 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx);
Jan Voung72984d82015-01-29 14:42:38 -0800399 virtual ~TargetDataLowering();
Jan Voung839c4ce2014-07-28 15:19:43 -0700400
John Porto8b1a7052015-06-17 13:20:08 -0700401 virtual void lowerGlobals(const VariableDeclarationList &Vars,
402 const IceString &SectionSuffix) = 0;
John Porto0f86d032015-06-15 07:44:27 -0700403 virtual void lowerConstants() = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700404 virtual void lowerJumpTables() = 0;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700405
406protected:
John Porto8b1a7052015-06-17 13:20:08 -0700407 void emitGlobal(const VariableDeclaration &Var,
408 const IceString &SectionSuffix);
Jan Voung58eea4d2015-06-15 15:11:56 -0700409
Andrew Scull9612d322015-07-06 14:53:25 -0700410 /// For now, we assume .long is the right directive for emitting 4 byte
411 /// emit global relocations. However, LLVM MIPS usually uses .4byte instead.
412 /// Perhaps there is some difference when the location is unaligned.
John Porto8b1a7052015-06-17 13:20:08 -0700413 static const char *getEmit32Directive() { return ".long"; }
Jan Voung58eea4d2015-06-15 15:11:56 -0700414
Jim Stichnothc6ead202015-02-24 09:30:30 -0800415 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
Jim Stichnothde4ca712014-06-29 08:13:48 -0700416 GlobalContext *Ctx;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700417};
418
Andrew Scull9612d322015-07-06 14:53:25 -0700419/// TargetHeaderLowering is used to "lower" the header of an output file.
420/// It writes out the target-specific header attributes. E.g., for ARM
421/// this writes out the build attributes (float ABI, etc.).
Jan Voungfb792842015-06-11 15:27:50 -0700422class TargetHeaderLowering {
423 TargetHeaderLowering() = delete;
424 TargetHeaderLowering(const TargetHeaderLowering &) = delete;
425 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete;
426
427public:
428 static std::unique_ptr<TargetHeaderLowering>
429 createLowering(GlobalContext *Ctx);
430 virtual ~TargetHeaderLowering();
431
432 virtual void lower() {}
433
434protected:
435 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
436 GlobalContext *Ctx;
437};
438
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700439} // end of namespace Ice
440
441#endif // SUBZERO_SRC_ICETARGETLOWERING_H