blob: 9fb0c15e142631014328e903a94c0f803af738c0 [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
Jim Stichnoth92a6e5b2015-12-02 16:52:44 -080011/// \brief Declares the TargetLowering, LoweringContext, and TargetDataLowering
12/// classes.
13///
14/// TargetLowering is an abstract class used to drive the translation/lowering
15/// process. LoweringContext maintains a context for lowering each instruction,
16/// offering conveniences such as iterating over non-deleted instructions.
17/// TargetDataLowering is an abstract class used to drive the lowering/emission
18/// of global initializers, external global declarations, and internal constant
19/// pools.
Andrew Scull9612d322015-07-06 14:53:25 -070020///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070021//===----------------------------------------------------------------------===//
22
23#ifndef SUBZERO_SRC_ICETARGETLOWERING_H
24#define SUBZERO_SRC_ICETARGETLOWERING_H
25
John Porto1d937a82015-12-17 06:19:34 -080026#include "IceCfgNode.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070027#include "IceDefs.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070028#include "IceInst.h" // for the names of the Inst subtypes
Jan Voung76bb0be2015-05-14 09:26:19 -070029#include "IceOperand.h"
Jim Stichnotha18cc9c2014-09-30 19:10:22 -070030#include "IceTypes.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070031
John Porto1d937a82015-12-17 06:19:34 -080032#include <utility>
33
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070034namespace Ice {
35
Karl Schimpfc5abdc12015-10-09 13:29:13 -070036// UnimplementedError is defined as a macro so that we can get actual line
37// numbers.
38#define UnimplementedError(Flags) \
39 do { \
40 if (!static_cast<const ClFlags &>(Flags).getSkipUnimplemented()) { \
41 /* Use llvm_unreachable instead of report_fatal_error, which gives \
42 better stack traces. */ \
43 llvm_unreachable("Not yet implemented"); \
44 abort(); \
45 } \
46 } while (0)
47
Jim Stichnoth91c773e2016-01-19 09:52:22 -080048// UnimplementedLoweringError is similar in style to UnimplementedError. Given
49// a TargetLowering object pointer and an Inst pointer, it adds appropriate
50// FakeDef and FakeUse instructions to try maintain liveness consistency.
51#define UnimplementedLoweringError(Target, Instr) \
52 do { \
53 if ((Target)->Ctx->getFlags().getSkipUnimplemented()) { \
54 (Target)->addFakeDefUses(Instr); \
55 } else { \
56 /* Use llvm_unreachable instead of report_fatal_error, which gives \
57 better stack traces. */ \
58 llvm_unreachable("Not yet implemented"); \
59 abort(); \
60 } \
61 } while (0)
62
Andrew Scull57e12682015-09-16 11:30:19 -070063/// LoweringContext makes it easy to iterate through non-deleted instructions in
64/// a node, and insert new (lowered) instructions at the current point. Along
65/// with the instruction list container and associated iterators, it holds the
66/// current node, which is needed when inserting new instructions in order to
67/// track whether variables are used as single-block or multi-block.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070068class LoweringContext {
Jim Stichnoth7b451a92014-10-15 14:39:23 -070069 LoweringContext(const LoweringContext &) = delete;
70 LoweringContext &operator=(const LoweringContext &) = delete;
71
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070072public:
Jim Stichnotheafb56c2015-06-22 10:35:22 -070073 LoweringContext() = default;
74 ~LoweringContext() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070075 void init(CfgNode *Node);
76 Inst *getNextInst() const {
77 if (Next == End)
Jim Stichnothae953202014-12-20 06:17:49 -080078 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080079 return Next;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070080 }
Jan Voungc820ddf2014-07-29 14:38:51 -070081 Inst *getNextInst(InstList::iterator &Iter) const {
Jan Vounge6e497d2014-07-30 10:06:03 -070082 advanceForward(Iter);
Jan Voungc820ddf2014-07-29 14:38:51 -070083 if (Iter == End)
Jim Stichnothae953202014-12-20 06:17:49 -080084 return nullptr;
Jim Stichnoth607e9f02014-11-06 13:32:05 -080085 return Iter;
Jan Voungc820ddf2014-07-29 14:38:51 -070086 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070087 CfgNode *getNode() const { return Node; }
88 bool atEnd() const { return Cur == End; }
89 InstList::iterator getCur() const { return Cur; }
Jim Stichnoth5d2fa0c2014-12-01 09:30:55 -080090 InstList::iterator getNext() const { return Next; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070091 InstList::iterator getEnd() const { return End; }
92 void insert(Inst *Inst);
John Porto1d937a82015-12-17 06:19:34 -080093 template <typename Inst, typename... Args> Inst *insert(Args &&... A) {
94 auto *New = Inst::create(Node->getCfg(), std::forward<Args>(A)...);
95 insert(New);
96 return New;
97 }
Jan Vounge6e497d2014-07-30 10:06:03 -070098 Inst *getLastInserted() const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070099 void advanceCur() { Cur = Next; }
Jan Vounge6e497d2014-07-30 10:06:03 -0700100 void advanceNext() { advanceForward(Next); }
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700101 void setCur(InstList::iterator C) { Cur = C; }
102 void setNext(InstList::iterator N) { Next = N; }
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700103 void rewind();
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700104 void setInsertPoint(const InstList::iterator &Position) { Next = Position; }
Jim Stichnoth318f4cd2015-10-01 21:02:37 -0700105 void availabilityReset();
106 void availabilityUpdate();
107 Variable *availabilityGet(Operand *Src) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700108
109private:
Andrew Scull9612d322015-07-06 14:53:25 -0700110 /// Node is the argument to Inst::updateVars().
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700111 CfgNode *Node = nullptr;
112 Inst *LastInserted = nullptr;
Andrew Scull57e12682015-09-16 11:30:19 -0700113 /// Cur points to the current instruction being considered. It is guaranteed
114 /// to point to a non-deleted instruction, or to be End.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700115 InstList::iterator Cur;
Andrew Scull57e12682015-09-16 11:30:19 -0700116 /// Next doubles as a pointer to the next valid instruction (if any), and the
117 /// new-instruction insertion point. It is also updated for the caller in case
118 /// the lowering consumes more than one high-level instruction. It is
119 /// guaranteed to point to a non-deleted instruction after Cur, or to be End.
120 // TODO: Consider separating the notion of "next valid instruction" and "new
121 // instruction insertion point", to avoid confusion when previously-deleted
122 // instructions come between the two points.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700123 InstList::iterator Next;
Andrew Scull9612d322015-07-06 14:53:25 -0700124 /// Begin is a copy of Insts.begin(), used if iterators are moved backward.
Jan Vounge6e497d2014-07-30 10:06:03 -0700125 InstList::iterator Begin;
Andrew Scull9612d322015-07-06 14:53:25 -0700126 /// End is a copy of Insts.end(), used if Next needs to be advanced.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700127 InstList::iterator End;
Jim Stichnoth318f4cd2015-10-01 21:02:37 -0700128 /// LastDest and LastSrc capture the parameters of the last "Dest=Src" simple
129 /// assignment inserted (provided Src is a variable). This is used for simple
130 /// availability analysis.
131 Variable *LastDest = nullptr;
132 Variable *LastSrc = nullptr;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700133
Jan Voungc820ddf2014-07-29 14:38:51 -0700134 void skipDeleted(InstList::iterator &I) const;
Jan Vounge6e497d2014-07-30 10:06:03 -0700135 void advanceForward(InstList::iterator &I) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700136};
137
Jan Voung28068ad2015-07-31 12:58:46 -0700138/// A helper class to advance the LoweringContext at each loop iteration.
139class PostIncrLoweringContext {
140 PostIncrLoweringContext() = delete;
141 PostIncrLoweringContext(const PostIncrLoweringContext &) = delete;
142 PostIncrLoweringContext &operator=(const PostIncrLoweringContext &) = delete;
143
144public:
145 explicit PostIncrLoweringContext(LoweringContext &Context)
146 : Context(Context) {}
147 ~PostIncrLoweringContext() {
148 Context.advanceCur();
149 Context.advanceNext();
150 }
151
152private:
153 LoweringContext &Context;
154};
155
John Porto53611e22015-12-30 07:30:10 -0800156/// TargetLowering is the base class for all backends in Subzero. In addition to
157/// implementing the abstract methods in this class, each concrete target must
158/// also implement a named constructor in its own namespace. For instance, for
159/// X8632 we have:
160///
161/// namespace X8632 {
162/// void createTargetLowering(Cfg *Func);
163/// }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700164class TargetLowering {
Jim Stichnothc6ead202015-02-24 09:30:30 -0800165 TargetLowering() = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700166 TargetLowering(const TargetLowering &) = delete;
167 TargetLowering &operator=(const TargetLowering &) = delete;
168
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700169public:
Karl Schimpf5403f5d2016-01-15 11:07:46 -0800170 static void staticInit(GlobalContext *Ctx);
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800171 // Each target must define a public static method:
Karl Schimpf5403f5d2016-01-15 11:07:46 -0800172 // static void staticInit(GlobalContext *Ctx);
John Porto53611e22015-12-30 07:30:10 -0800173
174 static std::unique_ptr<TargetLowering> createLowering(TargetArch Target,
175 Cfg *Func);
176
177 virtual std::unique_ptr<Assembler> createAssembler() const = 0;
178
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700179 void translate() {
Jan Voung1f47ad02015-03-20 15:01:26 -0700180 switch (Ctx->getFlags().getOptLevel()) {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700181 case Opt_m1:
182 translateOm1();
183 break;
184 case Opt_0:
185 translateO0();
186 break;
187 case Opt_1:
188 translateO1();
189 break;
190 case Opt_2:
191 translateO2();
192 break;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700193 }
194 }
195 virtual void translateOm1() {
196 Func->setError("Target doesn't specify Om1 lowering steps.");
197 }
198 virtual void translateO0() {
199 Func->setError("Target doesn't specify O0 lowering steps.");
200 }
201 virtual void translateO1() {
202 Func->setError("Target doesn't specify O1 lowering steps.");
203 }
204 virtual void translateO2() {
205 Func->setError("Target doesn't specify O2 lowering steps.");
206 }
207
John Porto5e0a8a72015-11-20 13:50:36 -0800208 /// Generates calls to intrinsics for operations the Target can't handle.
209 void genTargetHelperCalls();
Andrew Scull9612d322015-07-06 14:53:25 -0700210 /// Tries to do address mode optimization on a single instruction.
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700211 void doAddressOpt();
Andrew Scull9612d322015-07-06 14:53:25 -0700212 /// Randomly insert NOPs.
Qining Luaee5fa82015-08-20 14:59:03 -0700213 void doNopInsertion(RandomNumberGenerator &RNG);
Andrew Scull9612d322015-07-06 14:53:25 -0700214 /// Lowers a single non-Phi instruction.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700215 void lower();
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700216 /// Inserts and lowers a single high-level instruction at a specific insertion
217 /// point.
218 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr);
Andrew Scull57e12682015-09-16 11:30:19 -0700219 /// Does preliminary lowering of the set of Phi instructions in the current
220 /// node. The main intention is to do what's needed to keep the unlowered Phi
221 /// instructions consistent with the lowered non-Phi instructions, e.g. to
222 /// lower 64-bit operands on a 32-bit target.
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700223 virtual void prelowerPhis() {}
Andrew Scull57e12682015-09-16 11:30:19 -0700224 /// Tries to do branch optimization on a single instruction. Returns true if
225 /// some optimization was done.
Jim Stichnothff9c7062014-09-18 04:50:49 -0700226 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) {
227 return false;
228 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700229
Jim Stichnoth3d44fe82014-11-01 10:10:18 -0700230 virtual SizeT getNumRegisters() const = 0;
Andrew Scull57e12682015-09-16 11:30:19 -0700231 /// Returns a variable pre-colored to the specified physical register. This is
232 /// generally used to get very direct access to the register such as in the
233 /// prolog or epilog or for marking scratch registers as killed by a call. If
234 /// a Type is not provided, a target-specific default type is used.
Jim Stichnoth98712a32014-10-24 10:59:02 -0700235 virtual Variable *getPhysicalRegister(SizeT RegNum,
236 Type Ty = IceType_void) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700237 /// Returns a printable name for the register.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700238 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0;
239
240 virtual bool hasFramePointer() const { return false; }
David Sehre39d0ca2015-11-06 11:25:41 -0800241 virtual void setHasFramePointer() = 0;
Jim Stichnothe7418712015-10-09 06:54:02 -0700242 virtual SizeT getStackReg() const = 0;
David Sehr2f3b8ec2015-11-16 16:51:39 -0800243 virtual SizeT getFrameReg() const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700244 virtual SizeT getFrameOrStackReg() const = 0;
Matt Walad4799f42014-08-14 14:24:12 -0700245 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0;
David Sehre39d0ca2015-11-06 11:25:41 -0800246 virtual uint32_t getStackAlignment() const = 0;
David Sehr2f3b8ec2015-11-16 16:51:39 -0800247 virtual void reserveFixedAllocaArea(size_t Size, size_t Align) = 0;
248 virtual int32_t getFrameFixedAllocaOffset() const = 0;
John Porto614140e2015-11-23 11:43:13 -0800249 virtual uint32_t maxOutArgsSizeBytes() const { return 0; }
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700250
Andrew Scull6d47bcd2015-09-17 17:10:05 -0700251 /// Return whether a 64-bit Variable should be split into a Variable64On32.
252 virtual bool shouldSplitToVariable64On32(Type Ty) const = 0;
253
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700254 bool hasComputedFrame() const { return HasComputedFrame; }
Andrew Scull57e12682015-09-16 11:30:19 -0700255 /// Returns true if this function calls a function that has the "returns
256 /// twice" attribute.
Jan Voung44d53e12014-09-11 19:18:03 -0700257 bool callsReturnsTwice() const { return CallsReturnsTwice; }
Jim Stichnothdd842db2015-01-27 12:53:53 -0800258 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; }
Jan Voungb36ad9b2015-04-21 17:01:49 -0700259 SizeT makeNextLabelNumber() { return NextLabelNumber++; }
Andrew Scull86df4e92015-07-30 13:54:44 -0700260 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700261 LoweringContext &getContext() { return Context; }
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800262 Cfg *getFunc() const { return Func; }
263 GlobalContext *getGlobalContext() const { return Ctx; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700264
265 enum RegSet {
266 RegSet_None = 0,
267 RegSet_CallerSave = 1 << 0,
268 RegSet_CalleeSave = 1 << 1,
269 RegSet_StackPointer = 1 << 2,
270 RegSet_FramePointer = 1 << 3,
271 RegSet_All = ~RegSet_None
272 };
Andrew Scull8072bae2015-09-14 16:01:26 -0700273 using RegSetMask = uint32_t;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700274
275 virtual llvm::SmallBitVector getRegisterSet(RegSetMask Include,
276 RegSetMask Exclude) const = 0;
Jim Stichnothc59288b2015-11-09 11:38:40 -0800277 virtual const llvm::SmallBitVector &
278 getRegistersForVariable(const Variable *Var) const = 0;
John Portobb0a5fe2015-09-04 11:23:41 -0700279 virtual const llvm::SmallBitVector &getAliasesForRegister(SizeT) const = 0;
280
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800281 void regAlloc(RegAllocKind Kind);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700282
Qining Luaee5fa82015-08-20 14:59:03 -0700283 virtual void
284 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation,
285 const llvm::SmallBitVector &ExcludeRegisters,
286 uint64_t Salt) const = 0;
Jim Stichnothe6d24782014-12-19 05:42:24 -0800287
Andrew Scull87f80c12015-07-20 10:19:16 -0700288 /// Get the minimum number of clusters required for a jump table to be
289 /// considered.
290 virtual SizeT getMinJumpTableSize() const = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700291 virtual void emitJumpTable(const Cfg *Func,
292 const InstJumpTable *JumpTable) const = 0;
Andrew Scull87f80c12015-07-20 10:19:16 -0700293
Jim Stichnoth144cdce2014-09-22 16:02:59 -0700294 virtual void emitVariable(const Variable *Var) const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700295
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800296 void emitWithoutPrefix(const ConstantRelocatable *CR,
297 const char *Suffix = "") const;
Jan Voung76bb0be2015-05-14 09:26:19 -0700298
Jan Voung76bb0be2015-05-14 09:26:19 -0700299 virtual void emit(const ConstantInteger32 *C) const = 0;
300 virtual void emit(const ConstantInteger64 *C) const = 0;
301 virtual void emit(const ConstantFloat *C) const = 0;
302 virtual void emit(const ConstantDouble *C) const = 0;
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800303 virtual void emit(const ConstantUndef *C) const = 0;
304 virtual void emit(const ConstantRelocatable *CR) const = 0;
Jan Voung76bb0be2015-05-14 09:26:19 -0700305
Andrew Scull9612d322015-07-06 14:53:25 -0700306 /// Performs target-specific argument lowering.
Matt Wala45a06232014-07-09 16:33:22 -0700307 virtual void lowerArguments() = 0;
308
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700309 virtual void initNodeForLowering(CfgNode *) {}
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700310 virtual void addProlog(CfgNode *Node) = 0;
311 virtual void addEpilog(CfgNode *Node) = 0;
312
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700313 virtual ~TargetLowering() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700314
John Porto3bf335f2016-01-15 11:17:55 -0800315private:
316 // This control variable is used by AutoBundle (RAII-style bundle
317 // locking/unlocking) to prevent nested bundles.
318 bool AutoBundling = false;
319
320 // _bundle_lock(), and _bundle_unlock(), were made private to force subtargets
321 // to use the AutoBundle helper.
322 void
323 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) {
324 Context.insert<InstBundleLock>(BundleOption);
325 }
326 void _bundle_unlock() { Context.insert<InstBundleUnlock>(); }
327
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700328protected:
John Porto3bf335f2016-01-15 11:17:55 -0800329 /// AutoBundle provides RIAA-style bundling. Sub-targets are expected to use
330 /// it when emitting NaCl Bundles to ensure proper bundle_unlocking, and
331 /// prevent nested bundles.
332 ///
333 /// AutoBundle objects will emit a _bundle_lock during construction (but only
334 /// if sandboxed code generation was requested), and a bundle_unlock() during
335 /// destruction. By carefully scoping objects of this type, Subtargets can
336 /// ensure proper bundle emission.
337 class AutoBundle {
338 AutoBundle() = delete;
339 AutoBundle(const AutoBundle &) = delete;
340 AutoBundle &operator=(const AutoBundle &) = delete;
341
342 public:
343 explicit AutoBundle(TargetLowering *Target, InstBundleLock::Option Option =
344 InstBundleLock::Opt_None);
345 ~AutoBundle();
346
347 private:
348 TargetLowering *const Target;
349 const bool NeedSandboxing;
350 };
351
Jim Stichnothc6ead202015-02-24 09:30:30 -0800352 explicit TargetLowering(Cfg *Func);
Karl Schimpf5403f5d2016-01-15 11:07:46 -0800353 // Applies command line filters to TypeToRegisterSet array.
354 static void
355 filterTypeToRegisterSet(GlobalContext *Ctx, int32_t NumRegs,
356 llvm::SmallBitVector TypeToRegisterSet[],
357 size_t TypeToRegisterSetSize,
358 std::function<IceString(int32_t)> getRegName);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700359 virtual void lowerAlloca(const InstAlloca *Inst) = 0;
360 virtual void lowerArithmetic(const InstArithmetic *Inst) = 0;
361 virtual void lowerAssign(const InstAssign *Inst) = 0;
362 virtual void lowerBr(const InstBr *Inst) = 0;
363 virtual void lowerCall(const InstCall *Inst) = 0;
364 virtual void lowerCast(const InstCast *Inst) = 0;
365 virtual void lowerFcmp(const InstFcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700366 virtual void lowerExtractElement(const InstExtractElement *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700367 virtual void lowerIcmp(const InstIcmp *Inst) = 0;
Matt Wala49889232014-07-18 12:45:09 -0700368 virtual void lowerInsertElement(const InstInsertElement *Inst) = 0;
Jan Voung3bd9f1a2014-06-18 10:50:57 -0700369 virtual void lowerIntrinsicCall(const InstIntrinsicCall *Inst) = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700370 virtual void lowerLoad(const InstLoad *Inst) = 0;
371 virtual void lowerPhi(const InstPhi *Inst) = 0;
372 virtual void lowerRet(const InstRet *Inst) = 0;
373 virtual void lowerSelect(const InstSelect *Inst) = 0;
374 virtual void lowerStore(const InstStore *Inst) = 0;
375 virtual void lowerSwitch(const InstSwitch *Inst) = 0;
376 virtual void lowerUnreachable(const InstUnreachable *Inst) = 0;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700377 virtual void lowerOther(const Inst *Instr);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700378
John Porto5e0a8a72015-11-20 13:50:36 -0800379 virtual void genTargetHelperCallFor(Inst *Instr) = 0;
John Portof4198542015-11-20 14:17:23 -0800380 virtual uint32_t getCallStackArgumentsSizeBytes(const InstCall *Instr) = 0;
John Porto5e0a8a72015-11-20 13:50:36 -0800381
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700382 virtual void doAddressOptLoad() {}
383 virtual void doAddressOptStore() {}
Jim Stichnothad2989b2015-09-15 10:21:42 -0700384 virtual void doMockBoundsCheck(Operand *) {}
Qining Luaee5fa82015-08-20 14:59:03 -0700385 virtual void randomlyInsertNop(float Probability,
386 RandomNumberGenerator &RNG) = 0;
Andrew Scull57e12682015-09-16 11:30:19 -0700387 /// This gives the target an opportunity to post-process the lowered expansion
388 /// before returning.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700389 virtual void postLower() {}
390
Jim Stichnoth91c773e2016-01-19 09:52:22 -0800391 /// When the SkipUnimplemented flag is set, addFakeDefUses() gets invoked by
392 /// the UnimplementedLoweringError macro to insert fake uses of all the
393 /// instruction variables and a fake def of the instruction dest, in order to
394 /// preserve integrity of liveness analysis.
395 void addFakeDefUses(const Inst *Instr);
396
Jim Stichnoth230d4102015-09-25 17:40:32 -0700397 /// Find (non-SSA) instructions where the Dest variable appears in some source
398 /// operand, and set the IsDestRedefined flag. This keeps liveness analysis
399 /// consistent.
400 void markRedefinitions();
Jan Voungb3401d22015-05-18 09:38:21 -0700401
Andrew Scull57e12682015-09-16 11:30:19 -0700402 /// Make a pass over the Cfg to determine which variables need stack slots and
403 /// place them in a sorted list (SortedSpilledVariables). Among those, vars,
404 /// classify the spill variables as local to the basic block vs global
405 /// (multi-block) in order to compute the parameters GlobalsSize and
406 /// SpillAreaSizeBytes (represents locals or general vars if the coalescing of
407 /// locals is disallowed) along with alignments required for variables in each
408 /// area. We rely on accurate VMetadata in order to classify a variable as
409 /// global vs local (otherwise the variable is conservatively global). The
410 /// in-args should be initialized to 0.
Andrew Scull9612d322015-07-06 14:53:25 -0700411 ///
Andrew Scull57e12682015-09-16 11:30:19 -0700412 /// This is only a pre-pass and the actual stack slot assignment is handled
413 /// separately.
Andrew Scull9612d322015-07-06 14:53:25 -0700414 ///
Andrew Scull57e12682015-09-16 11:30:19 -0700415 /// There may be target-specific Variable types, which will be handled by
416 /// TargetVarHook. If the TargetVarHook returns true, then the variable is
417 /// skipped and not considered with the rest of the spilled variables.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700418 void getVarStackSlotParams(VarList &SortedSpilledVariables,
419 llvm::SmallBitVector &RegsUsed,
420 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
421 uint32_t *SpillAreaAlignmentBytes,
422 uint32_t *LocalsSlotsAlignmentBytes,
423 std::function<bool(Variable *)> TargetVarHook);
424
Andrew Scull57e12682015-09-16 11:30:19 -0700425 /// Calculate the amount of padding needed to align the local and global areas
426 /// to the required alignment. This assumes the globals/locals layout used by
427 /// getVarStackSlotParams and assignVarStackSlots.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700428 void alignStackSpillAreas(uint32_t SpillAreaStartOffset,
429 uint32_t SpillAreaAlignmentBytes,
430 size_t GlobalsSize,
431 uint32_t LocalsSlotsAlignmentBytes,
432 uint32_t *SpillAreaPaddingBytes,
433 uint32_t *LocalsSlotsPaddingBytes);
434
Andrew Scull57e12682015-09-16 11:30:19 -0700435 /// Make a pass through the SortedSpilledVariables and actually assign stack
436 /// slots. SpillAreaPaddingBytes takes into account stack alignment padding.
437 /// The SpillArea starts after that amount of padding. This matches the scheme
438 /// in getVarStackSlotParams, where there may be a separate multi-block global
439 /// var spill area and a local var spill area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700440 void assignVarStackSlots(VarList &SortedSpilledVariables,
441 size_t SpillAreaPaddingBytes,
442 size_t SpillAreaSizeBytes,
443 size_t GlobalsAndSubsequentPaddingSize,
444 bool UsesFramePointer);
445
Andrew Scull57e12682015-09-16 11:30:19 -0700446 /// Sort the variables in Source based on required alignment. The variables
447 /// with the largest alignment need are placed in the front of the Dest list.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700448 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const;
449
Andrew Scull9612d322015-07-06 14:53:25 -0700450 /// Make a call to an external helper function.
Jan Voungb36ad9b2015-04-21 17:01:49 -0700451 InstCall *makeHelperCall(const IceString &Name, Variable *Dest,
452 SizeT MaxSrcs);
453
Jim Stichnoth230d4102015-09-25 17:40:32 -0700454 void _set_dest_redefined() { Context.getLastInserted()->setDestRedefined(); }
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700455
Andrew Scullcfa628b2015-08-20 14:23:05 -0700456 bool shouldOptimizeMemIntrins();
457
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700458 Cfg *Func;
459 GlobalContext *Ctx;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700460 bool HasComputedFrame = false;
461 bool CallsReturnsTwice = false;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700462 SizeT NextLabelNumber = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700463 SizeT NextJumpTableNumber = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700464 LoweringContext Context;
Jim Stichnoth9738a9e2015-02-23 16:39:06 -0800465
Jim Stichnothc4508792015-03-01 23:12:55 -0800466 // Runtime helper function names
467 const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16";
468 const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8";
469 const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1";
470 const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1";
471 const static constexpr char *H_call_ctpop_i32 = "__popcountsi2";
472 const static constexpr char *H_call_ctpop_i64 = "__popcountdi2";
473 const static constexpr char *H_call_longjmp = "longjmp";
474 const static constexpr char *H_call_memcpy = "memcpy";
475 const static constexpr char *H_call_memmove = "memmove";
476 const static constexpr char *H_call_memset = "memset";
477 const static constexpr char *H_call_read_tp = "__nacl_read_tp";
478 const static constexpr char *H_call_setjmp = "setjmp";
479 const static constexpr char *H_fptosi_f32_i64 = "__Sz_fptosi_f32_i64";
480 const static constexpr char *H_fptosi_f64_i64 = "__Sz_fptosi_f64_i64";
481 const static constexpr char *H_fptoui_4xi32_f32 = "__Sz_fptoui_4xi32_f32";
482 const static constexpr char *H_fptoui_f32_i32 = "__Sz_fptoui_f32_i32";
483 const static constexpr char *H_fptoui_f32_i64 = "__Sz_fptoui_f32_i64";
484 const static constexpr char *H_fptoui_f64_i32 = "__Sz_fptoui_f64_i32";
485 const static constexpr char *H_fptoui_f64_i64 = "__Sz_fptoui_f64_i64";
486 const static constexpr char *H_frem_f32 = "fmodf";
487 const static constexpr char *H_frem_f64 = "fmod";
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800488 const static constexpr char *H_getIP_prefix = "__Sz_getIP_";
Jan Voung6ec369e2015-06-30 11:03:15 -0700489 const static constexpr char *H_sdiv_i32 = "__divsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800490 const static constexpr char *H_sdiv_i64 = "__divdi3";
491 const static constexpr char *H_sitofp_i64_f32 = "__Sz_sitofp_i64_f32";
492 const static constexpr char *H_sitofp_i64_f64 = "__Sz_sitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700493 const static constexpr char *H_srem_i32 = "__modsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800494 const static constexpr char *H_srem_i64 = "__moddi3";
Jan Voung6ec369e2015-06-30 11:03:15 -0700495 const static constexpr char *H_udiv_i32 = "__udivsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800496 const static constexpr char *H_udiv_i64 = "__udivdi3";
497 const static constexpr char *H_uitofp_4xi32_4xf32 = "__Sz_uitofp_4xi32_4xf32";
498 const static constexpr char *H_uitofp_i32_f32 = "__Sz_uitofp_i32_f32";
499 const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64";
500 const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32";
501 const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64";
Jan Voung6ec369e2015-06-30 11:03:15 -0700502 const static constexpr char *H_urem_i32 = "__umodsi3";
Jim Stichnothc4508792015-03-01 23:12:55 -0800503 const static constexpr char *H_urem_i64 = "__umoddi3";
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700504};
505
Andrew Scull57e12682015-09-16 11:30:19 -0700506/// TargetDataLowering is used for "lowering" data including initializers for
507/// global variables, and the internal constant pools. It is separated out from
508/// TargetLowering because it does not require a Cfg.
Jan Voung72984d82015-01-29 14:42:38 -0800509class TargetDataLowering {
510 TargetDataLowering() = delete;
511 TargetDataLowering(const TargetDataLowering &) = delete;
512 TargetDataLowering &operator=(const TargetDataLowering &) = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700513
Jim Stichnothde4ca712014-06-29 08:13:48 -0700514public:
Jim Stichnothbbca7542015-02-11 16:08:31 -0800515 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx);
Jan Voung72984d82015-01-29 14:42:38 -0800516 virtual ~TargetDataLowering();
Jan Voung839c4ce2014-07-28 15:19:43 -0700517
John Porto8b1a7052015-06-17 13:20:08 -0700518 virtual void lowerGlobals(const VariableDeclarationList &Vars,
519 const IceString &SectionSuffix) = 0;
John Porto0f86d032015-06-15 07:44:27 -0700520 virtual void lowerConstants() = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700521 virtual void lowerJumpTables() = 0;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700522
523protected:
John Porto8b1a7052015-06-17 13:20:08 -0700524 void emitGlobal(const VariableDeclaration &Var,
525 const IceString &SectionSuffix);
Jan Voung58eea4d2015-06-15 15:11:56 -0700526
Andrew Scull57e12682015-09-16 11:30:19 -0700527 /// For now, we assume .long is the right directive for emitting 4 byte emit
528 /// global relocations. However, LLVM MIPS usually uses .4byte instead.
Andrew Scull9612d322015-07-06 14:53:25 -0700529 /// Perhaps there is some difference when the location is unaligned.
John Porto8b1a7052015-06-17 13:20:08 -0700530 static const char *getEmit32Directive() { return ".long"; }
Jan Voung58eea4d2015-06-15 15:11:56 -0700531
Jim Stichnothc6ead202015-02-24 09:30:30 -0800532 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
Jim Stichnothde4ca712014-06-29 08:13:48 -0700533 GlobalContext *Ctx;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700534};
535
Andrew Scull57e12682015-09-16 11:30:19 -0700536/// TargetHeaderLowering is used to "lower" the header of an output file. It
537/// writes out the target-specific header attributes. E.g., for ARM this writes
538/// out the build attributes (float ABI, etc.).
Jan Voungfb792842015-06-11 15:27:50 -0700539class TargetHeaderLowering {
540 TargetHeaderLowering() = delete;
541 TargetHeaderLowering(const TargetHeaderLowering &) = delete;
542 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete;
543
544public:
545 static std::unique_ptr<TargetHeaderLowering>
546 createLowering(GlobalContext *Ctx);
547 virtual ~TargetHeaderLowering();
548
549 virtual void lower() {}
550
551protected:
552 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
553 GlobalContext *Ctx;
554};
555
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700556} // end of namespace Ice
557
558#endif // SUBZERO_SRC_ICETARGETLOWERING_H