blob: d55e48cfeaf57ecb6d8071805f9859c0978e8d53 [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
Jim Stichnoth92a6e5b2015-12-02 16:52:44 -080011/// \brief Declares the TargetLowering, LoweringContext, and TargetDataLowering
12/// classes.
13///
14/// TargetLowering is an abstract class used to drive the translation/lowering
15/// process. LoweringContext maintains a context for lowering each instruction,
16/// offering conveniences such as iterating over non-deleted instructions.
17/// TargetDataLowering is an abstract class used to drive the lowering/emission
18/// of global initializers, external global declarations, and internal constant
19/// pools.
Andrew Scull9612d322015-07-06 14:53:25 -070020///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070021//===----------------------------------------------------------------------===//
22
23#ifndef SUBZERO_SRC_ICETARGETLOWERING_H
24#define SUBZERO_SRC_ICETARGETLOWERING_H
25
John Portoe82b5602016-02-24 15:58:55 -080026#include "IceBitVector.h"
27#include "IceCfgNode.h"
Manasij Mukherjee7cd926d2016-08-04 12:33:23 -070028#include "IceDefs.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070029#include "IceInst.h" // for the names of the Inst subtypes
Jan Voung76bb0be2015-05-14 09:26:19 -070030#include "IceOperand.h"
Manasij Mukherjee7cd926d2016-08-04 12:33:23 -070031#include "IceRegAlloc.h"
Jim Stichnotha18cc9c2014-09-30 19:10:22 -070032#include "IceTypes.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070033
John Porto1d937a82015-12-17 06:19:34 -080034#include <utility>
35
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070036namespace Ice {
37
Karl Schimpfc5abdc12015-10-09 13:29:13 -070038// UnimplementedError is defined as a macro so that we can get actual line
39// numbers.
40#define UnimplementedError(Flags) \
41 do { \
42 if (!static_cast<const ClFlags &>(Flags).getSkipUnimplemented()) { \
43 /* Use llvm_unreachable instead of report_fatal_error, which gives \
44 better stack traces. */ \
45 llvm_unreachable("Not yet implemented"); \
46 abort(); \
47 } \
48 } while (0)
49
Jim Stichnoth91c773e2016-01-19 09:52:22 -080050// UnimplementedLoweringError is similar in style to UnimplementedError. Given
51// a TargetLowering object pointer and an Inst pointer, it adds appropriate
52// FakeDef and FakeUse instructions to try maintain liveness consistency.
53#define UnimplementedLoweringError(Target, Instr) \
54 do { \
Karl Schimpfd4699942016-04-02 09:55:31 -070055 if (getFlags().getSkipUnimplemented()) { \
Jim Stichnoth91c773e2016-01-19 09:52:22 -080056 (Target)->addFakeDefUses(Instr); \
57 } else { \
58 /* Use llvm_unreachable instead of report_fatal_error, which gives \
59 better stack traces. */ \
Eric Holke37076a2016-01-27 14:06:35 -080060 llvm_unreachable( \
Jim Stichnoth467ffe52016-03-29 15:01:06 -070061 (std::string("Not yet implemented: ") + Instr->getInstName()) \
62 .c_str()); \
Jim Stichnoth91c773e2016-01-19 09:52:22 -080063 abort(); \
64 } \
65 } while (0)
66
Andrew Scull57e12682015-09-16 11:30:19 -070067/// LoweringContext makes it easy to iterate through non-deleted instructions in
68/// a node, and insert new (lowered) instructions at the current point. Along
69/// with the instruction list container and associated iterators, it holds the
70/// current node, which is needed when inserting new instructions in order to
71/// track whether variables are used as single-block or multi-block.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070072class LoweringContext {
Jim Stichnoth7b451a92014-10-15 14:39:23 -070073 LoweringContext(const LoweringContext &) = delete;
74 LoweringContext &operator=(const LoweringContext &) = delete;
75
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070076public:
Jim Stichnotheafb56c2015-06-22 10:35:22 -070077 LoweringContext() = default;
78 ~LoweringContext() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070079 void init(CfgNode *Node);
80 Inst *getNextInst() const {
81 if (Next == End)
Jim Stichnothae953202014-12-20 06:17:49 -080082 return nullptr;
Jim Stichnothf5fdd232016-05-09 12:24:36 -070083 return iteratorToInst(Next);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070084 }
Jan Voungc820ddf2014-07-29 14:38:51 -070085 Inst *getNextInst(InstList::iterator &Iter) const {
Jan Vounge6e497d2014-07-30 10:06:03 -070086 advanceForward(Iter);
Jan Voungc820ddf2014-07-29 14:38:51 -070087 if (Iter == End)
Jim Stichnothae953202014-12-20 06:17:49 -080088 return nullptr;
Jim Stichnothf5fdd232016-05-09 12:24:36 -070089 return iteratorToInst(Iter);
Jan Voungc820ddf2014-07-29 14:38:51 -070090 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070091 CfgNode *getNode() const { return Node; }
92 bool atEnd() const { return Cur == End; }
93 InstList::iterator getCur() const { return Cur; }
Jim Stichnoth5d2fa0c2014-12-01 09:30:55 -080094 InstList::iterator getNext() const { return Next; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070095 InstList::iterator getEnd() const { return End; }
Jim Stichnoth8cfeb692016-02-05 09:50:02 -080096 void insert(Inst *Instr);
Nicolas Capens95342282021-07-06 12:45:02 -040097 template <typename Inst, typename... Args> Inst *insert(Args &&...A) {
John Porto1d937a82015-12-17 06:19:34 -080098 auto *New = Inst::create(Node->getCfg(), std::forward<Args>(A)...);
99 insert(New);
100 return New;
101 }
Jan Vounge6e497d2014-07-30 10:06:03 -0700102 Inst *getLastInserted() const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700103 void advanceCur() { Cur = Next; }
Jan Vounge6e497d2014-07-30 10:06:03 -0700104 void advanceNext() { advanceForward(Next); }
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700105 void setCur(InstList::iterator C) { Cur = C; }
106 void setNext(InstList::iterator N) { Next = N; }
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700107 void rewind();
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700108 void setInsertPoint(const InstList::iterator &Position) { Next = Position; }
Jim Stichnoth318f4cd2015-10-01 21:02:37 -0700109 void availabilityReset();
110 void availabilityUpdate();
111 Variable *availabilityGet(Operand *Src) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700112
113private:
Andrew Scull9612d322015-07-06 14:53:25 -0700114 /// Node is the argument to Inst::updateVars().
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700115 CfgNode *Node = nullptr;
116 Inst *LastInserted = nullptr;
Andrew Scull57e12682015-09-16 11:30:19 -0700117 /// Cur points to the current instruction being considered. It is guaranteed
118 /// to point to a non-deleted instruction, or to be End.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700119 InstList::iterator Cur;
Andrew Scull57e12682015-09-16 11:30:19 -0700120 /// Next doubles as a pointer to the next valid instruction (if any), and the
121 /// new-instruction insertion point. It is also updated for the caller in case
122 /// the lowering consumes more than one high-level instruction. It is
123 /// guaranteed to point to a non-deleted instruction after Cur, or to be End.
124 // TODO: Consider separating the notion of "next valid instruction" and "new
125 // instruction insertion point", to avoid confusion when previously-deleted
126 // instructions come between the two points.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700127 InstList::iterator Next;
Andrew Scull9612d322015-07-06 14:53:25 -0700128 /// Begin is a copy of Insts.begin(), used if iterators are moved backward.
Jan Vounge6e497d2014-07-30 10:06:03 -0700129 InstList::iterator Begin;
Andrew Scull9612d322015-07-06 14:53:25 -0700130 /// End is a copy of Insts.end(), used if Next needs to be advanced.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700131 InstList::iterator End;
Jim Stichnoth318f4cd2015-10-01 21:02:37 -0700132 /// LastDest and LastSrc capture the parameters of the last "Dest=Src" simple
133 /// assignment inserted (provided Src is a variable). This is used for simple
134 /// availability analysis.
135 Variable *LastDest = nullptr;
136 Variable *LastSrc = nullptr;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700137
Jan Voungc820ddf2014-07-29 14:38:51 -0700138 void skipDeleted(InstList::iterator &I) const;
Jan Vounge6e497d2014-07-30 10:06:03 -0700139 void advanceForward(InstList::iterator &I) const;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700140};
141
Jan Voung28068ad2015-07-31 12:58:46 -0700142/// A helper class to advance the LoweringContext at each loop iteration.
143class PostIncrLoweringContext {
144 PostIncrLoweringContext() = delete;
145 PostIncrLoweringContext(const PostIncrLoweringContext &) = delete;
146 PostIncrLoweringContext &operator=(const PostIncrLoweringContext &) = delete;
147
148public:
149 explicit PostIncrLoweringContext(LoweringContext &Context)
150 : Context(Context) {}
151 ~PostIncrLoweringContext() {
152 Context.advanceCur();
153 Context.advanceNext();
154 }
155
156private:
157 LoweringContext &Context;
158};
159
John Porto53611e22015-12-30 07:30:10 -0800160/// TargetLowering is the base class for all backends in Subzero. In addition to
161/// implementing the abstract methods in this class, each concrete target must
162/// also implement a named constructor in its own namespace. For instance, for
163/// X8632 we have:
164///
165/// namespace X8632 {
166/// void createTargetLowering(Cfg *Func);
167/// }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700168class TargetLowering {
Jim Stichnothc6ead202015-02-24 09:30:30 -0800169 TargetLowering() = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700170 TargetLowering(const TargetLowering &) = delete;
171 TargetLowering &operator=(const TargetLowering &) = delete;
172
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700173public:
Karl Schimpf5403f5d2016-01-15 11:07:46 -0800174 static void staticInit(GlobalContext *Ctx);
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800175 // Each target must define a public static method:
Karl Schimpf5403f5d2016-01-15 11:07:46 -0800176 // static void staticInit(GlobalContext *Ctx);
Jim Stichnoth467ffe52016-03-29 15:01:06 -0700177 static bool shouldBePooled(const class Constant *C);
Nicolas Capens32f9cce2016-10-19 01:24:27 -0400178 static Type getPointerType();
John Porto53611e22015-12-30 07:30:10 -0800179
180 static std::unique_ptr<TargetLowering> createLowering(TargetArch Target,
181 Cfg *Func);
182
183 virtual std::unique_ptr<Assembler> createAssembler() const = 0;
184
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700185 void translate() {
Jim Stichnothdd6dcfa2016-04-18 12:52:09 -0700186 switch (Func->getOptLevel()) {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700187 case Opt_m1:
188 translateOm1();
189 break;
190 case Opt_0:
191 translateO0();
192 break;
193 case Opt_1:
194 translateO1();
195 break;
196 case Opt_2:
197 translateO2();
198 break;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700199 }
200 }
201 virtual void translateOm1() {
202 Func->setError("Target doesn't specify Om1 lowering steps.");
203 }
204 virtual void translateO0() {
205 Func->setError("Target doesn't specify O0 lowering steps.");
206 }
207 virtual void translateO1() {
208 Func->setError("Target doesn't specify O1 lowering steps.");
209 }
210 virtual void translateO2() {
211 Func->setError("Target doesn't specify O2 lowering steps.");
212 }
213
John Porto5e0a8a72015-11-20 13:50:36 -0800214 /// Generates calls to intrinsics for operations the Target can't handle.
215 void genTargetHelperCalls();
Andrew Scull9612d322015-07-06 14:53:25 -0700216 /// Tries to do address mode optimization on a single instruction.
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700217 void doAddressOpt();
Andrew Scull9612d322015-07-06 14:53:25 -0700218 /// Lowers a single non-Phi instruction.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700219 void lower();
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700220 /// Inserts and lowers a single high-level instruction at a specific insertion
221 /// point.
222 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr);
Andrew Scull57e12682015-09-16 11:30:19 -0700223 /// Does preliminary lowering of the set of Phi instructions in the current
224 /// node. The main intention is to do what's needed to keep the unlowered Phi
225 /// instructions consistent with the lowered non-Phi instructions, e.g. to
226 /// lower 64-bit operands on a 32-bit target.
Jim Stichnoth336f6c42014-10-30 15:01:31 -0700227 virtual void prelowerPhis() {}
Andrew Scull57e12682015-09-16 11:30:19 -0700228 /// Tries to do branch optimization on a single instruction. Returns true if
229 /// some optimization was done.
Jim Stichnothff9c7062014-09-18 04:50:49 -0700230 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) {
231 return false;
232 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700233
Jim Stichnoth3d44fe82014-11-01 10:10:18 -0700234 virtual SizeT getNumRegisters() const = 0;
Andrew Scull57e12682015-09-16 11:30:19 -0700235 /// Returns a variable pre-colored to the specified physical register. This is
236 /// generally used to get very direct access to the register such as in the
237 /// prolog or epilog or for marking scratch registers as killed by a call. If
238 /// a Type is not provided, a target-specific default type is used.
Jim Stichnoth8aa39662016-02-10 11:20:30 -0800239 virtual Variable *getPhysicalRegister(RegNumT RegNum,
Jim Stichnoth98712a32014-10-24 10:59:02 -0700240 Type Ty = IceType_void) = 0;
Andrew Scull9612d322015-07-06 14:53:25 -0700241 /// Returns a printable name for the register.
Jim Stichnoth467ffe52016-03-29 15:01:06 -0700242 virtual const char *getRegName(RegNumT RegNum, Type Ty) const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700243
244 virtual bool hasFramePointer() const { return false; }
David Sehre39d0ca2015-11-06 11:25:41 -0800245 virtual void setHasFramePointer() = 0;
Jim Stichnoth8aa39662016-02-10 11:20:30 -0800246 virtual RegNumT getStackReg() const = 0;
247 virtual RegNumT getFrameReg() const = 0;
248 virtual RegNumT getFrameOrStackReg() const = 0;
Matt Walad4799f42014-08-14 14:24:12 -0700249 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0;
David Sehre39d0ca2015-11-06 11:25:41 -0800250 virtual uint32_t getStackAlignment() const = 0;
Nicolas Capens4e679e52017-01-12 17:01:06 -0500251 virtual bool needsStackPointerAlignment() const { return false; }
David Sehr2f3b8ec2015-11-16 16:51:39 -0800252 virtual void reserveFixedAllocaArea(size_t Size, size_t Align) = 0;
253 virtual int32_t getFrameFixedAllocaOffset() const = 0;
John Porto614140e2015-11-23 11:43:13 -0800254 virtual uint32_t maxOutArgsSizeBytes() const { return 0; }
Stefan Maksimovic298d14e2017-01-11 05:58:27 -0800255 // Addressing relative to frame pointer differs in MIPS compared to X86/ARM
256 // since MIPS decrements its stack pointer prior to saving it in the frame
257 // pointer register.
258 virtual uint32_t getFramePointerOffset(uint32_t CurrentOffset,
259 uint32_t Size) const {
260 return -(CurrentOffset + Size);
261 }
Andrew Scull6d47bcd2015-09-17 17:10:05 -0700262 /// Return whether a 64-bit Variable should be split into a Variable64On32.
263 virtual bool shouldSplitToVariable64On32(Type Ty) const = 0;
264
Jaydeep Patil958ddb72016-10-03 07:52:48 -0700265 /// Return whether a Vector Variable should be split into a VariableVecOn32.
266 virtual bool shouldSplitToVariableVecOn32(Type Ty) const {
267 (void)Ty;
268 return false;
269 }
270
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700271 bool hasComputedFrame() const { return HasComputedFrame; }
Andrew Scull57e12682015-09-16 11:30:19 -0700272 /// Returns true if this function calls a function that has the "returns
273 /// twice" attribute.
Jan Voung44d53e12014-09-11 19:18:03 -0700274 bool callsReturnsTwice() const { return CallsReturnsTwice; }
Jim Stichnothdd842db2015-01-27 12:53:53 -0800275 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; }
Jan Voungb36ad9b2015-04-21 17:01:49 -0700276 SizeT makeNextLabelNumber() { return NextLabelNumber++; }
Andrew Scull86df4e92015-07-30 13:54:44 -0700277 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700278 LoweringContext &getContext() { return Context; }
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800279 Cfg *getFunc() const { return Func; }
280 GlobalContext *getGlobalContext() const { return Ctx; }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700281
282 enum RegSet {
283 RegSet_None = 0,
284 RegSet_CallerSave = 1 << 0,
285 RegSet_CalleeSave = 1 << 1,
286 RegSet_StackPointer = 1 << 2,
287 RegSet_FramePointer = 1 << 3,
288 RegSet_All = ~RegSet_None
289 };
Andrew Scull8072bae2015-09-14 16:01:26 -0700290 using RegSetMask = uint32_t;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700291
John Portoe82b5602016-02-24 15:58:55 -0800292 virtual SmallBitVector getRegisterSet(RegSetMask Include,
293 RegSetMask Exclude) const = 0;
Jim Stichnothb40595a2016-01-29 06:14:31 -0800294 /// Get the set of physical registers available for the specified Variable's
295 /// register class, applying register restrictions from the command line.
John Portoe82b5602016-02-24 15:58:55 -0800296 virtual const SmallBitVector &
Jim Stichnothc59288b2015-11-09 11:38:40 -0800297 getRegistersForVariable(const Variable *Var) const = 0;
Jim Stichnothb40595a2016-01-29 06:14:31 -0800298 /// Get the set of *all* physical registers available for the specified
299 /// Variable's register class, *not* applying register restrictions from the
300 /// command line.
John Portoe82b5602016-02-24 15:58:55 -0800301 virtual const SmallBitVector &
Jim Stichnothb40595a2016-01-29 06:14:31 -0800302 getAllRegistersForVariable(const Variable *Var) const = 0;
John Portoe82b5602016-02-24 15:58:55 -0800303 virtual const SmallBitVector &getAliasesForRegister(RegNumT) const = 0;
John Portobb0a5fe2015-09-04 11:23:41 -0700304
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800305 void regAlloc(RegAllocKind Kind);
Manasij Mukherjee7cd926d2016-08-04 12:33:23 -0700306 void postRegallocSplitting(const SmallBitVector &RegMask);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700307
Andrew Scull87f80c12015-07-20 10:19:16 -0700308 /// Get the minimum number of clusters required for a jump table to be
309 /// considered.
310 virtual SizeT getMinJumpTableSize() const = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700311 virtual void emitJumpTable(const Cfg *Func,
312 const InstJumpTable *JumpTable) const = 0;
Andrew Scull87f80c12015-07-20 10:19:16 -0700313
Jim Stichnoth144cdce2014-09-22 16:02:59 -0700314 virtual void emitVariable(const Variable *Var) const = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700315
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800316 void emitWithoutPrefix(const ConstantRelocatable *CR,
317 const char *Suffix = "") const;
Jan Voung76bb0be2015-05-14 09:26:19 -0700318
Jan Voung76bb0be2015-05-14 09:26:19 -0700319 virtual void emit(const ConstantInteger32 *C) const = 0;
320 virtual void emit(const ConstantInteger64 *C) const = 0;
321 virtual void emit(const ConstantFloat *C) const = 0;
322 virtual void emit(const ConstantDouble *C) const = 0;
Jim Stichnoth8ff4b282016-01-04 15:39:06 -0800323 virtual void emit(const ConstantUndef *C) const = 0;
324 virtual void emit(const ConstantRelocatable *CR) const = 0;
Jan Voung76bb0be2015-05-14 09:26:19 -0700325
Andrew Scull9612d322015-07-06 14:53:25 -0700326 /// Performs target-specific argument lowering.
Matt Wala45a06232014-07-09 16:33:22 -0700327 virtual void lowerArguments() = 0;
328
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700329 virtual void initNodeForLowering(CfgNode *) {}
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700330 virtual void addProlog(CfgNode *Node) = 0;
331 virtual void addEpilog(CfgNode *Node) = 0;
332
Jim Stichnothb9a84722016-08-01 13:18:36 -0700333 /// Create a properly-typed "mov" instruction. This is primarily for local
334 /// variable splitting.
335 virtual Inst *createLoweredMove(Variable *Dest, Variable *SrcVar) {
336 // TODO(stichnot): make pure virtual by implementing for all targets
337 (void)Dest;
338 (void)SrcVar;
339 llvm::report_fatal_error("createLoweredMove() unimplemented");
340 return nullptr;
341 }
342
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700343 virtual ~TargetLowering() = default;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700344
John Porto3bf335f2016-01-15 11:17:55 -0800345private:
Eric Holkd6cf6b32016-02-17 11:09:48 -0800346 /// This indicates whether we are in the genTargetHelperCalls phase, and
347 /// therefore can do things like scalarization.
348 bool GeneratingTargetHelpers = false;
349
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700350protected:
Jim Stichnothc6ead202015-02-24 09:30:30 -0800351 explicit TargetLowering(Cfg *Func);
Karl Schimpf5403f5d2016-01-15 11:07:46 -0800352 // Applies command line filters to TypeToRegisterSet array.
Jim Stichnoth467ffe52016-03-29 15:01:06 -0700353 static void filterTypeToRegisterSet(
354 GlobalContext *Ctx, int32_t NumRegs, SmallBitVector TypeToRegisterSet[],
355 size_t TypeToRegisterSetSize,
356 std::function<std::string(RegNumT)> getRegName,
357 std::function<const char *(RegClass)> getRegClassName);
Jim Stichnoth8cfeb692016-02-05 09:50:02 -0800358 virtual void lowerAlloca(const InstAlloca *Instr) = 0;
359 virtual void lowerArithmetic(const InstArithmetic *Instr) = 0;
360 virtual void lowerAssign(const InstAssign *Instr) = 0;
361 virtual void lowerBr(const InstBr *Instr) = 0;
Eric Holk67c7c412016-04-15 13:05:37 -0700362 virtual void lowerBreakpoint(const InstBreakpoint *Instr) = 0;
Jim Stichnoth8cfeb692016-02-05 09:50:02 -0800363 virtual void lowerCall(const InstCall *Instr) = 0;
364 virtual void lowerCast(const InstCast *Instr) = 0;
365 virtual void lowerFcmp(const InstFcmp *Instr) = 0;
366 virtual void lowerExtractElement(const InstExtractElement *Instr) = 0;
367 virtual void lowerIcmp(const InstIcmp *Instr) = 0;
368 virtual void lowerInsertElement(const InstInsertElement *Instr) = 0;
Nicolas Capens33a77f72021-02-08 15:04:38 -0500369 virtual void lowerIntrinsic(const InstIntrinsic *Instr) = 0;
Jim Stichnoth8cfeb692016-02-05 09:50:02 -0800370 virtual void lowerLoad(const InstLoad *Instr) = 0;
371 virtual void lowerPhi(const InstPhi *Instr) = 0;
372 virtual void lowerRet(const InstRet *Instr) = 0;
373 virtual void lowerSelect(const InstSelect *Instr) = 0;
John Portoa47c11c2016-04-21 05:53:42 -0700374 virtual void lowerShuffleVector(const InstShuffleVector *Instr) = 0;
Jim Stichnoth8cfeb692016-02-05 09:50:02 -0800375 virtual void lowerStore(const InstStore *Instr) = 0;
376 virtual void lowerSwitch(const InstSwitch *Instr) = 0;
377 virtual void lowerUnreachable(const InstUnreachable *Instr) = 0;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700378 virtual void lowerOther(const Inst *Instr);
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700379
John Porto5e0a8a72015-11-20 13:50:36 -0800380 virtual void genTargetHelperCallFor(Inst *Instr) = 0;
John Portof4198542015-11-20 14:17:23 -0800381 virtual uint32_t getCallStackArgumentsSizeBytes(const InstCall *Instr) = 0;
John Porto5e0a8a72015-11-20 13:50:36 -0800382
Manasij Mukherjee0c704172016-07-21 12:40:24 -0700383 /// Opportunity to modify other instructions to help Address Optimization
384 virtual void doAddressOptOther() {}
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700385 virtual void doAddressOptLoad() {}
386 virtual void doAddressOptStore() {}
Nicolas Capense986b312017-01-27 00:56:42 -0800387 virtual void doAddressOptLoadSubVector() {}
388 virtual void doAddressOptStoreSubVector() {}
Jim Stichnothad2989b2015-09-15 10:21:42 -0700389 virtual void doMockBoundsCheck(Operand *) {}
Andrew Scull57e12682015-09-16 11:30:19 -0700390 /// This gives the target an opportunity to post-process the lowered expansion
391 /// before returning.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700392 virtual void postLower() {}
393
Jim Stichnoth91c773e2016-01-19 09:52:22 -0800394 /// When the SkipUnimplemented flag is set, addFakeDefUses() gets invoked by
395 /// the UnimplementedLoweringError macro to insert fake uses of all the
396 /// instruction variables and a fake def of the instruction dest, in order to
397 /// preserve integrity of liveness analysis.
398 void addFakeDefUses(const Inst *Instr);
399
Jim Stichnoth230d4102015-09-25 17:40:32 -0700400 /// Find (non-SSA) instructions where the Dest variable appears in some source
401 /// operand, and set the IsDestRedefined flag. This keeps liveness analysis
402 /// consistent.
403 void markRedefinitions();
Jan Voungb3401d22015-05-18 09:38:21 -0700404
Andrew Scull57e12682015-09-16 11:30:19 -0700405 /// Make a pass over the Cfg to determine which variables need stack slots and
406 /// place them in a sorted list (SortedSpilledVariables). Among those, vars,
407 /// classify the spill variables as local to the basic block vs global
408 /// (multi-block) in order to compute the parameters GlobalsSize and
409 /// SpillAreaSizeBytes (represents locals or general vars if the coalescing of
410 /// locals is disallowed) along with alignments required for variables in each
411 /// area. We rely on accurate VMetadata in order to classify a variable as
412 /// global vs local (otherwise the variable is conservatively global). The
413 /// in-args should be initialized to 0.
Andrew Scull9612d322015-07-06 14:53:25 -0700414 ///
Andrew Scull57e12682015-09-16 11:30:19 -0700415 /// This is only a pre-pass and the actual stack slot assignment is handled
416 /// separately.
Andrew Scull9612d322015-07-06 14:53:25 -0700417 ///
Andrew Scull57e12682015-09-16 11:30:19 -0700418 /// There may be target-specific Variable types, which will be handled by
419 /// TargetVarHook. If the TargetVarHook returns true, then the variable is
420 /// skipped and not considered with the rest of the spilled variables.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700421 void getVarStackSlotParams(VarList &SortedSpilledVariables,
John Portoe82b5602016-02-24 15:58:55 -0800422 SmallBitVector &RegsUsed, size_t *GlobalsSize,
423 size_t *SpillAreaSizeBytes,
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700424 uint32_t *SpillAreaAlignmentBytes,
425 uint32_t *LocalsSlotsAlignmentBytes,
426 std::function<bool(Variable *)> TargetVarHook);
427
Andrew Scull57e12682015-09-16 11:30:19 -0700428 /// Calculate the amount of padding needed to align the local and global areas
429 /// to the required alignment. This assumes the globals/locals layout used by
430 /// getVarStackSlotParams and assignVarStackSlots.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700431 void alignStackSpillAreas(uint32_t SpillAreaStartOffset,
432 uint32_t SpillAreaAlignmentBytes,
433 size_t GlobalsSize,
434 uint32_t LocalsSlotsAlignmentBytes,
435 uint32_t *SpillAreaPaddingBytes,
436 uint32_t *LocalsSlotsPaddingBytes);
437
Andrew Scull57e12682015-09-16 11:30:19 -0700438 /// Make a pass through the SortedSpilledVariables and actually assign stack
439 /// slots. SpillAreaPaddingBytes takes into account stack alignment padding.
440 /// The SpillArea starts after that amount of padding. This matches the scheme
441 /// in getVarStackSlotParams, where there may be a separate multi-block global
442 /// var spill area and a local var spill area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700443 void assignVarStackSlots(VarList &SortedSpilledVariables,
444 size_t SpillAreaPaddingBytes,
445 size_t SpillAreaSizeBytes,
446 size_t GlobalsAndSubsequentPaddingSize,
447 bool UsesFramePointer);
448
Andrew Scull57e12682015-09-16 11:30:19 -0700449 /// Sort the variables in Source based on required alignment. The variables
450 /// with the largest alignment need are placed in the front of the Dest list.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700451 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const;
452
Karl Schimpf20070e82016-03-17 13:30:13 -0700453 InstCall *makeHelperCall(RuntimeHelper FuncID, Variable *Dest, SizeT MaxSrcs);
Jan Voungb36ad9b2015-04-21 17:01:49 -0700454
Jim Stichnoth230d4102015-09-25 17:40:32 -0700455 void _set_dest_redefined() { Context.getLastInserted()->setDestRedefined(); }
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700456
Andrew Scullcfa628b2015-08-20 14:23:05 -0700457 bool shouldOptimizeMemIntrins();
458
Eric Holkcfc25532016-02-09 17:47:58 -0800459 void scalarizeArithmetic(InstArithmetic::OpKind K, Variable *Dest,
460 Operand *Src0, Operand *Src1);
461
Eric Holkcc69fa22016-02-10 13:07:06 -0800462 /// Generalizes scalarizeArithmetic to support other instruction types.
463 ///
Eric Holkd6cf6b32016-02-17 11:09:48 -0800464 /// insertScalarInstruction is a function-like object with signature
Eric Holkcc69fa22016-02-10 13:07:06 -0800465 /// (Variable *Dest, Variable *Src0, Variable *Src1) -> Instr *.
Eric Holkd6cf6b32016-02-17 11:09:48 -0800466 template <typename... Operands,
467 typename F = std::function<Inst *(Variable *, Operands *...)>>
468 void scalarizeInstruction(Variable *Dest, F insertScalarInstruction,
Nicolas Capens95342282021-07-06 12:45:02 -0400469 Operands *...Srcs) {
Eric Holkd6cf6b32016-02-17 11:09:48 -0800470 assert(GeneratingTargetHelpers &&
471 "scalarizeInstruction called during incorrect phase");
Eric Holkcc69fa22016-02-10 13:07:06 -0800472 const Type DestTy = Dest->getType();
473 assert(isVectorType(DestTy));
474 const Type DestElementTy = typeElementType(DestTy);
475 const SizeT NumElements = typeNumElements(DestTy);
Eric Holkcc69fa22016-02-10 13:07:06 -0800476
477 Variable *T = Func->makeVariable(DestTy);
Jaydeep Patil958ddb72016-10-03 07:52:48 -0700478 if (auto *VarVecOn32 = llvm::dyn_cast<VariableVecOn32>(T)) {
479 VarVecOn32->initVecElement(Func);
Jaydeep Patil3a01f332016-10-17 06:33:50 -0700480 auto *Undef = ConstantUndef::create(Ctx, DestTy);
481 Context.insert<InstAssign>(T, Undef);
482 } else {
483 Context.insert<InstFakeDef>(T);
Jaydeep Patil958ddb72016-10-03 07:52:48 -0700484 }
Eric Holkcc69fa22016-02-10 13:07:06 -0800485
Eric Holkd6cf6b32016-02-17 11:09:48 -0800486 for (SizeT I = 0; I < NumElements; ++I) {
487 auto *Index = Ctx->getConstantInt32(I);
488
Takuto Ikuta9911aca2018-11-03 10:43:22 +0000489 auto makeExtractThunk = [this, Index, NumElements](Operand *Src) {
490 return [this, Index, NumElements, Src]() {
Takuto Ikuta4169b312018-11-05 23:32:02 +0900491 (void)NumElements;
Eric Holkd6cf6b32016-02-17 11:09:48 -0800492 assert(typeNumElements(Src->getType()) == NumElements);
493
494 const auto ElementTy = typeElementType(Src->getType());
495 auto *Op = Func->makeVariable(ElementTy);
496 Context.insert<InstExtractElement>(Op, Src, Index);
497 return Op;
498 };
499 };
Eric Holkcc69fa22016-02-10 13:07:06 -0800500
501 // Perform the operation as a scalar operation.
Eric Holkd6cf6b32016-02-17 11:09:48 -0800502 auto *Res = Func->makeVariable(DestElementTy);
503 auto *Arith = applyToThunkedArgs(insertScalarInstruction, Res,
504 makeExtractThunk(Srcs)...);
Eric Holkcc69fa22016-02-10 13:07:06 -0800505 genTargetHelperCallFor(Arith);
506
Eric Holkcc69fa22016-02-10 13:07:06 -0800507 Variable *DestT = Func->makeVariable(DestTy);
508 Context.insert<InstInsertElement>(DestT, T, Res, Index);
509 T = DestT;
510 }
511 Context.insert<InstAssign>(Dest, T);
512 }
513
Eric Holkd6cf6b32016-02-17 11:09:48 -0800514 // applyToThunkedArgs is used by scalarizeInstruction. Ideally, we would just
515 // call insertScalarInstruction(Res, Srcs...), but C++ does not specify
516 // evaluation order which means this leads to an unpredictable final
517 // output. Instead, we wrap each of the Srcs in a thunk and these
518 // applyToThunkedArgs functions apply the thunks in a well defined order so we
519 // still get well-defined output.
520 Inst *applyToThunkedArgs(
521 std::function<Inst *(Variable *, Variable *)> insertScalarInstruction,
522 Variable *Res, std::function<Variable *()> thunk0) {
523 auto *Src0 = thunk0();
524 return insertScalarInstruction(Res, Src0);
525 }
Eric Holkcc69fa22016-02-10 13:07:06 -0800526
Eric Holkd6cf6b32016-02-17 11:09:48 -0800527 Inst *
528 applyToThunkedArgs(std::function<Inst *(Variable *, Variable *, Variable *)>
529 insertScalarInstruction,
530 Variable *Res, std::function<Variable *()> thunk0,
531 std::function<Variable *()> thunk1) {
532 auto *Src0 = thunk0();
533 auto *Src1 = thunk1();
534 return insertScalarInstruction(Res, Src0, Src1);
535 }
Eric Holkcc69fa22016-02-10 13:07:06 -0800536
Eric Holkd6cf6b32016-02-17 11:09:48 -0800537 Inst *applyToThunkedArgs(
538 std::function<Inst *(Variable *, Variable *, Variable *, Variable *)>
539 insertScalarInstruction,
540 Variable *Res, std::function<Variable *()> thunk0,
541 std::function<Variable *()> thunk1, std::function<Variable *()> thunk2) {
542 auto *Src0 = thunk0();
543 auto *Src1 = thunk1();
544 auto *Src2 = thunk2();
545 return insertScalarInstruction(Res, Src0, Src1, Src2);
Eric Holkcc69fa22016-02-10 13:07:06 -0800546 }
547
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700548 Cfg *Func;
549 GlobalContext *Ctx;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700550 bool HasComputedFrame = false;
551 bool CallsReturnsTwice = false;
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700552 SizeT NextLabelNumber = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700553 SizeT NextJumpTableNumber = 0;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700554 LoweringContext Context;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700555};
556
Andrew Scull57e12682015-09-16 11:30:19 -0700557/// TargetDataLowering is used for "lowering" data including initializers for
558/// global variables, and the internal constant pools. It is separated out from
559/// TargetLowering because it does not require a Cfg.
Jan Voung72984d82015-01-29 14:42:38 -0800560class TargetDataLowering {
561 TargetDataLowering() = delete;
562 TargetDataLowering(const TargetDataLowering &) = delete;
563 TargetDataLowering &operator=(const TargetDataLowering &) = delete;
Jim Stichnoth7b451a92014-10-15 14:39:23 -0700564
Jim Stichnothde4ca712014-06-29 08:13:48 -0700565public:
Jim Stichnothbbca7542015-02-11 16:08:31 -0800566 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx);
Jan Voung72984d82015-01-29 14:42:38 -0800567 virtual ~TargetDataLowering();
Jan Voung839c4ce2014-07-28 15:19:43 -0700568
John Porto8b1a7052015-06-17 13:20:08 -0700569 virtual void lowerGlobals(const VariableDeclarationList &Vars,
Jim Stichnoth467ffe52016-03-29 15:01:06 -0700570 const std::string &SectionSuffix) = 0;
John Porto0f86d032015-06-15 07:44:27 -0700571 virtual void lowerConstants() = 0;
Andrew Scull86df4e92015-07-30 13:54:44 -0700572 virtual void lowerJumpTables() = 0;
Jaydeep Patil3da9f652016-11-03 22:54:06 -0700573 virtual void emitTargetRODataSections() {}
Jim Stichnothde4ca712014-06-29 08:13:48 -0700574
575protected:
John Porto8b1a7052015-06-17 13:20:08 -0700576 void emitGlobal(const VariableDeclaration &Var,
Jim Stichnoth467ffe52016-03-29 15:01:06 -0700577 const std::string &SectionSuffix);
Jan Voung58eea4d2015-06-15 15:11:56 -0700578
Andrew Scull57e12682015-09-16 11:30:19 -0700579 /// For now, we assume .long is the right directive for emitting 4 byte emit
580 /// global relocations. However, LLVM MIPS usually uses .4byte instead.
Andrew Scull9612d322015-07-06 14:53:25 -0700581 /// Perhaps there is some difference when the location is unaligned.
John Porto8b1a7052015-06-17 13:20:08 -0700582 static const char *getEmit32Directive() { return ".long"; }
Jan Voung58eea4d2015-06-15 15:11:56 -0700583
Jim Stichnothc6ead202015-02-24 09:30:30 -0800584 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
Jim Stichnothde4ca712014-06-29 08:13:48 -0700585 GlobalContext *Ctx;
Jim Stichnothde4ca712014-06-29 08:13:48 -0700586};
587
Andrew Scull57e12682015-09-16 11:30:19 -0700588/// TargetHeaderLowering is used to "lower" the header of an output file. It
589/// writes out the target-specific header attributes. E.g., for ARM this writes
590/// out the build attributes (float ABI, etc.).
Jan Voungfb792842015-06-11 15:27:50 -0700591class TargetHeaderLowering {
592 TargetHeaderLowering() = delete;
593 TargetHeaderLowering(const TargetHeaderLowering &) = delete;
594 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete;
595
596public:
597 static std::unique_ptr<TargetHeaderLowering>
598 createLowering(GlobalContext *Ctx);
599 virtual ~TargetHeaderLowering();
600
601 virtual void lower() {}
602
603protected:
604 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
605 GlobalContext *Ctx;
606};
607
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700608} // end of namespace Ice
609
610#endif // SUBZERO_SRC_ICETARGETLOWERING_H