blob: 894f2dc54125cd108f41ee99f9b432d12a29a048 [file] [log] [blame]
Dan Gohman1462faa2015-11-16 16:18:28 +00001//===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9///
10/// \file
11/// \brief This file implements a register stacking pass.
12///
13/// This pass reorders instructions to put register uses and defs in an order
14/// such that they form single-use expression trees. Registers fitting this form
15/// are then marked as "stackified", meaning references to them are replaced by
Dan Gohmane0405332016-10-03 22:43:53 +000016/// "push" and "pop" from the value stack.
Dan Gohman1462faa2015-11-16 16:18:28 +000017///
Dan Gohman31448f12015-12-08 03:43:03 +000018/// This is primarily a code size optimization, since temporary values on the
Dan Gohmane0405332016-10-03 22:43:53 +000019/// value stack don't need to be named.
Dan Gohman1462faa2015-11-16 16:18:28 +000020///
21//===----------------------------------------------------------------------===//
22
23#include "WebAssembly.h"
Dan Gohman4ba48162015-11-18 16:12:01 +000024#include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_*
Dan Gohman7a6b9822015-11-29 22:32:02 +000025#include "WebAssemblyMachineFunctionInfo.h"
Dan Gohmanb6fd39a2016-01-19 16:59:23 +000026#include "WebAssemblySubtarget.h"
Dan Gohman4fc4e422016-10-24 19:49:43 +000027#include "WebAssemblyUtilities.h"
Dan Gohman81719f82015-11-25 16:55:01 +000028#include "llvm/Analysis/AliasAnalysis.h"
Dan Gohman8887d1f2015-12-25 00:31:02 +000029#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Dan Gohman1462faa2015-11-16 16:18:28 +000030#include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
Dan Gohmanadf28172016-01-28 01:22:44 +000031#include "llvm/CodeGen/MachineDominators.h"
32#include "llvm/CodeGen/MachineInstrBuilder.h"
Dan Gohman1462faa2015-11-16 16:18:28 +000033#include "llvm/CodeGen/MachineRegisterInfo.h"
34#include "llvm/CodeGen/Passes.h"
35#include "llvm/Support/Debug.h"
36#include "llvm/Support/raw_ostream.h"
37using namespace llvm;
38
39#define DEBUG_TYPE "wasm-reg-stackify"
40
41namespace {
42class WebAssemblyRegStackify final : public MachineFunctionPass {
Mehdi Amini117296c2016-10-01 02:56:57 +000043 StringRef getPassName() const override {
Dan Gohman1462faa2015-11-16 16:18:28 +000044 return "WebAssembly Register Stackify";
45 }
46
47 void getAnalysisUsage(AnalysisUsage &AU) const override {
48 AU.setPreservesCFG();
Dan Gohman81719f82015-11-25 16:55:01 +000049 AU.addRequired<AAResultsWrapperPass>();
Dan Gohmanadf28172016-01-28 01:22:44 +000050 AU.addRequired<MachineDominatorTree>();
Dan Gohman8887d1f2015-12-25 00:31:02 +000051 AU.addRequired<LiveIntervals>();
Dan Gohman1462faa2015-11-16 16:18:28 +000052 AU.addPreserved<MachineBlockFrequencyInfo>();
Dan Gohman8887d1f2015-12-25 00:31:02 +000053 AU.addPreserved<SlotIndexes>();
54 AU.addPreserved<LiveIntervals>();
Dan Gohman8887d1f2015-12-25 00:31:02 +000055 AU.addPreservedID(LiveVariablesID);
Dan Gohmanadf28172016-01-28 01:22:44 +000056 AU.addPreserved<MachineDominatorTree>();
Dan Gohman1462faa2015-11-16 16:18:28 +000057 MachineFunctionPass::getAnalysisUsage(AU);
58 }
59
60 bool runOnMachineFunction(MachineFunction &MF) override;
61
62public:
63 static char ID; // Pass identification, replacement for typeid
64 WebAssemblyRegStackify() : MachineFunctionPass(ID) {}
65};
66} // end anonymous namespace
67
68char WebAssemblyRegStackify::ID = 0;
69FunctionPass *llvm::createWebAssemblyRegStackify() {
70 return new WebAssemblyRegStackify();
71}
72
Dan Gohmanb0992da2015-11-20 02:19:12 +000073// Decorate the given instruction with implicit operands that enforce the
Dan Gohman8887d1f2015-12-25 00:31:02 +000074// expression stack ordering constraints for an instruction which is on
75// the expression stack.
76static void ImposeStackOrdering(MachineInstr *MI) {
Dan Gohmane0405332016-10-03 22:43:53 +000077 // Write the opaque VALUE_STACK register.
78 if (!MI->definesRegister(WebAssembly::VALUE_STACK))
79 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
Dan Gohman4da4abd2015-12-05 00:51:40 +000080 /*isDef=*/true,
81 /*isImp=*/true));
Dan Gohman4da4abd2015-12-05 00:51:40 +000082
Dan Gohmane0405332016-10-03 22:43:53 +000083 // Also read the opaque VALUE_STACK register.
84 if (!MI->readsRegister(WebAssembly::VALUE_STACK))
85 MI->addOperand(MachineOperand::CreateReg(WebAssembly::VALUE_STACK,
Dan Gohmana712a6c2015-12-14 22:37:23 +000086 /*isDef=*/false,
87 /*isImp=*/true));
Dan Gohmanb0992da2015-11-20 02:19:12 +000088}
89
Dan Gohman2644d742016-05-17 04:05:31 +000090// Determine whether a call to the callee referenced by
91// MI->getOperand(CalleeOpNo) reads memory, writes memory, and/or has side
92// effects.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +000093static void QueryCallee(const MachineInstr &MI, unsigned CalleeOpNo, bool &Read,
94 bool &Write, bool &Effects, bool &StackPointer) {
Dan Gohmand08cd152016-05-17 21:14:26 +000095 // All calls can use the stack pointer.
96 StackPointer = true;
97
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +000098 const MachineOperand &MO = MI.getOperand(CalleeOpNo);
Dan Gohman2644d742016-05-17 04:05:31 +000099 if (MO.isGlobal()) {
100 const Constant *GV = MO.getGlobal();
101 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(GV))
102 if (!GA->isInterposable())
103 GV = GA->getAliasee();
104
105 if (const Function *F = dyn_cast<Function>(GV)) {
106 if (!F->doesNotThrow())
107 Effects = true;
108 if (F->doesNotAccessMemory())
109 return;
110 if (F->onlyReadsMemory()) {
111 Read = true;
112 return;
113 }
114 }
115 }
116
117 // Assume the worst.
118 Write = true;
119 Read = true;
120 Effects = true;
121}
122
Dan Gohmand08cd152016-05-17 21:14:26 +0000123// Determine whether MI reads memory, writes memory, has side effects,
124// and/or uses the __stack_pointer value.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000125static void Query(const MachineInstr &MI, AliasAnalysis &AA, bool &Read,
126 bool &Write, bool &Effects, bool &StackPointer) {
127 assert(!MI.isPosition());
128 assert(!MI.isTerminator());
Dan Gohman6c8f20d2016-05-23 17:42:57 +0000129
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000130 if (MI.isDebugValue())
Dan Gohman6c8f20d2016-05-23 17:42:57 +0000131 return;
Dan Gohman2644d742016-05-17 04:05:31 +0000132
133 // Check for loads.
Justin Lebard98cf002016-09-10 01:03:20 +0000134 if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad(&AA))
Dan Gohman2644d742016-05-17 04:05:31 +0000135 Read = true;
136
137 // Check for stores.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000138 if (MI.mayStore()) {
Dan Gohman2644d742016-05-17 04:05:31 +0000139 Write = true;
Dan Gohmand08cd152016-05-17 21:14:26 +0000140
141 // Check for stores to __stack_pointer.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000142 for (auto MMO : MI.memoperands()) {
Dan Gohmand08cd152016-05-17 21:14:26 +0000143 const MachinePointerInfo &MPI = MMO->getPointerInfo();
144 if (MPI.V.is<const PseudoSourceValue *>()) {
145 auto PSV = MPI.V.get<const PseudoSourceValue *>();
146 if (const ExternalSymbolPseudoSourceValue *EPSV =
147 dyn_cast<ExternalSymbolPseudoSourceValue>(PSV))
148 if (StringRef(EPSV->getSymbol()) == "__stack_pointer")
149 StackPointer = true;
150 }
151 }
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000152 } else if (MI.hasOrderedMemoryRef()) {
153 switch (MI.getOpcode()) {
Dan Gohman2644d742016-05-17 04:05:31 +0000154 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
155 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
156 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
157 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
158 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
159 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
160 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
161 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
162 // These instruction have hasUnmodeledSideEffects() returning true
163 // because they trap on overflow and invalid so they can't be arbitrarily
164 // moved, however hasOrderedMemoryRef() interprets this plus their lack
165 // of memoperands as having a potential unknown memory reference.
166 break;
167 default:
Dan Gohman10545702016-05-17 22:24:18 +0000168 // Record volatile accesses, unless it's a call, as calls are handled
Dan Gohman2644d742016-05-17 04:05:31 +0000169 // specially below.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000170 if (!MI.isCall()) {
Dan Gohman2644d742016-05-17 04:05:31 +0000171 Write = true;
Dan Gohman10545702016-05-17 22:24:18 +0000172 Effects = true;
173 }
Dan Gohman2644d742016-05-17 04:05:31 +0000174 break;
175 }
176 }
177
178 // Check for side effects.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000179 if (MI.hasUnmodeledSideEffects()) {
180 switch (MI.getOpcode()) {
Dan Gohman2644d742016-05-17 04:05:31 +0000181 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64:
182 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64:
183 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64:
184 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64:
185 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32:
186 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64:
187 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32:
188 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64:
189 // These instructions have hasUnmodeledSideEffects() returning true
190 // because they trap on overflow and invalid so they can't be arbitrarily
191 // moved, however in the specific case of register stackifying, it is safe
192 // to move them because overflow and invalid are Undefined Behavior.
193 break;
194 default:
195 Effects = true;
196 break;
197 }
198 }
199
200 // Analyze calls.
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000201 if (MI.isCall()) {
202 switch (MI.getOpcode()) {
Dan Gohman2644d742016-05-17 04:05:31 +0000203 case WebAssembly::CALL_VOID:
Dan Gohman10545702016-05-17 22:24:18 +0000204 case WebAssembly::CALL_INDIRECT_VOID:
Dan Gohmand08cd152016-05-17 21:14:26 +0000205 QueryCallee(MI, 0, Read, Write, Effects, StackPointer);
Dan Gohman2644d742016-05-17 04:05:31 +0000206 break;
Dan Gohman10545702016-05-17 22:24:18 +0000207 case WebAssembly::CALL_I32: case WebAssembly::CALL_I64:
208 case WebAssembly::CALL_F32: case WebAssembly::CALL_F64:
209 case WebAssembly::CALL_INDIRECT_I32: case WebAssembly::CALL_INDIRECT_I64:
210 case WebAssembly::CALL_INDIRECT_F32: case WebAssembly::CALL_INDIRECT_F64:
Dan Gohmand08cd152016-05-17 21:14:26 +0000211 QueryCallee(MI, 1, Read, Write, Effects, StackPointer);
Dan Gohman2644d742016-05-17 04:05:31 +0000212 break;
Dan Gohman2644d742016-05-17 04:05:31 +0000213 default:
214 llvm_unreachable("unexpected call opcode");
215 }
216 }
217}
218
219// Test whether Def is safe and profitable to rematerialize.
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000220static bool ShouldRematerialize(const MachineInstr &Def, AliasAnalysis &AA,
Dan Gohman2644d742016-05-17 04:05:31 +0000221 const WebAssemblyInstrInfo *TII) {
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000222 return Def.isAsCheapAsAMove() && TII->isTriviallyReMaterializable(Def, &AA);
Dan Gohman2644d742016-05-17 04:05:31 +0000223}
224
Dan Gohman12de0b92016-05-17 20:19:47 +0000225// Identify the definition for this register at this point. This is a
226// generalization of MachineRegisterInfo::getUniqueVRegDef that uses
227// LiveIntervals to handle complex cases.
Dan Gohman2644d742016-05-17 04:05:31 +0000228static MachineInstr *GetVRegDef(unsigned Reg, const MachineInstr *Insert,
229 const MachineRegisterInfo &MRI,
230 const LiveIntervals &LIS)
231{
232 // Most registers are in SSA form here so we try a quick MRI query first.
233 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg))
234 return Def;
235
236 // MRI doesn't know what the Def is. Try asking LIS.
237 if (const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore(
238 LIS.getInstructionIndex(*Insert)))
239 return LIS.getInstructionFromIndex(ValNo->def);
240
241 return nullptr;
242}
243
Dan Gohman12de0b92016-05-17 20:19:47 +0000244// Test whether Reg, as defined at Def, has exactly one use. This is a
245// generalization of MachineRegisterInfo::hasOneUse that uses LiveIntervals
246// to handle complex cases.
247static bool HasOneUse(unsigned Reg, MachineInstr *Def,
248 MachineRegisterInfo &MRI, MachineDominatorTree &MDT,
249 LiveIntervals &LIS) {
250 // Most registers are in SSA form here so we try a quick MRI query first.
251 if (MRI.hasOneUse(Reg))
252 return true;
253
254 bool HasOne = false;
255 const LiveInterval &LI = LIS.getInterval(Reg);
256 const VNInfo *DefVNI = LI.getVNInfoAt(
257 LIS.getInstructionIndex(*Def).getRegSlot());
258 assert(DefVNI);
Dominic Chena8a63822016-08-17 23:42:27 +0000259 for (auto &I : MRI.use_nodbg_operands(Reg)) {
Dan Gohman12de0b92016-05-17 20:19:47 +0000260 const auto &Result = LI.Query(LIS.getInstructionIndex(*I.getParent()));
261 if (Result.valueIn() == DefVNI) {
262 if (!Result.isKill())
263 return false;
264 if (HasOne)
265 return false;
266 HasOne = true;
267 }
268 }
269 return HasOne;
270}
271
Dan Gohman8887d1f2015-12-25 00:31:02 +0000272// Test whether it's safe to move Def to just before Insert.
Dan Gohman81719f82015-11-25 16:55:01 +0000273// TODO: Compute memory dependencies in a way that doesn't require always
274// walking the block.
275// TODO: Compute memory dependencies in a way that uses AliasAnalysis to be
276// more precise.
277static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert,
Derek Schuffe9e68912016-09-30 18:02:54 +0000278 AliasAnalysis &AA, const MachineRegisterInfo &MRI) {
Dan Gohman391a98a2015-12-03 23:07:03 +0000279 assert(Def->getParent() == Insert->getParent());
Dan Gohman8887d1f2015-12-25 00:31:02 +0000280
281 // Check for register dependencies.
Derek Schuffe9e68912016-09-30 18:02:54 +0000282 SmallVector<unsigned, 4> MutableRegisters;
Dan Gohman8887d1f2015-12-25 00:31:02 +0000283 for (const MachineOperand &MO : Def->operands()) {
284 if (!MO.isReg() || MO.isUndef())
285 continue;
286 unsigned Reg = MO.getReg();
287
288 // If the register is dead here and at Insert, ignore it.
289 if (MO.isDead() && Insert->definesRegister(Reg) &&
290 !Insert->readsRegister(Reg))
291 continue;
292
293 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000294 // Ignore ARGUMENTS; it's just used to keep the ARGUMENT_* instructions
295 // from moving down, and we've already checked for that.
296 if (Reg == WebAssembly::ARGUMENTS)
297 continue;
Dan Gohman8887d1f2015-12-25 00:31:02 +0000298 // If the physical register is never modified, ignore it.
299 if (!MRI.isPhysRegModified(Reg))
300 continue;
301 // Otherwise, it's a physical register with unknown liveness.
302 return false;
303 }
304
Derek Schuffe9e68912016-09-30 18:02:54 +0000305 // If one of the operands isn't in SSA form, it has different values at
306 // different times, and we need to make sure we don't move our use across
307 // a different def.
308 if (!MO.isDef() && !MRI.hasOneDef(Reg))
309 MutableRegisters.push_back(Reg);
Dan Gohman8887d1f2015-12-25 00:31:02 +0000310 }
311
Dan Gohmand08cd152016-05-17 21:14:26 +0000312 bool Read = false, Write = false, Effects = false, StackPointer = false;
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000313 Query(*Def, AA, Read, Write, Effects, StackPointer);
Dan Gohman2644d742016-05-17 04:05:31 +0000314
315 // If the instruction does not access memory and has no side effects, it has
316 // no additional dependencies.
Derek Schuffe9e68912016-09-30 18:02:54 +0000317 bool HasMutableRegisters = !MutableRegisters.empty();
318 if (!Read && !Write && !Effects && !StackPointer && !HasMutableRegisters)
Dan Gohman2644d742016-05-17 04:05:31 +0000319 return true;
320
321 // Scan through the intervening instructions between Def and Insert.
322 MachineBasicBlock::const_iterator D(Def), I(Insert);
323 for (--I; I != D; --I) {
324 bool InterveningRead = false;
325 bool InterveningWrite = false;
326 bool InterveningEffects = false;
Dan Gohmand08cd152016-05-17 21:14:26 +0000327 bool InterveningStackPointer = false;
Duncan P. N. Exon Smith500d0462016-07-08 19:36:40 +0000328 Query(*I, AA, InterveningRead, InterveningWrite, InterveningEffects,
Dan Gohmand08cd152016-05-17 21:14:26 +0000329 InterveningStackPointer);
Dan Gohman2644d742016-05-17 04:05:31 +0000330 if (Effects && InterveningEffects)
331 return false;
332 if (Read && InterveningWrite)
333 return false;
334 if (Write && (InterveningRead || InterveningWrite))
335 return false;
Dan Gohmand08cd152016-05-17 21:14:26 +0000336 if (StackPointer && InterveningStackPointer)
337 return false;
Derek Schuffe9e68912016-09-30 18:02:54 +0000338
339 for (unsigned Reg : MutableRegisters)
340 for (const MachineOperand &MO : I->operands())
341 if (MO.isReg() && MO.isDef() && MO.getReg() == Reg)
342 return false;
Dan Gohman2644d742016-05-17 04:05:31 +0000343 }
344
345 return true;
Dan Gohman81719f82015-11-25 16:55:01 +0000346}
347
Dan Gohmanadf28172016-01-28 01:22:44 +0000348/// Test whether OneUse, a use of Reg, dominates all of Reg's other uses.
349static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
350 const MachineBasicBlock &MBB,
351 const MachineRegisterInfo &MRI,
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000352 const MachineDominatorTree &MDT,
Dan Gohman10545702016-05-17 22:24:18 +0000353 LiveIntervals &LIS,
354 WebAssemblyFunctionInfo &MFI) {
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000355 const LiveInterval &LI = LIS.getInterval(Reg);
356
357 const MachineInstr *OneUseInst = OneUse.getParent();
358 VNInfo *OneUseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*OneUseInst));
359
Dominic Chena8a63822016-08-17 23:42:27 +0000360 for (const MachineOperand &Use : MRI.use_nodbg_operands(Reg)) {
Dan Gohmanadf28172016-01-28 01:22:44 +0000361 if (&Use == &OneUse)
362 continue;
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000363
Dan Gohmanadf28172016-01-28 01:22:44 +0000364 const MachineInstr *UseInst = Use.getParent();
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000365 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst));
366
367 if (UseVNI != OneUseVNI)
368 continue;
369
Dan Gohmanadf28172016-01-28 01:22:44 +0000370 const MachineInstr *OneUseInst = OneUse.getParent();
Dan Gohman12de0b92016-05-17 20:19:47 +0000371 if (UseInst == OneUseInst) {
Dan Gohmanadf28172016-01-28 01:22:44 +0000372 // Another use in the same instruction. We need to ensure that the one
373 // selected use happens "before" it.
374 if (&OneUse > &Use)
375 return false;
376 } else {
377 // Test that the use is dominated by the one selected use.
Dan Gohman10545702016-05-17 22:24:18 +0000378 while (!MDT.dominates(OneUseInst, UseInst)) {
379 // Actually, dominating is over-conservative. Test that the use would
380 // happen after the one selected use in the stack evaluation order.
381 //
382 // This is needed as a consequence of using implicit get_locals for
383 // uses and implicit set_locals for defs.
Dominic Chen4173fff2016-08-11 04:10:56 +0000384 if (UseInst->getDesc().getNumDefs() == 0)
Dan Gohman10545702016-05-17 22:24:18 +0000385 return false;
386 const MachineOperand &MO = UseInst->getOperand(0);
387 if (!MO.isReg())
388 return false;
389 unsigned DefReg = MO.getReg();
390 if (!TargetRegisterInfo::isVirtualRegister(DefReg) ||
391 !MFI.isVRegStackified(DefReg))
392 return false;
393 assert(MRI.hasOneUse(DefReg));
394 const MachineOperand &NewUse = *MRI.use_begin(DefReg);
395 const MachineInstr *NewUseInst = NewUse.getParent();
396 if (NewUseInst == OneUseInst) {
397 if (&OneUse > &NewUse)
398 return false;
399 break;
400 }
401 UseInst = NewUseInst;
402 }
Dan Gohmanadf28172016-01-28 01:22:44 +0000403 }
404 }
405 return true;
406}
407
Dan Gohman4fc4e422016-10-24 19:49:43 +0000408/// Get the appropriate tee opcode for the given register class.
409static unsigned GetTeeOpcode(const TargetRegisterClass *RC) {
Dan Gohmanadf28172016-01-28 01:22:44 +0000410 if (RC == &WebAssembly::I32RegClass)
Dan Gohman4fc4e422016-10-24 19:49:43 +0000411 return WebAssembly::TEE_I32;
Dan Gohmanadf28172016-01-28 01:22:44 +0000412 if (RC == &WebAssembly::I64RegClass)
Dan Gohman4fc4e422016-10-24 19:49:43 +0000413 return WebAssembly::TEE_I64;
Dan Gohmanadf28172016-01-28 01:22:44 +0000414 if (RC == &WebAssembly::F32RegClass)
Dan Gohman4fc4e422016-10-24 19:49:43 +0000415 return WebAssembly::TEE_F32;
Dan Gohmanadf28172016-01-28 01:22:44 +0000416 if (RC == &WebAssembly::F64RegClass)
Dan Gohman4fc4e422016-10-24 19:49:43 +0000417 return WebAssembly::TEE_F64;
Derek Schuff39bf39f2016-08-02 23:16:09 +0000418 if (RC == &WebAssembly::V128RegClass)
Dan Gohman4fc4e422016-10-24 19:49:43 +0000419 return WebAssembly::TEE_V128;
Dan Gohmanadf28172016-01-28 01:22:44 +0000420 llvm_unreachable("Unexpected register class");
421}
422
Dan Gohman2644d742016-05-17 04:05:31 +0000423// Shrink LI to its uses, cleaning up LI.
424static void ShrinkToUses(LiveInterval &LI, LiveIntervals &LIS) {
425 if (LIS.shrinkToUses(&LI)) {
426 SmallVector<LiveInterval*, 4> SplitLIs;
427 LIS.splitSeparateComponents(LI, SplitLIs);
428 }
429}
430
Dan Gohmanadf28172016-01-28 01:22:44 +0000431/// A single-use def in the same block with no intervening memory or register
432/// dependencies; move the def down and nest it with the current instruction.
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000433static MachineInstr *MoveForSingleUse(unsigned Reg, MachineOperand& Op,
434 MachineInstr *Def,
Dan Gohmanadf28172016-01-28 01:22:44 +0000435 MachineBasicBlock &MBB,
436 MachineInstr *Insert, LiveIntervals &LIS,
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000437 WebAssemblyFunctionInfo &MFI,
438 MachineRegisterInfo &MRI) {
Dan Gohman2644d742016-05-17 04:05:31 +0000439 DEBUG(dbgs() << "Move for single use: "; Def->dump());
440
Dan Gohmanadf28172016-01-28 01:22:44 +0000441 MBB.splice(Insert, &MBB, Def);
JF Bastien1afd1e22016-02-28 15:33:53 +0000442 LIS.handleMove(*Def);
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000443
Dan Gohman12de0b92016-05-17 20:19:47 +0000444 if (MRI.hasOneDef(Reg) && MRI.hasOneUse(Reg)) {
445 // No one else is using this register for anything so we can just stackify
446 // it in place.
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000447 MFI.stackifyVReg(Reg);
448 } else {
Dan Gohman12de0b92016-05-17 20:19:47 +0000449 // The register may have unrelated uses or defs; create a new register for
450 // just our one def and use so that we can stackify it.
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000451 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
452 Def->getOperand(0).setReg(NewReg);
453 Op.setReg(NewReg);
454
455 // Tell LiveIntervals about the new register.
456 LIS.createAndComputeVirtRegInterval(NewReg);
457
458 // Tell LiveIntervals about the changes to the old register.
459 LiveInterval &LI = LIS.getInterval(Reg);
Dan Gohman6c8f20d2016-05-23 17:42:57 +0000460 LI.removeSegment(LIS.getInstructionIndex(*Def).getRegSlot(),
461 LIS.getInstructionIndex(*Op.getParent()).getRegSlot(),
462 /*RemoveDeadValNo=*/true);
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000463
464 MFI.stackifyVReg(NewReg);
Dan Gohman2644d742016-05-17 04:05:31 +0000465
466 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000467 }
468
Dan Gohmanadf28172016-01-28 01:22:44 +0000469 ImposeStackOrdering(Def);
470 return Def;
471}
472
473/// A trivially cloneable instruction; clone it and nest the new copy with the
474/// current instruction.
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000475static MachineInstr *RematerializeCheapDef(
476 unsigned Reg, MachineOperand &Op, MachineInstr &Def, MachineBasicBlock &MBB,
477 MachineBasicBlock::instr_iterator Insert, LiveIntervals &LIS,
478 WebAssemblyFunctionInfo &MFI, MachineRegisterInfo &MRI,
479 const WebAssemblyInstrInfo *TII, const WebAssemblyRegisterInfo *TRI) {
480 DEBUG(dbgs() << "Rematerializing cheap def: "; Def.dump());
Dan Gohman2644d742016-05-17 04:05:31 +0000481 DEBUG(dbgs() << " - for use in "; Op.getParent()->dump());
482
Dan Gohmanadf28172016-01-28 01:22:44 +0000483 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
484 TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI);
485 Op.setReg(NewReg);
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000486 MachineInstr *Clone = &*std::prev(Insert);
JF Bastien13d3b9b2016-02-27 16:38:23 +0000487 LIS.InsertMachineInstrInMaps(*Clone);
Dan Gohmanadf28172016-01-28 01:22:44 +0000488 LIS.createAndComputeVirtRegInterval(NewReg);
489 MFI.stackifyVReg(NewReg);
490 ImposeStackOrdering(Clone);
491
Dan Gohman2644d742016-05-17 04:05:31 +0000492 DEBUG(dbgs() << " - Cloned to "; Clone->dump());
493
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000494 // Shrink the interval.
495 bool IsDead = MRI.use_empty(Reg);
496 if (!IsDead) {
497 LiveInterval &LI = LIS.getInterval(Reg);
Dan Gohman2644d742016-05-17 04:05:31 +0000498 ShrinkToUses(LI, LIS);
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000499 IsDead = !LI.liveAt(LIS.getInstructionIndex(Def).getDeadSlot());
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000500 }
501
Dan Gohmanadf28172016-01-28 01:22:44 +0000502 // If that was the last use of the original, delete the original.
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000503 if (IsDead) {
Dan Gohman2644d742016-05-17 04:05:31 +0000504 DEBUG(dbgs() << " - Deleting original\n");
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000505 SlotIndex Idx = LIS.getInstructionIndex(Def).getRegSlot();
Dan Gohmanadf28172016-01-28 01:22:44 +0000506 LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx);
Dan Gohmanadf28172016-01-28 01:22:44 +0000507 LIS.removeInterval(Reg);
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000508 LIS.RemoveMachineInstrFromMaps(Def);
509 Def.eraseFromParent();
Dan Gohmanadf28172016-01-28 01:22:44 +0000510 }
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000511
Dan Gohmanadf28172016-01-28 01:22:44 +0000512 return Clone;
513}
514
515/// A multiple-use def in the same block with no intervening memory or register
516/// dependencies; move the def down, nest it with the current instruction, and
Dan Gohman4fc4e422016-10-24 19:49:43 +0000517/// insert a tee to satisfy the rest of the uses. As an illustration, rewrite
518/// this:
Dan Gohmanadf28172016-01-28 01:22:44 +0000519///
520/// Reg = INST ... // Def
521/// INST ..., Reg, ... // Insert
522/// INST ..., Reg, ...
523/// INST ..., Reg, ...
524///
525/// to this:
526///
Dan Gohman8aa237c2016-02-16 15:17:21 +0000527/// DefReg = INST ... // Def (to become the new Insert)
Dan Gohman4fc4e422016-10-24 19:49:43 +0000528/// TeeReg, Reg = TEE_... DefReg
Dan Gohmanadf28172016-01-28 01:22:44 +0000529/// INST ..., TeeReg, ... // Insert
Dan Gohman6c8f20d2016-05-23 17:42:57 +0000530/// INST ..., Reg, ...
531/// INST ..., Reg, ...
Dan Gohmanadf28172016-01-28 01:22:44 +0000532///
Dan Gohman8aa237c2016-02-16 15:17:21 +0000533/// with DefReg and TeeReg stackified. This eliminates a get_local from the
Dan Gohmanadf28172016-01-28 01:22:44 +0000534/// resulting code.
535static MachineInstr *MoveAndTeeForMultiUse(
536 unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB,
537 MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
538 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) {
Dan Gohman2644d742016-05-17 04:05:31 +0000539 DEBUG(dbgs() << "Move and tee for multi-use:"; Def->dump());
540
Dan Gohman12de0b92016-05-17 20:19:47 +0000541 // Move Def into place.
Dan Gohmanadf28172016-01-28 01:22:44 +0000542 MBB.splice(Insert, &MBB, Def);
JF Bastien1afd1e22016-02-28 15:33:53 +0000543 LIS.handleMove(*Def);
Dan Gohman12de0b92016-05-17 20:19:47 +0000544
545 // Create the Tee and attach the registers.
Dan Gohmanadf28172016-01-28 01:22:44 +0000546 const auto *RegClass = MRI.getRegClass(Reg);
Dan Gohmanadf28172016-01-28 01:22:44 +0000547 unsigned TeeReg = MRI.createVirtualRegister(RegClass);
Dan Gohman8aa237c2016-02-16 15:17:21 +0000548 unsigned DefReg = MRI.createVirtualRegister(RegClass);
Dan Gohman33e694a2016-05-12 04:19:09 +0000549 MachineOperand &DefMO = Def->getOperand(0);
Dan Gohmanadf28172016-01-28 01:22:44 +0000550 MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(),
Dan Gohman4fc4e422016-10-24 19:49:43 +0000551 TII->get(GetTeeOpcode(RegClass)), TeeReg)
Dan Gohman12de0b92016-05-17 20:19:47 +0000552 .addReg(Reg, RegState::Define)
Dan Gohman33e694a2016-05-12 04:19:09 +0000553 .addReg(DefReg, getUndefRegState(DefMO.isDead()));
Dan Gohmanadf28172016-01-28 01:22:44 +0000554 Op.setReg(TeeReg);
Dan Gohman33e694a2016-05-12 04:19:09 +0000555 DefMO.setReg(DefReg);
Dan Gohman12de0b92016-05-17 20:19:47 +0000556 SlotIndex TeeIdx = LIS.InsertMachineInstrInMaps(*Tee).getRegSlot();
557 SlotIndex DefIdx = LIS.getInstructionIndex(*Def).getRegSlot();
558
559 // Tell LiveIntervals we moved the original vreg def from Def to Tee.
560 LiveInterval &LI = LIS.getInterval(Reg);
561 LiveInterval::iterator I = LI.FindSegmentContaining(DefIdx);
562 VNInfo *ValNo = LI.getVNInfoAt(DefIdx);
563 I->start = TeeIdx;
564 ValNo->def = TeeIdx;
565 ShrinkToUses(LI, LIS);
566
567 // Finish stackifying the new regs.
Dan Gohmanadf28172016-01-28 01:22:44 +0000568 LIS.createAndComputeVirtRegInterval(TeeReg);
Dan Gohman8aa237c2016-02-16 15:17:21 +0000569 LIS.createAndComputeVirtRegInterval(DefReg);
570 MFI.stackifyVReg(DefReg);
Dan Gohmanadf28172016-01-28 01:22:44 +0000571 MFI.stackifyVReg(TeeReg);
572 ImposeStackOrdering(Def);
573 ImposeStackOrdering(Tee);
Dan Gohman12de0b92016-05-17 20:19:47 +0000574
575 DEBUG(dbgs() << " - Replaced register: "; Def->dump());
576 DEBUG(dbgs() << " - Tee instruction: "; Tee->dump());
Dan Gohmanadf28172016-01-28 01:22:44 +0000577 return Def;
578}
579
580namespace {
581/// A stack for walking the tree of instructions being built, visiting the
582/// MachineOperands in DFS order.
583class TreeWalkerState {
584 typedef MachineInstr::mop_iterator mop_iterator;
585 typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator;
586 typedef iterator_range<mop_reverse_iterator> RangeTy;
587 SmallVector<RangeTy, 4> Worklist;
588
589public:
590 explicit TreeWalkerState(MachineInstr *Insert) {
591 const iterator_range<mop_iterator> &Range = Insert->explicit_uses();
592 if (Range.begin() != Range.end())
593 Worklist.push_back(reverse(Range));
594 }
595
596 bool Done() const { return Worklist.empty(); }
597
598 MachineOperand &Pop() {
599 RangeTy &Range = Worklist.back();
600 MachineOperand &Op = *Range.begin();
601 Range = drop_begin(Range, 1);
602 if (Range.begin() == Range.end())
603 Worklist.pop_back();
604 assert((Worklist.empty() ||
605 Worklist.back().begin() != Worklist.back().end()) &&
606 "Empty ranges shouldn't remain in the worklist");
607 return Op;
608 }
609
610 /// Push Instr's operands onto the stack to be visited.
611 void PushOperands(MachineInstr *Instr) {
612 const iterator_range<mop_iterator> &Range(Instr->explicit_uses());
613 if (Range.begin() != Range.end())
614 Worklist.push_back(reverse(Range));
615 }
616
617 /// Some of Instr's operands are on the top of the stack; remove them and
618 /// re-insert them starting from the beginning (because we've commuted them).
619 void ResetTopOperands(MachineInstr *Instr) {
620 assert(HasRemainingOperands(Instr) &&
621 "Reseting operands should only be done when the instruction has "
622 "an operand still on the stack");
623 Worklist.back() = reverse(Instr->explicit_uses());
624 }
625
626 /// Test whether Instr has operands remaining to be visited at the top of
627 /// the stack.
628 bool HasRemainingOperands(const MachineInstr *Instr) const {
629 if (Worklist.empty())
630 return false;
631 const RangeTy &Range = Worklist.back();
632 return Range.begin() != Range.end() && Range.begin()->getParent() == Instr;
633 }
Dan Gohmanfbfe5ec2016-01-28 03:59:09 +0000634
635 /// Test whether the given register is present on the stack, indicating an
636 /// operand in the tree that we haven't visited yet. Moving a definition of
637 /// Reg to a point in the tree after that would change its value.
Dan Gohman10545702016-05-17 22:24:18 +0000638 ///
639 /// This is needed as a consequence of using implicit get_locals for
640 /// uses and implicit set_locals for defs.
Dan Gohmanfbfe5ec2016-01-28 03:59:09 +0000641 bool IsOnStack(unsigned Reg) const {
642 for (const RangeTy &Range : Worklist)
643 for (const MachineOperand &MO : Range)
644 if (MO.isReg() && MO.getReg() == Reg)
645 return true;
646 return false;
647 }
Dan Gohmanadf28172016-01-28 01:22:44 +0000648};
649
650/// State to keep track of whether commuting is in flight or whether it's been
651/// tried for the current instruction and didn't work.
652class CommutingState {
653 /// There are effectively three states: the initial state where we haven't
654 /// started commuting anything and we don't know anything yet, the tenative
655 /// state where we've commuted the operands of the current instruction and are
656 /// revisting it, and the declined state where we've reverted the operands
657 /// back to their original order and will no longer commute it further.
658 bool TentativelyCommuting;
659 bool Declined;
660
661 /// During the tentative state, these hold the operand indices of the commuted
662 /// operands.
663 unsigned Operand0, Operand1;
664
665public:
666 CommutingState() : TentativelyCommuting(false), Declined(false) {}
667
668 /// Stackification for an operand was not successful due to ordering
669 /// constraints. If possible, and if we haven't already tried it and declined
670 /// it, commute Insert's operands and prepare to revisit it.
671 void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker,
672 const WebAssemblyInstrInfo *TII) {
673 if (TentativelyCommuting) {
674 assert(!Declined &&
675 "Don't decline commuting until you've finished trying it");
676 // Commuting didn't help. Revert it.
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000677 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
Dan Gohmanadf28172016-01-28 01:22:44 +0000678 TentativelyCommuting = false;
679 Declined = true;
680 } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) {
681 Operand0 = TargetInstrInfo::CommuteAnyOperandIndex;
682 Operand1 = TargetInstrInfo::CommuteAnyOperandIndex;
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000683 if (TII->findCommutedOpIndices(*Insert, Operand0, Operand1)) {
Dan Gohmanadf28172016-01-28 01:22:44 +0000684 // Tentatively commute the operands and try again.
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000685 TII->commuteInstruction(*Insert, /*NewMI=*/false, Operand0, Operand1);
Dan Gohmanadf28172016-01-28 01:22:44 +0000686 TreeWalker.ResetTopOperands(Insert);
687 TentativelyCommuting = true;
688 Declined = false;
689 }
690 }
691 }
692
693 /// Stackification for some operand was successful. Reset to the default
694 /// state.
695 void Reset() {
696 TentativelyCommuting = false;
697 Declined = false;
698 }
699};
700} // end anonymous namespace
701
Dan Gohman1462faa2015-11-16 16:18:28 +0000702bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
703 DEBUG(dbgs() << "********** Register Stackifying **********\n"
704 "********** Function: "
705 << MF.getName() << '\n');
706
707 bool Changed = false;
708 MachineRegisterInfo &MRI = MF.getRegInfo();
709 WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>();
Dan Gohmanb6fd39a2016-01-19 16:59:23 +0000710 const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo();
711 const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo();
Dan Gohman81719f82015-11-25 16:55:01 +0000712 AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
Dan Gohmanadf28172016-01-28 01:22:44 +0000713 MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>();
Dan Gohman8887d1f2015-12-25 00:31:02 +0000714 LiveIntervals &LIS = getAnalysis<LiveIntervals>();
Dan Gohmand70e5902015-12-08 03:30:42 +0000715
Dan Gohman1462faa2015-11-16 16:18:28 +0000716 // Walk the instructions from the bottom up. Currently we don't look past
717 // block boundaries, and the blocks aren't ordered so the block visitation
718 // order isn't significant, but we may want to change this in the future.
719 for (MachineBasicBlock &MBB : MF) {
Dan Gohman8f59cf72016-01-06 18:29:35 +0000720 // Don't use a range-based for loop, because we modify the list as we're
721 // iterating over it and the end iterator may change.
722 for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) {
723 MachineInstr *Insert = &*MII;
Dan Gohman81719f82015-11-25 16:55:01 +0000724 // Don't nest anything inside an inline asm, because we don't have
725 // constraints for $push inputs.
726 if (Insert->getOpcode() == TargetOpcode::INLINEASM)
Dan Gohman595e8ab2016-02-22 17:45:20 +0000727 continue;
728
729 // Ignore debugging intrinsics.
730 if (Insert->getOpcode() == TargetOpcode::DBG_VALUE)
731 continue;
Dan Gohman81719f82015-11-25 16:55:01 +0000732
Dan Gohman1462faa2015-11-16 16:18:28 +0000733 // Iterate through the inputs in reverse order, since we'll be pulling
Dan Gohman53d13992015-12-02 18:08:49 +0000734 // operands off the stack in LIFO order.
Dan Gohmanadf28172016-01-28 01:22:44 +0000735 CommutingState Commuting;
736 TreeWalkerState TreeWalker(Insert);
737 while (!TreeWalker.Done()) {
738 MachineOperand &Op = TreeWalker.Pop();
739
Dan Gohman1462faa2015-11-16 16:18:28 +0000740 // We're only interested in explicit virtual register operands.
Dan Gohmanadf28172016-01-28 01:22:44 +0000741 if (!Op.isReg())
Dan Gohman1462faa2015-11-16 16:18:28 +0000742 continue;
743
744 unsigned Reg = Op.getReg();
Dan Gohmanadf28172016-01-28 01:22:44 +0000745 assert(Op.isUse() && "explicit_uses() should only iterate over uses");
746 assert(!Op.isImplicit() &&
747 "explicit_uses() should only iterate over explicit operands");
748 if (TargetRegisterInfo::isPhysicalRegister(Reg))
Dan Gohman1462faa2015-11-16 16:18:28 +0000749 continue;
750
Dan Gohmanffc184b2016-10-03 22:32:21 +0000751 // Identify the definition for this register at this point.
Dan Gohman2644d742016-05-17 04:05:31 +0000752 MachineInstr *Def = GetVRegDef(Reg, Insert, MRI, LIS);
753 if (!Def)
754 continue;
Dan Gohmanadf28172016-01-28 01:22:44 +0000755
Dan Gohman81719f82015-11-25 16:55:01 +0000756 // Don't nest an INLINE_ASM def into anything, because we don't have
757 // constraints for $pop outputs.
758 if (Def->getOpcode() == TargetOpcode::INLINEASM)
759 continue;
760
Dan Gohman4ba48162015-11-18 16:12:01 +0000761 // Argument instructions represent live-in registers and not real
762 // instructions.
Dan Gohman4fc4e422016-10-24 19:49:43 +0000763 if (WebAssembly::isArgument(*Def))
Dan Gohman4ba48162015-11-18 16:12:01 +0000764 continue;
765
Dan Gohmanadf28172016-01-28 01:22:44 +0000766 // Decide which strategy to take. Prefer to move a single-use value
Dan Gohman4fc4e422016-10-24 19:49:43 +0000767 // over cloning it, and prefer cloning over introducing a tee.
Dan Gohmanadf28172016-01-28 01:22:44 +0000768 // For moving, we require the def to be in the same block as the use;
769 // this makes things simpler (LiveIntervals' handleMove function only
770 // supports intra-block moves) and it's MachineSink's job to catch all
771 // the sinking opportunities anyway.
772 bool SameBlock = Def->getParent() == &MBB;
Derek Schuffe9e68912016-09-30 18:02:54 +0000773 bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, MRI) &&
Dan Gohmanfbfe5ec2016-01-28 03:59:09 +0000774 !TreeWalker.IsOnStack(Reg);
Dan Gohman12de0b92016-05-17 20:19:47 +0000775 if (CanMove && HasOneUse(Reg, Def, MRI, MDT, LIS)) {
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000776 Insert = MoveForSingleUse(Reg, Op, Def, MBB, Insert, LIS, MFI, MRI);
Duncan P. N. Exon Smith9cfc75c2016-06-30 00:01:54 +0000777 } else if (ShouldRematerialize(*Def, AA, TII)) {
778 Insert =
779 RematerializeCheapDef(Reg, Op, *Def, MBB, Insert->getIterator(),
780 LIS, MFI, MRI, TII, TRI);
Dan Gohmanadf28172016-01-28 01:22:44 +0000781 } else if (CanMove &&
Dan Gohman10545702016-05-17 22:24:18 +0000782 OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS, MFI)) {
Dan Gohmanadf28172016-01-28 01:22:44 +0000783 Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
784 MRI, TII);
785 } else {
786 // We failed to stackify the operand. If the problem was ordering
787 // constraints, Commuting may be able to help.
788 if (!CanMove && SameBlock)
789 Commuting.MaybeCommute(Insert, TreeWalker, TII);
790 // Proceed to the next operand.
791 continue;
Dan Gohmanb6fd39a2016-01-19 16:59:23 +0000792 }
Dan Gohmanadf28172016-01-28 01:22:44 +0000793
794 // We stackified an operand. Add the defining instruction's operands to
795 // the worklist stack now to continue to build an ever deeper tree.
796 Commuting.Reset();
797 TreeWalker.PushOperands(Insert);
Dan Gohman1462faa2015-11-16 16:18:28 +0000798 }
Dan Gohmanadf28172016-01-28 01:22:44 +0000799
800 // If we stackified any operands, skip over the tree to start looking for
801 // the next instruction we can build a tree on.
802 if (Insert != &*MII) {
Dan Gohman8f59cf72016-01-06 18:29:35 +0000803 ImposeStackOrdering(&*MII);
Eric Liuc7e5a9c2016-09-12 09:35:59 +0000804 MII = MachineBasicBlock::iterator(Insert).getReverse();
Dan Gohmanadf28172016-01-28 01:22:44 +0000805 Changed = true;
806 }
Dan Gohman1462faa2015-11-16 16:18:28 +0000807 }
808 }
809
Dan Gohmane0405332016-10-03 22:43:53 +0000810 // If we used VALUE_STACK anywhere, add it to the live-in sets everywhere so
Dan Gohmanadf28172016-01-28 01:22:44 +0000811 // that it never looks like a use-before-def.
Dan Gohmanb0992da2015-11-20 02:19:12 +0000812 if (Changed) {
Dan Gohmane0405332016-10-03 22:43:53 +0000813 MF.getRegInfo().addLiveIn(WebAssembly::VALUE_STACK);
Dan Gohmanb0992da2015-11-20 02:19:12 +0000814 for (MachineBasicBlock &MBB : MF)
Dan Gohmane0405332016-10-03 22:43:53 +0000815 MBB.addLiveIn(WebAssembly::VALUE_STACK);
Dan Gohmanb0992da2015-11-20 02:19:12 +0000816 }
817
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000818#ifndef NDEBUG
Dan Gohmanb6fd39a2016-01-19 16:59:23 +0000819 // Verify that pushes and pops are performed in LIFO order.
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000820 SmallVector<unsigned, 0> Stack;
821 for (MachineBasicBlock &MBB : MF) {
822 for (MachineInstr &MI : MBB) {
Dan Gohman0cfb5f82016-05-10 04:24:02 +0000823 if (MI.isDebugValue())
824 continue;
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000825 for (MachineOperand &MO : reverse(MI.explicit_operands())) {
Dan Gohman7a6b9822015-11-29 22:32:02 +0000826 if (!MO.isReg())
827 continue;
Dan Gohmanadf28172016-01-28 01:22:44 +0000828 unsigned Reg = MO.getReg();
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000829
Dan Gohmanadf28172016-01-28 01:22:44 +0000830 if (MFI.isVRegStackified(Reg)) {
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000831 if (MO.isDef())
Dan Gohmanadf28172016-01-28 01:22:44 +0000832 Stack.push_back(Reg);
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000833 else
Dan Gohmanadf28172016-01-28 01:22:44 +0000834 assert(Stack.pop_back_val() == Reg &&
835 "Register stack pop should be paired with a push");
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000836 }
837 }
838 }
839 // TODO: Generalize this code to support keeping values on the stack across
840 // basic block boundaries.
Dan Gohmanadf28172016-01-28 01:22:44 +0000841 assert(Stack.empty() &&
842 "Register stack pushes and pops should be balanced");
Dan Gohman7bafa0e2015-11-20 02:33:24 +0000843 }
844#endif
845
Dan Gohman1462faa2015-11-16 16:18:28 +0000846 return Changed;
847}