blob: 34f68f18599d5d3b242eefc95842d4208a944f94 [file] [log] [blame]
Reed Kotler5bf80202013-02-27 04:20:14 +00001//===-- MipsConstantIslandPass.cpp - Emit Pc Relative loads----------------===//
Reed Kotlerbb3094a2013-02-27 03:33:58 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//
11// This pass is used to make Pc relative loads of constants.
Reed Kotler4d0313d2013-11-05 12:04:37 +000012// For now, only Mips16 will use this.
Reed Kotlerbb3094a2013-02-27 03:33:58 +000013//
14// Loading constants inline is expensive on Mips16 and it's in general better
15// to place the constant nearby in code space and then it can be loaded with a
16// simple 16 bit load instruction.
17//
18// The constants can be not just numbers but addresses of functions and labels.
19// This can be particularly helpful in static relocation mode for embedded
Alp Tokerf907b892013-12-05 05:44:44 +000020// non-linux targets.
Reed Kotlerbb3094a2013-02-27 03:33:58 +000021//
22//
23
Reed Kotlerbb3094a2013-02-27 03:33:58 +000024#include "Mips.h"
25#include "MCTargetDesc/MipsBaseInfo.h"
Reed Kotler5c8ae092013-11-13 04:37:52 +000026#include "Mips16InstrInfo.h"
Reed Kotler0f007fc2013-11-05 08:14:14 +000027#include "MipsMachineFunction.h"
Reed Kotlerbb3094a2013-02-27 03:33:58 +000028#include "MipsTargetMachine.h"
29#include "llvm/ADT/Statistic.h"
Reed Kotler91ae9822013-10-27 21:57:36 +000030#include "llvm/CodeGen/MachineBasicBlock.h"
Reed Kotlerbb3094a2013-02-27 03:33:58 +000031#include "llvm/CodeGen/MachineFunctionPass.h"
32#include "llvm/CodeGen/MachineInstrBuilder.h"
Reed Kotler91ae9822013-10-27 21:57:36 +000033#include "llvm/CodeGen/MachineRegisterInfo.h"
Reed Kotlerbb3094a2013-02-27 03:33:58 +000034#include "llvm/IR/Function.h"
Chandler Carruth83948572014-03-04 10:30:26 +000035#include "llvm/IR/InstIterator.h"
Reed Kotlerbb3094a2013-02-27 03:33:58 +000036#include "llvm/Support/CommandLine.h"
Reed Kotler91ae9822013-10-27 21:57:36 +000037#include "llvm/Support/Debug.h"
Chandler Carruth8a8cd2b2014-01-07 11:48:04 +000038#include "llvm/Support/Format.h"
Reed Kotlerbb3094a2013-02-27 03:33:58 +000039#include "llvm/Support/MathExtras.h"
Reed Kotler91ae9822013-10-27 21:57:36 +000040#include "llvm/Support/raw_ostream.h"
Reed Kotlerbb3094a2013-02-27 03:33:58 +000041#include "llvm/Target/TargetInstrInfo.h"
42#include "llvm/Target/TargetMachine.h"
43#include "llvm/Target/TargetRegisterInfo.h"
Reed Kotler91ae9822013-10-27 21:57:36 +000044#include <algorithm>
Reed Kotlerbb3094a2013-02-27 03:33:58 +000045
46using namespace llvm;
47
Chandler Carruth84e68b22014-04-22 02:41:26 +000048#define DEBUG_TYPE "mips-constant-islands"
49
Reed Kotler91ae9822013-10-27 21:57:36 +000050STATISTIC(NumCPEs, "Number of constpool entries");
Reed Kotler0f007fc2013-11-05 08:14:14 +000051STATISTIC(NumSplit, "Number of uncond branches inserted");
52STATISTIC(NumCBrFixed, "Number of cond branches fixed");
53STATISTIC(NumUBrFixed, "Number of uncond branches fixed");
Reed Kotler91ae9822013-10-27 21:57:36 +000054
55// FIXME: This option should be removed once it has received sufficient testing.
56static cl::opt<bool>
57AlignConstantIslands("mips-align-constant-islands", cl::Hidden, cl::init(true),
58 cl::desc("Align constant islands in code"));
59
Reed Kotler0f007fc2013-11-05 08:14:14 +000060
61// Rather than do make check tests with huge amounts of code, we force
62// the test to use this amount.
63//
64static cl::opt<int> ConstantIslandsSmallOffset(
65 "mips-constant-islands-small-offset",
66 cl::init(0),
67 cl::desc("Make small offsets be this amount for testing purposes"),
68 cl::Hidden);
69
Reed Kotler45c59272013-11-10 00:09:26 +000070//
71// For testing purposes we tell it to not use relaxed load forms so that it
72// will split blocks.
73//
74static cl::opt<bool> NoLoadRelaxation(
75 "mips-constant-islands-no-load-relaxation",
76 cl::init(false),
77 cl::desc("Don't relax loads to long loads - for testing purposes"),
78 cl::Hidden);
79
Reed Kotler0d409e22013-11-28 00:56:37 +000080static unsigned int branchTargetOperand(MachineInstr *MI) {
81 switch (MI->getOpcode()) {
82 case Mips::Bimm16:
83 case Mips::BimmX16:
84 case Mips::Bteqz16:
85 case Mips::BteqzX16:
86 case Mips::Btnez16:
87 case Mips::BtnezX16:
Reed Kotlerad450f22013-11-29 22:32:56 +000088 case Mips::JalB16:
Reed Kotler0d409e22013-11-28 00:56:37 +000089 return 0;
90 case Mips::BeqzRxImm16:
91 case Mips::BeqzRxImmX16:
92 case Mips::BnezRxImm16:
93 case Mips::BnezRxImmX16:
94 return 1;
95 }
96 llvm_unreachable("Unknown branch type");
97}
98
Reed Kotlerad450f22013-11-29 22:32:56 +000099static bool isUnconditionalBranch(unsigned int Opcode) {
100 switch (Opcode) {
101 default: return false;
102 case Mips::Bimm16:
103 case Mips::BimmX16:
104 case Mips::JalB16:
105 return true;
106 }
107}
108
Reed Kotler0d409e22013-11-28 00:56:37 +0000109static unsigned int longformBranchOpcode(unsigned int Opcode) {
110 switch (Opcode) {
111 case Mips::Bimm16:
112 case Mips::BimmX16:
113 return Mips::BimmX16;
114 case Mips::Bteqz16:
115 case Mips::BteqzX16:
116 return Mips::BteqzX16;
117 case Mips::Btnez16:
118 case Mips::BtnezX16:
119 return Mips::BtnezX16;
Reed Kotlerad450f22013-11-29 22:32:56 +0000120 case Mips::JalB16:
121 return Mips::JalB16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000122 case Mips::BeqzRxImm16:
123 case Mips::BeqzRxImmX16:
124 return Mips::BeqzRxImmX16;
125 case Mips::BnezRxImm16:
126 case Mips::BnezRxImmX16:
127 return Mips::BnezRxImmX16;
128 }
129 llvm_unreachable("Unknown branch type");
130}
131
132//
133// FIXME: need to go through this whole constant islands port and check the math
134// for branch ranges and clean this up and make some functions to calculate things
135// that are done many times identically.
136// Need to refactor some of the code to call this routine.
137//
138static unsigned int branchMaxOffsets(unsigned int Opcode) {
139 unsigned Bits, Scale;
140 switch (Opcode) {
141 case Mips::Bimm16:
142 Bits = 11;
143 Scale = 2;
144 break;
145 case Mips::BimmX16:
146 Bits = 16;
147 Scale = 2;
148 break;
149 case Mips::BeqzRxImm16:
150 Bits = 8;
151 Scale = 2;
152 break;
153 case Mips::BeqzRxImmX16:
154 Bits = 16;
155 Scale = 2;
156 break;
157 case Mips::BnezRxImm16:
158 Bits = 8;
159 Scale = 2;
160 break;
161 case Mips::BnezRxImmX16:
162 Bits = 16;
163 Scale = 2;
164 break;
165 case Mips::Bteqz16:
166 Bits = 8;
167 Scale = 2;
168 break;
169 case Mips::BteqzX16:
170 Bits = 16;
171 Scale = 2;
172 break;
173 case Mips::Btnez16:
174 Bits = 8;
175 Scale = 2;
176 break;
177 case Mips::BtnezX16:
178 Bits = 16;
179 Scale = 2;
180 break;
181 default:
182 llvm_unreachable("Unknown branch type");
183 }
184 unsigned MaxOffs = ((1 << (Bits-1))-1) * Scale;
185 return MaxOffs;
186}
Reed Kotler0f007fc2013-11-05 08:14:14 +0000187
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000188namespace {
Reed Kotler0f007fc2013-11-05 08:14:14 +0000189
190
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000191 typedef MachineBasicBlock::iterator Iter;
192 typedef MachineBasicBlock::reverse_iterator ReverseIter;
193
Reed Kotler0f007fc2013-11-05 08:14:14 +0000194 /// MipsConstantIslands - Due to limited PC-relative displacements, Mips
195 /// requires constant pool entries to be scattered among the instructions
196 /// inside a function. To do this, it completely ignores the normal LLVM
197 /// constant pool; instead, it places constants wherever it feels like with
198 /// special instructions.
199 ///
200 /// The terminology used in this pass includes:
201 /// Islands - Clumps of constants placed in the function.
202 /// Water - Potential places where an island could be formed.
203 /// CPE - A constant pool entry that has been placed somewhere, which
204 /// tracks a list of users.
205
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000206 class MipsConstantIslands : public MachineFunctionPass {
207
Reed Kotler0f007fc2013-11-05 08:14:14 +0000208 /// BasicBlockInfo - Information about the offset and size of a single
209 /// basic block.
210 struct BasicBlockInfo {
211 /// Offset - Distance from the beginning of the function to the beginning
212 /// of this basic block.
213 ///
214 /// Offsets are computed assuming worst case padding before an aligned
215 /// block. This means that subtracting basic block offsets always gives a
216 /// conservative estimate of the real distance which may be smaller.
217 ///
218 /// Because worst case padding is used, the computed offset of an aligned
219 /// block may not actually be aligned.
220 unsigned Offset;
221
222 /// Size - Size of the basic block in bytes. If the block contains
223 /// inline assembly, this is a worst case estimate.
224 ///
225 /// The size does not include any alignment padding whether from the
226 /// beginning of the block, or from an aligned jump table at the end.
227 unsigned Size;
228
Reed Kotler7ded5b62013-11-05 23:36:58 +0000229 // FIXME: ignore LogAlign for this patch
230 //
Reed Kotler0f007fc2013-11-05 08:14:14 +0000231 unsigned postOffset(unsigned LogAlign = 0) const {
232 unsigned PO = Offset + Size;
233 return PO;
234 }
235
Reed Kotler7ded5b62013-11-05 23:36:58 +0000236 BasicBlockInfo() : Offset(0), Size(0) {}
237
Reed Kotler0f007fc2013-11-05 08:14:14 +0000238 };
239
240 std::vector<BasicBlockInfo> BBInfo;
241
242 /// WaterList - A sorted list of basic blocks where islands could be placed
243 /// (i.e. blocks that don't fall through to the following block, due
244 /// to a return, unreachable, or unconditional branch).
245 std::vector<MachineBasicBlock*> WaterList;
246
247 /// NewWaterList - The subset of WaterList that was created since the
248 /// previous iteration by inserting unconditional branches.
249 SmallSet<MachineBasicBlock*, 4> NewWaterList;
250
251 typedef std::vector<MachineBasicBlock*>::iterator water_iterator;
252
253 /// CPUser - One user of a constant pool, keeping the machine instruction
254 /// pointer, the constant pool being referenced, and the max displacement
255 /// allowed from the instruction to the CP. The HighWaterMark records the
256 /// highest basic block where a new CPEntry can be placed. To ensure this
257 /// pass terminates, the CP entries are initially placed at the end of the
258 /// function and then move monotonically to lower addresses. The
259 /// exception to this rule is when the current CP entry for a particular
260 /// CPUser is out of range, but there is another CP entry for the same
261 /// constant value in range. We want to use the existing in-range CP
262 /// entry, but if it later moves out of range, the search for new water
263 /// should resume where it left off. The HighWaterMark is used to record
264 /// that point.
265 struct CPUser {
266 MachineInstr *MI;
267 MachineInstr *CPEMI;
268 MachineBasicBlock *HighWaterMark;
269 private:
270 unsigned MaxDisp;
271 unsigned LongFormMaxDisp; // mips16 has 16/32 bit instructions
272 // with different displacements
273 unsigned LongFormOpcode;
274 public:
275 bool NegOk;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000276 CPUser(MachineInstr *mi, MachineInstr *cpemi, unsigned maxdisp,
Reed Kotlerb09ebe92013-11-05 22:34:29 +0000277 bool neg,
Reed Kotler0f007fc2013-11-05 08:14:14 +0000278 unsigned longformmaxdisp, unsigned longformopcode)
279 : MI(mi), CPEMI(cpemi), MaxDisp(maxdisp),
280 LongFormMaxDisp(longformmaxdisp), LongFormOpcode(longformopcode),
Reed Kotler7ded5b62013-11-05 23:36:58 +0000281 NegOk(neg){
Reed Kotler0f007fc2013-11-05 08:14:14 +0000282 HighWaterMark = CPEMI->getParent();
283 }
284 /// getMaxDisp - Returns the maximum displacement supported by MI.
Reed Kotler0f007fc2013-11-05 08:14:14 +0000285 unsigned getMaxDisp() const {
286 unsigned xMaxDisp = ConstantIslandsSmallOffset?
287 ConstantIslandsSmallOffset: MaxDisp;
Reed Kotler7ded5b62013-11-05 23:36:58 +0000288 return xMaxDisp;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000289 }
Reed Kotler45c59272013-11-10 00:09:26 +0000290 void setMaxDisp(unsigned val) {
291 MaxDisp = val;
292 }
Reed Kotler0f007fc2013-11-05 08:14:14 +0000293 unsigned getLongFormMaxDisp() const {
Reed Kotler7ded5b62013-11-05 23:36:58 +0000294 return LongFormMaxDisp;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000295 }
296 unsigned getLongFormOpcode() const {
297 return LongFormOpcode;
298 }
299 };
300
301 /// CPUsers - Keep track of all of the machine instructions that use various
302 /// constant pools and their max displacement.
303 std::vector<CPUser> CPUsers;
Reed Kotler91ae9822013-10-27 21:57:36 +0000304
305 /// CPEntry - One per constant pool entry, keeping the machine instruction
306 /// pointer, the constpool index, and the number of CPUser's which
307 /// reference this entry.
308 struct CPEntry {
309 MachineInstr *CPEMI;
310 unsigned CPI;
311 unsigned RefCount;
312 CPEntry(MachineInstr *cpemi, unsigned cpi, unsigned rc = 0)
313 : CPEMI(cpemi), CPI(cpi), RefCount(rc) {}
314 };
315
316 /// CPEntries - Keep track of all of the constant pool entry machine
317 /// instructions. For each original constpool index (i.e. those that
318 /// existed upon entry to this pass), it keeps a vector of entries.
319 /// Original elements are cloned as we go along; the clones are
320 /// put in the vector of the original element, but have distinct CPIs.
321 std::vector<std::vector<CPEntry> > CPEntries;
322
Reed Kotler0f007fc2013-11-05 08:14:14 +0000323 /// ImmBranch - One per immediate branch, keeping the machine instruction
324 /// pointer, conditional or unconditional, the max displacement,
325 /// and (if isCond is true) the corresponding unconditional branch
326 /// opcode.
327 struct ImmBranch {
328 MachineInstr *MI;
329 unsigned MaxDisp : 31;
330 bool isCond : 1;
331 int UncondBr;
332 ImmBranch(MachineInstr *mi, unsigned maxdisp, bool cond, int ubr)
333 : MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}
334 };
335
336 /// ImmBranches - Keep track of all the immediate branch instructions.
337 ///
338 std::vector<ImmBranch> ImmBranches;
339
340 /// HasFarJump - True if any far jump instruction has been emitted during
341 /// the branch fix up pass.
342 bool HasFarJump;
343
344 const TargetMachine &TM;
345 bool IsPIC;
346 unsigned ABI;
347 const MipsSubtarget *STI;
Reed Kotler5c8ae092013-11-13 04:37:52 +0000348 const Mips16InstrInfo *TII;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000349 MipsFunctionInfo *MFI;
350 MachineFunction *MF;
351 MachineConstantPool *MCP;
352
353 unsigned PICLabelUId;
354 bool PrescannedForConstants;
355
356 void initPICLabelUId(unsigned UId) {
357 PICLabelUId = UId;
358 }
359
360
361 unsigned createPICLabelUId() {
362 return PICLabelUId++;
363 }
364
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000365 public:
366 static char ID;
367 MipsConstantIslands(TargetMachine &tm)
368 : MachineFunctionPass(ID), TM(tm),
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000369 IsPIC(TM.getRelocationModel() == Reloc::PIC_),
Reed Kotler91ae9822013-10-27 21:57:36 +0000370 ABI(TM.getSubtarget<MipsSubtarget>().getTargetABI()),
Craig Topper062a2ba2014-04-25 05:30:21 +0000371 STI(&TM.getSubtarget<MipsSubtarget>()), MF(nullptr), MCP(nullptr),
Reed Kotler0f007fc2013-11-05 08:14:14 +0000372 PrescannedForConstants(false){}
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000373
374 virtual const char *getPassName() const {
375 return "Mips Constant Islands";
376 }
377
378 bool runOnMachineFunction(MachineFunction &F);
379
Reed Kotler91ae9822013-10-27 21:57:36 +0000380 void doInitialPlacement(std::vector<MachineInstr*> &CPEMIs);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000381 CPEntry *findConstPoolEntry(unsigned CPI, const MachineInstr *CPEMI);
382 unsigned getCPELogAlign(const MachineInstr *CPEMI);
383 void initializeFunctionInfo(const std::vector<MachineInstr*> &CPEMIs);
384 unsigned getOffsetOf(MachineInstr *MI) const;
385 unsigned getUserOffset(CPUser&) const;
386 void dumpBBs();
Reed Kotler0f007fc2013-11-05 08:14:14 +0000387
388 bool isOffsetInRange(unsigned UserOffset, unsigned TrialOffset,
Reed Kotlerb09ebe92013-11-05 22:34:29 +0000389 unsigned Disp, bool NegativeOK);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000390 bool isOffsetInRange(unsigned UserOffset, unsigned TrialOffset,
391 const CPUser &U);
392
Reed Kotler0f007fc2013-11-05 08:14:14 +0000393 void computeBlockSize(MachineBasicBlock *MBB);
394 MachineBasicBlock *splitBlockBeforeInstr(MachineInstr *MI);
395 void updateForInsertedWaterBlock(MachineBasicBlock *NewBB);
396 void adjustBBOffsetsAfter(MachineBasicBlock *BB);
397 bool decrementCPEReferenceCount(unsigned CPI, MachineInstr* CPEMI);
398 int findInRangeCPEntry(CPUser& U, unsigned UserOffset);
399 int findLongFormInRangeCPEntry(CPUser& U, unsigned UserOffset);
400 bool findAvailableWater(CPUser&U, unsigned UserOffset,
401 water_iterator &WaterIter);
402 void createNewWater(unsigned CPUserIndex, unsigned UserOffset,
403 MachineBasicBlock *&NewMBB);
404 bool handleConstantPoolUser(unsigned CPUserIndex);
405 void removeDeadCPEMI(MachineInstr *CPEMI);
406 bool removeUnusedCPEntries();
407 bool isCPEntryInRange(MachineInstr *MI, unsigned UserOffset,
408 MachineInstr *CPEMI, unsigned Disp, bool NegOk,
409 bool DoDump = false);
410 bool isWaterInRange(unsigned UserOffset, MachineBasicBlock *Water,
411 CPUser &U, unsigned &Growth);
412 bool isBBInRange(MachineInstr *MI, MachineBasicBlock *BB, unsigned Disp);
413 bool fixupImmediateBr(ImmBranch &Br);
414 bool fixupConditionalBr(ImmBranch &Br);
415 bool fixupUnconditionalBr(ImmBranch &Br);
Reed Kotler91ae9822013-10-27 21:57:36 +0000416
417 void prescanForConstants();
418
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000419 private:
Reed Kotler91ae9822013-10-27 21:57:36 +0000420
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000421 };
422
423 char MipsConstantIslands::ID = 0;
424} // end of anonymous namespace
425
Reed Kotler0f007fc2013-11-05 08:14:14 +0000426bool MipsConstantIslands::isOffsetInRange
427 (unsigned UserOffset, unsigned TrialOffset,
428 const CPUser &U) {
429 return isOffsetInRange(UserOffset, TrialOffset,
Reed Kotlerb09ebe92013-11-05 22:34:29 +0000430 U.getMaxDisp(), U.NegOk);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000431}
432/// print block size and offset information - debugging
433void MipsConstantIslands::dumpBBs() {
434 DEBUG({
435 for (unsigned J = 0, E = BBInfo.size(); J !=E; ++J) {
436 const BasicBlockInfo &BBI = BBInfo[J];
437 dbgs() << format("%08x BB#%u\t", BBI.Offset, J)
Reed Kotler0f007fc2013-11-05 08:14:14 +0000438 << format(" size=%#x\n", BBInfo[J].Size);
439 }
440 });
441}
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000442/// createMipsLongBranchPass - Returns a pass that converts branches to long
443/// branches.
444FunctionPass *llvm::createMipsConstantIslandPass(MipsTargetMachine &tm) {
445 return new MipsConstantIslands(tm);
446}
447
Reed Kotler91ae9822013-10-27 21:57:36 +0000448bool MipsConstantIslands::runOnMachineFunction(MachineFunction &mf) {
Reed Kotler1595f362013-04-09 19:46:01 +0000449 // The intention is for this to be a mips16 only pass for now
450 // FIXME:
Reed Kotler91ae9822013-10-27 21:57:36 +0000451 MF = &mf;
452 MCP = mf.getConstantPool();
453 DEBUG(dbgs() << "constant island machine function " << "\n");
454 if (!TM.getSubtarget<MipsSubtarget>().inMips16Mode() ||
455 !MipsSubtarget::useConstantIslands()) {
456 return false;
457 }
Reed Kotler5c8ae092013-11-13 04:37:52 +0000458 TII = (const Mips16InstrInfo*)MF->getTarget().getInstrInfo();
Reed Kotler0f007fc2013-11-05 08:14:14 +0000459 MFI = MF->getInfo<MipsFunctionInfo>();
Reed Kotler91ae9822013-10-27 21:57:36 +0000460 DEBUG(dbgs() << "constant island processing " << "\n");
461 //
462 // will need to make predermination if there is any constants we need to
463 // put in constant islands. TBD.
464 //
Reed Kotler0f007fc2013-11-05 08:14:14 +0000465 if (!PrescannedForConstants) prescanForConstants();
Reed Kotler91ae9822013-10-27 21:57:36 +0000466
Reed Kotler0f007fc2013-11-05 08:14:14 +0000467 HasFarJump = false;
Reed Kotler91ae9822013-10-27 21:57:36 +0000468 // This pass invalidates liveness information when it splits basic blocks.
469 MF->getRegInfo().invalidateLiveness();
470
471 // Renumber all of the machine basic blocks in the function, guaranteeing that
472 // the numbers agree with the position of the block in the function.
473 MF->RenumberBlocks();
474
Reed Kotler0f007fc2013-11-05 08:14:14 +0000475 bool MadeChange = false;
476
Reed Kotler91ae9822013-10-27 21:57:36 +0000477 // Perform the initial placement of the constant pool entries. To start with,
478 // we put them all at the end of the function.
479 std::vector<MachineInstr*> CPEMIs;
480 if (!MCP->isEmpty())
481 doInitialPlacement(CPEMIs);
482
Reed Kotler0f007fc2013-11-05 08:14:14 +0000483 /// The next UID to take is the first unused one.
484 initPICLabelUId(CPEMIs.size());
485
486 // Do the initial scan of the function, building up information about the
487 // sizes of each block, the location of all the water, and finding all of the
488 // constant pool users.
489 initializeFunctionInfo(CPEMIs);
490 CPEMIs.clear();
491 DEBUG(dumpBBs());
492
493 /// Remove dead constant pool entries.
494 MadeChange |= removeUnusedCPEntries();
495
496 // Iteratively place constant pool entries and fix up branches until there
497 // is no change.
498 unsigned NoCPIters = 0, NoBRIters = 0;
499 (void)NoBRIters;
500 while (true) {
501 DEBUG(dbgs() << "Beginning CP iteration #" << NoCPIters << '\n');
502 bool CPChange = false;
503 for (unsigned i = 0, e = CPUsers.size(); i != e; ++i)
504 CPChange |= handleConstantPoolUser(i);
505 if (CPChange && ++NoCPIters > 30)
506 report_fatal_error("Constant Island pass failed to converge!");
507 DEBUG(dumpBBs());
508
509 // Clear NewWaterList now. If we split a block for branches, it should
510 // appear as "new water" for the next iteration of constant pool placement.
511 NewWaterList.clear();
512
513 DEBUG(dbgs() << "Beginning BR iteration #" << NoBRIters << '\n');
514 bool BRChange = false;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000515 for (unsigned i = 0, e = ImmBranches.size(); i != e; ++i)
516 BRChange |= fixupImmediateBr(ImmBranches[i]);
517 if (BRChange && ++NoBRIters > 30)
518 report_fatal_error("Branch Fix Up pass failed to converge!");
519 DEBUG(dumpBBs());
Reed Kotler0f007fc2013-11-05 08:14:14 +0000520 if (!CPChange && !BRChange)
521 break;
522 MadeChange = true;
523 }
524
525 DEBUG(dbgs() << '\n'; dumpBBs());
526
527 BBInfo.clear();
528 WaterList.clear();
529 CPUsers.clear();
530 CPEntries.clear();
531 ImmBranches.clear();
532 return MadeChange;
Reed Kotlerbb3094a2013-02-27 03:33:58 +0000533}
534
Reed Kotler91ae9822013-10-27 21:57:36 +0000535/// doInitialPlacement - Perform the initial placement of the constant pool
536/// entries. To start with, we put them all at the end of the function.
537void
538MipsConstantIslands::doInitialPlacement(std::vector<MachineInstr*> &CPEMIs) {
539 // Create the basic block to hold the CPE's.
540 MachineBasicBlock *BB = MF->CreateMachineBasicBlock();
541 MF->push_back(BB);
542
543
544 // MachineConstantPool measures alignment in bytes. We measure in log2(bytes).
545 unsigned MaxAlign = Log2_32(MCP->getConstantPoolAlignment());
546
547 // Mark the basic block as required by the const-pool.
548 // If AlignConstantIslands isn't set, use 4-byte alignment for everything.
549 BB->setAlignment(AlignConstantIslands ? MaxAlign : 2);
550
551 // The function needs to be as aligned as the basic blocks. The linker may
552 // move functions around based on their alignment.
553 MF->ensureAlignment(BB->getAlignment());
554
555 // Order the entries in BB by descending alignment. That ensures correct
556 // alignment of all entries as long as BB is sufficiently aligned. Keep
557 // track of the insertion point for each alignment. We are going to bucket
558 // sort the entries as they are created.
559 SmallVector<MachineBasicBlock::iterator, 8> InsPoint(MaxAlign + 1, BB->end());
560
561 // Add all of the constants from the constant pool to the end block, use an
562 // identity mapping of CPI's to CPE's.
563 const std::vector<MachineConstantPoolEntry> &CPs = MCP->getConstants();
564
565 const DataLayout &TD = *MF->getTarget().getDataLayout();
566 for (unsigned i = 0, e = CPs.size(); i != e; ++i) {
567 unsigned Size = TD.getTypeAllocSize(CPs[i].getType());
568 assert(Size >= 4 && "Too small constant pool entry");
569 unsigned Align = CPs[i].getAlignment();
570 assert(isPowerOf2_32(Align) && "Invalid alignment");
571 // Verify that all constant pool entries are a multiple of their alignment.
572 // If not, we would have to pad them out so that instructions stay aligned.
573 assert((Size % Align) == 0 && "CP Entry not multiple of 4 bytes!");
574
575 // Insert CONSTPOOL_ENTRY before entries with a smaller alignment.
576 unsigned LogAlign = Log2_32(Align);
577 MachineBasicBlock::iterator InsAt = InsPoint[LogAlign];
578
579 MachineInstr *CPEMI =
580 BuildMI(*BB, InsAt, DebugLoc(), TII->get(Mips::CONSTPOOL_ENTRY))
581 .addImm(i).addConstantPoolIndex(i).addImm(Size);
582
583 CPEMIs.push_back(CPEMI);
584
585 // Ensure that future entries with higher alignment get inserted before
586 // CPEMI. This is bucket sort with iterators.
587 for (unsigned a = LogAlign + 1; a <= MaxAlign; ++a)
588 if (InsPoint[a] == InsAt)
589 InsPoint[a] = CPEMI;
590 // Add a new CPEntry, but no corresponding CPUser yet.
591 std::vector<CPEntry> CPEs;
592 CPEs.push_back(CPEntry(CPEMI, i));
593 CPEntries.push_back(CPEs);
594 ++NumCPEs;
595 DEBUG(dbgs() << "Moved CPI#" << i << " to end of function, size = "
596 << Size << ", align = " << Align <<'\n');
597 }
598 DEBUG(BB->dump());
599}
600
Reed Kotler0f007fc2013-11-05 08:14:14 +0000601/// BBHasFallthrough - Return true if the specified basic block can fallthrough
602/// into the block immediately after it.
603static bool BBHasFallthrough(MachineBasicBlock *MBB) {
604 // Get the next machine basic block in the function.
605 MachineFunction::iterator MBBI = MBB;
606 // Can't fall off end of function.
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000607 if (std::next(MBBI) == MBB->getParent()->end())
Reed Kotler0f007fc2013-11-05 08:14:14 +0000608 return false;
609
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000610 MachineBasicBlock *NextBB = std::next(MBBI);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000611 for (MachineBasicBlock::succ_iterator I = MBB->succ_begin(),
612 E = MBB->succ_end(); I != E; ++I)
613 if (*I == NextBB)
614 return true;
615
616 return false;
617}
618
619/// findConstPoolEntry - Given the constpool index and CONSTPOOL_ENTRY MI,
620/// look up the corresponding CPEntry.
621MipsConstantIslands::CPEntry
622*MipsConstantIslands::findConstPoolEntry(unsigned CPI,
623 const MachineInstr *CPEMI) {
624 std::vector<CPEntry> &CPEs = CPEntries[CPI];
625 // Number of entries per constpool index should be small, just do a
626 // linear search.
627 for (unsigned i = 0, e = CPEs.size(); i != e; ++i) {
628 if (CPEs[i].CPEMI == CPEMI)
629 return &CPEs[i];
630 }
Craig Topper062a2ba2014-04-25 05:30:21 +0000631 return nullptr;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000632}
633
634/// getCPELogAlign - Returns the required alignment of the constant pool entry
635/// represented by CPEMI. Alignment is measured in log2(bytes) units.
636unsigned MipsConstantIslands::getCPELogAlign(const MachineInstr *CPEMI) {
637 assert(CPEMI && CPEMI->getOpcode() == Mips::CONSTPOOL_ENTRY);
638
639 // Everything is 4-byte aligned unless AlignConstantIslands is set.
640 if (!AlignConstantIslands)
641 return 2;
642
643 unsigned CPI = CPEMI->getOperand(1).getIndex();
644 assert(CPI < MCP->getConstants().size() && "Invalid constant pool index.");
645 unsigned Align = MCP->getConstants()[CPI].getAlignment();
646 assert(isPowerOf2_32(Align) && "Invalid CPE alignment");
647 return Log2_32(Align);
648}
649
650/// initializeFunctionInfo - Do the initial scan of the function, building up
651/// information about the sizes of each block, the location of all the water,
652/// and finding all of the constant pool users.
653void MipsConstantIslands::
654initializeFunctionInfo(const std::vector<MachineInstr*> &CPEMIs) {
655 BBInfo.clear();
656 BBInfo.resize(MF->getNumBlockIDs());
657
658 // First thing, compute the size of all basic blocks, and see if the function
659 // has any inline assembly in it. If so, we have to be conservative about
660 // alignment assumptions, as we don't know for sure the size of any
661 // instructions in the inline assembly.
662 for (MachineFunction::iterator I = MF->begin(), E = MF->end(); I != E; ++I)
663 computeBlockSize(I);
664
Reed Kotler0f007fc2013-11-05 08:14:14 +0000665
666 // Compute block offsets.
667 adjustBBOffsetsAfter(MF->begin());
668
669 // Now go back through the instructions and build up our data structures.
670 for (MachineFunction::iterator MBBI = MF->begin(), E = MF->end();
671 MBBI != E; ++MBBI) {
672 MachineBasicBlock &MBB = *MBBI;
673
674 // If this block doesn't fall through into the next MBB, then this is
675 // 'water' that a constant pool island could be placed.
676 if (!BBHasFallthrough(&MBB))
677 WaterList.push_back(&MBB);
678 for (MachineBasicBlock::iterator I = MBB.begin(), E = MBB.end();
679 I != E; ++I) {
680 if (I->isDebugValue())
681 continue;
682
683 int Opc = I->getOpcode();
684 if (I->isBranch()) {
685 bool isCond = false;
686 unsigned Bits = 0;
687 unsigned Scale = 1;
688 int UOpc = Opc;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000689 switch (Opc) {
690 default:
Reed Kotler4b7afe52013-11-13 23:52:18 +0000691 continue; // Ignore other branches for now
692 case Mips::Bimm16:
693 Bits = 11;
694 Scale = 2;
695 isCond = false;
696 break;
697 case Mips::BimmX16:
698 Bits = 16;
699 Scale = 2;
700 isCond = false;
Reed Kotler0d409e22013-11-28 00:56:37 +0000701 break;
702 case Mips::BeqzRxImm16:
Reed Kotler59975c22013-12-03 23:42:51 +0000703 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000704 Bits = 8;
705 Scale = 2;
706 isCond = true;
707 break;
708 case Mips::BeqzRxImmX16:
Reed Kotler59975c22013-12-03 23:42:51 +0000709 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000710 Bits = 16;
711 Scale = 2;
712 isCond = true;
713 break;
714 case Mips::BnezRxImm16:
Reed Kotler59975c22013-12-03 23:42:51 +0000715 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000716 Bits = 8;
717 Scale = 2;
718 isCond = true;
719 break;
720 case Mips::BnezRxImmX16:
Reed Kotler59975c22013-12-03 23:42:51 +0000721 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000722 Bits = 16;
723 Scale = 2;
724 isCond = true;
725 break;
726 case Mips::Bteqz16:
Reed Kotler59975c22013-12-03 23:42:51 +0000727 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000728 Bits = 8;
729 Scale = 2;
730 isCond = true;
731 break;
732 case Mips::BteqzX16:
Reed Kotler59975c22013-12-03 23:42:51 +0000733 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000734 Bits = 16;
735 Scale = 2;
736 isCond = true;
737 break;
738 case Mips::Btnez16:
Reed Kotler59975c22013-12-03 23:42:51 +0000739 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000740 Bits = 8;
741 Scale = 2;
742 isCond = true;
743 break;
744 case Mips::BtnezX16:
Reed Kotler59975c22013-12-03 23:42:51 +0000745 UOpc=Mips::Bimm16;
Reed Kotler0d409e22013-11-28 00:56:37 +0000746 Bits = 16;
747 Scale = 2;
748 isCond = true;
749 break;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000750 }
751 // Record this immediate branch.
752 unsigned MaxOffs = ((1 << (Bits-1))-1) * Scale;
753 ImmBranches.push_back(ImmBranch(I, MaxOffs, isCond, UOpc));
Reed Kotler0f007fc2013-11-05 08:14:14 +0000754 }
Reed Kotler0f007fc2013-11-05 08:14:14 +0000755
756 if (Opc == Mips::CONSTPOOL_ENTRY)
757 continue;
758
759
760 // Scan the instructions for constant pool operands.
761 for (unsigned op = 0, e = I->getNumOperands(); op != e; ++op)
762 if (I->getOperand(op).isCPI()) {
763
764 // We found one. The addressing mode tells us the max displacement
765 // from the PC that this instruction permits.
766
767 // Basic size info comes from the TSFlags field.
768 unsigned Bits = 0;
769 unsigned Scale = 1;
770 bool NegOk = false;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000771 unsigned LongFormBits = 0;
772 unsigned LongFormScale = 0;
773 unsigned LongFormOpcode = 0;
774 switch (Opc) {
775 default:
776 llvm_unreachable("Unknown addressing mode for CP reference!");
777 case Mips::LwRxPcTcp16:
778 Bits = 8;
Reed Kotler3d7b33f2013-11-06 04:29:52 +0000779 Scale = 4;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000780 LongFormOpcode = Mips::LwRxPcTcpX16;
Reed Kotler43788a22014-01-16 00:47:46 +0000781 LongFormBits = 14;
Reed Kotler45c59272013-11-10 00:09:26 +0000782 LongFormScale = 1;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000783 break;
784 case Mips::LwRxPcTcpX16:
Reed Kotler43788a22014-01-16 00:47:46 +0000785 Bits = 14;
Reed Kotler3d7b33f2013-11-06 04:29:52 +0000786 Scale = 1;
787 NegOk = true;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000788 break;
789 }
790 // Remember that this is a user of a CP entry.
791 unsigned CPI = I->getOperand(op).getIndex();
792 MachineInstr *CPEMI = CPEMIs[CPI];
793 unsigned MaxOffs = ((1 << Bits)-1) * Scale;
794 unsigned LongFormMaxOffs = ((1 << LongFormBits)-1) * LongFormScale;
795 CPUsers.push_back(CPUser(I, CPEMI, MaxOffs, NegOk,
Reed Kotlerb09ebe92013-11-05 22:34:29 +0000796 LongFormMaxOffs, LongFormOpcode));
Reed Kotler0f007fc2013-11-05 08:14:14 +0000797
798 // Increment corresponding CPEntry reference count.
799 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
800 assert(CPE && "Cannot find a corresponding CPEntry!");
801 CPE->RefCount++;
802
803 // Instructions can only use one CP entry, don't bother scanning the
804 // rest of the operands.
805 break;
806
807 }
808
809 }
810 }
811
812}
813
814/// computeBlockSize - Compute the size and some alignment information for MBB.
815/// This function updates BBInfo directly.
816void MipsConstantIslands::computeBlockSize(MachineBasicBlock *MBB) {
817 BasicBlockInfo &BBI = BBInfo[MBB->getNumber()];
818 BBI.Size = 0;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000819
820 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end(); I != E;
821 ++I)
822 BBI.Size += TII->GetInstSizeInBytes(I);
823
824}
825
826/// getOffsetOf - Return the current offset of the specified machine instruction
827/// from the start of the function. This offset changes as stuff is moved
828/// around inside the function.
829unsigned MipsConstantIslands::getOffsetOf(MachineInstr *MI) const {
830 MachineBasicBlock *MBB = MI->getParent();
831
832 // The offset is composed of two things: the sum of the sizes of all MBB's
833 // before this instruction's block, and the offset from the start of the block
834 // it is in.
835 unsigned Offset = BBInfo[MBB->getNumber()].Offset;
836
837 // Sum instructions before MI in MBB.
838 for (MachineBasicBlock::iterator I = MBB->begin(); &*I != MI; ++I) {
839 assert(I != MBB->end() && "Didn't find MI in its own basic block?");
840 Offset += TII->GetInstSizeInBytes(I);
841 }
842 return Offset;
843}
844
845/// CompareMBBNumbers - Little predicate function to sort the WaterList by MBB
846/// ID.
847static bool CompareMBBNumbers(const MachineBasicBlock *LHS,
848 const MachineBasicBlock *RHS) {
849 return LHS->getNumber() < RHS->getNumber();
850}
851
852/// updateForInsertedWaterBlock - When a block is newly inserted into the
853/// machine function, it upsets all of the block numbers. Renumber the blocks
854/// and update the arrays that parallel this numbering.
855void MipsConstantIslands::updateForInsertedWaterBlock
856 (MachineBasicBlock *NewBB) {
857 // Renumber the MBB's to keep them consecutive.
858 NewBB->getParent()->RenumberBlocks(NewBB);
859
860 // Insert an entry into BBInfo to align it properly with the (newly
861 // renumbered) block numbers.
862 BBInfo.insert(BBInfo.begin() + NewBB->getNumber(), BasicBlockInfo());
863
864 // Next, update WaterList. Specifically, we need to add NewMBB as having
865 // available water after it.
866 water_iterator IP =
867 std::lower_bound(WaterList.begin(), WaterList.end(), NewBB,
868 CompareMBBNumbers);
869 WaterList.insert(IP, NewBB);
870}
871
Reed Kotler0f007fc2013-11-05 08:14:14 +0000872unsigned MipsConstantIslands::getUserOffset(CPUser &U) const {
Reed Kotler0eb87392013-11-05 21:39:57 +0000873 return getOffsetOf(U.MI);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000874}
875
876/// Split the basic block containing MI into two blocks, which are joined by
877/// an unconditional branch. Update data structures and renumber blocks to
878/// account for this change and returns the newly created block.
879MachineBasicBlock *MipsConstantIslands::splitBlockBeforeInstr
880 (MachineInstr *MI) {
881 MachineBasicBlock *OrigBB = MI->getParent();
882
883 // Create a new MBB for the code after the OrigBB.
884 MachineBasicBlock *NewBB =
885 MF->CreateMachineBasicBlock(OrigBB->getBasicBlock());
886 MachineFunction::iterator MBBI = OrigBB; ++MBBI;
887 MF->insert(MBBI, NewBB);
888
889 // Splice the instructions starting with MI over to NewBB.
890 NewBB->splice(NewBB->end(), OrigBB, MI, OrigBB->end());
891
892 // Add an unconditional branch from OrigBB to NewBB.
893 // Note the new unconditional branch is not being recorded.
894 // There doesn't seem to be meaningful DebugInfo available; this doesn't
895 // correspond to anything in the source.
Reed Kotlerf0e69682013-11-12 02:27:12 +0000896 BuildMI(OrigBB, DebugLoc(), TII->get(Mips::Bimm16)).addMBB(NewBB);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000897 ++NumSplit;
898
899 // Update the CFG. All succs of OrigBB are now succs of NewBB.
900 NewBB->transferSuccessors(OrigBB);
901
902 // OrigBB branches to NewBB.
903 OrigBB->addSuccessor(NewBB);
904
905 // Update internal data structures to account for the newly inserted MBB.
906 // This is almost the same as updateForInsertedWaterBlock, except that
907 // the Water goes after OrigBB, not NewBB.
908 MF->RenumberBlocks(NewBB);
909
910 // Insert an entry into BBInfo to align it properly with the (newly
911 // renumbered) block numbers.
912 BBInfo.insert(BBInfo.begin() + NewBB->getNumber(), BasicBlockInfo());
913
914 // Next, update WaterList. Specifically, we need to add OrigMBB as having
915 // available water after it (but not if it's already there, which happens
916 // when splitting before a conditional branch that is followed by an
917 // unconditional branch - in that case we want to insert NewBB).
918 water_iterator IP =
919 std::lower_bound(WaterList.begin(), WaterList.end(), OrigBB,
920 CompareMBBNumbers);
921 MachineBasicBlock* WaterBB = *IP;
922 if (WaterBB == OrigBB)
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000923 WaterList.insert(std::next(IP), NewBB);
Reed Kotler0f007fc2013-11-05 08:14:14 +0000924 else
925 WaterList.insert(IP, OrigBB);
926 NewWaterList.insert(OrigBB);
927
928 // Figure out how large the OrigBB is. As the first half of the original
929 // block, it cannot contain a tablejump. The size includes
930 // the new jump we added. (It should be possible to do this without
931 // recounting everything, but it's very confusing, and this is rarely
932 // executed.)
933 computeBlockSize(OrigBB);
934
935 // Figure out how large the NewMBB is. As the second half of the original
936 // block, it may contain a tablejump.
937 computeBlockSize(NewBB);
938
939 // All BBOffsets following these blocks must be modified.
940 adjustBBOffsetsAfter(OrigBB);
941
942 return NewBB;
943}
944
945
946
947/// isOffsetInRange - Checks whether UserOffset (the location of a constant pool
948/// reference) is within MaxDisp of TrialOffset (a proposed location of a
949/// constant pool entry).
Reed Kotler0f007fc2013-11-05 08:14:14 +0000950bool MipsConstantIslands::isOffsetInRange(unsigned UserOffset,
951 unsigned TrialOffset, unsigned MaxDisp,
Reed Kotlerb09ebe92013-11-05 22:34:29 +0000952 bool NegativeOK) {
Reed Kotler0f007fc2013-11-05 08:14:14 +0000953 if (UserOffset <= TrialOffset) {
954 // User before the Trial.
955 if (TrialOffset - UserOffset <= MaxDisp)
956 return true;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000957 } else if (NegativeOK) {
958 if (UserOffset - TrialOffset <= MaxDisp)
959 return true;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000960 }
961 return false;
962}
963
964/// isWaterInRange - Returns true if a CPE placed after the specified
965/// Water (a basic block) will be in range for the specific MI.
966///
967/// Compute how much the function will grow by inserting a CPE after Water.
968bool MipsConstantIslands::isWaterInRange(unsigned UserOffset,
969 MachineBasicBlock* Water, CPUser &U,
970 unsigned &Growth) {
971 unsigned CPELogAlign = getCPELogAlign(U.CPEMI);
972 unsigned CPEOffset = BBInfo[Water->getNumber()].postOffset(CPELogAlign);
973 unsigned NextBlockOffset, NextBlockAlignment;
974 MachineFunction::const_iterator NextBlock = Water;
975 if (++NextBlock == MF->end()) {
976 NextBlockOffset = BBInfo[Water->getNumber()].postOffset();
977 NextBlockAlignment = 0;
978 } else {
979 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;
980 NextBlockAlignment = NextBlock->getAlignment();
981 }
982 unsigned Size = U.CPEMI->getOperand(2).getImm();
983 unsigned CPEEnd = CPEOffset + Size;
984
985 // The CPE may be able to hide in the alignment padding before the next
986 // block. It may also cause more padding to be required if it is more aligned
987 // that the next block.
988 if (CPEEnd > NextBlockOffset) {
989 Growth = CPEEnd - NextBlockOffset;
990 // Compute the padding that would go at the end of the CPE to align the next
991 // block.
992 Growth += OffsetToAlignment(CPEEnd, 1u << NextBlockAlignment);
993
994 // If the CPE is to be inserted before the instruction, that will raise
995 // the offset of the instruction. Also account for unknown alignment padding
996 // in blocks between CPE and the user.
997 if (CPEOffset < UserOffset)
Reed Kotler7ded5b62013-11-05 23:36:58 +0000998 UserOffset += Growth;
Reed Kotler0f007fc2013-11-05 08:14:14 +0000999 } else
1000 // CPE fits in existing padding.
1001 Growth = 0;
1002
1003 return isOffsetInRange(UserOffset, CPEOffset, U);
1004}
1005
1006/// isCPEntryInRange - Returns true if the distance between specific MI and
1007/// specific ConstPool entry instruction can fit in MI's displacement field.
1008bool MipsConstantIslands::isCPEntryInRange
1009 (MachineInstr *MI, unsigned UserOffset,
1010 MachineInstr *CPEMI, unsigned MaxDisp,
1011 bool NegOk, bool DoDump) {
1012 unsigned CPEOffset = getOffsetOf(CPEMI);
1013
1014 if (DoDump) {
1015 DEBUG({
1016 unsigned Block = MI->getParent()->getNumber();
1017 const BasicBlockInfo &BBI = BBInfo[Block];
1018 dbgs() << "User of CPE#" << CPEMI->getOperand(0).getImm()
1019 << " max delta=" << MaxDisp
1020 << format(" insn address=%#x", UserOffset)
1021 << " in BB#" << Block << ": "
1022 << format("%#x-%x\t", BBI.Offset, BBI.postOffset()) << *MI
1023 << format("CPE address=%#x offset=%+d: ", CPEOffset,
1024 int(CPEOffset-UserOffset));
1025 });
1026 }
1027
1028 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);
1029}
1030
1031#ifndef NDEBUG
1032/// BBIsJumpedOver - Return true of the specified basic block's only predecessor
1033/// unconditionally branches to its only successor.
1034static bool BBIsJumpedOver(MachineBasicBlock *MBB) {
1035 if (MBB->pred_size() != 1 || MBB->succ_size() != 1)
1036 return false;
1037 MachineBasicBlock *Succ = *MBB->succ_begin();
1038 MachineBasicBlock *Pred = *MBB->pred_begin();
1039 MachineInstr *PredMI = &Pred->back();
Reed Kotlerf0e69682013-11-12 02:27:12 +00001040 if (PredMI->getOpcode() == Mips::Bimm16)
Reed Kotler0f007fc2013-11-05 08:14:14 +00001041 return PredMI->getOperand(0).getMBB() == Succ;
1042 return false;
1043}
1044#endif
1045
1046void MipsConstantIslands::adjustBBOffsetsAfter(MachineBasicBlock *BB) {
1047 unsigned BBNum = BB->getNumber();
1048 for(unsigned i = BBNum + 1, e = MF->getNumBlockIDs(); i < e; ++i) {
1049 // Get the offset and known bits at the end of the layout predecessor.
1050 // Include the alignment of the current block.
Reed Kotler7ded5b62013-11-05 23:36:58 +00001051 unsigned Offset = BBInfo[i - 1].Offset + BBInfo[i - 1].Size;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001052 BBInfo[i].Offset = Offset;
1053 }
1054}
1055
1056/// decrementCPEReferenceCount - find the constant pool entry with index CPI
1057/// and instruction CPEMI, and decrement its refcount. If the refcount
1058/// becomes 0 remove the entry and instruction. Returns true if we removed
1059/// the entry, false if we didn't.
1060
1061bool MipsConstantIslands::decrementCPEReferenceCount(unsigned CPI,
1062 MachineInstr *CPEMI) {
1063 // Find the old entry. Eliminate it if it is no longer used.
1064 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
1065 assert(CPE && "Unexpected!");
1066 if (--CPE->RefCount == 0) {
1067 removeDeadCPEMI(CPEMI);
Craig Topper062a2ba2014-04-25 05:30:21 +00001068 CPE->CPEMI = nullptr;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001069 --NumCPEs;
1070 return true;
1071 }
1072 return false;
1073}
1074
1075/// LookForCPEntryInRange - see if the currently referenced CPE is in range;
1076/// if not, see if an in-range clone of the CPE is in range, and if so,
1077/// change the data structures so the user references the clone. Returns:
1078/// 0 = no existing entry found
1079/// 1 = entry found, and there were no code insertions or deletions
1080/// 2 = entry found, and there were code insertions or deletions
1081int MipsConstantIslands::findInRangeCPEntry(CPUser& U, unsigned UserOffset)
1082{
1083 MachineInstr *UserMI = U.MI;
1084 MachineInstr *CPEMI = U.CPEMI;
1085
1086 // Check to see if the CPE is already in-range.
1087 if (isCPEntryInRange(UserMI, UserOffset, CPEMI, U.getMaxDisp(), U.NegOk,
1088 true)) {
1089 DEBUG(dbgs() << "In range\n");
1090 return 1;
1091 }
1092
1093 // No. Look for previously created clones of the CPE that are in range.
1094 unsigned CPI = CPEMI->getOperand(1).getIndex();
1095 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1096 for (unsigned i = 0, e = CPEs.size(); i != e; ++i) {
1097 // We already tried this one
1098 if (CPEs[i].CPEMI == CPEMI)
1099 continue;
1100 // Removing CPEs can leave empty entries, skip
Craig Topper062a2ba2014-04-25 05:30:21 +00001101 if (CPEs[i].CPEMI == nullptr)
Reed Kotler0f007fc2013-11-05 08:14:14 +00001102 continue;
1103 if (isCPEntryInRange(UserMI, UserOffset, CPEs[i].CPEMI, U.getMaxDisp(),
1104 U.NegOk)) {
1105 DEBUG(dbgs() << "Replacing CPE#" << CPI << " with CPE#"
1106 << CPEs[i].CPI << "\n");
1107 // Point the CPUser node to the replacement
1108 U.CPEMI = CPEs[i].CPEMI;
1109 // Change the CPI in the instruction operand to refer to the clone.
1110 for (unsigned j = 0, e = UserMI->getNumOperands(); j != e; ++j)
1111 if (UserMI->getOperand(j).isCPI()) {
1112 UserMI->getOperand(j).setIndex(CPEs[i].CPI);
1113 break;
1114 }
1115 // Adjust the refcount of the clone...
1116 CPEs[i].RefCount++;
1117 // ...and the original. If we didn't remove the old entry, none of the
1118 // addresses changed, so we don't need another pass.
1119 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1120 }
1121 }
1122 return 0;
1123}
1124
1125/// LookForCPEntryInRange - see if the currently referenced CPE is in range;
1126/// This version checks if the longer form of the instruction can be used to
1127/// to satisfy things.
1128/// if not, see if an in-range clone of the CPE is in range, and if so,
1129/// change the data structures so the user references the clone. Returns:
1130/// 0 = no existing entry found
1131/// 1 = entry found, and there were no code insertions or deletions
1132/// 2 = entry found, and there were code insertions or deletions
1133int MipsConstantIslands::findLongFormInRangeCPEntry
1134 (CPUser& U, unsigned UserOffset)
1135{
1136 MachineInstr *UserMI = U.MI;
1137 MachineInstr *CPEMI = U.CPEMI;
1138
1139 // Check to see if the CPE is already in-range.
1140 if (isCPEntryInRange(UserMI, UserOffset, CPEMI,
1141 U.getLongFormMaxDisp(), U.NegOk,
1142 true)) {
1143 DEBUG(dbgs() << "In range\n");
1144 UserMI->setDesc(TII->get(U.getLongFormOpcode()));
Reed Kotler45c59272013-11-10 00:09:26 +00001145 U.setMaxDisp(U.getLongFormMaxDisp());
Reed Kotler0f007fc2013-11-05 08:14:14 +00001146 return 2; // instruction is longer length now
1147 }
1148
1149 // No. Look for previously created clones of the CPE that are in range.
1150 unsigned CPI = CPEMI->getOperand(1).getIndex();
1151 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1152 for (unsigned i = 0, e = CPEs.size(); i != e; ++i) {
1153 // We already tried this one
1154 if (CPEs[i].CPEMI == CPEMI)
1155 continue;
1156 // Removing CPEs can leave empty entries, skip
Craig Topper062a2ba2014-04-25 05:30:21 +00001157 if (CPEs[i].CPEMI == nullptr)
Reed Kotler0f007fc2013-11-05 08:14:14 +00001158 continue;
1159 if (isCPEntryInRange(UserMI, UserOffset, CPEs[i].CPEMI,
1160 U.getLongFormMaxDisp(), U.NegOk)) {
1161 DEBUG(dbgs() << "Replacing CPE#" << CPI << " with CPE#"
1162 << CPEs[i].CPI << "\n");
1163 // Point the CPUser node to the replacement
1164 U.CPEMI = CPEs[i].CPEMI;
1165 // Change the CPI in the instruction operand to refer to the clone.
1166 for (unsigned j = 0, e = UserMI->getNumOperands(); j != e; ++j)
1167 if (UserMI->getOperand(j).isCPI()) {
1168 UserMI->getOperand(j).setIndex(CPEs[i].CPI);
1169 break;
1170 }
1171 // Adjust the refcount of the clone...
1172 CPEs[i].RefCount++;
1173 // ...and the original. If we didn't remove the old entry, none of the
1174 // addresses changed, so we don't need another pass.
1175 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1176 }
1177 }
1178 return 0;
1179}
1180
1181/// getUnconditionalBrDisp - Returns the maximum displacement that can fit in
1182/// the specific unconditional branch instruction.
1183static inline unsigned getUnconditionalBrDisp(int Opc) {
1184 switch (Opc) {
Reed Kotlerf0e69682013-11-12 02:27:12 +00001185 case Mips::Bimm16:
1186 return ((1<<10)-1)*2;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001187 case Mips::BimmX16:
1188 return ((1<<16)-1)*2;
1189 default:
1190 break;
1191 }
1192 return ((1<<16)-1)*2;
1193}
1194
1195/// findAvailableWater - Look for an existing entry in the WaterList in which
1196/// we can place the CPE referenced from U so it's within range of U's MI.
1197/// Returns true if found, false if not. If it returns true, WaterIter
Reed Kotler4d0313d2013-11-05 12:04:37 +00001198/// is set to the WaterList entry.
1199/// To ensure that this pass
Reed Kotler0f007fc2013-11-05 08:14:14 +00001200/// terminates, the CPE location for a particular CPUser is only allowed to
1201/// move to a lower address, so search backward from the end of the list and
1202/// prefer the first water that is in range.
1203bool MipsConstantIslands::findAvailableWater(CPUser &U, unsigned UserOffset,
1204 water_iterator &WaterIter) {
1205 if (WaterList.empty())
1206 return false;
1207
1208 unsigned BestGrowth = ~0u;
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001209 for (water_iterator IP = std::prev(WaterList.end()), B = WaterList.begin();;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001210 --IP) {
1211 MachineBasicBlock* WaterBB = *IP;
1212 // Check if water is in range and is either at a lower address than the
1213 // current "high water mark" or a new water block that was created since
1214 // the previous iteration by inserting an unconditional branch. In the
1215 // latter case, we want to allow resetting the high water mark back to
1216 // this new water since we haven't seen it before. Inserting branches
1217 // should be relatively uncommon and when it does happen, we want to be
1218 // sure to take advantage of it for all the CPEs near that block, so that
1219 // we don't insert more branches than necessary.
1220 unsigned Growth;
1221 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&
1222 (WaterBB->getNumber() < U.HighWaterMark->getNumber() ||
1223 NewWaterList.count(WaterBB)) && Growth < BestGrowth) {
1224 // This is the least amount of required padding seen so far.
1225 BestGrowth = Growth;
1226 WaterIter = IP;
1227 DEBUG(dbgs() << "Found water after BB#" << WaterBB->getNumber()
1228 << " Growth=" << Growth << '\n');
1229
1230 // Keep looking unless it is perfect.
1231 if (BestGrowth == 0)
1232 return true;
1233 }
1234 if (IP == B)
1235 break;
1236 }
1237 return BestGrowth != ~0u;
1238}
1239
1240/// createNewWater - No existing WaterList entry will work for
1241/// CPUsers[CPUserIndex], so create a place to put the CPE. The end of the
1242/// block is used if in range, and the conditional branch munged so control
1243/// flow is correct. Otherwise the block is split to create a hole with an
1244/// unconditional branch around it. In either case NewMBB is set to a
1245/// block following which the new island can be inserted (the WaterList
1246/// is not adjusted).
1247void MipsConstantIslands::createNewWater(unsigned CPUserIndex,
1248 unsigned UserOffset,
1249 MachineBasicBlock *&NewMBB) {
1250 CPUser &U = CPUsers[CPUserIndex];
1251 MachineInstr *UserMI = U.MI;
1252 MachineInstr *CPEMI = U.CPEMI;
1253 unsigned CPELogAlign = getCPELogAlign(CPEMI);
1254 MachineBasicBlock *UserMBB = UserMI->getParent();
1255 const BasicBlockInfo &UserBBI = BBInfo[UserMBB->getNumber()];
1256
1257 // If the block does not end in an unconditional branch already, and if the
Reed Kotler4d0313d2013-11-05 12:04:37 +00001258 // end of the block is within range, make new water there.
Reed Kotler0f007fc2013-11-05 08:14:14 +00001259 if (BBHasFallthrough(UserMBB)) {
1260 // Size of branch to insert.
1261 unsigned Delta = 2;
1262 // Compute the offset where the CPE will begin.
1263 unsigned CPEOffset = UserBBI.postOffset(CPELogAlign) + Delta;
1264
1265 if (isOffsetInRange(UserOffset, CPEOffset, U)) {
1266 DEBUG(dbgs() << "Split at end of BB#" << UserMBB->getNumber()
1267 << format(", expected CPE offset %#x\n", CPEOffset));
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001268 NewMBB = std::next(MachineFunction::iterator(UserMBB));
Reed Kotler0f007fc2013-11-05 08:14:14 +00001269 // Add an unconditional branch from UserMBB to fallthrough block. Record
1270 // it for branch lengthening; this new branch will not get out of range,
1271 // but if the preceding conditional branch is out of range, the targets
1272 // will be exchanged, and the altered branch may be out of range, so the
1273 // machinery has to know about it.
Reed Kotlerf0e69682013-11-12 02:27:12 +00001274 int UncondBr = Mips::Bimm16;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001275 BuildMI(UserMBB, DebugLoc(), TII->get(UncondBr)).addMBB(NewMBB);
1276 unsigned MaxDisp = getUnconditionalBrDisp(UncondBr);
1277 ImmBranches.push_back(ImmBranch(&UserMBB->back(),
1278 MaxDisp, false, UncondBr));
1279 BBInfo[UserMBB->getNumber()].Size += Delta;
1280 adjustBBOffsetsAfter(UserMBB);
1281 return;
1282 }
1283 }
1284
Reed Kotler4d0313d2013-11-05 12:04:37 +00001285 // What a big block. Find a place within the block to split it.
Reed Kotler0f007fc2013-11-05 08:14:14 +00001286
1287 // Try to split the block so it's fully aligned. Compute the latest split
1288 // point where we can add a 4-byte branch instruction, and then align to
1289 // LogAlign which is the largest possible alignment in the function.
1290 unsigned LogAlign = MF->getAlignment();
1291 assert(LogAlign >= CPELogAlign && "Over-aligned constant pool entry");
Reed Kotler7ded5b62013-11-05 23:36:58 +00001292 unsigned BaseInsertOffset = UserOffset + U.getMaxDisp();
Reed Kotler0f007fc2013-11-05 08:14:14 +00001293 DEBUG(dbgs() << format("Split in middle of big block before %#x",
1294 BaseInsertOffset));
1295
1296 // The 4 in the following is for the unconditional branch we'll be inserting
Reed Kotler4d0313d2013-11-05 12:04:37 +00001297 // Alignment of the island is handled
Reed Kotler0f007fc2013-11-05 08:14:14 +00001298 // inside isOffsetInRange.
1299 BaseInsertOffset -= 4;
1300
1301 DEBUG(dbgs() << format(", adjusted to %#x", BaseInsertOffset)
Reed Kotler7ded5b62013-11-05 23:36:58 +00001302 << " la=" << LogAlign << '\n');
Reed Kotler0f007fc2013-11-05 08:14:14 +00001303
1304 // This could point off the end of the block if we've already got constant
1305 // pool entries following this block; only the last one is in the water list.
1306 // Back past any possible branches (allow for a conditional and a maximally
1307 // long unconditional).
1308 if (BaseInsertOffset + 8 >= UserBBI.postOffset()) {
Reed Kotler7ded5b62013-11-05 23:36:58 +00001309 BaseInsertOffset = UserBBI.postOffset() - 8;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001310 DEBUG(dbgs() << format("Move inside block: %#x\n", BaseInsertOffset));
1311 }
Reed Kotler7ded5b62013-11-05 23:36:58 +00001312 unsigned EndInsertOffset = BaseInsertOffset + 4 +
Reed Kotler0f007fc2013-11-05 08:14:14 +00001313 CPEMI->getOperand(2).getImm();
1314 MachineBasicBlock::iterator MI = UserMI;
1315 ++MI;
1316 unsigned CPUIndex = CPUserIndex+1;
1317 unsigned NumCPUsers = CPUsers.size();
1318 //MachineInstr *LastIT = 0;
1319 for (unsigned Offset = UserOffset+TII->GetInstSizeInBytes(UserMI);
1320 Offset < BaseInsertOffset;
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001321 Offset += TII->GetInstSizeInBytes(MI), MI = std::next(MI)) {
Reed Kotler0f007fc2013-11-05 08:14:14 +00001322 assert(MI != UserMBB->end() && "Fell off end of block");
1323 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].MI == MI) {
1324 CPUser &U = CPUsers[CPUIndex];
1325 if (!isOffsetInRange(Offset, EndInsertOffset, U)) {
1326 // Shift intertion point by one unit of alignment so it is within reach.
1327 BaseInsertOffset -= 1u << LogAlign;
1328 EndInsertOffset -= 1u << LogAlign;
1329 }
1330 // This is overly conservative, as we don't account for CPEMIs being
1331 // reused within the block, but it doesn't matter much. Also assume CPEs
1332 // are added in order with alignment padding. We may eventually be able
1333 // to pack the aligned CPEs better.
1334 EndInsertOffset += U.CPEMI->getOperand(2).getImm();
1335 CPUIndex++;
1336 }
1337 }
1338
1339 --MI;
1340 NewMBB = splitBlockBeforeInstr(MI);
1341}
1342
1343/// handleConstantPoolUser - Analyze the specified user, checking to see if it
1344/// is out-of-range. If so, pick up the constant pool value and move it some
1345/// place in-range. Return true if we changed any addresses (thus must run
1346/// another pass of branch lengthening), false otherwise.
1347bool MipsConstantIslands::handleConstantPoolUser(unsigned CPUserIndex) {
1348 CPUser &U = CPUsers[CPUserIndex];
1349 MachineInstr *UserMI = U.MI;
1350 MachineInstr *CPEMI = U.CPEMI;
1351 unsigned CPI = CPEMI->getOperand(1).getIndex();
1352 unsigned Size = CPEMI->getOperand(2).getImm();
1353 // Compute this only once, it's expensive.
1354 unsigned UserOffset = getUserOffset(U);
1355
1356 // See if the current entry is within range, or there is a clone of it
1357 // in range.
1358 int result = findInRangeCPEntry(U, UserOffset);
1359 if (result==1) return false;
1360 else if (result==2) return true;
1361
1362
1363 // Look for water where we can place this CPE.
1364 MachineBasicBlock *NewIsland = MF->CreateMachineBasicBlock();
1365 MachineBasicBlock *NewMBB;
1366 water_iterator IP;
1367 if (findAvailableWater(U, UserOffset, IP)) {
1368 DEBUG(dbgs() << "Found water in range\n");
1369 MachineBasicBlock *WaterBB = *IP;
1370
1371 // If the original WaterList entry was "new water" on this iteration,
1372 // propagate that to the new island. This is just keeping NewWaterList
1373 // updated to match the WaterList, which will be updated below.
1374 if (NewWaterList.erase(WaterBB))
1375 NewWaterList.insert(NewIsland);
1376
1377 // The new CPE goes before the following block (NewMBB).
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001378 NewMBB = std::next(MachineFunction::iterator(WaterBB));
Reed Kotler0f007fc2013-11-05 08:14:14 +00001379
1380 } else {
1381 // No water found.
1382 // we first see if a longer form of the instrucion could have reached
1383 // the constant. in that case we won't bother to split
Reed Kotler45c59272013-11-10 00:09:26 +00001384 if (!NoLoadRelaxation) {
1385 result = findLongFormInRangeCPEntry(U, UserOffset);
1386 if (result != 0) return true;
1387 }
Reed Kotler0f007fc2013-11-05 08:14:14 +00001388 DEBUG(dbgs() << "No water found\n");
1389 createNewWater(CPUserIndex, UserOffset, NewMBB);
1390
1391 // splitBlockBeforeInstr adds to WaterList, which is important when it is
1392 // called while handling branches so that the water will be seen on the
1393 // next iteration for constant pools, but in this context, we don't want
1394 // it. Check for this so it will be removed from the WaterList.
1395 // Also remove any entry from NewWaterList.
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001396 MachineBasicBlock *WaterBB = std::prev(MachineFunction::iterator(NewMBB));
Reed Kotler0f007fc2013-11-05 08:14:14 +00001397 IP = std::find(WaterList.begin(), WaterList.end(), WaterBB);
1398 if (IP != WaterList.end())
1399 NewWaterList.erase(WaterBB);
1400
1401 // We are adding new water. Update NewWaterList.
1402 NewWaterList.insert(NewIsland);
1403 }
1404
1405 // Remove the original WaterList entry; we want subsequent insertions in
1406 // this vicinity to go after the one we're about to insert. This
1407 // considerably reduces the number of times we have to move the same CPE
1408 // more than once and is also important to ensure the algorithm terminates.
1409 if (IP != WaterList.end())
1410 WaterList.erase(IP);
1411
1412 // Okay, we know we can put an island before NewMBB now, do it!
1413 MF->insert(NewMBB, NewIsland);
1414
1415 // Update internal data structures to account for the newly inserted MBB.
1416 updateForInsertedWaterBlock(NewIsland);
1417
1418 // Decrement the old entry, and remove it if refcount becomes 0.
1419 decrementCPEReferenceCount(CPI, CPEMI);
1420
Reed Kotlerd3b28eb2013-11-24 02:53:09 +00001421 // No existing clone of this CPE is within range.
1422 // We will be generating a new clone. Get a UID for it.
1423 unsigned ID = createPICLabelUId();
1424
Reed Kotler0f007fc2013-11-05 08:14:14 +00001425 // Now that we have an island to add the CPE to, clone the original CPE and
1426 // add it to the island.
1427 U.HighWaterMark = NewIsland;
1428 U.CPEMI = BuildMI(NewIsland, DebugLoc(), TII->get(Mips::CONSTPOOL_ENTRY))
1429 .addImm(ID).addConstantPoolIndex(CPI).addImm(Size);
1430 CPEntries[CPI].push_back(CPEntry(U.CPEMI, ID, 1));
1431 ++NumCPEs;
1432
1433 // Mark the basic block as aligned as required by the const-pool entry.
1434 NewIsland->setAlignment(getCPELogAlign(U.CPEMI));
1435
1436 // Increase the size of the island block to account for the new entry.
1437 BBInfo[NewIsland->getNumber()].Size += Size;
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001438 adjustBBOffsetsAfter(std::prev(MachineFunction::iterator(NewIsland)));
Reed Kotler0f007fc2013-11-05 08:14:14 +00001439
Reed Kotlerd3b28eb2013-11-24 02:53:09 +00001440
Reed Kotler0f007fc2013-11-05 08:14:14 +00001441
1442 // Finally, change the CPI in the instruction operand to be ID.
1443 for (unsigned i = 0, e = UserMI->getNumOperands(); i != e; ++i)
1444 if (UserMI->getOperand(i).isCPI()) {
1445 UserMI->getOperand(i).setIndex(ID);
1446 break;
1447 }
1448
1449 DEBUG(dbgs() << " Moved CPE to #" << ID << " CPI=" << CPI
1450 << format(" offset=%#x\n", BBInfo[NewIsland->getNumber()].Offset));
1451
1452 return true;
1453}
1454
1455/// removeDeadCPEMI - Remove a dead constant pool entry instruction. Update
1456/// sizes and offsets of impacted basic blocks.
1457void MipsConstantIslands::removeDeadCPEMI(MachineInstr *CPEMI) {
1458 MachineBasicBlock *CPEBB = CPEMI->getParent();
1459 unsigned Size = CPEMI->getOperand(2).getImm();
1460 CPEMI->eraseFromParent();
1461 BBInfo[CPEBB->getNumber()].Size -= Size;
1462 // All succeeding offsets have the current size value added in, fix this.
1463 if (CPEBB->empty()) {
1464 BBInfo[CPEBB->getNumber()].Size = 0;
1465
1466 // This block no longer needs to be aligned.
1467 CPEBB->setAlignment(0);
1468 } else
1469 // Entries are sorted by descending alignment, so realign from the front.
1470 CPEBB->setAlignment(getCPELogAlign(CPEBB->begin()));
1471
1472 adjustBBOffsetsAfter(CPEBB);
1473 // An island has only one predecessor BB and one successor BB. Check if
1474 // this BB's predecessor jumps directly to this BB's successor. This
1475 // shouldn't happen currently.
1476 assert(!BBIsJumpedOver(CPEBB) && "How did this happen?");
1477 // FIXME: remove the empty blocks after all the work is done?
1478}
1479
1480/// removeUnusedCPEntries - Remove constant pool entries whose refcounts
1481/// are zero.
1482bool MipsConstantIslands::removeUnusedCPEntries() {
1483 unsigned MadeChange = false;
1484 for (unsigned i = 0, e = CPEntries.size(); i != e; ++i) {
1485 std::vector<CPEntry> &CPEs = CPEntries[i];
1486 for (unsigned j = 0, ee = CPEs.size(); j != ee; ++j) {
1487 if (CPEs[j].RefCount == 0 && CPEs[j].CPEMI) {
1488 removeDeadCPEMI(CPEs[j].CPEMI);
Craig Topper062a2ba2014-04-25 05:30:21 +00001489 CPEs[j].CPEMI = nullptr;
Reed Kotler0f007fc2013-11-05 08:14:14 +00001490 MadeChange = true;
1491 }
1492 }
1493 }
1494 return MadeChange;
1495}
1496
1497/// isBBInRange - Returns true if the distance between specific MI and
1498/// specific BB can fit in MI's displacement field.
1499bool MipsConstantIslands::isBBInRange
1500 (MachineInstr *MI,MachineBasicBlock *DestBB, unsigned MaxDisp) {
1501
1502unsigned PCAdj = 4;
1503
1504 unsigned BrOffset = getOffsetOf(MI) + PCAdj;
1505 unsigned DestOffset = BBInfo[DestBB->getNumber()].Offset;
1506
1507 DEBUG(dbgs() << "Branch of destination BB#" << DestBB->getNumber()
1508 << " from BB#" << MI->getParent()->getNumber()
1509 << " max delta=" << MaxDisp
1510 << " from " << getOffsetOf(MI) << " to " << DestOffset
1511 << " offset " << int(DestOffset-BrOffset) << "\t" << *MI);
1512
1513 if (BrOffset <= DestOffset) {
1514 // Branch before the Dest.
1515 if (DestOffset-BrOffset <= MaxDisp)
1516 return true;
1517 } else {
1518 if (BrOffset-DestOffset <= MaxDisp)
1519 return true;
1520 }
1521 return false;
1522}
1523
1524/// fixupImmediateBr - Fix up an immediate branch whose destination is too far
1525/// away to fit in its displacement field.
1526bool MipsConstantIslands::fixupImmediateBr(ImmBranch &Br) {
1527 MachineInstr *MI = Br.MI;
Reed Kotler0d409e22013-11-28 00:56:37 +00001528 unsigned TargetOperand = branchTargetOperand(MI);
1529 MachineBasicBlock *DestBB = MI->getOperand(TargetOperand).getMBB();
Reed Kotler0f007fc2013-11-05 08:14:14 +00001530
1531 // Check to see if the DestBB is already in-range.
1532 if (isBBInRange(MI, DestBB, Br.MaxDisp))
1533 return false;
1534
1535 if (!Br.isCond)
1536 return fixupUnconditionalBr(Br);
1537 return fixupConditionalBr(Br);
1538}
1539
1540/// fixupUnconditionalBr - Fix up an unconditional branch whose destination is
1541/// too far away to fit in its displacement field. If the LR register has been
1542/// spilled in the epilogue, then we can use BL to implement a far jump.
1543/// Otherwise, add an intermediate branch instruction to a branch.
1544bool
1545MipsConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {
1546 MachineInstr *MI = Br.MI;
1547 MachineBasicBlock *MBB = MI->getParent();
Reed Kotler2fc05be2013-11-21 05:13:23 +00001548 MachineBasicBlock *DestBB = MI->getOperand(0).getMBB();
Reed Kotler0f007fc2013-11-05 08:14:14 +00001549 // Use BL to implement far jump.
Reed Kotler2fc05be2013-11-21 05:13:23 +00001550 unsigned BimmX16MaxDisp = ((1 << 16)-1) * 2;
1551 if (isBBInRange(MI, DestBB, BimmX16MaxDisp)) {
1552 Br.MaxDisp = BimmX16MaxDisp;
1553 MI->setDesc(TII->get(Mips::BimmX16));
1554 }
1555 else {
1556 // need to give the math a more careful look here
1557 // this is really a segment address and not
1558 // a PC relative address. FIXME. But I think that
1559 // just reducing the bits by 1 as I've done is correct.
1560 // The basic block we are branching too much be longword aligned.
1561 // we know that RA is saved because we always save it right now.
1562 // this requirement will be relaxed later but we also have an alternate
1563 // way to implement this that I will implement that does not need jal.
1564 // We should have a way to back out this alignment restriction if we "can" later.
1565 // but it is not harmful.
1566 //
1567 DestBB->setAlignment(2);
1568 Br.MaxDisp = ((1<<24)-1) * 2;
Reed Kotlerad450f22013-11-29 22:32:56 +00001569 MI->setDesc(TII->get(Mips::JalB16));
Reed Kotler2fc05be2013-11-21 05:13:23 +00001570 }
Reed Kotler0f007fc2013-11-05 08:14:14 +00001571 BBInfo[MBB->getNumber()].Size += 2;
1572 adjustBBOffsetsAfter(MBB);
1573 HasFarJump = true;
1574 ++NumUBrFixed;
1575
1576 DEBUG(dbgs() << " Changed B to long jump " << *MI);
1577
1578 return true;
1579}
1580
Reed Kotler0d409e22013-11-28 00:56:37 +00001581
Reed Kotler0f007fc2013-11-05 08:14:14 +00001582/// fixupConditionalBr - Fix up a conditional branch whose destination is too
1583/// far away to fit in its displacement field. It is converted to an inverse
1584/// conditional branch + an unconditional branch to the destination.
1585bool
1586MipsConstantIslands::fixupConditionalBr(ImmBranch &Br) {
1587 MachineInstr *MI = Br.MI;
Reed Kotler0d409e22013-11-28 00:56:37 +00001588 unsigned TargetOperand = branchTargetOperand(MI);
1589 MachineBasicBlock *DestBB = MI->getOperand(TargetOperand).getMBB();
1590 unsigned Opcode = MI->getOpcode();
1591 unsigned LongFormOpcode = longformBranchOpcode(Opcode);
1592 unsigned LongFormMaxOff = branchMaxOffsets(LongFormOpcode);
1593
1594 // Check to see if the DestBB is already in-range.
1595 if (isBBInRange(MI, DestBB, LongFormMaxOff)) {
1596 Br.MaxDisp = LongFormMaxOff;
1597 MI->setDesc(TII->get(LongFormOpcode));
1598 return true;
1599 }
Reed Kotler0f007fc2013-11-05 08:14:14 +00001600
1601 // Add an unconditional branch to the destination and invert the branch
1602 // condition to jump over it:
Reed Kotlerad450f22013-11-29 22:32:56 +00001603 // bteqz L1
Reed Kotler0f007fc2013-11-05 08:14:14 +00001604 // =>
Reed Kotlerad450f22013-11-29 22:32:56 +00001605 // bnez L2
Reed Kotler0f007fc2013-11-05 08:14:14 +00001606 // b L1
1607 // L2:
Reed Kotler0f007fc2013-11-05 08:14:14 +00001608
1609 // If the branch is at the end of its MBB and that has a fall-through block,
1610 // direct the updated conditional branch to the fall-through block. Otherwise,
1611 // split the MBB before the next instruction.
1612 MachineBasicBlock *MBB = MI->getParent();
1613 MachineInstr *BMI = &MBB->back();
1614 bool NeedSplit = (BMI != MI) || !BBHasFallthrough(MBB);
Reed Kotler47f3c64a2013-12-19 00:43:08 +00001615 unsigned OppositeBranchOpcode = TII->getOppositeBranchOpc(Opcode);
Reed Kotlerad450f22013-11-29 22:32:56 +00001616
Reed Kotler0f007fc2013-11-05 08:14:14 +00001617 ++NumCBrFixed;
1618 if (BMI != MI) {
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001619 if (std::next(MachineBasicBlock::iterator(MI)) == std::prev(MBB->end()) &&
Reed Kotlerad450f22013-11-29 22:32:56 +00001620 isUnconditionalBranch(BMI->getOpcode())) {
Reed Kotler0f007fc2013-11-05 08:14:14 +00001621 // Last MI in the BB is an unconditional branch. Can we simply invert the
1622 // condition and swap destinations:
Reed Kotlerad450f22013-11-29 22:32:56 +00001623 // beqz L1
Reed Kotler0f007fc2013-11-05 08:14:14 +00001624 // b L2
1625 // =>
Reed Kotlerad450f22013-11-29 22:32:56 +00001626 // bnez L2
Reed Kotler0f007fc2013-11-05 08:14:14 +00001627 // b L1
Reed Kotlerad450f22013-11-29 22:32:56 +00001628 unsigned BMITargetOperand = branchTargetOperand(BMI);
1629 MachineBasicBlock *NewDest =
1630 BMI->getOperand(BMITargetOperand).getMBB();
Reed Kotler0f007fc2013-11-05 08:14:14 +00001631 if (isBBInRange(MI, NewDest, Br.MaxDisp)) {
1632 DEBUG(dbgs() << " Invert Bcc condition and swap its destination with "
1633 << *BMI);
Reed Kotler59975c22013-12-03 23:42:51 +00001634 MI->setDesc(TII->get(OppositeBranchOpcode));
Reed Kotlerad450f22013-11-29 22:32:56 +00001635 BMI->getOperand(BMITargetOperand).setMBB(DestBB);
1636 MI->getOperand(TargetOperand).setMBB(NewDest);
Reed Kotler0f007fc2013-11-05 08:14:14 +00001637 return true;
1638 }
1639 }
1640 }
1641
Reed Kotlerad450f22013-11-29 22:32:56 +00001642
Reed Kotler0f007fc2013-11-05 08:14:14 +00001643 if (NeedSplit) {
1644 splitBlockBeforeInstr(MI);
1645 // No need for the branch to the next block. We're adding an unconditional
1646 // branch to the destination.
1647 int delta = TII->GetInstSizeInBytes(&MBB->back());
1648 BBInfo[MBB->getNumber()].Size -= delta;
1649 MBB->back().eraseFromParent();
1650 // BBInfo[SplitBB].Offset is wrong temporarily, fixed below
1651 }
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +00001652 MachineBasicBlock *NextBB = std::next(MachineFunction::iterator(MBB));
Reed Kotler0f007fc2013-11-05 08:14:14 +00001653
1654 DEBUG(dbgs() << " Insert B to BB#" << DestBB->getNumber()
1655 << " also invert condition and change dest. to BB#"
1656 << NextBB->getNumber() << "\n");
1657
1658 // Insert a new conditional branch and a new unconditional branch.
1659 // Also update the ImmBranch as well as adding a new entry for the new branch.
Reed Kotler59975c22013-12-03 23:42:51 +00001660 if (MI->getNumExplicitOperands() == 2) {
1661 BuildMI(MBB, DebugLoc(), TII->get(OppositeBranchOpcode))
1662 .addReg(MI->getOperand(0).getReg())
1663 .addMBB(NextBB);
Reed Kotler47f3c64a2013-12-19 00:43:08 +00001664 } else {
1665 BuildMI(MBB, DebugLoc(), TII->get(OppositeBranchOpcode))
1666 .addMBB(NextBB);
Reed Kotler59975c22013-12-03 23:42:51 +00001667 }
Reed Kotler0f007fc2013-11-05 08:14:14 +00001668 Br.MI = &MBB->back();
1669 BBInfo[MBB->getNumber()].Size += TII->GetInstSizeInBytes(&MBB->back());
1670 BuildMI(MBB, DebugLoc(), TII->get(Br.UncondBr)).addMBB(DestBB);
1671 BBInfo[MBB->getNumber()].Size += TII->GetInstSizeInBytes(&MBB->back());
1672 unsigned MaxDisp = getUnconditionalBrDisp(Br.UncondBr);
1673 ImmBranches.push_back(ImmBranch(&MBB->back(), MaxDisp, false, Br.UncondBr));
1674
1675 // Remove the old conditional branch. It may or may not still be in MBB.
1676 BBInfo[MI->getParent()->getNumber()].Size -= TII->GetInstSizeInBytes(MI);
1677 MI->eraseFromParent();
1678 adjustBBOffsetsAfter(MBB);
1679 return true;
1680}
1681
Reed Kotler91ae9822013-10-27 21:57:36 +00001682
1683void MipsConstantIslands::prescanForConstants() {
Reed Kotler0f007fc2013-11-05 08:14:14 +00001684 unsigned J = 0;
1685 (void)J;
Reed Kotler91ae9822013-10-27 21:57:36 +00001686 for (MachineFunction::iterator B =
1687 MF->begin(), E = MF->end(); B != E; ++B) {
1688 for (MachineBasicBlock::instr_iterator I =
1689 B->instr_begin(), EB = B->instr_end(); I != EB; ++I) {
1690 switch(I->getDesc().getOpcode()) {
1691 case Mips::LwConstant32: {
Reed Kotlera787aa22013-11-24 06:18:50 +00001692 PrescannedForConstants = true;
Reed Kotler91ae9822013-10-27 21:57:36 +00001693 DEBUG(dbgs() << "constant island constant " << *I << "\n");
1694 J = I->getNumOperands();
1695 DEBUG(dbgs() << "num operands " << J << "\n");
1696 MachineOperand& Literal = I->getOperand(1);
1697 if (Literal.isImm()) {
1698 int64_t V = Literal.getImm();
1699 DEBUG(dbgs() << "literal " << V << "\n");
1700 Type *Int32Ty =
1701 Type::getInt32Ty(MF->getFunction()->getContext());
1702 const Constant *C = ConstantInt::get(Int32Ty, V);
1703 unsigned index = MCP->getConstantPoolIndex(C, 4);
1704 I->getOperand(2).ChangeToImmediate(index);
1705 DEBUG(dbgs() << "constant island constant " << *I << "\n");
Reed Kotler0f007fc2013-11-05 08:14:14 +00001706 I->setDesc(TII->get(Mips::LwRxPcTcp16));
Reed Kotler91ae9822013-10-27 21:57:36 +00001707 I->RemoveOperand(1);
1708 I->RemoveOperand(1);
1709 I->addOperand(MachineOperand::CreateCPI(index, 0));
Reed Kotler0f007fc2013-11-05 08:14:14 +00001710 I->addOperand(MachineOperand::CreateImm(4));
Reed Kotler91ae9822013-10-27 21:57:36 +00001711 }
1712 break;
1713 }
1714 default:
1715 break;
1716 }
1717 }
1718 }
1719}
Reed Kotler0f007fc2013-11-05 08:14:14 +00001720