blob: b5f195bd4098e7c184a57efd9d995028a233ea2a [file] [log] [blame]
Evan Cheng1be453b2009-08-08 03:21:23 +00001//===-- Thumb2SizeReduction.cpp - Thumb2 code size reduction pass -*- C++ -*-=//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
Evan Cheng1be453b2009-08-08 03:21:23 +000010#include "ARM.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000011#include "ARMBaseInstrInfo.h"
Bob Wilsona2881ee2011-04-19 18:11:49 +000012#include "ARMSubtarget.h"
Evan Chenga20cde32011-07-20 23:34:39 +000013#include "MCTargetDesc/ARMAddressingModes.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000014#include "Thumb2InstrInfo.h"
15#include "llvm/ADT/DenseMap.h"
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +000016#include "llvm/ADT/PostOrderIterator.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000017#include "llvm/ADT/Statistic.h"
18#include "llvm/CodeGen/MachineFunctionPass.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000019#include "llvm/CodeGen/MachineInstr.h"
20#include "llvm/CodeGen/MachineInstrBuilder.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000021#include "llvm/IR/Function.h" // To access Function attributes
Evan Chengf16a1d52009-08-10 07:20:37 +000022#include "llvm/Support/CommandLine.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000023#include "llvm/Support/Debug.h"
Benjamin Kramer16132e62015-03-23 18:07:13 +000024#include "llvm/Support/raw_ostream.h"
Craig Toppera9253262014-03-22 23:51:00 +000025#include "llvm/Target/TargetMachine.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000026using namespace llvm;
27
Chandler Carruth84e68b22014-04-22 02:41:26 +000028#define DEBUG_TYPE "t2-reduce-size"
29
Evan Cheng1f5bee12009-08-10 06:57:42 +000030STATISTIC(NumNarrows, "Number of 32-bit instrs reduced to 16-bit ones");
31STATISTIC(Num2Addrs, "Number of 32-bit instrs reduced to 2addr 16-bit ones");
Evan Cheng36064672009-08-11 08:52:18 +000032STATISTIC(NumLdSts, "Number of 32-bit load / store reduced to 16-bit ones");
Evan Cheng1be453b2009-08-08 03:21:23 +000033
Evan Chengcc9ca352009-08-11 21:11:32 +000034static cl::opt<int> ReduceLimit("t2-reduce-limit",
35 cl::init(-1), cl::Hidden);
36static cl::opt<int> ReduceLimit2Addr("t2-reduce-limit2",
37 cl::init(-1), cl::Hidden);
38static cl::opt<int> ReduceLimitLdSt("t2-reduce-limit3",
39 cl::init(-1), cl::Hidden);
Evan Chengf16a1d52009-08-10 07:20:37 +000040
Evan Cheng1be453b2009-08-08 03:21:23 +000041namespace {
42 /// ReduceTable - A static table with information on mapping from wide
43 /// opcodes to narrow
44 struct ReduceEntry {
Craig Topperca658c22012-03-11 07:16:55 +000045 uint16_t WideOpc; // Wide opcode
46 uint16_t NarrowOpc1; // Narrow opcode to transform to
47 uint16_t NarrowOpc2; // Narrow opcode when it's two-address
Evan Cheng1be453b2009-08-08 03:21:23 +000048 uint8_t Imm1Limit; // Limit of immediate field (bits)
49 uint8_t Imm2Limit; // Limit of immediate field when it's two-address
50 unsigned LowRegs1 : 1; // Only possible if low-registers are used
51 unsigned LowRegs2 : 1; // Only possible if low-registers are used (2addr)
Evan Cheng1e6c2a12009-08-12 01:49:45 +000052 unsigned PredCC1 : 2; // 0 - If predicated, cc is on and vice versa.
Evan Cheng1be453b2009-08-08 03:21:23 +000053 // 1 - No cc field.
Evan Cheng1e6c2a12009-08-12 01:49:45 +000054 // 2 - Always set CPSR.
Evan Chengaee7e492009-08-12 18:35:50 +000055 unsigned PredCC2 : 2;
Bob Wilsona2881ee2011-04-19 18:11:49 +000056 unsigned PartFlag : 1; // 16-bit instruction does partial flag update
Evan Cheng1be453b2009-08-08 03:21:23 +000057 unsigned Special : 1; // Needs to be dealt with specially
Evan Chengddc0cb62012-12-20 19:59:30 +000058 unsigned AvoidMovs: 1; // Avoid movs with shifter operand (for Swift)
Evan Cheng1be453b2009-08-08 03:21:23 +000059 };
60
61 static const ReduceEntry ReduceTable[] = {
Evan Chengddc0cb62012-12-20 19:59:30 +000062 // Wide, Narrow1, Narrow2, imm1,imm2, lo1, lo2, P/C,PF,S,AM
63 { ARM::t2ADCrr, 0, ARM::tADC, 0, 0, 0, 1, 0,0, 0,0,0 },
64 { ARM::t2ADDri, ARM::tADDi3, ARM::tADDi8, 3, 8, 1, 1, 0,0, 0,1,0 },
65 { ARM::t2ADDrr, ARM::tADDrr, ARM::tADDhirr, 0, 0, 1, 0, 0,1, 0,0,0 },
66 { ARM::t2ADDSri,ARM::tADDi3, ARM::tADDi8, 3, 8, 1, 1, 2,2, 0,1,0 },
67 { ARM::t2ADDSrr,ARM::tADDrr, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
68 { ARM::t2ANDrr, 0, ARM::tAND, 0, 0, 0, 1, 0,0, 1,0,0 },
69 { ARM::t2ASRri, ARM::tASRri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
70 { ARM::t2ASRrr, 0, ARM::tASRrr, 0, 0, 0, 1, 0,0, 1,0,1 },
71 { ARM::t2BICrr, 0, ARM::tBIC, 0, 0, 0, 1, 0,0, 1,0,0 },
72 //FIXME: Disable CMN, as CCodes are backwards from compare expectations
73 //{ ARM::t2CMNrr, ARM::tCMN, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
74 { ARM::t2CMNzrr, ARM::tCMNz, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
75 { ARM::t2CMPri, ARM::tCMPi8, 0, 8, 0, 1, 0, 2,0, 0,0,0 },
76 { ARM::t2CMPrr, ARM::tCMPhir, 0, 0, 0, 0, 0, 2,0, 0,1,0 },
77 { ARM::t2EORrr, 0, ARM::tEOR, 0, 0, 0, 1, 0,0, 1,0,0 },
78 // FIXME: adr.n immediate offset must be multiple of 4.
79 //{ ARM::t2LEApcrelJT,ARM::tLEApcrelJT, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
80 { ARM::t2LSLri, ARM::tLSLri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
81 { ARM::t2LSLrr, 0, ARM::tLSLrr, 0, 0, 0, 1, 0,0, 1,0,1 },
82 { ARM::t2LSRri, ARM::tLSRri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
83 { ARM::t2LSRrr, 0, ARM::tLSRrr, 0, 0, 0, 1, 0,0, 1,0,1 },
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +000084 { ARM::t2MOVi, ARM::tMOVi8, 0, 8, 0, 1, 0, 0,0, 1,0,0 },
85 { ARM::t2MOVi16,ARM::tMOVi8, 0, 8, 0, 1, 0, 0,0, 1,1,0 },
Evan Chengddc0cb62012-12-20 19:59:30 +000086 // FIXME: Do we need the 16-bit 'S' variant?
87 { ARM::t2MOVr,ARM::tMOVr, 0, 0, 0, 0, 0, 1,0, 0,0,0 },
88 { ARM::t2MUL, 0, ARM::tMUL, 0, 0, 0, 1, 0,0, 1,0,0 },
89 { ARM::t2MVNr, ARM::tMVN, 0, 0, 0, 1, 0, 0,0, 0,0,0 },
90 { ARM::t2ORRrr, 0, ARM::tORR, 0, 0, 0, 1, 0,0, 1,0,0 },
91 { ARM::t2REV, ARM::tREV, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
92 { ARM::t2REV16, ARM::tREV16, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
93 { ARM::t2REVSH, ARM::tREVSH, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
94 { ARM::t2RORrr, 0, ARM::tROR, 0, 0, 0, 1, 0,0, 1,0,0 },
95 { ARM::t2RSBri, ARM::tRSB, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
96 { ARM::t2RSBSri,ARM::tRSB, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
97 { ARM::t2SBCrr, 0, ARM::tSBC, 0, 0, 0, 1, 0,0, 0,0,0 },
98 { ARM::t2SUBri, ARM::tSUBi3, ARM::tSUBi8, 3, 8, 1, 1, 0,0, 0,0,0 },
99 { ARM::t2SUBrr, ARM::tSUBrr, 0, 0, 0, 1, 0, 0,0, 0,0,0 },
100 { ARM::t2SUBSri,ARM::tSUBi3, ARM::tSUBi8, 3, 8, 1, 1, 2,2, 0,0,0 },
101 { ARM::t2SUBSrr,ARM::tSUBrr, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
102 { ARM::t2SXTB, ARM::tSXTB, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
103 { ARM::t2SXTH, ARM::tSXTH, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
104 { ARM::t2TSTrr, ARM::tTST, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
105 { ARM::t2UXTB, ARM::tUXTB, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
106 { ARM::t2UXTH, ARM::tUXTH, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
Evan Cheng36064672009-08-11 08:52:18 +0000107
Evan Chengddc0cb62012-12-20 19:59:30 +0000108 // FIXME: Clean this up after splitting each Thumb load / store opcode
109 // into multiple ones.
110 { ARM::t2LDRi12,ARM::tLDRi, ARM::tLDRspi, 5, 8, 1, 0, 0,0, 0,1,0 },
111 { ARM::t2LDRs, ARM::tLDRr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
112 { ARM::t2LDRBi12,ARM::tLDRBi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
113 { ARM::t2LDRBs, ARM::tLDRBr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
114 { ARM::t2LDRHi12,ARM::tLDRHi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
115 { ARM::t2LDRHs, ARM::tLDRHr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
116 { ARM::t2LDRSBs,ARM::tLDRSB, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
117 { ARM::t2LDRSHs,ARM::tLDRSH, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
118 { ARM::t2STRi12,ARM::tSTRi, ARM::tSTRspi, 5, 8, 1, 0, 0,0, 0,1,0 },
119 { ARM::t2STRs, ARM::tSTRr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
120 { ARM::t2STRBi12,ARM::tSTRBi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
121 { ARM::t2STRBs, ARM::tSTRBr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
122 { ARM::t2STRHi12,ARM::tSTRHi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
123 { ARM::t2STRHs, ARM::tSTRHr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
Evan Chengcc9ca352009-08-11 21:11:32 +0000124
Evan Chengddc0cb62012-12-20 19:59:30 +0000125 { ARM::t2LDMIA, ARM::tLDMIA, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
126 { ARM::t2LDMIA_RET,0, ARM::tPOP_RET, 0, 0, 1, 1, 1,1, 0,1,0 },
127 { ARM::t2LDMIA_UPD,ARM::tLDMIA_UPD,ARM::tPOP,0, 0, 1, 1, 1,1, 0,1,0 },
128 // ARM::t2STM (with no basereg writeback) has no Thumb1 equivalent
129 { ARM::t2STMIA_UPD,ARM::tSTMIA_UPD, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
130 { ARM::t2STMDB_UPD, 0, ARM::tPUSH, 0, 0, 1, 1, 1,1, 0,1,0 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000131 };
132
Nick Lewycky02d5f772009-10-25 06:33:48 +0000133 class Thumb2SizeReduce : public MachineFunctionPass {
Evan Cheng1be453b2009-08-08 03:21:23 +0000134 public:
135 static char ID;
136 Thumb2SizeReduce();
137
Evan Cheng6ddd7bc2009-08-15 07:59:10 +0000138 const Thumb2InstrInfo *TII;
Bob Wilsona2881ee2011-04-19 18:11:49 +0000139 const ARMSubtarget *STI;
Evan Cheng1be453b2009-08-08 03:21:23 +0000140
Craig Topper6bc27bf2014-03-10 02:09:33 +0000141 bool runOnMachineFunction(MachineFunction &MF) override;
Evan Cheng1be453b2009-08-08 03:21:23 +0000142
Craig Topper6bc27bf2014-03-10 02:09:33 +0000143 const char *getPassName() const override {
Evan Cheng1be453b2009-08-08 03:21:23 +0000144 return "Thumb2 instruction size reduction pass";
145 }
146
147 private:
148 /// ReduceOpcodeMap - Maps wide opcode to index of entry in ReduceTable.
149 DenseMap<unsigned, unsigned> ReduceOpcodeMap;
150
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000151 bool canAddPseudoFlagDep(MachineInstr *Use, bool IsSelfLoop);
Bob Wilsona2881ee2011-04-19 18:11:49 +0000152
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000153 bool VerifyPredAndCC(MachineInstr *MI, const ReduceEntry &Entry,
154 bool is2Addr, ARMCC::CondCodes Pred,
155 bool LiveCPSR, bool &HasCC, bool &CCDead);
156
Evan Cheng36064672009-08-11 08:52:18 +0000157 bool ReduceLoadStore(MachineBasicBlock &MBB, MachineInstr *MI,
158 const ReduceEntry &Entry);
159
160 bool ReduceSpecial(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000161 const ReduceEntry &Entry, bool LiveCPSR, bool IsSelfLoop);
Evan Cheng36064672009-08-11 08:52:18 +0000162
Evan Cheng1be453b2009-08-08 03:21:23 +0000163 /// ReduceTo2Addr - Reduce a 32-bit instruction to a 16-bit two-address
164 /// instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000165 bool ReduceTo2Addr(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000166 const ReduceEntry &Entry, bool LiveCPSR,
Evan Chengf4807a12011-10-27 21:21:05 +0000167 bool IsSelfLoop);
Evan Cheng1be453b2009-08-08 03:21:23 +0000168
169 /// ReduceToNarrow - Reduce a 32-bit instruction to a 16-bit
170 /// non-two-address instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000171 bool ReduceToNarrow(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000172 const ReduceEntry &Entry, bool LiveCPSR,
Evan Chengf4807a12011-10-27 21:21:05 +0000173 bool IsSelfLoop);
Evan Cheng1be453b2009-08-08 03:21:23 +0000174
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000175 /// ReduceMI - Attempt to reduce MI, return true on success.
176 bool ReduceMI(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000177 bool LiveCPSR, bool IsSelfLoop);
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000178
Evan Cheng1be453b2009-08-08 03:21:23 +0000179 /// ReduceMBB - Reduce width of instructions in the specified basic block.
180 bool ReduceMBB(MachineBasicBlock &MBB);
Quentin Colombet23b404d2012-12-18 22:47:16 +0000181
Evan Chengddc0cb62012-12-20 19:59:30 +0000182 bool OptimizeSize;
Quentin Colombet23b404d2012-12-18 22:47:16 +0000183 bool MinimizeSize;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000184
185 // Last instruction to define CPSR in the current block.
186 MachineInstr *CPSRDef;
187 // Was CPSR last defined by a high latency instruction?
188 // When CPSRDef is null, this refers to CPSR defs in predecessors.
189 bool HighLatencyCPSR;
190
191 struct MBBInfo {
192 // The flags leaving this block have high latency.
193 bool HighLatencyCPSR;
194 // Has this block been visited yet?
195 bool Visited;
196
197 MBBInfo() : HighLatencyCPSR(false), Visited(false) {}
198 };
199
200 SmallVector<MBBInfo, 8> BlockInfo;
Evan Cheng1be453b2009-08-08 03:21:23 +0000201 };
202 char Thumb2SizeReduce::ID = 0;
203}
204
Owen Andersona7aed182010-08-06 18:33:48 +0000205Thumb2SizeReduce::Thumb2SizeReduce() : MachineFunctionPass(ID) {
Evan Chengddc0cb62012-12-20 19:59:30 +0000206 OptimizeSize = MinimizeSize = false;
Evan Cheng1be453b2009-08-08 03:21:23 +0000207 for (unsigned i = 0, e = array_lengthof(ReduceTable); i != e; ++i) {
208 unsigned FromOpc = ReduceTable[i].WideOpc;
209 if (!ReduceOpcodeMap.insert(std::make_pair(FromOpc, i)).second)
210 assert(false && "Duplicated entries?");
211 }
212}
213
Evan Cheng6cc775f2011-06-28 19:10:37 +0000214static bool HasImplicitCPSRDef(const MCInstrDesc &MCID) {
Craig Topper5a4bcc72012-03-08 08:22:45 +0000215 for (const uint16_t *Regs = MCID.getImplicitDefs(); *Regs; ++Regs)
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000216 if (*Regs == ARM::CPSR)
217 return true;
218 return false;
219}
220
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000221// Check for a likely high-latency flag def.
222static bool isHighLatencyCPSR(MachineInstr *Def) {
223 switch(Def->getOpcode()) {
224 case ARM::FMSTAT:
225 case ARM::tMUL:
226 return true;
227 }
228 return false;
229}
230
Bob Wilsona2881ee2011-04-19 18:11:49 +0000231/// canAddPseudoFlagDep - For A9 (and other out-of-order) implementations,
232/// the 's' 16-bit instruction partially update CPSR. Abort the
233/// transformation to avoid adding false dependency on last CPSR setting
234/// instruction which hurts the ability for out-of-order execution engine
235/// to do register renaming magic.
236/// This function checks if there is a read-of-write dependency between the
237/// last instruction that defines the CPSR and the current instruction. If there
238/// is, then there is no harm done since the instruction cannot be retired
239/// before the CPSR setting instruction anyway.
240/// Note, we are not doing full dependency analysis here for the sake of compile
241/// time. We're not looking for cases like:
242/// r0 = muls ...
243/// r1 = add.w r0, ...
244/// ...
245/// = mul.w r1
246/// In this case it would have been ok to narrow the mul.w to muls since there
247/// are indirect RAW dependency between the muls and the mul.w
248bool
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000249Thumb2SizeReduce::canAddPseudoFlagDep(MachineInstr *Use, bool FirstInSelfLoop) {
Quentin Colombet23b404d2012-12-18 22:47:16 +0000250 // Disable the check for -Oz (aka OptimizeForSizeHarder).
251 if (MinimizeSize || !STI->avoidCPSRPartialUpdate())
Bob Wilsona2881ee2011-04-19 18:11:49 +0000252 return false;
253
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000254 if (!CPSRDef)
Evan Chengf4807a12011-10-27 21:21:05 +0000255 // If this BB loops back to itself, conservatively avoid narrowing the
256 // first instruction that does partial flag update.
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000257 return HighLatencyCPSR || FirstInSelfLoop;
Evan Chengf4807a12011-10-27 21:21:05 +0000258
Bob Wilsona2881ee2011-04-19 18:11:49 +0000259 SmallSet<unsigned, 2> Defs;
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000260 for (const MachineOperand &MO : CPSRDef->operands()) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000261 if (!MO.isReg() || MO.isUndef() || MO.isUse())
262 continue;
263 unsigned Reg = MO.getReg();
264 if (Reg == 0 || Reg == ARM::CPSR)
265 continue;
266 Defs.insert(Reg);
267 }
268
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000269 for (const MachineOperand &MO : Use->operands()) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000270 if (!MO.isReg() || MO.isUndef() || MO.isDef())
271 continue;
272 unsigned Reg = MO.getReg();
273 if (Defs.count(Reg))
274 return false;
275 }
276
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000277 // If the current CPSR has high latency, try to avoid the false dependency.
278 if (HighLatencyCPSR)
279 return true;
280
281 // tMOVi8 usually doesn't start long dependency chains, and there are a lot
282 // of them, so always shrink them when CPSR doesn't have high latency.
283 if (Use->getOpcode() == ARM::t2MOVi ||
284 Use->getOpcode() == ARM::t2MOVi16)
285 return false;
286
Bob Wilsona2881ee2011-04-19 18:11:49 +0000287 // No read-after-write dependency. The narrowing will add false dependency.
288 return true;
289}
290
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000291bool
292Thumb2SizeReduce::VerifyPredAndCC(MachineInstr *MI, const ReduceEntry &Entry,
293 bool is2Addr, ARMCC::CondCodes Pred,
294 bool LiveCPSR, bool &HasCC, bool &CCDead) {
Evan Chengd461c1c2009-08-09 19:17:19 +0000295 if ((is2Addr && Entry.PredCC2 == 0) ||
296 (!is2Addr && Entry.PredCC1 == 0)) {
297 if (Pred == ARMCC::AL) {
298 // Not predicated, must set CPSR.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000299 if (!HasCC) {
300 // Original instruction was not setting CPSR, but CPSR is not
301 // currently live anyway. It's ok to set it. The CPSR def is
302 // dead though.
303 if (!LiveCPSR) {
304 HasCC = true;
305 CCDead = true;
306 return true;
307 }
308 return false;
309 }
Evan Chengd461c1c2009-08-09 19:17:19 +0000310 } else {
311 // Predicated, must not set CPSR.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000312 if (HasCC)
313 return false;
Evan Chengd461c1c2009-08-09 19:17:19 +0000314 }
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000315 } else if ((is2Addr && Entry.PredCC2 == 2) ||
316 (!is2Addr && Entry.PredCC1 == 2)) {
317 /// Old opcode has an optional def of CPSR.
318 if (HasCC)
319 return true;
Jim Grosbachbc7eeaf2010-09-14 20:35:46 +0000320 // If old opcode does not implicitly define CPSR, then it's not ok since
321 // these new opcodes' CPSR def is not meant to be thrown away. e.g. CMP.
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000322 if (!HasImplicitCPSRDef(MI->getDesc()))
323 return false;
324 HasCC = true;
Evan Chengd461c1c2009-08-09 19:17:19 +0000325 } else {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000326 // 16-bit instruction does not set CPSR.
327 if (HasCC)
328 return false;
Evan Chengd461c1c2009-08-09 19:17:19 +0000329 }
330
331 return true;
332}
333
Evan Chengcc9ca352009-08-11 21:11:32 +0000334static bool VerifyLowRegs(MachineInstr *MI) {
335 unsigned Opc = MI->getOpcode();
Peter Collingbourne85a0e232015-05-05 20:07:10 +0000336 bool isPCOk = (Opc == ARM::t2LDMIA_RET || Opc == ARM::t2LDMIA_UPD);
Tim Northoverba1d7042014-09-10 12:53:28 +0000337 bool isLROk = (Opc == ARM::t2STMDB_UPD);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000338 bool isSPOk = isPCOk || isLROk;
Evan Chengcc9ca352009-08-11 21:11:32 +0000339 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
340 const MachineOperand &MO = MI->getOperand(i);
341 if (!MO.isReg() || MO.isImplicit())
342 continue;
343 unsigned Reg = MO.getReg();
344 if (Reg == 0 || Reg == ARM::CPSR)
345 continue;
346 if (isPCOk && Reg == ARM::PC)
347 continue;
348 if (isLROk && Reg == ARM::LR)
349 continue;
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000350 if (Reg == ARM::SP) {
351 if (isSPOk)
352 continue;
353 if (i == 1 && (Opc == ARM::t2LDRi12 || Opc == ARM::t2STRi12))
354 // Special case for these ldr / str with sp as base register.
355 continue;
356 }
Evan Chengcc9ca352009-08-11 21:11:32 +0000357 if (!isARMLowRegister(Reg))
358 return false;
359 }
360 return true;
361}
362
Evan Cheng1be453b2009-08-08 03:21:23 +0000363bool
Evan Cheng36064672009-08-11 08:52:18 +0000364Thumb2SizeReduce::ReduceLoadStore(MachineBasicBlock &MBB, MachineInstr *MI,
365 const ReduceEntry &Entry) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000366 if (ReduceLimitLdSt != -1 && ((int)NumLdSts >= ReduceLimitLdSt))
367 return false;
368
Evan Cheng36064672009-08-11 08:52:18 +0000369 unsigned Scale = 1;
370 bool HasImmOffset = false;
371 bool HasShift = false;
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000372 bool HasOffReg = true;
Evan Chengcc9ca352009-08-11 21:11:32 +0000373 bool isLdStMul = false;
Evan Chengcc9ca352009-08-11 21:11:32 +0000374 unsigned Opc = Entry.NarrowOpc1;
375 unsigned OpNum = 3; // First 'rest' of operands.
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000376 uint8_t ImmLimit = Entry.Imm1Limit;
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000377
Evan Cheng36064672009-08-11 08:52:18 +0000378 switch (Entry.WideOpc) {
379 default:
380 llvm_unreachable("Unexpected Thumb2 load / store opcode!");
Jakob Stoklund Olesenb3de7b12012-08-28 03:11:27 +0000381 case ARM::t2LDRi12:
Bill Wendling092a7bd2010-12-14 03:36:38 +0000382 case ARM::t2STRi12:
383 if (MI->getOperand(1).getReg() == ARM::SP) {
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000384 Opc = Entry.NarrowOpc2;
385 ImmLimit = Entry.Imm2Limit;
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000386 }
Bill Wendling092a7bd2010-12-14 03:36:38 +0000387
Evan Cheng36064672009-08-11 08:52:18 +0000388 Scale = 4;
Owen Anderson4ebf4712011-02-08 22:39:40 +0000389 HasImmOffset = true;
390 HasOffReg = false;
Evan Cheng36064672009-08-11 08:52:18 +0000391 break;
Jakob Stoklund Olesenb3de7b12012-08-28 03:11:27 +0000392 case ARM::t2LDRBi12:
Evan Cheng36064672009-08-11 08:52:18 +0000393 case ARM::t2STRBi12:
Owen Anderson4ebf4712011-02-08 22:39:40 +0000394 HasImmOffset = true;
395 HasOffReg = false;
Evan Cheng36064672009-08-11 08:52:18 +0000396 break;
397 case ARM::t2LDRHi12:
398 case ARM::t2STRHi12:
399 Scale = 2;
Owen Anderson4ebf4712011-02-08 22:39:40 +0000400 HasImmOffset = true;
401 HasOffReg = false;
Evan Cheng36064672009-08-11 08:52:18 +0000402 break;
Jakob Stoklund Olesenb3de7b12012-08-28 03:11:27 +0000403 case ARM::t2LDRs:
404 case ARM::t2LDRBs:
405 case ARM::t2LDRHs:
Evan Cheng36064672009-08-11 08:52:18 +0000406 case ARM::t2LDRSBs:
407 case ARM::t2LDRSHs:
408 case ARM::t2STRs:
409 case ARM::t2STRBs:
410 case ARM::t2STRHs:
411 HasShift = true;
Evan Chengcc9ca352009-08-11 21:11:32 +0000412 OpNum = 4;
Evan Cheng36064672009-08-11 08:52:18 +0000413 break;
Peter Collingbourne85a0e232015-05-05 20:07:10 +0000414 case ARM::t2LDMIA: {
Evan Chengcc9ca352009-08-11 21:11:32 +0000415 unsigned BaseReg = MI->getOperand(0).getReg();
Peter Collingbourne85a0e232015-05-05 20:07:10 +0000416 assert(isARMLowRegister(BaseReg));
Bill Wendling092a7bd2010-12-14 03:36:38 +0000417
Jim Grosbach88628e92010-09-07 22:30:53 +0000418 // For the non-writeback version (this one), the base register must be
419 // one of the registers being loaded.
420 bool isOK = false;
Peter Collingbourne85a0e232015-05-05 20:07:10 +0000421 for (unsigned i = 3; i < MI->getNumOperands(); ++i) {
Jim Grosbach88628e92010-09-07 22:30:53 +0000422 if (MI->getOperand(i).getReg() == BaseReg) {
423 isOK = true;
424 break;
425 }
426 }
Bill Wendling092a7bd2010-12-14 03:36:38 +0000427
Jim Grosbach88628e92010-09-07 22:30:53 +0000428 if (!isOK)
429 return false;
430
Bob Wilson947f04b2010-03-13 01:08:20 +0000431 OpNum = 0;
432 isLdStMul = true;
433 break;
434 }
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000435 case ARM::t2LDMIA_RET: {
Bob Wilson947f04b2010-03-13 01:08:20 +0000436 unsigned BaseReg = MI->getOperand(1).getReg();
437 if (BaseReg != ARM::SP)
438 return false;
439 Opc = Entry.NarrowOpc2; // tPOP_RET
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000440 OpNum = 2;
Bob Wilson947f04b2010-03-13 01:08:20 +0000441 isLdStMul = true;
442 break;
443 }
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000444 case ARM::t2LDMIA_UPD:
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000445 case ARM::t2STMIA_UPD:
446 case ARM::t2STMDB_UPD: {
Bob Wilson947f04b2010-03-13 01:08:20 +0000447 OpNum = 0;
Bill Wendling092a7bd2010-12-14 03:36:38 +0000448
Bob Wilson947f04b2010-03-13 01:08:20 +0000449 unsigned BaseReg = MI->getOperand(1).getReg();
Bob Wilson947f04b2010-03-13 01:08:20 +0000450 if (BaseReg == ARM::SP &&
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000451 (Entry.WideOpc == ARM::t2LDMIA_UPD ||
452 Entry.WideOpc == ARM::t2STMDB_UPD)) {
Bob Wilson947f04b2010-03-13 01:08:20 +0000453 Opc = Entry.NarrowOpc2; // tPOP or tPUSH
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000454 OpNum = 2;
455 } else if (!isARMLowRegister(BaseReg) ||
456 (Entry.WideOpc != ARM::t2LDMIA_UPD &&
457 Entry.WideOpc != ARM::t2STMIA_UPD)) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000458 return false;
459 }
Bill Wendling092a7bd2010-12-14 03:36:38 +0000460
Evan Chengcc9ca352009-08-11 21:11:32 +0000461 isLdStMul = true;
462 break;
463 }
Evan Cheng36064672009-08-11 08:52:18 +0000464 }
465
466 unsigned OffsetReg = 0;
467 bool OffsetKill = false;
Pete Cooperf68d5032015-05-01 18:57:32 +0000468 bool OffsetInternal = false;
Evan Cheng36064672009-08-11 08:52:18 +0000469 if (HasShift) {
470 OffsetReg = MI->getOperand(2).getReg();
471 OffsetKill = MI->getOperand(2).isKill();
Pete Cooperf68d5032015-05-01 18:57:32 +0000472 OffsetInternal = MI->getOperand(2).isInternalRead();
Bill Wendling092a7bd2010-12-14 03:36:38 +0000473
Evan Cheng36064672009-08-11 08:52:18 +0000474 if (MI->getOperand(3).getImm())
475 // Thumb1 addressing mode doesn't support shift.
476 return false;
477 }
478
479 unsigned OffsetImm = 0;
480 if (HasImmOffset) {
481 OffsetImm = MI->getOperand(2).getImm();
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000482 unsigned MaxOffset = ((1 << ImmLimit) - 1) * Scale;
Bill Wendling092a7bd2010-12-14 03:36:38 +0000483
484 if ((OffsetImm & (Scale - 1)) || OffsetImm > MaxOffset)
Evan Cheng36064672009-08-11 08:52:18 +0000485 // Make sure the immediate field fits.
486 return false;
487 }
488
489 // Add the 16-bit load / store instruction.
Evan Cheng36064672009-08-11 08:52:18 +0000490 DebugLoc dl = MI->getDebugLoc();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000491 MachineInstrBuilder MIB = BuildMI(MBB, MI, dl, TII->get(Opc));
Evan Chengcc9ca352009-08-11 21:11:32 +0000492 if (!isLdStMul) {
Owen Anderson99ea8a32010-12-07 00:45:21 +0000493 MIB.addOperand(MI->getOperand(0));
Owen Anderson4ebf4712011-02-08 22:39:40 +0000494 MIB.addOperand(MI->getOperand(1));
Bill Wendling092a7bd2010-12-14 03:36:38 +0000495
496 if (HasImmOffset)
497 MIB.addImm(OffsetImm / Scale);
498
Evan Chengcc9ca352009-08-11 21:11:32 +0000499 assert((!HasShift || OffsetReg) && "Invalid so_reg load / store address!");
500
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000501 if (HasOffReg)
Pete Cooperf68d5032015-05-01 18:57:32 +0000502 MIB.addReg(OffsetReg, getKillRegState(OffsetKill) |
503 getInternalReadRegState(OffsetInternal));
Evan Cheng36064672009-08-11 08:52:18 +0000504 }
Evan Cheng806845d2009-08-11 09:37:40 +0000505
Evan Cheng36064672009-08-11 08:52:18 +0000506 // Transfer the rest of operands.
Evan Cheng36064672009-08-11 08:52:18 +0000507 for (unsigned e = MI->getNumOperands(); OpNum != e; ++OpNum)
508 MIB.addOperand(MI->getOperand(OpNum));
509
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000510 // Transfer memoperands.
Chris Lattner1d0c2572011-04-29 05:24:29 +0000511 MIB->setMemRefs(MI->memoperands_begin(), MI->memoperands_end());
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000512
Anton Korobeynikovacca7ad2011-03-05 18:43:38 +0000513 // Transfer MI flags.
514 MIB.setMIFlags(MI->getFlags());
515
Chris Lattnera6f074f2009-08-23 03:41:05 +0000516 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " << *MIB);
Evan Cheng36064672009-08-11 08:52:18 +0000517
Evan Cheng7fae11b2011-12-14 02:11:42 +0000518 MBB.erase_instr(MI);
Evan Cheng36064672009-08-11 08:52:18 +0000519 ++NumLdSts;
520 return true;
521}
522
Evan Cheng36064672009-08-11 08:52:18 +0000523bool
524Thumb2SizeReduce::ReduceSpecial(MachineBasicBlock &MBB, MachineInstr *MI,
525 const ReduceEntry &Entry,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000526 bool LiveCPSR, bool IsSelfLoop) {
Jim Grosbacha8a80672011-06-29 23:25:04 +0000527 unsigned Opc = MI->getOpcode();
528 if (Opc == ARM::t2ADDri) {
529 // If the source register is SP, try to reduce to tADDrSPi, otherwise
530 // it's a normal reduce.
531 if (MI->getOperand(1).getReg() != ARM::SP) {
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000532 if (ReduceTo2Addr(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Jim Grosbacha8a80672011-06-29 23:25:04 +0000533 return true;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000534 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000535 }
536 // Try to reduce to tADDrSPi.
537 unsigned Imm = MI->getOperand(2).getImm();
538 // The immediate must be in range, the destination register must be a low
Jim Grosbached5134a2011-06-30 02:22:49 +0000539 // reg, the predicate must be "always" and the condition flags must not
540 // be being set.
Jim Grosbach68b0e842011-07-01 19:07:09 +0000541 if (Imm & 3 || Imm > 1020)
Jim Grosbacha8a80672011-06-29 23:25:04 +0000542 return false;
543 if (!isARMLowRegister(MI->getOperand(0).getReg()))
544 return false;
Jim Grosbached5134a2011-06-30 02:22:49 +0000545 if (MI->getOperand(3).getImm() != ARMCC::AL)
546 return false;
Jim Grosbacha8a80672011-06-29 23:25:04 +0000547 const MCInstrDesc &MCID = MI->getDesc();
548 if (MCID.hasOptionalDef() &&
549 MI->getOperand(MCID.getNumOperands()-1).getReg() == ARM::CPSR)
550 return false;
551
Evan Cheng7fae11b2011-12-14 02:11:42 +0000552 MachineInstrBuilder MIB = BuildMI(MBB, MI, MI->getDebugLoc(),
Jim Grosbacha8a80672011-06-29 23:25:04 +0000553 TII->get(ARM::tADDrSPi))
554 .addOperand(MI->getOperand(0))
555 .addOperand(MI->getOperand(1))
556 .addImm(Imm / 4); // The tADDrSPi has an implied scale by four.
Jim Grosbach1b8457a2011-08-24 17:46:13 +0000557 AddDefaultPred(MIB);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000558
559 // Transfer MI flags.
560 MIB.setMIFlags(MI->getFlags());
561
562 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " <<*MIB);
563
Evan Cheng7fae11b2011-12-14 02:11:42 +0000564 MBB.erase_instr(MI);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000565 ++NumNarrows;
566 return true;
567 }
568
Evan Chengcc9ca352009-08-11 21:11:32 +0000569 if (Entry.LowRegs1 && !VerifyLowRegs(MI))
Evan Cheng36064672009-08-11 08:52:18 +0000570 return false;
571
Evan Cheng7f8e5632011-12-07 07:15:52 +0000572 if (MI->mayLoad() || MI->mayStore())
Evan Cheng36064672009-08-11 08:52:18 +0000573 return ReduceLoadStore(MBB, MI, Entry);
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000574
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000575 switch (Opc) {
576 default: break;
Owen Anderson4ebf4712011-02-08 22:39:40 +0000577 case ARM::t2ADDSri:
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000578 case ARM::t2ADDSrr: {
579 unsigned PredReg = 0;
580 if (getInstrPredicate(MI, PredReg) == ARMCC::AL) {
581 switch (Opc) {
582 default: break;
583 case ARM::t2ADDSri: {
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000584 if (ReduceTo2Addr(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000585 return true;
586 // fallthrough
587 }
588 case ARM::t2ADDSrr:
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000589 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000590 }
591 }
592 break;
593 }
594 case ARM::t2RSBri:
595 case ARM::t2RSBSri:
Jim Grosbach8b31ef52011-07-27 16:47:19 +0000596 case ARM::t2SXTB:
597 case ARM::t2SXTH:
598 case ARM::t2UXTB:
599 case ARM::t2UXTH:
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000600 if (MI->getOperand(2).getImm() == 0)
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000601 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000602 break;
Anton Korobeynikov25229082009-11-24 00:44:37 +0000603 case ARM::t2MOVi16:
604 // Can convert only 'pure' immediate operands, not immediates obtained as
605 // globals' addresses.
606 if (MI->getOperand(1).isImm())
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000607 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Anton Korobeynikov25229082009-11-24 00:44:37 +0000608 break;
Jim Grosbach327cf8e2010-12-07 20:41:06 +0000609 case ARM::t2CMPrr: {
Jim Grosbach5bae0542010-12-03 23:54:18 +0000610 // Try to reduce to the lo-reg only version first. Why there are two
611 // versions of the instruction is a mystery.
612 // It would be nice to just have two entries in the master table that
613 // are prioritized, but the table assumes a unique entry for each
614 // source insn opcode. So for now, we hack a local entry record to use.
615 static const ReduceEntry NarrowEntry =
Evan Chengddc0cb62012-12-20 19:59:30 +0000616 { ARM::t2CMPrr,ARM::tCMPr, 0, 0, 0, 1, 1,2, 0, 0,1,0 };
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000617 if (ReduceToNarrow(MBB, MI, NarrowEntry, LiveCPSR, IsSelfLoop))
Jim Grosbach5bae0542010-12-03 23:54:18 +0000618 return true;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000619 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Jim Grosbach5bae0542010-12-03 23:54:18 +0000620 }
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000621 }
Evan Cheng36064672009-08-11 08:52:18 +0000622 return false;
623}
624
625bool
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000626Thumb2SizeReduce::ReduceTo2Addr(MachineBasicBlock &MBB, MachineInstr *MI,
627 const ReduceEntry &Entry,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000628 bool LiveCPSR, bool IsSelfLoop) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000629
630 if (ReduceLimit2Addr != -1 && ((int)Num2Addrs >= ReduceLimit2Addr))
631 return false;
632
Evan Chengddc0cb62012-12-20 19:59:30 +0000633 if (!MinimizeSize && !OptimizeSize && Entry.AvoidMovs &&
634 STI->avoidMOVsShifterOperand())
635 // Don't issue movs with shifter operand for some CPUs unless we
636 // are optimizing / minimizing for size.
637 return false;
638
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000639 unsigned Reg0 = MI->getOperand(0).getReg();
640 unsigned Reg1 = MI->getOperand(1).getReg();
Jim Grosbachc01104d2012-02-24 00:33:36 +0000641 // t2MUL is "special". The tied source operand is second, not first.
642 if (MI->getOpcode() == ARM::t2MUL) {
Jim Grosbach3a21e2c2012-02-24 00:53:11 +0000643 unsigned Reg2 = MI->getOperand(2).getReg();
644 // Early exit if the regs aren't all low regs.
645 if (!isARMLowRegister(Reg0) || !isARMLowRegister(Reg1)
646 || !isARMLowRegister(Reg2))
647 return false;
648 if (Reg0 != Reg2) {
Jim Grosbachc01104d2012-02-24 00:33:36 +0000649 // If the other operand also isn't the same as the destination, we
650 // can't reduce.
651 if (Reg1 != Reg0)
652 return false;
653 // Try to commute the operands to make it a 2-address instruction.
654 MachineInstr *CommutedMI = TII->commuteInstruction(MI);
655 if (!CommutedMI)
656 return false;
657 }
658 } else if (Reg0 != Reg1) {
Bob Wilson279e55f2010-06-24 16:50:20 +0000659 // Try to commute the operands to make it a 2-address instruction.
660 unsigned CommOpIdx1, CommOpIdx2;
661 if (!TII->findCommutedOpIndices(MI, CommOpIdx1, CommOpIdx2) ||
662 CommOpIdx1 != 1 || MI->getOperand(CommOpIdx2).getReg() != Reg0)
663 return false;
664 MachineInstr *CommutedMI = TII->commuteInstruction(MI);
665 if (!CommutedMI)
666 return false;
667 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000668 if (Entry.LowRegs2 && !isARMLowRegister(Reg0))
669 return false;
670 if (Entry.Imm2Limit) {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000671 unsigned Imm = MI->getOperand(2).getImm();
Evan Cheng1be453b2009-08-08 03:21:23 +0000672 unsigned Limit = (1 << Entry.Imm2Limit) - 1;
673 if (Imm > Limit)
674 return false;
675 } else {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000676 unsigned Reg2 = MI->getOperand(2).getReg();
Evan Cheng1be453b2009-08-08 03:21:23 +0000677 if (Entry.LowRegs2 && !isARMLowRegister(Reg2))
678 return false;
679 }
680
Evan Cheng1f5bee12009-08-10 06:57:42 +0000681 // Check if it's possible / necessary to transfer the predicate.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000682 const MCInstrDesc &NewMCID = TII->get(Entry.NarrowOpc2);
Evan Cheng1f5bee12009-08-10 06:57:42 +0000683 unsigned PredReg = 0;
684 ARMCC::CondCodes Pred = getInstrPredicate(MI, PredReg);
685 bool SkipPred = false;
686 if (Pred != ARMCC::AL) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000687 if (!NewMCID.isPredicable())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000688 // Can't transfer predicate, fail.
689 return false;
690 } else {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000691 SkipPred = !NewMCID.isPredicable();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000692 }
693
Evan Cheng1be453b2009-08-08 03:21:23 +0000694 bool HasCC = false;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000695 bool CCDead = false;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000696 const MCInstrDesc &MCID = MI->getDesc();
697 if (MCID.hasOptionalDef()) {
698 unsigned NumOps = MCID.getNumOperands();
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000699 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR);
700 if (HasCC && MI->getOperand(NumOps-1).isDead())
701 CCDead = true;
702 }
Evan Cheng1f5bee12009-08-10 06:57:42 +0000703 if (!VerifyPredAndCC(MI, Entry, true, Pred, LiveCPSR, HasCC, CCDead))
Evan Chengd461c1c2009-08-09 19:17:19 +0000704 return false;
Evan Cheng1be453b2009-08-08 03:21:23 +0000705
Bob Wilsona2881ee2011-04-19 18:11:49 +0000706 // Avoid adding a false dependency on partial flag update by some 16-bit
707 // instructions which has the 's' bit set.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000708 if (Entry.PartFlag && NewMCID.hasOptionalDef() && HasCC &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000709 canAddPseudoFlagDep(MI, IsSelfLoop))
Bob Wilsona2881ee2011-04-19 18:11:49 +0000710 return false;
711
Evan Cheng1be453b2009-08-08 03:21:23 +0000712 // Add the 16-bit instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000713 DebugLoc dl = MI->getDebugLoc();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000714 MachineInstrBuilder MIB = BuildMI(MBB, MI, dl, NewMCID);
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000715 MIB.addOperand(MI->getOperand(0));
Evan Cheng6cc775f2011-06-28 19:10:37 +0000716 if (NewMCID.hasOptionalDef()) {
Evan Cheng6ddd7bc2009-08-15 07:59:10 +0000717 if (HasCC)
718 AddDefaultT1CC(MIB, CCDead);
719 else
720 AddNoT1CC(MIB);
721 }
Evan Chengd461c1c2009-08-09 19:17:19 +0000722
723 // Transfer the rest of operands.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000724 unsigned NumOps = MCID.getNumOperands();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000725 for (unsigned i = 1, e = MI->getNumOperands(); i != e; ++i) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000726 if (i < NumOps && MCID.OpInfo[i].isOptionalDef())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000727 continue;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000728 if (SkipPred && MCID.OpInfo[i].isPredicate())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000729 continue;
730 MIB.addOperand(MI->getOperand(i));
731 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000732
Anton Korobeynikovacca7ad2011-03-05 18:43:38 +0000733 // Transfer MI flags.
734 MIB.setMIFlags(MI->getFlags());
735
Chris Lattnera6f074f2009-08-23 03:41:05 +0000736 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " << *MIB);
Evan Cheng1be453b2009-08-08 03:21:23 +0000737
Evan Cheng7fae11b2011-12-14 02:11:42 +0000738 MBB.erase_instr(MI);
Evan Cheng1be453b2009-08-08 03:21:23 +0000739 ++Num2Addrs;
Evan Cheng1be453b2009-08-08 03:21:23 +0000740 return true;
741}
742
743bool
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000744Thumb2SizeReduce::ReduceToNarrow(MachineBasicBlock &MBB, MachineInstr *MI,
745 const ReduceEntry &Entry,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000746 bool LiveCPSR, bool IsSelfLoop) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000747 if (ReduceLimit != -1 && ((int)NumNarrows >= ReduceLimit))
748 return false;
749
Evan Chengddc0cb62012-12-20 19:59:30 +0000750 if (!MinimizeSize && !OptimizeSize && Entry.AvoidMovs &&
751 STI->avoidMOVsShifterOperand())
752 // Don't issue movs with shifter operand for some CPUs unless we
753 // are optimizing / minimizing for size.
754 return false;
755
Evan Chengd461c1c2009-08-09 19:17:19 +0000756 unsigned Limit = ~0U;
757 if (Entry.Imm1Limit)
Jim Grosbacha8a80672011-06-29 23:25:04 +0000758 Limit = (1 << Entry.Imm1Limit) - 1;
Evan Chengd461c1c2009-08-09 19:17:19 +0000759
Evan Cheng6cc775f2011-06-28 19:10:37 +0000760 const MCInstrDesc &MCID = MI->getDesc();
761 for (unsigned i = 0, e = MCID.getNumOperands(); i != e; ++i) {
762 if (MCID.OpInfo[i].isPredicate())
Evan Chengd461c1c2009-08-09 19:17:19 +0000763 continue;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000764 const MachineOperand &MO = MI->getOperand(i);
Evan Chengd461c1c2009-08-09 19:17:19 +0000765 if (MO.isReg()) {
766 unsigned Reg = MO.getReg();
767 if (!Reg || Reg == ARM::CPSR)
768 continue;
769 if (Entry.LowRegs1 && !isARMLowRegister(Reg))
770 return false;
Evan Chengf6a9d062009-08-11 23:00:31 +0000771 } else if (MO.isImm() &&
Evan Cheng6cc775f2011-06-28 19:10:37 +0000772 !MCID.OpInfo[i].isPredicate()) {
Jim Grosbacha8a80672011-06-29 23:25:04 +0000773 if (((unsigned)MO.getImm()) > Limit)
Evan Chengd461c1c2009-08-09 19:17:19 +0000774 return false;
775 }
776 }
777
Evan Cheng1f5bee12009-08-10 06:57:42 +0000778 // Check if it's possible / necessary to transfer the predicate.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000779 const MCInstrDesc &NewMCID = TII->get(Entry.NarrowOpc1);
Evan Cheng1f5bee12009-08-10 06:57:42 +0000780 unsigned PredReg = 0;
781 ARMCC::CondCodes Pred = getInstrPredicate(MI, PredReg);
782 bool SkipPred = false;
783 if (Pred != ARMCC::AL) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000784 if (!NewMCID.isPredicable())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000785 // Can't transfer predicate, fail.
786 return false;
787 } else {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000788 SkipPred = !NewMCID.isPredicable();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000789 }
790
Evan Chengd461c1c2009-08-09 19:17:19 +0000791 bool HasCC = false;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000792 bool CCDead = false;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000793 if (MCID.hasOptionalDef()) {
794 unsigned NumOps = MCID.getNumOperands();
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000795 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR);
796 if (HasCC && MI->getOperand(NumOps-1).isDead())
797 CCDead = true;
798 }
Evan Cheng1f5bee12009-08-10 06:57:42 +0000799 if (!VerifyPredAndCC(MI, Entry, false, Pred, LiveCPSR, HasCC, CCDead))
Evan Chengd461c1c2009-08-09 19:17:19 +0000800 return false;
801
Bob Wilsona2881ee2011-04-19 18:11:49 +0000802 // Avoid adding a false dependency on partial flag update by some 16-bit
803 // instructions which has the 's' bit set.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000804 if (Entry.PartFlag && NewMCID.hasOptionalDef() && HasCC &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000805 canAddPseudoFlagDep(MI, IsSelfLoop))
Bob Wilsona2881ee2011-04-19 18:11:49 +0000806 return false;
807
Evan Chengd461c1c2009-08-09 19:17:19 +0000808 // Add the 16-bit instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000809 DebugLoc dl = MI->getDebugLoc();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000810 MachineInstrBuilder MIB = BuildMI(MBB, MI, dl, NewMCID);
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000811 MIB.addOperand(MI->getOperand(0));
Evan Cheng6cc775f2011-06-28 19:10:37 +0000812 if (NewMCID.hasOptionalDef()) {
Evan Cheng6ddd7bc2009-08-15 07:59:10 +0000813 if (HasCC)
814 AddDefaultT1CC(MIB, CCDead);
815 else
816 AddNoT1CC(MIB);
817 }
Evan Chengd461c1c2009-08-09 19:17:19 +0000818
819 // Transfer the rest of operands.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000820 unsigned NumOps = MCID.getNumOperands();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000821 for (unsigned i = 1, e = MI->getNumOperands(); i != e; ++i) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000822 if (i < NumOps && MCID.OpInfo[i].isOptionalDef())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000823 continue;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000824 if ((MCID.getOpcode() == ARM::t2RSBSri ||
Jim Grosbach8b31ef52011-07-27 16:47:19 +0000825 MCID.getOpcode() == ARM::t2RSBri ||
826 MCID.getOpcode() == ARM::t2SXTB ||
827 MCID.getOpcode() == ARM::t2SXTH ||
828 MCID.getOpcode() == ARM::t2UXTB ||
829 MCID.getOpcode() == ARM::t2UXTH) && i == 2)
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000830 // Skip the zero immediate operand, it's now implicit.
831 continue;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000832 bool isPred = (i < NumOps && MCID.OpInfo[i].isPredicate());
Evan Chengf6a9d062009-08-11 23:00:31 +0000833 if (SkipPred && isPred)
834 continue;
835 const MachineOperand &MO = MI->getOperand(i);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000836 if (MO.isReg() && MO.isImplicit() && MO.getReg() == ARM::CPSR)
837 // Skip implicit def of CPSR. Either it's modeled as an optional
838 // def now or it's already an implicit def on the new instruction.
839 continue;
840 MIB.addOperand(MO);
Evan Cheng1f5bee12009-08-10 06:57:42 +0000841 }
Evan Cheng6cc775f2011-06-28 19:10:37 +0000842 if (!MCID.isPredicable() && NewMCID.isPredicable())
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000843 AddDefaultPred(MIB);
Evan Chengd461c1c2009-08-09 19:17:19 +0000844
Anton Korobeynikovacca7ad2011-03-05 18:43:38 +0000845 // Transfer MI flags.
846 MIB.setMIFlags(MI->getFlags());
847
Chris Lattnera6f074f2009-08-23 03:41:05 +0000848 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " << *MIB);
Evan Chengd461c1c2009-08-09 19:17:19 +0000849
Evan Cheng7fae11b2011-12-14 02:11:42 +0000850 MBB.erase_instr(MI);
Evan Chengd461c1c2009-08-09 19:17:19 +0000851 ++NumNarrows;
852 return true;
Evan Cheng1be453b2009-08-08 03:21:23 +0000853}
854
Bob Wilsona2881ee2011-04-19 18:11:49 +0000855static bool UpdateCPSRDef(MachineInstr &MI, bool LiveCPSR, bool &DefCPSR) {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000856 bool HasDef = false;
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000857 for (const MachineOperand &MO : MI.operands()) {
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000858 if (!MO.isReg() || MO.isUndef() || MO.isUse())
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000859 continue;
860 if (MO.getReg() != ARM::CPSR)
861 continue;
Bob Wilsona2881ee2011-04-19 18:11:49 +0000862
863 DefCPSR = true;
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000864 if (!MO.isDead())
865 HasDef = true;
866 }
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000867
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000868 return HasDef || LiveCPSR;
869}
870
871static bool UpdateCPSRUse(MachineInstr &MI, bool LiveCPSR) {
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000872 for (const MachineOperand &MO : MI.operands()) {
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000873 if (!MO.isReg() || MO.isUndef() || MO.isDef())
874 continue;
875 if (MO.getReg() != ARM::CPSR)
876 continue;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000877 assert(LiveCPSR && "CPSR liveness tracking is wrong!");
878 if (MO.isKill()) {
879 LiveCPSR = false;
880 break;
881 }
882 }
883
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000884 return LiveCPSR;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000885}
886
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000887bool Thumb2SizeReduce::ReduceMI(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000888 bool LiveCPSR, bool IsSelfLoop) {
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000889 unsigned Opcode = MI->getOpcode();
890 DenseMap<unsigned, unsigned>::iterator OPI = ReduceOpcodeMap.find(Opcode);
891 if (OPI == ReduceOpcodeMap.end())
892 return false;
893 const ReduceEntry &Entry = ReduceTable[OPI->second];
894
895 // Don't attempt normal reductions on "special" cases for now.
896 if (Entry.Special)
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000897 return ReduceSpecial(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000898
899 // Try to transform to a 16-bit two-address instruction.
900 if (Entry.NarrowOpc2 &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000901 ReduceTo2Addr(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000902 return true;
903
904 // Try to transform to a 16-bit non-two-address instruction.
905 if (Entry.NarrowOpc1 &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000906 ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000907 return true;
908
909 return false;
910}
911
Evan Cheng1be453b2009-08-08 03:21:23 +0000912bool Thumb2SizeReduce::ReduceMBB(MachineBasicBlock &MBB) {
913 bool Modified = false;
914
Evan Cheng1f5bee12009-08-10 06:57:42 +0000915 // Yes, CPSR could be livein.
Dan Gohmana1cf9fe2010-04-13 16:53:51 +0000916 bool LiveCPSR = MBB.isLiveIn(ARM::CPSR);
Craig Topper062a2ba2014-04-25 05:30:21 +0000917 MachineInstr *BundleMI = nullptr;
Evan Cheng1f5bee12009-08-10 06:57:42 +0000918
Craig Topper062a2ba2014-04-25 05:30:21 +0000919 CPSRDef = nullptr;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000920 HighLatencyCPSR = false;
921
922 // Check predecessors for the latest CPSRDef.
Jim Grosbach537f3ed2014-04-04 02:11:03 +0000923 for (auto *Pred : MBB.predecessors()) {
924 const MBBInfo &PInfo = BlockInfo[Pred->getNumber()];
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000925 if (!PInfo.Visited) {
926 // Since blocks are visited in RPO, this must be a back-edge.
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000927 continue;
928 }
929 if (PInfo.HighLatencyCPSR) {
930 HighLatencyCPSR = true;
931 break;
932 }
933 }
934
Evan Chengf4807a12011-10-27 21:21:05 +0000935 // If this BB loops back to itself, conservatively avoid narrowing the
936 // first instruction that does partial flag update.
937 bool IsSelfLoop = MBB.isSuccessor(&MBB);
Jim Grosbach0c509fa2012-04-06 23:43:50 +0000938 MachineBasicBlock::instr_iterator MII = MBB.instr_begin(),E = MBB.instr_end();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000939 MachineBasicBlock::instr_iterator NextMII;
Evan Cheng1be453b2009-08-08 03:21:23 +0000940 for (; MII != E; MII = NextMII) {
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000941 NextMII = std::next(MII);
Evan Cheng1be453b2009-08-08 03:21:23 +0000942
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000943 MachineInstr *MI = &*MII;
Evan Cheng7fae11b2011-12-14 02:11:42 +0000944 if (MI->isBundle()) {
945 BundleMI = MI;
946 continue;
947 }
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000948 if (MI->isDebugValue())
949 continue;
Evan Cheng7fae11b2011-12-14 02:11:42 +0000950
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000951 LiveCPSR = UpdateCPSRUse(*MI, LiveCPSR);
952
Jakob Stoklund Olesen41bbf9c2012-12-18 00:46:39 +0000953 // Does NextMII belong to the same bundle as MI?
954 bool NextInSameBundle = NextMII != E && NextMII->isBundledWithPred();
955
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000956 if (ReduceMI(MBB, MI, LiveCPSR, IsSelfLoop)) {
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000957 Modified = true;
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000958 MachineBasicBlock::instr_iterator I = std::prev(NextMII);
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000959 MI = &*I;
Jakob Stoklund Olesen41bbf9c2012-12-18 00:46:39 +0000960 // Removing and reinserting the first instruction in a bundle will break
961 // up the bundle. Fix the bundling if it was broken.
962 if (NextInSameBundle && !NextMII->isBundledWithPred())
963 NextMII->bundleWithPred();
Evan Cheng1be453b2009-08-08 03:21:23 +0000964 }
965
Jakob Stoklund Olesen41bbf9c2012-12-18 00:46:39 +0000966 if (!NextInSameBundle && MI->isInsideBundle()) {
Evan Cheng7fae11b2011-12-14 02:11:42 +0000967 // FIXME: Since post-ra scheduler operates on bundles, the CPSR kill
968 // marker is only on the BUNDLE instruction. Process the BUNDLE
969 // instruction as we finish with the bundled instruction to work around
970 // the inconsistency.
Evan Cheng903231b2011-12-17 01:25:34 +0000971 if (BundleMI->killsRegister(ARM::CPSR))
972 LiveCPSR = false;
973 MachineOperand *MO = BundleMI->findRegisterDefOperand(ARM::CPSR);
974 if (MO && !MO->isDead())
975 LiveCPSR = true;
Weiming Zhaof66be562014-01-13 18:47:54 +0000976 MO = BundleMI->findRegisterUseOperand(ARM::CPSR);
977 if (MO && !MO->isKill())
978 LiveCPSR = true;
Evan Cheng903231b2011-12-17 01:25:34 +0000979 }
Evan Cheng7fae11b2011-12-14 02:11:42 +0000980
Bob Wilsona2881ee2011-04-19 18:11:49 +0000981 bool DefCPSR = false;
982 LiveCPSR = UpdateCPSRDef(*MI, LiveCPSR, DefCPSR);
Evan Cheng7f8e5632011-12-07 07:15:52 +0000983 if (MI->isCall()) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000984 // Calls don't really set CPSR.
Craig Topper062a2ba2014-04-25 05:30:21 +0000985 CPSRDef = nullptr;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000986 HighLatencyCPSR = false;
Evan Chengf4807a12011-10-27 21:21:05 +0000987 IsSelfLoop = false;
988 } else if (DefCPSR) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000989 // This is the last CPSR defining instruction.
990 CPSRDef = MI;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000991 HighLatencyCPSR = isHighLatencyCPSR(CPSRDef);
Evan Chengf4807a12011-10-27 21:21:05 +0000992 IsSelfLoop = false;
993 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000994 }
995
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000996 MBBInfo &Info = BlockInfo[MBB.getNumber()];
997 Info.HighLatencyCPSR = HighLatencyCPSR;
998 Info.Visited = true;
Evan Cheng1be453b2009-08-08 03:21:23 +0000999 return Modified;
1000}
1001
1002bool Thumb2SizeReduce::runOnMachineFunction(MachineFunction &MF) {
Eric Christopher1b21f002015-01-29 00:19:33 +00001003 STI = &static_cast<const ARMSubtarget &>(MF.getSubtarget());
Eric Christopher63b44882015-03-05 00:23:40 +00001004 if (STI->isThumb1Only() || STI->prefers32BitThumb())
1005 return false;
1006
Eric Christopher1b21f002015-01-29 00:19:33 +00001007 TII = static_cast<const Thumb2InstrInfo *>(STI->getInstrInfo());
Evan Cheng1be453b2009-08-08 03:21:23 +00001008
Evan Chengddc0cb62012-12-20 19:59:30 +00001009 // Optimizing / minimizing size?
Duncan P. N. Exon Smith2cff9e12015-02-14 02:24:44 +00001010 OptimizeSize = MF.getFunction()->hasFnAttribute(Attribute::OptimizeForSize);
1011 MinimizeSize = MF.getFunction()->hasFnAttribute(Attribute::MinSize);
Quentin Colombet23b404d2012-12-18 22:47:16 +00001012
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +00001013 BlockInfo.clear();
1014 BlockInfo.resize(MF.getNumBlockIDs());
1015
1016 // Visit blocks in reverse post-order so LastCPSRDef is known for all
1017 // predecessors.
1018 ReversePostOrderTraversal<MachineFunction*> RPOT(&MF);
Evan Cheng1be453b2009-08-08 03:21:23 +00001019 bool Modified = false;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +00001020 for (ReversePostOrderTraversal<MachineFunction*>::rpo_iterator
1021 I = RPOT.begin(), E = RPOT.end(); I != E; ++I)
1022 Modified |= ReduceMBB(**I);
Evan Cheng1be453b2009-08-08 03:21:23 +00001023 return Modified;
1024}
1025
1026/// createThumb2SizeReductionPass - Returns an instance of the Thumb2 size
1027/// reduction pass.
1028FunctionPass *llvm::createThumb2SizeReductionPass() {
1029 return new Thumb2SizeReduce();
1030}