blob: 40b07bec9aeabd1ccc015e6114d0d10a0219338e [file] [log] [blame]
Evan Cheng1be453b2009-08-08 03:21:23 +00001//===-- Thumb2SizeReduction.cpp - Thumb2 code size reduction pass -*- C++ -*-=//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
Evan Cheng1be453b2009-08-08 03:21:23 +000010#include "ARM.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000011#include "ARMBaseInstrInfo.h"
Bob Wilsona2881ee2011-04-19 18:11:49 +000012#include "ARMSubtarget.h"
Evan Chenga20cde32011-07-20 23:34:39 +000013#include "MCTargetDesc/ARMAddressingModes.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000014#include "Thumb2InstrInfo.h"
15#include "llvm/ADT/DenseMap.h"
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +000016#include "llvm/ADT/PostOrderIterator.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000017#include "llvm/ADT/Statistic.h"
18#include "llvm/CodeGen/MachineFunctionPass.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000019#include "llvm/CodeGen/MachineInstr.h"
20#include "llvm/CodeGen/MachineInstrBuilder.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000021#include "llvm/IR/Function.h" // To access Function attributes
Evan Chengf16a1d52009-08-10 07:20:37 +000022#include "llvm/Support/CommandLine.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000023#include "llvm/Support/Debug.h"
Benjamin Kramer16132e62015-03-23 18:07:13 +000024#include "llvm/Support/raw_ostream.h"
Craig Toppera9253262014-03-22 23:51:00 +000025#include "llvm/Target/TargetMachine.h"
Evan Cheng1be453b2009-08-08 03:21:23 +000026using namespace llvm;
27
Chandler Carruth84e68b22014-04-22 02:41:26 +000028#define DEBUG_TYPE "t2-reduce-size"
29
Evan Cheng1f5bee12009-08-10 06:57:42 +000030STATISTIC(NumNarrows, "Number of 32-bit instrs reduced to 16-bit ones");
31STATISTIC(Num2Addrs, "Number of 32-bit instrs reduced to 2addr 16-bit ones");
Evan Cheng36064672009-08-11 08:52:18 +000032STATISTIC(NumLdSts, "Number of 32-bit load / store reduced to 16-bit ones");
Evan Cheng1be453b2009-08-08 03:21:23 +000033
Evan Chengcc9ca352009-08-11 21:11:32 +000034static cl::opt<int> ReduceLimit("t2-reduce-limit",
35 cl::init(-1), cl::Hidden);
36static cl::opt<int> ReduceLimit2Addr("t2-reduce-limit2",
37 cl::init(-1), cl::Hidden);
38static cl::opt<int> ReduceLimitLdSt("t2-reduce-limit3",
39 cl::init(-1), cl::Hidden);
Evan Chengf16a1d52009-08-10 07:20:37 +000040
Evan Cheng1be453b2009-08-08 03:21:23 +000041namespace {
42 /// ReduceTable - A static table with information on mapping from wide
43 /// opcodes to narrow
44 struct ReduceEntry {
Craig Topperca658c22012-03-11 07:16:55 +000045 uint16_t WideOpc; // Wide opcode
46 uint16_t NarrowOpc1; // Narrow opcode to transform to
47 uint16_t NarrowOpc2; // Narrow opcode when it's two-address
Evan Cheng1be453b2009-08-08 03:21:23 +000048 uint8_t Imm1Limit; // Limit of immediate field (bits)
49 uint8_t Imm2Limit; // Limit of immediate field when it's two-address
50 unsigned LowRegs1 : 1; // Only possible if low-registers are used
51 unsigned LowRegs2 : 1; // Only possible if low-registers are used (2addr)
Evan Cheng1e6c2a12009-08-12 01:49:45 +000052 unsigned PredCC1 : 2; // 0 - If predicated, cc is on and vice versa.
Evan Cheng1be453b2009-08-08 03:21:23 +000053 // 1 - No cc field.
Evan Cheng1e6c2a12009-08-12 01:49:45 +000054 // 2 - Always set CPSR.
Evan Chengaee7e492009-08-12 18:35:50 +000055 unsigned PredCC2 : 2;
Bob Wilsona2881ee2011-04-19 18:11:49 +000056 unsigned PartFlag : 1; // 16-bit instruction does partial flag update
Evan Cheng1be453b2009-08-08 03:21:23 +000057 unsigned Special : 1; // Needs to be dealt with specially
Evan Chengddc0cb62012-12-20 19:59:30 +000058 unsigned AvoidMovs: 1; // Avoid movs with shifter operand (for Swift)
Evan Cheng1be453b2009-08-08 03:21:23 +000059 };
60
61 static const ReduceEntry ReduceTable[] = {
Evan Chengddc0cb62012-12-20 19:59:30 +000062 // Wide, Narrow1, Narrow2, imm1,imm2, lo1, lo2, P/C,PF,S,AM
63 { ARM::t2ADCrr, 0, ARM::tADC, 0, 0, 0, 1, 0,0, 0,0,0 },
64 { ARM::t2ADDri, ARM::tADDi3, ARM::tADDi8, 3, 8, 1, 1, 0,0, 0,1,0 },
65 { ARM::t2ADDrr, ARM::tADDrr, ARM::tADDhirr, 0, 0, 1, 0, 0,1, 0,0,0 },
66 { ARM::t2ADDSri,ARM::tADDi3, ARM::tADDi8, 3, 8, 1, 1, 2,2, 0,1,0 },
67 { ARM::t2ADDSrr,ARM::tADDrr, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
68 { ARM::t2ANDrr, 0, ARM::tAND, 0, 0, 0, 1, 0,0, 1,0,0 },
69 { ARM::t2ASRri, ARM::tASRri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
70 { ARM::t2ASRrr, 0, ARM::tASRrr, 0, 0, 0, 1, 0,0, 1,0,1 },
71 { ARM::t2BICrr, 0, ARM::tBIC, 0, 0, 0, 1, 0,0, 1,0,0 },
72 //FIXME: Disable CMN, as CCodes are backwards from compare expectations
73 //{ ARM::t2CMNrr, ARM::tCMN, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
74 { ARM::t2CMNzrr, ARM::tCMNz, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
75 { ARM::t2CMPri, ARM::tCMPi8, 0, 8, 0, 1, 0, 2,0, 0,0,0 },
76 { ARM::t2CMPrr, ARM::tCMPhir, 0, 0, 0, 0, 0, 2,0, 0,1,0 },
77 { ARM::t2EORrr, 0, ARM::tEOR, 0, 0, 0, 1, 0,0, 1,0,0 },
78 // FIXME: adr.n immediate offset must be multiple of 4.
79 //{ ARM::t2LEApcrelJT,ARM::tLEApcrelJT, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
80 { ARM::t2LSLri, ARM::tLSLri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
81 { ARM::t2LSLrr, 0, ARM::tLSLrr, 0, 0, 0, 1, 0,0, 1,0,1 },
82 { ARM::t2LSRri, ARM::tLSRri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
83 { ARM::t2LSRrr, 0, ARM::tLSRrr, 0, 0, 0, 1, 0,0, 1,0,1 },
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +000084 { ARM::t2MOVi, ARM::tMOVi8, 0, 8, 0, 1, 0, 0,0, 1,0,0 },
85 { ARM::t2MOVi16,ARM::tMOVi8, 0, 8, 0, 1, 0, 0,0, 1,1,0 },
Evan Chengddc0cb62012-12-20 19:59:30 +000086 // FIXME: Do we need the 16-bit 'S' variant?
87 { ARM::t2MOVr,ARM::tMOVr, 0, 0, 0, 0, 0, 1,0, 0,0,0 },
88 { ARM::t2MUL, 0, ARM::tMUL, 0, 0, 0, 1, 0,0, 1,0,0 },
89 { ARM::t2MVNr, ARM::tMVN, 0, 0, 0, 1, 0, 0,0, 0,0,0 },
90 { ARM::t2ORRrr, 0, ARM::tORR, 0, 0, 0, 1, 0,0, 1,0,0 },
91 { ARM::t2REV, ARM::tREV, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
92 { ARM::t2REV16, ARM::tREV16, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
93 { ARM::t2REVSH, ARM::tREVSH, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
94 { ARM::t2RORrr, 0, ARM::tROR, 0, 0, 0, 1, 0,0, 1,0,0 },
95 { ARM::t2RSBri, ARM::tRSB, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
96 { ARM::t2RSBSri,ARM::tRSB, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
97 { ARM::t2SBCrr, 0, ARM::tSBC, 0, 0, 0, 1, 0,0, 0,0,0 },
98 { ARM::t2SUBri, ARM::tSUBi3, ARM::tSUBi8, 3, 8, 1, 1, 0,0, 0,0,0 },
99 { ARM::t2SUBrr, ARM::tSUBrr, 0, 0, 0, 1, 0, 0,0, 0,0,0 },
100 { ARM::t2SUBSri,ARM::tSUBi3, ARM::tSUBi8, 3, 8, 1, 1, 2,2, 0,0,0 },
101 { ARM::t2SUBSrr,ARM::tSUBrr, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
102 { ARM::t2SXTB, ARM::tSXTB, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
103 { ARM::t2SXTH, ARM::tSXTH, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
104 { ARM::t2TSTrr, ARM::tTST, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
105 { ARM::t2UXTB, ARM::tUXTB, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
106 { ARM::t2UXTH, ARM::tUXTH, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
Evan Cheng36064672009-08-11 08:52:18 +0000107
Evan Chengddc0cb62012-12-20 19:59:30 +0000108 // FIXME: Clean this up after splitting each Thumb load / store opcode
109 // into multiple ones.
110 { ARM::t2LDRi12,ARM::tLDRi, ARM::tLDRspi, 5, 8, 1, 0, 0,0, 0,1,0 },
111 { ARM::t2LDRs, ARM::tLDRr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
112 { ARM::t2LDRBi12,ARM::tLDRBi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
113 { ARM::t2LDRBs, ARM::tLDRBr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
114 { ARM::t2LDRHi12,ARM::tLDRHi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
115 { ARM::t2LDRHs, ARM::tLDRHr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
116 { ARM::t2LDRSBs,ARM::tLDRSB, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
117 { ARM::t2LDRSHs,ARM::tLDRSH, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
118 { ARM::t2STRi12,ARM::tSTRi, ARM::tSTRspi, 5, 8, 1, 0, 0,0, 0,1,0 },
119 { ARM::t2STRs, ARM::tSTRr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
120 { ARM::t2STRBi12,ARM::tSTRBi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
121 { ARM::t2STRBs, ARM::tSTRBr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
122 { ARM::t2STRHi12,ARM::tSTRHi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
123 { ARM::t2STRHs, ARM::tSTRHr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
Evan Chengcc9ca352009-08-11 21:11:32 +0000124
Evan Chengddc0cb62012-12-20 19:59:30 +0000125 { ARM::t2LDMIA, ARM::tLDMIA, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
126 { ARM::t2LDMIA_RET,0, ARM::tPOP_RET, 0, 0, 1, 1, 1,1, 0,1,0 },
127 { ARM::t2LDMIA_UPD,ARM::tLDMIA_UPD,ARM::tPOP,0, 0, 1, 1, 1,1, 0,1,0 },
128 // ARM::t2STM (with no basereg writeback) has no Thumb1 equivalent
129 { ARM::t2STMIA_UPD,ARM::tSTMIA_UPD, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
130 { ARM::t2STMDB_UPD, 0, ARM::tPUSH, 0, 0, 1, 1, 1,1, 0,1,0 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000131 };
132
Nick Lewycky02d5f772009-10-25 06:33:48 +0000133 class Thumb2SizeReduce : public MachineFunctionPass {
Evan Cheng1be453b2009-08-08 03:21:23 +0000134 public:
135 static char ID;
136 Thumb2SizeReduce();
137
Evan Cheng6ddd7bc2009-08-15 07:59:10 +0000138 const Thumb2InstrInfo *TII;
Bob Wilsona2881ee2011-04-19 18:11:49 +0000139 const ARMSubtarget *STI;
Evan Cheng1be453b2009-08-08 03:21:23 +0000140
Craig Topper6bc27bf2014-03-10 02:09:33 +0000141 bool runOnMachineFunction(MachineFunction &MF) override;
Evan Cheng1be453b2009-08-08 03:21:23 +0000142
Craig Topper6bc27bf2014-03-10 02:09:33 +0000143 const char *getPassName() const override {
Evan Cheng1be453b2009-08-08 03:21:23 +0000144 return "Thumb2 instruction size reduction pass";
145 }
146
147 private:
148 /// ReduceOpcodeMap - Maps wide opcode to index of entry in ReduceTable.
149 DenseMap<unsigned, unsigned> ReduceOpcodeMap;
150
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000151 bool canAddPseudoFlagDep(MachineInstr *Use, bool IsSelfLoop);
Bob Wilsona2881ee2011-04-19 18:11:49 +0000152
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000153 bool VerifyPredAndCC(MachineInstr *MI, const ReduceEntry &Entry,
154 bool is2Addr, ARMCC::CondCodes Pred,
155 bool LiveCPSR, bool &HasCC, bool &CCDead);
156
Evan Cheng36064672009-08-11 08:52:18 +0000157 bool ReduceLoadStore(MachineBasicBlock &MBB, MachineInstr *MI,
158 const ReduceEntry &Entry);
159
160 bool ReduceSpecial(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000161 const ReduceEntry &Entry, bool LiveCPSR, bool IsSelfLoop);
Evan Cheng36064672009-08-11 08:52:18 +0000162
Evan Cheng1be453b2009-08-08 03:21:23 +0000163 /// ReduceTo2Addr - Reduce a 32-bit instruction to a 16-bit two-address
164 /// instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000165 bool ReduceTo2Addr(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000166 const ReduceEntry &Entry, bool LiveCPSR,
Evan Chengf4807a12011-10-27 21:21:05 +0000167 bool IsSelfLoop);
Evan Cheng1be453b2009-08-08 03:21:23 +0000168
169 /// ReduceToNarrow - Reduce a 32-bit instruction to a 16-bit
170 /// non-two-address instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000171 bool ReduceToNarrow(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000172 const ReduceEntry &Entry, bool LiveCPSR,
Evan Chengf4807a12011-10-27 21:21:05 +0000173 bool IsSelfLoop);
Evan Cheng1be453b2009-08-08 03:21:23 +0000174
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000175 /// ReduceMI - Attempt to reduce MI, return true on success.
176 bool ReduceMI(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000177 bool LiveCPSR, bool IsSelfLoop);
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000178
Evan Cheng1be453b2009-08-08 03:21:23 +0000179 /// ReduceMBB - Reduce width of instructions in the specified basic block.
180 bool ReduceMBB(MachineBasicBlock &MBB);
Quentin Colombet23b404d2012-12-18 22:47:16 +0000181
Evan Chengddc0cb62012-12-20 19:59:30 +0000182 bool OptimizeSize;
Quentin Colombet23b404d2012-12-18 22:47:16 +0000183 bool MinimizeSize;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000184
185 // Last instruction to define CPSR in the current block.
186 MachineInstr *CPSRDef;
187 // Was CPSR last defined by a high latency instruction?
188 // When CPSRDef is null, this refers to CPSR defs in predecessors.
189 bool HighLatencyCPSR;
190
191 struct MBBInfo {
192 // The flags leaving this block have high latency.
193 bool HighLatencyCPSR;
194 // Has this block been visited yet?
195 bool Visited;
196
197 MBBInfo() : HighLatencyCPSR(false), Visited(false) {}
198 };
199
200 SmallVector<MBBInfo, 8> BlockInfo;
Evan Cheng1be453b2009-08-08 03:21:23 +0000201 };
202 char Thumb2SizeReduce::ID = 0;
203}
204
Owen Andersona7aed182010-08-06 18:33:48 +0000205Thumb2SizeReduce::Thumb2SizeReduce() : MachineFunctionPass(ID) {
Evan Chengddc0cb62012-12-20 19:59:30 +0000206 OptimizeSize = MinimizeSize = false;
Evan Cheng1be453b2009-08-08 03:21:23 +0000207 for (unsigned i = 0, e = array_lengthof(ReduceTable); i != e; ++i) {
208 unsigned FromOpc = ReduceTable[i].WideOpc;
209 if (!ReduceOpcodeMap.insert(std::make_pair(FromOpc, i)).second)
210 assert(false && "Duplicated entries?");
211 }
212}
213
Evan Cheng6cc775f2011-06-28 19:10:37 +0000214static bool HasImplicitCPSRDef(const MCInstrDesc &MCID) {
Craig Topper5a4bcc72012-03-08 08:22:45 +0000215 for (const uint16_t *Regs = MCID.getImplicitDefs(); *Regs; ++Regs)
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000216 if (*Regs == ARM::CPSR)
217 return true;
218 return false;
219}
220
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000221// Check for a likely high-latency flag def.
222static bool isHighLatencyCPSR(MachineInstr *Def) {
223 switch(Def->getOpcode()) {
224 case ARM::FMSTAT:
225 case ARM::tMUL:
226 return true;
227 }
228 return false;
229}
230
Bob Wilsona2881ee2011-04-19 18:11:49 +0000231/// canAddPseudoFlagDep - For A9 (and other out-of-order) implementations,
232/// the 's' 16-bit instruction partially update CPSR. Abort the
233/// transformation to avoid adding false dependency on last CPSR setting
234/// instruction which hurts the ability for out-of-order execution engine
235/// to do register renaming magic.
236/// This function checks if there is a read-of-write dependency between the
237/// last instruction that defines the CPSR and the current instruction. If there
238/// is, then there is no harm done since the instruction cannot be retired
239/// before the CPSR setting instruction anyway.
240/// Note, we are not doing full dependency analysis here for the sake of compile
241/// time. We're not looking for cases like:
242/// r0 = muls ...
243/// r1 = add.w r0, ...
244/// ...
245/// = mul.w r1
246/// In this case it would have been ok to narrow the mul.w to muls since there
247/// are indirect RAW dependency between the muls and the mul.w
248bool
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000249Thumb2SizeReduce::canAddPseudoFlagDep(MachineInstr *Use, bool FirstInSelfLoop) {
Quentin Colombet23b404d2012-12-18 22:47:16 +0000250 // Disable the check for -Oz (aka OptimizeForSizeHarder).
251 if (MinimizeSize || !STI->avoidCPSRPartialUpdate())
Bob Wilsona2881ee2011-04-19 18:11:49 +0000252 return false;
253
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000254 if (!CPSRDef)
Evan Chengf4807a12011-10-27 21:21:05 +0000255 // If this BB loops back to itself, conservatively avoid narrowing the
256 // first instruction that does partial flag update.
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000257 return HighLatencyCPSR || FirstInSelfLoop;
Evan Chengf4807a12011-10-27 21:21:05 +0000258
Bob Wilsona2881ee2011-04-19 18:11:49 +0000259 SmallSet<unsigned, 2> Defs;
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000260 for (const MachineOperand &MO : CPSRDef->operands()) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000261 if (!MO.isReg() || MO.isUndef() || MO.isUse())
262 continue;
263 unsigned Reg = MO.getReg();
264 if (Reg == 0 || Reg == ARM::CPSR)
265 continue;
266 Defs.insert(Reg);
267 }
268
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000269 for (const MachineOperand &MO : Use->operands()) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000270 if (!MO.isReg() || MO.isUndef() || MO.isDef())
271 continue;
272 unsigned Reg = MO.getReg();
273 if (Defs.count(Reg))
274 return false;
275 }
276
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000277 // If the current CPSR has high latency, try to avoid the false dependency.
278 if (HighLatencyCPSR)
279 return true;
280
281 // tMOVi8 usually doesn't start long dependency chains, and there are a lot
282 // of them, so always shrink them when CPSR doesn't have high latency.
283 if (Use->getOpcode() == ARM::t2MOVi ||
284 Use->getOpcode() == ARM::t2MOVi16)
285 return false;
286
Bob Wilsona2881ee2011-04-19 18:11:49 +0000287 // No read-after-write dependency. The narrowing will add false dependency.
288 return true;
289}
290
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000291bool
292Thumb2SizeReduce::VerifyPredAndCC(MachineInstr *MI, const ReduceEntry &Entry,
293 bool is2Addr, ARMCC::CondCodes Pred,
294 bool LiveCPSR, bool &HasCC, bool &CCDead) {
Evan Chengd461c1c2009-08-09 19:17:19 +0000295 if ((is2Addr && Entry.PredCC2 == 0) ||
296 (!is2Addr && Entry.PredCC1 == 0)) {
297 if (Pred == ARMCC::AL) {
298 // Not predicated, must set CPSR.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000299 if (!HasCC) {
300 // Original instruction was not setting CPSR, but CPSR is not
301 // currently live anyway. It's ok to set it. The CPSR def is
302 // dead though.
303 if (!LiveCPSR) {
304 HasCC = true;
305 CCDead = true;
306 return true;
307 }
308 return false;
309 }
Evan Chengd461c1c2009-08-09 19:17:19 +0000310 } else {
311 // Predicated, must not set CPSR.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000312 if (HasCC)
313 return false;
Evan Chengd461c1c2009-08-09 19:17:19 +0000314 }
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000315 } else if ((is2Addr && Entry.PredCC2 == 2) ||
316 (!is2Addr && Entry.PredCC1 == 2)) {
317 /// Old opcode has an optional def of CPSR.
318 if (HasCC)
319 return true;
Jim Grosbachbc7eeaf2010-09-14 20:35:46 +0000320 // If old opcode does not implicitly define CPSR, then it's not ok since
321 // these new opcodes' CPSR def is not meant to be thrown away. e.g. CMP.
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000322 if (!HasImplicitCPSRDef(MI->getDesc()))
323 return false;
324 HasCC = true;
Evan Chengd461c1c2009-08-09 19:17:19 +0000325 } else {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000326 // 16-bit instruction does not set CPSR.
327 if (HasCC)
328 return false;
Evan Chengd461c1c2009-08-09 19:17:19 +0000329 }
330
331 return true;
332}
333
Evan Chengcc9ca352009-08-11 21:11:32 +0000334static bool VerifyLowRegs(MachineInstr *MI) {
335 unsigned Opc = MI->getOpcode();
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000336 bool isPCOk = (Opc == ARM::t2LDMIA_RET || Opc == ARM::t2LDMIA ||
337 Opc == ARM::t2LDMDB || Opc == ARM::t2LDMIA_UPD ||
Owen Anderson4ebf4712011-02-08 22:39:40 +0000338 Opc == ARM::t2LDMDB_UPD);
Tim Northoverba1d7042014-09-10 12:53:28 +0000339 bool isLROk = (Opc == ARM::t2STMDB_UPD);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000340 bool isSPOk = isPCOk || isLROk;
Evan Chengcc9ca352009-08-11 21:11:32 +0000341 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
342 const MachineOperand &MO = MI->getOperand(i);
343 if (!MO.isReg() || MO.isImplicit())
344 continue;
345 unsigned Reg = MO.getReg();
346 if (Reg == 0 || Reg == ARM::CPSR)
347 continue;
348 if (isPCOk && Reg == ARM::PC)
349 continue;
350 if (isLROk && Reg == ARM::LR)
351 continue;
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000352 if (Reg == ARM::SP) {
353 if (isSPOk)
354 continue;
355 if (i == 1 && (Opc == ARM::t2LDRi12 || Opc == ARM::t2STRi12))
356 // Special case for these ldr / str with sp as base register.
357 continue;
358 }
Evan Chengcc9ca352009-08-11 21:11:32 +0000359 if (!isARMLowRegister(Reg))
360 return false;
361 }
362 return true;
363}
364
Evan Cheng1be453b2009-08-08 03:21:23 +0000365bool
Evan Cheng36064672009-08-11 08:52:18 +0000366Thumb2SizeReduce::ReduceLoadStore(MachineBasicBlock &MBB, MachineInstr *MI,
367 const ReduceEntry &Entry) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000368 if (ReduceLimitLdSt != -1 && ((int)NumLdSts >= ReduceLimitLdSt))
369 return false;
370
Evan Cheng36064672009-08-11 08:52:18 +0000371 unsigned Scale = 1;
372 bool HasImmOffset = false;
373 bool HasShift = false;
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000374 bool HasOffReg = true;
Evan Chengcc9ca352009-08-11 21:11:32 +0000375 bool isLdStMul = false;
Evan Chengcc9ca352009-08-11 21:11:32 +0000376 unsigned Opc = Entry.NarrowOpc1;
377 unsigned OpNum = 3; // First 'rest' of operands.
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000378 uint8_t ImmLimit = Entry.Imm1Limit;
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000379
Evan Cheng36064672009-08-11 08:52:18 +0000380 switch (Entry.WideOpc) {
381 default:
382 llvm_unreachable("Unexpected Thumb2 load / store opcode!");
Jakob Stoklund Olesenb3de7b12012-08-28 03:11:27 +0000383 case ARM::t2LDRi12:
Bill Wendling092a7bd2010-12-14 03:36:38 +0000384 case ARM::t2STRi12:
385 if (MI->getOperand(1).getReg() == ARM::SP) {
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000386 Opc = Entry.NarrowOpc2;
387 ImmLimit = Entry.Imm2Limit;
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000388 }
Bill Wendling092a7bd2010-12-14 03:36:38 +0000389
Evan Cheng36064672009-08-11 08:52:18 +0000390 Scale = 4;
Owen Anderson4ebf4712011-02-08 22:39:40 +0000391 HasImmOffset = true;
392 HasOffReg = false;
Evan Cheng36064672009-08-11 08:52:18 +0000393 break;
Jakob Stoklund Olesenb3de7b12012-08-28 03:11:27 +0000394 case ARM::t2LDRBi12:
Evan Cheng36064672009-08-11 08:52:18 +0000395 case ARM::t2STRBi12:
Owen Anderson4ebf4712011-02-08 22:39:40 +0000396 HasImmOffset = true;
397 HasOffReg = false;
Evan Cheng36064672009-08-11 08:52:18 +0000398 break;
399 case ARM::t2LDRHi12:
400 case ARM::t2STRHi12:
401 Scale = 2;
Owen Anderson4ebf4712011-02-08 22:39:40 +0000402 HasImmOffset = true;
403 HasOffReg = false;
Evan Cheng36064672009-08-11 08:52:18 +0000404 break;
Jakob Stoklund Olesenb3de7b12012-08-28 03:11:27 +0000405 case ARM::t2LDRs:
406 case ARM::t2LDRBs:
407 case ARM::t2LDRHs:
Evan Cheng36064672009-08-11 08:52:18 +0000408 case ARM::t2LDRSBs:
409 case ARM::t2LDRSHs:
410 case ARM::t2STRs:
411 case ARM::t2STRBs:
412 case ARM::t2STRHs:
413 HasShift = true;
Evan Chengcc9ca352009-08-11 21:11:32 +0000414 OpNum = 4;
Evan Cheng36064672009-08-11 08:52:18 +0000415 break;
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000416 case ARM::t2LDMIA:
417 case ARM::t2LDMDB: {
Evan Chengcc9ca352009-08-11 21:11:32 +0000418 unsigned BaseReg = MI->getOperand(0).getReg();
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000419 if (!isARMLowRegister(BaseReg) || Entry.WideOpc != ARM::t2LDMIA)
Bob Wilson947f04b2010-03-13 01:08:20 +0000420 return false;
Bill Wendling092a7bd2010-12-14 03:36:38 +0000421
Jim Grosbach88628e92010-09-07 22:30:53 +0000422 // For the non-writeback version (this one), the base register must be
423 // one of the registers being loaded.
424 bool isOK = false;
425 for (unsigned i = 4; i < MI->getNumOperands(); ++i) {
426 if (MI->getOperand(i).getReg() == BaseReg) {
427 isOK = true;
428 break;
429 }
430 }
Bill Wendling092a7bd2010-12-14 03:36:38 +0000431
Jim Grosbach88628e92010-09-07 22:30:53 +0000432 if (!isOK)
433 return false;
434
Bob Wilson947f04b2010-03-13 01:08:20 +0000435 OpNum = 0;
436 isLdStMul = true;
437 break;
438 }
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000439 case ARM::t2LDMIA_RET: {
Bob Wilson947f04b2010-03-13 01:08:20 +0000440 unsigned BaseReg = MI->getOperand(1).getReg();
441 if (BaseReg != ARM::SP)
442 return false;
443 Opc = Entry.NarrowOpc2; // tPOP_RET
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000444 OpNum = 2;
Bob Wilson947f04b2010-03-13 01:08:20 +0000445 isLdStMul = true;
446 break;
447 }
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000448 case ARM::t2LDMIA_UPD:
449 case ARM::t2LDMDB_UPD:
450 case ARM::t2STMIA_UPD:
451 case ARM::t2STMDB_UPD: {
Bob Wilson947f04b2010-03-13 01:08:20 +0000452 OpNum = 0;
Bill Wendling092a7bd2010-12-14 03:36:38 +0000453
Bob Wilson947f04b2010-03-13 01:08:20 +0000454 unsigned BaseReg = MI->getOperand(1).getReg();
Bob Wilson947f04b2010-03-13 01:08:20 +0000455 if (BaseReg == ARM::SP &&
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000456 (Entry.WideOpc == ARM::t2LDMIA_UPD ||
457 Entry.WideOpc == ARM::t2STMDB_UPD)) {
Bob Wilson947f04b2010-03-13 01:08:20 +0000458 Opc = Entry.NarrowOpc2; // tPOP or tPUSH
Bill Wendlinga68e3a52010-11-16 01:16:36 +0000459 OpNum = 2;
460 } else if (!isARMLowRegister(BaseReg) ||
461 (Entry.WideOpc != ARM::t2LDMIA_UPD &&
462 Entry.WideOpc != ARM::t2STMIA_UPD)) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000463 return false;
464 }
Bill Wendling092a7bd2010-12-14 03:36:38 +0000465
Evan Chengcc9ca352009-08-11 21:11:32 +0000466 isLdStMul = true;
467 break;
468 }
Evan Cheng36064672009-08-11 08:52:18 +0000469 }
470
471 unsigned OffsetReg = 0;
472 bool OffsetKill = false;
Pete Cooperf68d5032015-05-01 18:57:32 +0000473 bool OffsetInternal = false;
Evan Cheng36064672009-08-11 08:52:18 +0000474 if (HasShift) {
475 OffsetReg = MI->getOperand(2).getReg();
476 OffsetKill = MI->getOperand(2).isKill();
Pete Cooperf68d5032015-05-01 18:57:32 +0000477 OffsetInternal = MI->getOperand(2).isInternalRead();
Bill Wendling092a7bd2010-12-14 03:36:38 +0000478
Evan Cheng36064672009-08-11 08:52:18 +0000479 if (MI->getOperand(3).getImm())
480 // Thumb1 addressing mode doesn't support shift.
481 return false;
482 }
483
484 unsigned OffsetImm = 0;
485 if (HasImmOffset) {
486 OffsetImm = MI->getOperand(2).getImm();
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000487 unsigned MaxOffset = ((1 << ImmLimit) - 1) * Scale;
Bill Wendling092a7bd2010-12-14 03:36:38 +0000488
489 if ((OffsetImm & (Scale - 1)) || OffsetImm > MaxOffset)
Evan Cheng36064672009-08-11 08:52:18 +0000490 // Make sure the immediate field fits.
491 return false;
492 }
493
494 // Add the 16-bit load / store instruction.
Evan Cheng36064672009-08-11 08:52:18 +0000495 DebugLoc dl = MI->getDebugLoc();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000496 MachineInstrBuilder MIB = BuildMI(MBB, MI, dl, TII->get(Opc));
Evan Chengcc9ca352009-08-11 21:11:32 +0000497 if (!isLdStMul) {
Owen Anderson99ea8a32010-12-07 00:45:21 +0000498 MIB.addOperand(MI->getOperand(0));
Owen Anderson4ebf4712011-02-08 22:39:40 +0000499 MIB.addOperand(MI->getOperand(1));
Bill Wendling092a7bd2010-12-14 03:36:38 +0000500
501 if (HasImmOffset)
502 MIB.addImm(OffsetImm / Scale);
503
Evan Chengcc9ca352009-08-11 21:11:32 +0000504 assert((!HasShift || OffsetReg) && "Invalid so_reg load / store address!");
505
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000506 if (HasOffReg)
Pete Cooperf68d5032015-05-01 18:57:32 +0000507 MIB.addReg(OffsetReg, getKillRegState(OffsetKill) |
508 getInternalReadRegState(OffsetInternal));
Evan Cheng36064672009-08-11 08:52:18 +0000509 }
Evan Cheng806845d2009-08-11 09:37:40 +0000510
Evan Cheng36064672009-08-11 08:52:18 +0000511 // Transfer the rest of operands.
Evan Cheng36064672009-08-11 08:52:18 +0000512 for (unsigned e = MI->getNumOperands(); OpNum != e; ++OpNum)
513 MIB.addOperand(MI->getOperand(OpNum));
514
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000515 // Transfer memoperands.
Chris Lattner1d0c2572011-04-29 05:24:29 +0000516 MIB->setMemRefs(MI->memoperands_begin(), MI->memoperands_end());
Evan Cheng2a6c92f2009-11-19 06:32:27 +0000517
Anton Korobeynikovacca7ad2011-03-05 18:43:38 +0000518 // Transfer MI flags.
519 MIB.setMIFlags(MI->getFlags());
520
Chris Lattnera6f074f2009-08-23 03:41:05 +0000521 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " << *MIB);
Evan Cheng36064672009-08-11 08:52:18 +0000522
Evan Cheng7fae11b2011-12-14 02:11:42 +0000523 MBB.erase_instr(MI);
Evan Cheng36064672009-08-11 08:52:18 +0000524 ++NumLdSts;
525 return true;
526}
527
Evan Cheng36064672009-08-11 08:52:18 +0000528bool
529Thumb2SizeReduce::ReduceSpecial(MachineBasicBlock &MBB, MachineInstr *MI,
530 const ReduceEntry &Entry,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000531 bool LiveCPSR, bool IsSelfLoop) {
Jim Grosbacha8a80672011-06-29 23:25:04 +0000532 unsigned Opc = MI->getOpcode();
533 if (Opc == ARM::t2ADDri) {
534 // If the source register is SP, try to reduce to tADDrSPi, otherwise
535 // it's a normal reduce.
536 if (MI->getOperand(1).getReg() != ARM::SP) {
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000537 if (ReduceTo2Addr(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Jim Grosbacha8a80672011-06-29 23:25:04 +0000538 return true;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000539 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000540 }
541 // Try to reduce to tADDrSPi.
542 unsigned Imm = MI->getOperand(2).getImm();
543 // The immediate must be in range, the destination register must be a low
Jim Grosbached5134a2011-06-30 02:22:49 +0000544 // reg, the predicate must be "always" and the condition flags must not
545 // be being set.
Jim Grosbach68b0e842011-07-01 19:07:09 +0000546 if (Imm & 3 || Imm > 1020)
Jim Grosbacha8a80672011-06-29 23:25:04 +0000547 return false;
548 if (!isARMLowRegister(MI->getOperand(0).getReg()))
549 return false;
Jim Grosbached5134a2011-06-30 02:22:49 +0000550 if (MI->getOperand(3).getImm() != ARMCC::AL)
551 return false;
Jim Grosbacha8a80672011-06-29 23:25:04 +0000552 const MCInstrDesc &MCID = MI->getDesc();
553 if (MCID.hasOptionalDef() &&
554 MI->getOperand(MCID.getNumOperands()-1).getReg() == ARM::CPSR)
555 return false;
556
Evan Cheng7fae11b2011-12-14 02:11:42 +0000557 MachineInstrBuilder MIB = BuildMI(MBB, MI, MI->getDebugLoc(),
Jim Grosbacha8a80672011-06-29 23:25:04 +0000558 TII->get(ARM::tADDrSPi))
559 .addOperand(MI->getOperand(0))
560 .addOperand(MI->getOperand(1))
561 .addImm(Imm / 4); // The tADDrSPi has an implied scale by four.
Jim Grosbach1b8457a2011-08-24 17:46:13 +0000562 AddDefaultPred(MIB);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000563
564 // Transfer MI flags.
565 MIB.setMIFlags(MI->getFlags());
566
567 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " <<*MIB);
568
Evan Cheng7fae11b2011-12-14 02:11:42 +0000569 MBB.erase_instr(MI);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000570 ++NumNarrows;
571 return true;
572 }
573
Evan Chengcc9ca352009-08-11 21:11:32 +0000574 if (Entry.LowRegs1 && !VerifyLowRegs(MI))
Evan Cheng36064672009-08-11 08:52:18 +0000575 return false;
576
Evan Cheng7f8e5632011-12-07 07:15:52 +0000577 if (MI->mayLoad() || MI->mayStore())
Evan Cheng36064672009-08-11 08:52:18 +0000578 return ReduceLoadStore(MBB, MI, Entry);
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000579
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000580 switch (Opc) {
581 default: break;
Owen Anderson4ebf4712011-02-08 22:39:40 +0000582 case ARM::t2ADDSri:
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000583 case ARM::t2ADDSrr: {
584 unsigned PredReg = 0;
585 if (getInstrPredicate(MI, PredReg) == ARMCC::AL) {
586 switch (Opc) {
587 default: break;
588 case ARM::t2ADDSri: {
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000589 if (ReduceTo2Addr(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000590 return true;
591 // fallthrough
592 }
593 case ARM::t2ADDSrr:
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000594 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000595 }
596 }
597 break;
598 }
599 case ARM::t2RSBri:
600 case ARM::t2RSBSri:
Jim Grosbach8b31ef52011-07-27 16:47:19 +0000601 case ARM::t2SXTB:
602 case ARM::t2SXTH:
603 case ARM::t2UXTB:
604 case ARM::t2UXTH:
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000605 if (MI->getOperand(2).getImm() == 0)
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000606 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000607 break;
Anton Korobeynikov25229082009-11-24 00:44:37 +0000608 case ARM::t2MOVi16:
609 // Can convert only 'pure' immediate operands, not immediates obtained as
610 // globals' addresses.
611 if (MI->getOperand(1).isImm())
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000612 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Anton Korobeynikov25229082009-11-24 00:44:37 +0000613 break;
Jim Grosbach327cf8e2010-12-07 20:41:06 +0000614 case ARM::t2CMPrr: {
Jim Grosbach5bae0542010-12-03 23:54:18 +0000615 // Try to reduce to the lo-reg only version first. Why there are two
616 // versions of the instruction is a mystery.
617 // It would be nice to just have two entries in the master table that
618 // are prioritized, but the table assumes a unique entry for each
619 // source insn opcode. So for now, we hack a local entry record to use.
620 static const ReduceEntry NarrowEntry =
Evan Chengddc0cb62012-12-20 19:59:30 +0000621 { ARM::t2CMPrr,ARM::tCMPr, 0, 0, 0, 1, 1,2, 0, 0,1,0 };
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000622 if (ReduceToNarrow(MBB, MI, NarrowEntry, LiveCPSR, IsSelfLoop))
Jim Grosbach5bae0542010-12-03 23:54:18 +0000623 return true;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000624 return ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Jim Grosbach5bae0542010-12-03 23:54:18 +0000625 }
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000626 }
Evan Cheng36064672009-08-11 08:52:18 +0000627 return false;
628}
629
630bool
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000631Thumb2SizeReduce::ReduceTo2Addr(MachineBasicBlock &MBB, MachineInstr *MI,
632 const ReduceEntry &Entry,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000633 bool LiveCPSR, bool IsSelfLoop) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000634
635 if (ReduceLimit2Addr != -1 && ((int)Num2Addrs >= ReduceLimit2Addr))
636 return false;
637
Evan Chengddc0cb62012-12-20 19:59:30 +0000638 if (!MinimizeSize && !OptimizeSize && Entry.AvoidMovs &&
639 STI->avoidMOVsShifterOperand())
640 // Don't issue movs with shifter operand for some CPUs unless we
641 // are optimizing / minimizing for size.
642 return false;
643
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000644 unsigned Reg0 = MI->getOperand(0).getReg();
645 unsigned Reg1 = MI->getOperand(1).getReg();
Jim Grosbachc01104d2012-02-24 00:33:36 +0000646 // t2MUL is "special". The tied source operand is second, not first.
647 if (MI->getOpcode() == ARM::t2MUL) {
Jim Grosbach3a21e2c2012-02-24 00:53:11 +0000648 unsigned Reg2 = MI->getOperand(2).getReg();
649 // Early exit if the regs aren't all low regs.
650 if (!isARMLowRegister(Reg0) || !isARMLowRegister(Reg1)
651 || !isARMLowRegister(Reg2))
652 return false;
653 if (Reg0 != Reg2) {
Jim Grosbachc01104d2012-02-24 00:33:36 +0000654 // If the other operand also isn't the same as the destination, we
655 // can't reduce.
656 if (Reg1 != Reg0)
657 return false;
658 // Try to commute the operands to make it a 2-address instruction.
659 MachineInstr *CommutedMI = TII->commuteInstruction(MI);
660 if (!CommutedMI)
661 return false;
662 }
663 } else if (Reg0 != Reg1) {
Bob Wilson279e55f2010-06-24 16:50:20 +0000664 // Try to commute the operands to make it a 2-address instruction.
665 unsigned CommOpIdx1, CommOpIdx2;
666 if (!TII->findCommutedOpIndices(MI, CommOpIdx1, CommOpIdx2) ||
667 CommOpIdx1 != 1 || MI->getOperand(CommOpIdx2).getReg() != Reg0)
668 return false;
669 MachineInstr *CommutedMI = TII->commuteInstruction(MI);
670 if (!CommutedMI)
671 return false;
672 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000673 if (Entry.LowRegs2 && !isARMLowRegister(Reg0))
674 return false;
675 if (Entry.Imm2Limit) {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000676 unsigned Imm = MI->getOperand(2).getImm();
Evan Cheng1be453b2009-08-08 03:21:23 +0000677 unsigned Limit = (1 << Entry.Imm2Limit) - 1;
678 if (Imm > Limit)
679 return false;
680 } else {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000681 unsigned Reg2 = MI->getOperand(2).getReg();
Evan Cheng1be453b2009-08-08 03:21:23 +0000682 if (Entry.LowRegs2 && !isARMLowRegister(Reg2))
683 return false;
684 }
685
Evan Cheng1f5bee12009-08-10 06:57:42 +0000686 // Check if it's possible / necessary to transfer the predicate.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000687 const MCInstrDesc &NewMCID = TII->get(Entry.NarrowOpc2);
Evan Cheng1f5bee12009-08-10 06:57:42 +0000688 unsigned PredReg = 0;
689 ARMCC::CondCodes Pred = getInstrPredicate(MI, PredReg);
690 bool SkipPred = false;
691 if (Pred != ARMCC::AL) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000692 if (!NewMCID.isPredicable())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000693 // Can't transfer predicate, fail.
694 return false;
695 } else {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000696 SkipPred = !NewMCID.isPredicable();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000697 }
698
Evan Cheng1be453b2009-08-08 03:21:23 +0000699 bool HasCC = false;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000700 bool CCDead = false;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000701 const MCInstrDesc &MCID = MI->getDesc();
702 if (MCID.hasOptionalDef()) {
703 unsigned NumOps = MCID.getNumOperands();
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000704 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR);
705 if (HasCC && MI->getOperand(NumOps-1).isDead())
706 CCDead = true;
707 }
Evan Cheng1f5bee12009-08-10 06:57:42 +0000708 if (!VerifyPredAndCC(MI, Entry, true, Pred, LiveCPSR, HasCC, CCDead))
Evan Chengd461c1c2009-08-09 19:17:19 +0000709 return false;
Evan Cheng1be453b2009-08-08 03:21:23 +0000710
Bob Wilsona2881ee2011-04-19 18:11:49 +0000711 // Avoid adding a false dependency on partial flag update by some 16-bit
712 // instructions which has the 's' bit set.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000713 if (Entry.PartFlag && NewMCID.hasOptionalDef() && HasCC &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000714 canAddPseudoFlagDep(MI, IsSelfLoop))
Bob Wilsona2881ee2011-04-19 18:11:49 +0000715 return false;
716
Evan Cheng1be453b2009-08-08 03:21:23 +0000717 // Add the 16-bit instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000718 DebugLoc dl = MI->getDebugLoc();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000719 MachineInstrBuilder MIB = BuildMI(MBB, MI, dl, NewMCID);
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000720 MIB.addOperand(MI->getOperand(0));
Evan Cheng6cc775f2011-06-28 19:10:37 +0000721 if (NewMCID.hasOptionalDef()) {
Evan Cheng6ddd7bc2009-08-15 07:59:10 +0000722 if (HasCC)
723 AddDefaultT1CC(MIB, CCDead);
724 else
725 AddNoT1CC(MIB);
726 }
Evan Chengd461c1c2009-08-09 19:17:19 +0000727
728 // Transfer the rest of operands.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000729 unsigned NumOps = MCID.getNumOperands();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000730 for (unsigned i = 1, e = MI->getNumOperands(); i != e; ++i) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000731 if (i < NumOps && MCID.OpInfo[i].isOptionalDef())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000732 continue;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000733 if (SkipPred && MCID.OpInfo[i].isPredicate())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000734 continue;
735 MIB.addOperand(MI->getOperand(i));
736 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000737
Anton Korobeynikovacca7ad2011-03-05 18:43:38 +0000738 // Transfer MI flags.
739 MIB.setMIFlags(MI->getFlags());
740
Chris Lattnera6f074f2009-08-23 03:41:05 +0000741 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " << *MIB);
Evan Cheng1be453b2009-08-08 03:21:23 +0000742
Evan Cheng7fae11b2011-12-14 02:11:42 +0000743 MBB.erase_instr(MI);
Evan Cheng1be453b2009-08-08 03:21:23 +0000744 ++Num2Addrs;
Evan Cheng1be453b2009-08-08 03:21:23 +0000745 return true;
746}
747
748bool
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000749Thumb2SizeReduce::ReduceToNarrow(MachineBasicBlock &MBB, MachineInstr *MI,
750 const ReduceEntry &Entry,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000751 bool LiveCPSR, bool IsSelfLoop) {
Evan Chengcc9ca352009-08-11 21:11:32 +0000752 if (ReduceLimit != -1 && ((int)NumNarrows >= ReduceLimit))
753 return false;
754
Evan Chengddc0cb62012-12-20 19:59:30 +0000755 if (!MinimizeSize && !OptimizeSize && Entry.AvoidMovs &&
756 STI->avoidMOVsShifterOperand())
757 // Don't issue movs with shifter operand for some CPUs unless we
758 // are optimizing / minimizing for size.
759 return false;
760
Evan Chengd461c1c2009-08-09 19:17:19 +0000761 unsigned Limit = ~0U;
762 if (Entry.Imm1Limit)
Jim Grosbacha8a80672011-06-29 23:25:04 +0000763 Limit = (1 << Entry.Imm1Limit) - 1;
Evan Chengd461c1c2009-08-09 19:17:19 +0000764
Evan Cheng6cc775f2011-06-28 19:10:37 +0000765 const MCInstrDesc &MCID = MI->getDesc();
766 for (unsigned i = 0, e = MCID.getNumOperands(); i != e; ++i) {
767 if (MCID.OpInfo[i].isPredicate())
Evan Chengd461c1c2009-08-09 19:17:19 +0000768 continue;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000769 const MachineOperand &MO = MI->getOperand(i);
Evan Chengd461c1c2009-08-09 19:17:19 +0000770 if (MO.isReg()) {
771 unsigned Reg = MO.getReg();
772 if (!Reg || Reg == ARM::CPSR)
773 continue;
774 if (Entry.LowRegs1 && !isARMLowRegister(Reg))
775 return false;
Evan Chengf6a9d062009-08-11 23:00:31 +0000776 } else if (MO.isImm() &&
Evan Cheng6cc775f2011-06-28 19:10:37 +0000777 !MCID.OpInfo[i].isPredicate()) {
Jim Grosbacha8a80672011-06-29 23:25:04 +0000778 if (((unsigned)MO.getImm()) > Limit)
Evan Chengd461c1c2009-08-09 19:17:19 +0000779 return false;
780 }
781 }
782
Evan Cheng1f5bee12009-08-10 06:57:42 +0000783 // Check if it's possible / necessary to transfer the predicate.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000784 const MCInstrDesc &NewMCID = TII->get(Entry.NarrowOpc1);
Evan Cheng1f5bee12009-08-10 06:57:42 +0000785 unsigned PredReg = 0;
786 ARMCC::CondCodes Pred = getInstrPredicate(MI, PredReg);
787 bool SkipPred = false;
788 if (Pred != ARMCC::AL) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000789 if (!NewMCID.isPredicable())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000790 // Can't transfer predicate, fail.
791 return false;
792 } else {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000793 SkipPred = !NewMCID.isPredicable();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000794 }
795
Evan Chengd461c1c2009-08-09 19:17:19 +0000796 bool HasCC = false;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000797 bool CCDead = false;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000798 if (MCID.hasOptionalDef()) {
799 unsigned NumOps = MCID.getNumOperands();
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000800 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR);
801 if (HasCC && MI->getOperand(NumOps-1).isDead())
802 CCDead = true;
803 }
Evan Cheng1f5bee12009-08-10 06:57:42 +0000804 if (!VerifyPredAndCC(MI, Entry, false, Pred, LiveCPSR, HasCC, CCDead))
Evan Chengd461c1c2009-08-09 19:17:19 +0000805 return false;
806
Bob Wilsona2881ee2011-04-19 18:11:49 +0000807 // Avoid adding a false dependency on partial flag update by some 16-bit
808 // instructions which has the 's' bit set.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000809 if (Entry.PartFlag && NewMCID.hasOptionalDef() && HasCC &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000810 canAddPseudoFlagDep(MI, IsSelfLoop))
Bob Wilsona2881ee2011-04-19 18:11:49 +0000811 return false;
812
Evan Chengd461c1c2009-08-09 19:17:19 +0000813 // Add the 16-bit instruction.
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000814 DebugLoc dl = MI->getDebugLoc();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000815 MachineInstrBuilder MIB = BuildMI(MBB, MI, dl, NewMCID);
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000816 MIB.addOperand(MI->getOperand(0));
Evan Cheng6cc775f2011-06-28 19:10:37 +0000817 if (NewMCID.hasOptionalDef()) {
Evan Cheng6ddd7bc2009-08-15 07:59:10 +0000818 if (HasCC)
819 AddDefaultT1CC(MIB, CCDead);
820 else
821 AddNoT1CC(MIB);
822 }
Evan Chengd461c1c2009-08-09 19:17:19 +0000823
824 // Transfer the rest of operands.
Evan Cheng6cc775f2011-06-28 19:10:37 +0000825 unsigned NumOps = MCID.getNumOperands();
Evan Cheng1f5bee12009-08-10 06:57:42 +0000826 for (unsigned i = 1, e = MI->getNumOperands(); i != e; ++i) {
Evan Cheng6cc775f2011-06-28 19:10:37 +0000827 if (i < NumOps && MCID.OpInfo[i].isOptionalDef())
Evan Cheng1f5bee12009-08-10 06:57:42 +0000828 continue;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000829 if ((MCID.getOpcode() == ARM::t2RSBSri ||
Jim Grosbach8b31ef52011-07-27 16:47:19 +0000830 MCID.getOpcode() == ARM::t2RSBri ||
831 MCID.getOpcode() == ARM::t2SXTB ||
832 MCID.getOpcode() == ARM::t2SXTH ||
833 MCID.getOpcode() == ARM::t2UXTB ||
834 MCID.getOpcode() == ARM::t2UXTH) && i == 2)
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000835 // Skip the zero immediate operand, it's now implicit.
836 continue;
Evan Cheng6cc775f2011-06-28 19:10:37 +0000837 bool isPred = (i < NumOps && MCID.OpInfo[i].isPredicate());
Evan Chengf6a9d062009-08-11 23:00:31 +0000838 if (SkipPred && isPred)
839 continue;
840 const MachineOperand &MO = MI->getOperand(i);
Jim Grosbacha8a80672011-06-29 23:25:04 +0000841 if (MO.isReg() && MO.isImplicit() && MO.getReg() == ARM::CPSR)
842 // Skip implicit def of CPSR. Either it's modeled as an optional
843 // def now or it's already an implicit def on the new instruction.
844 continue;
845 MIB.addOperand(MO);
Evan Cheng1f5bee12009-08-10 06:57:42 +0000846 }
Evan Cheng6cc775f2011-06-28 19:10:37 +0000847 if (!MCID.isPredicable() && NewMCID.isPredicable())
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000848 AddDefaultPred(MIB);
Evan Chengd461c1c2009-08-09 19:17:19 +0000849
Anton Korobeynikovacca7ad2011-03-05 18:43:38 +0000850 // Transfer MI flags.
851 MIB.setMIFlags(MI->getFlags());
852
Chris Lattnera6f074f2009-08-23 03:41:05 +0000853 DEBUG(errs() << "Converted 32-bit: " << *MI << " to 16-bit: " << *MIB);
Evan Chengd461c1c2009-08-09 19:17:19 +0000854
Evan Cheng7fae11b2011-12-14 02:11:42 +0000855 MBB.erase_instr(MI);
Evan Chengd461c1c2009-08-09 19:17:19 +0000856 ++NumNarrows;
857 return true;
Evan Cheng1be453b2009-08-08 03:21:23 +0000858}
859
Bob Wilsona2881ee2011-04-19 18:11:49 +0000860static bool UpdateCPSRDef(MachineInstr &MI, bool LiveCPSR, bool &DefCPSR) {
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000861 bool HasDef = false;
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000862 for (const MachineOperand &MO : MI.operands()) {
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000863 if (!MO.isReg() || MO.isUndef() || MO.isUse())
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000864 continue;
865 if (MO.getReg() != ARM::CPSR)
866 continue;
Bob Wilsona2881ee2011-04-19 18:11:49 +0000867
868 DefCPSR = true;
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000869 if (!MO.isDead())
870 HasDef = true;
871 }
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000872
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000873 return HasDef || LiveCPSR;
874}
875
876static bool UpdateCPSRUse(MachineInstr &MI, bool LiveCPSR) {
Owen Anderson8c1f17b2014-03-07 22:48:22 +0000877 for (const MachineOperand &MO : MI.operands()) {
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000878 if (!MO.isReg() || MO.isUndef() || MO.isDef())
879 continue;
880 if (MO.getReg() != ARM::CPSR)
881 continue;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000882 assert(LiveCPSR && "CPSR liveness tracking is wrong!");
883 if (MO.isKill()) {
884 LiveCPSR = false;
885 break;
886 }
887 }
888
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000889 return LiveCPSR;
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000890}
891
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000892bool Thumb2SizeReduce::ReduceMI(MachineBasicBlock &MBB, MachineInstr *MI,
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000893 bool LiveCPSR, bool IsSelfLoop) {
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000894 unsigned Opcode = MI->getOpcode();
895 DenseMap<unsigned, unsigned>::iterator OPI = ReduceOpcodeMap.find(Opcode);
896 if (OPI == ReduceOpcodeMap.end())
897 return false;
898 const ReduceEntry &Entry = ReduceTable[OPI->second];
899
900 // Don't attempt normal reductions on "special" cases for now.
901 if (Entry.Special)
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000902 return ReduceSpecial(MBB, MI, Entry, LiveCPSR, IsSelfLoop);
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000903
904 // Try to transform to a 16-bit two-address instruction.
905 if (Entry.NarrowOpc2 &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000906 ReduceTo2Addr(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000907 return true;
908
909 // Try to transform to a 16-bit non-two-address instruction.
910 if (Entry.NarrowOpc1 &&
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000911 ReduceToNarrow(MBB, MI, Entry, LiveCPSR, IsSelfLoop))
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000912 return true;
913
914 return false;
915}
916
Evan Cheng1be453b2009-08-08 03:21:23 +0000917bool Thumb2SizeReduce::ReduceMBB(MachineBasicBlock &MBB) {
918 bool Modified = false;
919
Evan Cheng1f5bee12009-08-10 06:57:42 +0000920 // Yes, CPSR could be livein.
Dan Gohmana1cf9fe2010-04-13 16:53:51 +0000921 bool LiveCPSR = MBB.isLiveIn(ARM::CPSR);
Craig Topper062a2ba2014-04-25 05:30:21 +0000922 MachineInstr *BundleMI = nullptr;
Evan Cheng1f5bee12009-08-10 06:57:42 +0000923
Craig Topper062a2ba2014-04-25 05:30:21 +0000924 CPSRDef = nullptr;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000925 HighLatencyCPSR = false;
926
927 // Check predecessors for the latest CPSRDef.
Jim Grosbach537f3ed2014-04-04 02:11:03 +0000928 for (auto *Pred : MBB.predecessors()) {
929 const MBBInfo &PInfo = BlockInfo[Pred->getNumber()];
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000930 if (!PInfo.Visited) {
931 // Since blocks are visited in RPO, this must be a back-edge.
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000932 continue;
933 }
934 if (PInfo.HighLatencyCPSR) {
935 HighLatencyCPSR = true;
936 break;
937 }
938 }
939
Evan Chengf4807a12011-10-27 21:21:05 +0000940 // If this BB loops back to itself, conservatively avoid narrowing the
941 // first instruction that does partial flag update.
942 bool IsSelfLoop = MBB.isSuccessor(&MBB);
Jim Grosbach0c509fa2012-04-06 23:43:50 +0000943 MachineBasicBlock::instr_iterator MII = MBB.instr_begin(),E = MBB.instr_end();
Evan Cheng7fae11b2011-12-14 02:11:42 +0000944 MachineBasicBlock::instr_iterator NextMII;
Evan Cheng1be453b2009-08-08 03:21:23 +0000945 for (; MII != E; MII = NextMII) {
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000946 NextMII = std::next(MII);
Evan Cheng1be453b2009-08-08 03:21:23 +0000947
Evan Cheng51cbd2d2009-08-10 02:37:24 +0000948 MachineInstr *MI = &*MII;
Evan Cheng7fae11b2011-12-14 02:11:42 +0000949 if (MI->isBundle()) {
950 BundleMI = MI;
951 continue;
952 }
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000953 if (MI->isDebugValue())
954 continue;
Evan Cheng7fae11b2011-12-14 02:11:42 +0000955
Evan Cheng1e6c2a12009-08-12 01:49:45 +0000956 LiveCPSR = UpdateCPSRUse(*MI, LiveCPSR);
957
Jakob Stoklund Olesen41bbf9c2012-12-18 00:46:39 +0000958 // Does NextMII belong to the same bundle as MI?
959 bool NextInSameBundle = NextMII != E && NextMII->isBundledWithPred();
960
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000961 if (ReduceMI(MBB, MI, LiveCPSR, IsSelfLoop)) {
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000962 Modified = true;
Benjamin Kramerb6d0bd42014-03-02 12:27:27 +0000963 MachineBasicBlock::instr_iterator I = std::prev(NextMII);
Jakob Stoklund Olesen43b1e132012-12-18 00:13:11 +0000964 MI = &*I;
Jakob Stoklund Olesen41bbf9c2012-12-18 00:46:39 +0000965 // Removing and reinserting the first instruction in a bundle will break
966 // up the bundle. Fix the bundling if it was broken.
967 if (NextInSameBundle && !NextMII->isBundledWithPred())
968 NextMII->bundleWithPred();
Evan Cheng1be453b2009-08-08 03:21:23 +0000969 }
970
Jakob Stoklund Olesen41bbf9c2012-12-18 00:46:39 +0000971 if (!NextInSameBundle && MI->isInsideBundle()) {
Evan Cheng7fae11b2011-12-14 02:11:42 +0000972 // FIXME: Since post-ra scheduler operates on bundles, the CPSR kill
973 // marker is only on the BUNDLE instruction. Process the BUNDLE
974 // instruction as we finish with the bundled instruction to work around
975 // the inconsistency.
Evan Cheng903231b2011-12-17 01:25:34 +0000976 if (BundleMI->killsRegister(ARM::CPSR))
977 LiveCPSR = false;
978 MachineOperand *MO = BundleMI->findRegisterDefOperand(ARM::CPSR);
979 if (MO && !MO->isDead())
980 LiveCPSR = true;
Weiming Zhaof66be562014-01-13 18:47:54 +0000981 MO = BundleMI->findRegisterUseOperand(ARM::CPSR);
982 if (MO && !MO->isKill())
983 LiveCPSR = true;
Evan Cheng903231b2011-12-17 01:25:34 +0000984 }
Evan Cheng7fae11b2011-12-14 02:11:42 +0000985
Bob Wilsona2881ee2011-04-19 18:11:49 +0000986 bool DefCPSR = false;
987 LiveCPSR = UpdateCPSRDef(*MI, LiveCPSR, DefCPSR);
Evan Cheng7f8e5632011-12-07 07:15:52 +0000988 if (MI->isCall()) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000989 // Calls don't really set CPSR.
Craig Topper062a2ba2014-04-25 05:30:21 +0000990 CPSRDef = nullptr;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000991 HighLatencyCPSR = false;
Evan Chengf4807a12011-10-27 21:21:05 +0000992 IsSelfLoop = false;
993 } else if (DefCPSR) {
Bob Wilsona2881ee2011-04-19 18:11:49 +0000994 // This is the last CPSR defining instruction.
995 CPSRDef = MI;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +0000996 HighLatencyCPSR = isHighLatencyCPSR(CPSRDef);
Evan Chengf4807a12011-10-27 21:21:05 +0000997 IsSelfLoop = false;
998 }
Evan Cheng1be453b2009-08-08 03:21:23 +0000999 }
1000
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +00001001 MBBInfo &Info = BlockInfo[MBB.getNumber()];
1002 Info.HighLatencyCPSR = HighLatencyCPSR;
1003 Info.Visited = true;
Evan Cheng1be453b2009-08-08 03:21:23 +00001004 return Modified;
1005}
1006
1007bool Thumb2SizeReduce::runOnMachineFunction(MachineFunction &MF) {
Eric Christopher1b21f002015-01-29 00:19:33 +00001008 STI = &static_cast<const ARMSubtarget &>(MF.getSubtarget());
Eric Christopher63b44882015-03-05 00:23:40 +00001009 if (STI->isThumb1Only() || STI->prefers32BitThumb())
1010 return false;
1011
Eric Christopher1b21f002015-01-29 00:19:33 +00001012 TII = static_cast<const Thumb2InstrInfo *>(STI->getInstrInfo());
Evan Cheng1be453b2009-08-08 03:21:23 +00001013
Evan Chengddc0cb62012-12-20 19:59:30 +00001014 // Optimizing / minimizing size?
Duncan P. N. Exon Smith2cff9e12015-02-14 02:24:44 +00001015 OptimizeSize = MF.getFunction()->hasFnAttribute(Attribute::OptimizeForSize);
1016 MinimizeSize = MF.getFunction()->hasFnAttribute(Attribute::MinSize);
Quentin Colombet23b404d2012-12-18 22:47:16 +00001017
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +00001018 BlockInfo.clear();
1019 BlockInfo.resize(MF.getNumBlockIDs());
1020
1021 // Visit blocks in reverse post-order so LastCPSRDef is known for all
1022 // predecessors.
1023 ReversePostOrderTraversal<MachineFunction*> RPOT(&MF);
Evan Cheng1be453b2009-08-08 03:21:23 +00001024 bool Modified = false;
Jakob Stoklund Olesen299475e2013-04-04 18:25:36 +00001025 for (ReversePostOrderTraversal<MachineFunction*>::rpo_iterator
1026 I = RPOT.begin(), E = RPOT.end(); I != E; ++I)
1027 Modified |= ReduceMBB(**I);
Evan Cheng1be453b2009-08-08 03:21:23 +00001028 return Modified;
1029}
1030
1031/// createThumb2SizeReductionPass - Returns an instance of the Thumb2 size
1032/// reduction pass.
1033FunctionPass *llvm::createThumb2SizeReductionPass() {
1034 return new Thumb2SizeReduce();
1035}