blob: dceb118d1e5d16b7f806aea67e49200df966c143 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/* This file contains register alloction support. */
18
Brian Carlstrom7940e442013-07-12 13:46:57 -070019#include "mir_to_lir-inl.h"
20
Vladimir Marko35831e82015-09-11 11:59:18 +010021#include "base/stringprintf.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080022#include "dex/compiler_ir.h"
Vladimir Markocc234812015-04-07 09:36:09 +010023#include "dex/dataflow_iterator-inl.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080024#include "dex/mir_graph.h"
25#include "driver/compiler_driver.h"
26#include "driver/dex_compilation_unit.h"
Vladimir Markocc234812015-04-07 09:36:09 +010027#include "utils/dex_cache_arrays_layout-inl.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080028
Brian Carlstrom7940e442013-07-12 13:46:57 -070029namespace art {
30
31/*
32 * Free all allocated temps in the temp pools. Note that this does
33 * not affect the "liveness" of a temp register, which will stay
34 * live until it is either explicitly killed or reallocated.
35 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070036void Mir2Lir::ResetRegPool() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +010037 for (RegisterInfo* info : tempreg_info_) {
buzbee091cc402014-03-31 10:14:40 -070038 info->MarkFree();
Brian Carlstrom7940e442013-07-12 13:46:57 -070039 }
40 // Reset temp tracking sanity check.
41 if (kIsDebugBuild) {
42 live_sreg_ = INVALID_SREG;
43 }
44}
45
Vladimir Marko8dea81c2014-06-06 14:50:36 +010046Mir2Lir::RegisterInfo::RegisterInfo(RegStorage r, const ResourceMask& mask)
buzbee30adc732014-05-09 15:10:18 -070047 : reg_(r), is_temp_(false), wide_value_(false), dirty_(false), aliased_(false), partner_(r),
buzbeeba574512014-05-12 15:13:16 -070048 s_reg_(INVALID_SREG), def_use_mask_(mask), master_(this), def_start_(nullptr),
49 def_end_(nullptr), alias_chain_(nullptr) {
buzbee091cc402014-03-31 10:14:40 -070050 switch (r.StorageSize()) {
51 case 0: storage_mask_ = 0xffffffff; break;
52 case 4: storage_mask_ = 0x00000001; break;
53 case 8: storage_mask_ = 0x00000003; break;
54 case 16: storage_mask_ = 0x0000000f; break;
55 case 32: storage_mask_ = 0x000000ff; break;
56 case 64: storage_mask_ = 0x0000ffff; break;
57 case 128: storage_mask_ = 0xffffffff; break;
Brian Carlstrom7940e442013-07-12 13:46:57 -070058 }
buzbee091cc402014-03-31 10:14:40 -070059 used_storage_ = r.Valid() ? ~storage_mask_ : storage_mask_;
buzbee30adc732014-05-09 15:10:18 -070060 liveness_ = used_storage_;
Brian Carlstrom7940e442013-07-12 13:46:57 -070061}
62
buzbee091cc402014-03-31 10:14:40 -070063Mir2Lir::RegisterPool::RegisterPool(Mir2Lir* m2l, ArenaAllocator* arena,
Vladimir Marko089142c2014-06-05 10:57:05 +010064 const ArrayRef<const RegStorage>& core_regs,
65 const ArrayRef<const RegStorage>& core64_regs,
66 const ArrayRef<const RegStorage>& sp_regs,
67 const ArrayRef<const RegStorage>& dp_regs,
68 const ArrayRef<const RegStorage>& reserved_regs,
69 const ArrayRef<const RegStorage>& reserved64_regs,
70 const ArrayRef<const RegStorage>& core_temps,
71 const ArrayRef<const RegStorage>& core64_temps,
72 const ArrayRef<const RegStorage>& sp_temps,
73 const ArrayRef<const RegStorage>& dp_temps) :
Vladimir Markoe39c54e2014-09-22 14:50:02 +010074 core_regs_(arena->Adapter()), next_core_reg_(0),
75 core64_regs_(arena->Adapter()), next_core64_reg_(0),
76 sp_regs_(arena->Adapter()), next_sp_reg_(0),
77 dp_regs_(arena->Adapter()), next_dp_reg_(0), m2l_(m2l) {
buzbee091cc402014-03-31 10:14:40 -070078 // Initialize the fast lookup map.
Vladimir Markoe39c54e2014-09-22 14:50:02 +010079 m2l_->reginfo_map_.clear();
80 m2l_->reginfo_map_.resize(RegStorage::kMaxRegs, nullptr);
buzbee091cc402014-03-31 10:14:40 -070081
82 // Construct the register pool.
Vladimir Markoe39c54e2014-09-22 14:50:02 +010083 core_regs_.reserve(core_regs.size());
Vladimir Marko8dea81c2014-06-06 14:50:36 +010084 for (const RegStorage& reg : core_regs) {
buzbee091cc402014-03-31 10:14:40 -070085 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
Vladimir Markoe39c54e2014-09-22 14:50:02 +010086 m2l_->reginfo_map_[reg.GetReg()] = info;
87 core_regs_.push_back(info);
buzbee091cc402014-03-31 10:14:40 -070088 }
Vladimir Markoe39c54e2014-09-22 14:50:02 +010089 core64_regs_.reserve(core64_regs.size());
Vladimir Marko8dea81c2014-06-06 14:50:36 +010090 for (const RegStorage& reg : core64_regs) {
buzbeeb01bf152014-05-13 15:59:07 -070091 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
Vladimir Markoe39c54e2014-09-22 14:50:02 +010092 m2l_->reginfo_map_[reg.GetReg()] = info;
93 core64_regs_.push_back(info);
buzbeeb01bf152014-05-13 15:59:07 -070094 }
Vladimir Markoe39c54e2014-09-22 14:50:02 +010095 sp_regs_.reserve(sp_regs.size());
Vladimir Marko8dea81c2014-06-06 14:50:36 +010096 for (const RegStorage& reg : sp_regs) {
buzbee091cc402014-03-31 10:14:40 -070097 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
Vladimir Markoe39c54e2014-09-22 14:50:02 +010098 m2l_->reginfo_map_[reg.GetReg()] = info;
99 sp_regs_.push_back(info);
buzbee091cc402014-03-31 10:14:40 -0700100 }
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100101 dp_regs_.reserve(dp_regs.size());
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100102 for (const RegStorage& reg : dp_regs) {
buzbee091cc402014-03-31 10:14:40 -0700103 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100104 m2l_->reginfo_map_[reg.GetReg()] = info;
105 dp_regs_.push_back(info);
buzbee091cc402014-03-31 10:14:40 -0700106 }
107
108 // Keep special registers from being allocated.
109 for (RegStorage reg : reserved_regs) {
110 m2l_->MarkInUse(reg);
111 }
buzbeeb01bf152014-05-13 15:59:07 -0700112 for (RegStorage reg : reserved64_regs) {
113 m2l_->MarkInUse(reg);
114 }
buzbee091cc402014-03-31 10:14:40 -0700115
116 // Mark temp regs - all others not in use can be used for promotion
117 for (RegStorage reg : core_temps) {
118 m2l_->MarkTemp(reg);
119 }
buzbeeb01bf152014-05-13 15:59:07 -0700120 for (RegStorage reg : core64_temps) {
121 m2l_->MarkTemp(reg);
122 }
buzbee091cc402014-03-31 10:14:40 -0700123 for (RegStorage reg : sp_temps) {
124 m2l_->MarkTemp(reg);
125 }
126 for (RegStorage reg : dp_temps) {
127 m2l_->MarkTemp(reg);
128 }
129
130 // Add an entry for InvalidReg with zero'd mask.
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100131 RegisterInfo* invalid_reg = new (arena) RegisterInfo(RegStorage::InvalidReg(), kEncodeNone);
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100132 m2l_->reginfo_map_[RegStorage::InvalidReg().GetReg()] = invalid_reg;
buzbeea0cd2d72014-06-01 09:33:49 -0700133
134 // Existence of core64 registers implies wide references.
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100135 if (core64_regs_.size() != 0) {
buzbeea0cd2d72014-06-01 09:33:49 -0700136 ref_regs_ = &core64_regs_;
137 next_ref_reg_ = &next_core64_reg_;
138 } else {
139 ref_regs_ = &core_regs_;
140 next_ref_reg_ = &next_core_reg_;
141 }
buzbee091cc402014-03-31 10:14:40 -0700142}
143
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100144void Mir2Lir::DumpRegPool(ArenaVector<RegisterInfo*>* regs) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700145 LOG(INFO) << "================================================";
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100146 for (RegisterInfo* info : *regs) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700147 LOG(INFO) << StringPrintf(
buzbee091cc402014-03-31 10:14:40 -0700148 "R[%d:%d:%c]: T:%d, U:%d, W:%d, p:%d, LV:%d, D:%d, SR:%d, DEF:%d",
149 info->GetReg().GetReg(), info->GetReg().GetRegNum(), info->GetReg().IsFloat() ? 'f' : 'c',
150 info->IsTemp(), info->InUse(), info->IsWide(), info->Partner().GetReg(), info->IsLive(),
151 info->IsDirty(), info->SReg(), info->DefStart() != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700152 }
153 LOG(INFO) << "================================================";
154}
155
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700156void Mir2Lir::DumpCoreRegPool() {
buzbee091cc402014-03-31 10:14:40 -0700157 DumpRegPool(&reg_pool_->core_regs_);
buzbeea0cd2d72014-06-01 09:33:49 -0700158 DumpRegPool(&reg_pool_->core64_regs_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700159}
160
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700161void Mir2Lir::DumpFpRegPool() {
buzbee091cc402014-03-31 10:14:40 -0700162 DumpRegPool(&reg_pool_->sp_regs_);
163 DumpRegPool(&reg_pool_->dp_regs_);
164}
165
166void Mir2Lir::DumpRegPools() {
167 LOG(INFO) << "Core registers";
168 DumpCoreRegPool();
169 LOG(INFO) << "FP registers";
170 DumpFpRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700171}
172
buzbee2700f7e2014-03-07 09:46:20 -0800173void Mir2Lir::Clobber(RegStorage reg) {
buzbeeba574512014-05-12 15:13:16 -0700174 if (UNLIKELY(reg.IsPair())) {
buzbee30adc732014-05-09 15:10:18 -0700175 DCHECK(!GetRegInfo(reg.GetLow())->IsAliased());
buzbeeba574512014-05-12 15:13:16 -0700176 Clobber(reg.GetLow());
buzbee30adc732014-05-09 15:10:18 -0700177 DCHECK(!GetRegInfo(reg.GetHigh())->IsAliased());
buzbeeba574512014-05-12 15:13:16 -0700178 Clobber(reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800179 } else {
buzbee30adc732014-05-09 15:10:18 -0700180 RegisterInfo* info = GetRegInfo(reg);
buzbeeba574512014-05-12 15:13:16 -0700181 if (info->IsTemp() && !info->IsDead()) {
buzbeeb5860fb2014-06-21 15:31:01 -0700182 if (info->GetReg().NotExactlyEquals(info->Partner())) {
buzbee082833c2014-05-17 23:16:26 -0700183 ClobberBody(GetRegInfo(info->Partner()));
184 }
buzbeeba574512014-05-12 15:13:16 -0700185 ClobberBody(info);
186 if (info->IsAliased()) {
buzbee642fe342014-05-23 16:04:08 -0700187 ClobberAliases(info, info->StorageMask());
buzbeeba574512014-05-12 15:13:16 -0700188 } else {
189 RegisterInfo* master = info->Master();
190 if (info != master) {
191 ClobberBody(info->Master());
buzbee642fe342014-05-23 16:04:08 -0700192 ClobberAliases(info->Master(), info->StorageMask());
buzbeeba574512014-05-12 15:13:16 -0700193 }
194 }
buzbee30adc732014-05-09 15:10:18 -0700195 }
buzbee2700f7e2014-03-07 09:46:20 -0800196 }
197}
198
buzbee642fe342014-05-23 16:04:08 -0700199void Mir2Lir::ClobberAliases(RegisterInfo* info, uint32_t clobber_mask) {
buzbeeba574512014-05-12 15:13:16 -0700200 for (RegisterInfo* alias = info->GetAliasChain(); alias != nullptr;
201 alias = alias->GetAliasChain()) {
202 DCHECK(!alias->IsAliased()); // Only the master should be marked as alised.
buzbee642fe342014-05-23 16:04:08 -0700203 // Only clobber if we have overlap.
204 if ((alias->StorageMask() & clobber_mask) != 0) {
205 ClobberBody(alias);
206 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700207 }
208}
209
210/*
211 * Break the association between a Dalvik vreg and a physical temp register of either register
212 * class.
213 * TODO: Ideally, the public version of this code should not exist. Besides its local usage
214 * in the register utilities, is is also used by code gen routines to work around a deficiency in
215 * local register allocation, which fails to distinguish between the "in" and "out" identities
216 * of Dalvik vregs. This can result in useless register copies when the same Dalvik vreg
217 * is used both as the source and destination register of an operation in which the type
218 * changes (for example: INT_TO_FLOAT v1, v1). Revisit when improved register allocation is
219 * addressed.
220 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700221void Mir2Lir::ClobberSReg(int s_reg) {
buzbee091cc402014-03-31 10:14:40 -0700222 if (s_reg != INVALID_SREG) {
buzbee30adc732014-05-09 15:10:18 -0700223 if (kIsDebugBuild && s_reg == live_sreg_) {
224 live_sreg_ = INVALID_SREG;
225 }
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100226 for (RegisterInfo* info : tempreg_info_) {
buzbee30adc732014-05-09 15:10:18 -0700227 if (info->SReg() == s_reg) {
buzbeeb5860fb2014-06-21 15:31:01 -0700228 if (info->GetReg().NotExactlyEquals(info->Partner())) {
buzbee082833c2014-05-17 23:16:26 -0700229 // Dealing with a pair - clobber the other half.
230 DCHECK(!info->IsAliased());
231 ClobberBody(GetRegInfo(info->Partner()));
232 }
buzbeeba574512014-05-12 15:13:16 -0700233 ClobberBody(info);
buzbee30adc732014-05-09 15:10:18 -0700234 if (info->IsAliased()) {
buzbee642fe342014-05-23 16:04:08 -0700235 ClobberAliases(info, info->StorageMask());
buzbee30adc732014-05-09 15:10:18 -0700236 }
buzbee091cc402014-03-31 10:14:40 -0700237 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700238 }
239 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700240}
241
242/*
243 * SSA names associated with the initial definitions of Dalvik
244 * registers are the same as the Dalvik register number (and
245 * thus take the same position in the promotion_map. However,
246 * the special Method* and compiler temp resisters use negative
247 * v_reg numbers to distinguish them and can have an arbitrary
248 * ssa name (above the last original Dalvik register). This function
249 * maps SSA names to positions in the promotion_map array.
250 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700251int Mir2Lir::SRegToPMap(int s_reg) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700252 DCHECK_LT(s_reg, mir_graph_->GetNumSSARegs());
253 DCHECK_GE(s_reg, 0);
254 int v_reg = mir_graph_->SRegToVReg(s_reg);
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700255 return v_reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700256}
257
buzbee091cc402014-03-31 10:14:40 -0700258// TODO: refactor following Alloc/Record routines - much commonality.
buzbee2700f7e2014-03-07 09:46:20 -0800259void Mir2Lir::RecordCorePromotion(RegStorage reg, int s_reg) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700260 int p_map_idx = SRegToPMap(s_reg);
261 int v_reg = mir_graph_->SRegToVReg(s_reg);
buzbee091cc402014-03-31 10:14:40 -0700262 int reg_num = reg.GetRegNum();
263 GetRegInfo(reg)->MarkInUse();
buzbee2700f7e2014-03-07 09:46:20 -0800264 core_spill_mask_ |= (1 << reg_num);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700265 // Include reg for later sort
buzbee2700f7e2014-03-07 09:46:20 -0800266 core_vmap_table_.push_back(reg_num << VREG_NUM_WIDTH | (v_reg & ((1 << VREG_NUM_WIDTH) - 1)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700267 num_core_spills_++;
268 promotion_map_[p_map_idx].core_location = kLocPhysReg;
buzbee2700f7e2014-03-07 09:46:20 -0800269 promotion_map_[p_map_idx].core_reg = reg_num;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700270}
271
buzbee091cc402014-03-31 10:14:40 -0700272/* Reserve a callee-save register. Return InvalidReg if none available */
buzbee2700f7e2014-03-07 09:46:20 -0800273RegStorage Mir2Lir::AllocPreservedCoreReg(int s_reg) {
274 RegStorage res;
buzbeeb5860fb2014-06-21 15:31:01 -0700275 /*
276 * Note: it really doesn't matter much whether we allocate from the core or core64
277 * pool for 64-bit targets - but for some targets it does matter whether allocations
278 * happens from the single or double pool. This entire section of code could stand
279 * a good refactoring.
280 */
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100281 for (RegisterInfo* info : reg_pool_->core_regs_) {
buzbee091cc402014-03-31 10:14:40 -0700282 if (!info->IsTemp() && !info->InUse()) {
283 res = info->GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700284 RecordCorePromotion(res, s_reg);
285 break;
286 }
287 }
288 return res;
289}
290
buzbeeb5860fb2014-06-21 15:31:01 -0700291void Mir2Lir::RecordFpPromotion(RegStorage reg, int s_reg) {
292 DCHECK_NE(cu_->instruction_set, kThumb2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700293 int p_map_idx = SRegToPMap(s_reg);
294 int v_reg = mir_graph_->SRegToVReg(s_reg);
buzbeeb5860fb2014-06-21 15:31:01 -0700295 int reg_num = reg.GetRegNum();
buzbee091cc402014-03-31 10:14:40 -0700296 GetRegInfo(reg)->MarkInUse();
buzbeeb5860fb2014-06-21 15:31:01 -0700297 fp_spill_mask_ |= (1 << reg_num);
298 // Include reg for later sort
299 fp_vmap_table_.push_back(reg_num << VREG_NUM_WIDTH | (v_reg & ((1 << VREG_NUM_WIDTH) - 1)));
300 num_fp_spills_++;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700301 promotion_map_[p_map_idx].fp_location = kLocPhysReg;
buzbeeb5860fb2014-06-21 15:31:01 -0700302 promotion_map_[p_map_idx].fp_reg = reg.GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700303}
304
buzbeeb5860fb2014-06-21 15:31:01 -0700305// Reserve a callee-save floating point.
306RegStorage Mir2Lir::AllocPreservedFpReg(int s_reg) {
307 /*
308 * For targets other than Thumb2, it doesn't matter whether we allocate from
309 * the sp_regs_ or dp_regs_ pool. Some refactoring is in order here.
310 */
311 DCHECK_NE(cu_->instruction_set, kThumb2);
buzbee2700f7e2014-03-07 09:46:20 -0800312 RegStorage res;
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100313 for (RegisterInfo* info : reg_pool_->sp_regs_) {
buzbee091cc402014-03-31 10:14:40 -0700314 if (!info->IsTemp() && !info->InUse()) {
315 res = info->GetReg();
buzbeeb5860fb2014-06-21 15:31:01 -0700316 RecordFpPromotion(res, s_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700317 break;
318 }
319 }
320 return res;
321}
322
buzbeeb5860fb2014-06-21 15:31:01 -0700323// TODO: this is Thumb2 only. Remove when DoPromotion refactored.
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100324RegStorage Mir2Lir::AllocPreservedDouble(int s_reg ATTRIBUTE_UNUSED) {
buzbeeb5860fb2014-06-21 15:31:01 -0700325 UNIMPLEMENTED(FATAL) << "Unexpected use of AllocPreservedDouble";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700326 UNREACHABLE();
buzbeeb5860fb2014-06-21 15:31:01 -0700327}
328
329// TODO: this is Thumb2 only. Remove when DoPromotion refactored.
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100330RegStorage Mir2Lir::AllocPreservedSingle(int s_reg ATTRIBUTE_UNUSED) {
buzbeeb5860fb2014-06-21 15:31:01 -0700331 UNIMPLEMENTED(FATAL) << "Unexpected use of AllocPreservedSingle";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700332 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700333}
334
buzbee091cc402014-03-31 10:14:40 -0700335
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100336RegStorage Mir2Lir::AllocTempBody(ArenaVector<RegisterInfo*>& regs, int* next_temp, bool required) {
337 int num_regs = regs.size();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700338 int next = *next_temp;
Brian Carlstrom38f85e42013-07-18 14:45:22 -0700339 for (int i = 0; i< num_regs; i++) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100340 if (next >= num_regs) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700341 next = 0;
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100342 }
343 RegisterInfo* info = regs[next];
buzbee30adc732014-05-09 15:10:18 -0700344 // Try to allocate a register that doesn't hold a live value.
buzbee082833c2014-05-17 23:16:26 -0700345 if (info->IsTemp() && !info->InUse() && info->IsDead()) {
buzbee88a6b412014-08-25 09:34:03 -0700346 // If it's wide, split it up.
347 if (info->IsWide()) {
348 // If the pair was associated with a wide value, unmark the partner as well.
349 if (info->SReg() != INVALID_SREG) {
350 RegisterInfo* partner = GetRegInfo(info->Partner());
351 DCHECK_EQ(info->GetReg().GetRegNum(), partner->Partner().GetRegNum());
352 DCHECK(partner->IsWide());
353 partner->SetIsWide(false);
354 }
355 info->SetIsWide(false);
356 }
buzbee091cc402014-03-31 10:14:40 -0700357 Clobber(info->GetReg());
358 info->MarkInUse();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700359 *next_temp = next + 1;
buzbee091cc402014-03-31 10:14:40 -0700360 return info->GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700361 }
362 next++;
363 }
364 next = *next_temp;
buzbee30adc732014-05-09 15:10:18 -0700365 // No free non-live regs. Anything we can kill?
Brian Carlstrom38f85e42013-07-18 14:45:22 -0700366 for (int i = 0; i< num_regs; i++) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100367 if (next >= num_regs) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700368 next = 0;
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100369 }
370 RegisterInfo* info = regs[next];
buzbee091cc402014-03-31 10:14:40 -0700371 if (info->IsTemp() && !info->InUse()) {
buzbee30adc732014-05-09 15:10:18 -0700372 // Got one. Kill it.
373 ClobberSReg(info->SReg());
buzbee091cc402014-03-31 10:14:40 -0700374 Clobber(info->GetReg());
375 info->MarkInUse();
buzbee082833c2014-05-17 23:16:26 -0700376 if (info->IsWide()) {
377 RegisterInfo* partner = GetRegInfo(info->Partner());
378 DCHECK_EQ(info->GetReg().GetRegNum(), partner->Partner().GetRegNum());
379 DCHECK(partner->IsWide());
380 info->SetIsWide(false);
381 partner->SetIsWide(false);
382 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700383 *next_temp = next + 1;
buzbee091cc402014-03-31 10:14:40 -0700384 return info->GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700385 }
386 next++;
387 }
388 if (required) {
389 CodegenDump();
buzbee091cc402014-03-31 10:14:40 -0700390 DumpRegPools();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700391 LOG(FATAL) << "No free temp registers";
392 }
buzbee2700f7e2014-03-07 09:46:20 -0800393 return RegStorage::InvalidReg(); // No register available
Brian Carlstrom7940e442013-07-12 13:46:57 -0700394}
395
Serguei Katkov9ee45192014-07-17 14:39:03 +0700396RegStorage Mir2Lir::AllocTemp(bool required) {
397 return AllocTempBody(reg_pool_->core_regs_, &reg_pool_->next_core_reg_, required);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700398}
399
Serguei Katkov9ee45192014-07-17 14:39:03 +0700400RegStorage Mir2Lir::AllocTempWide(bool required) {
buzbeeb01bf152014-05-13 15:59:07 -0700401 RegStorage res;
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100402 if (reg_pool_->core64_regs_.size() != 0) {
Serguei Katkov9ee45192014-07-17 14:39:03 +0700403 res = AllocTempBody(reg_pool_->core64_regs_, &reg_pool_->next_core64_reg_, required);
buzbeeb01bf152014-05-13 15:59:07 -0700404 } else {
405 RegStorage low_reg = AllocTemp();
406 RegStorage high_reg = AllocTemp();
407 res = RegStorage::MakeRegPair(low_reg, high_reg);
408 }
Serguei Katkov9ee45192014-07-17 14:39:03 +0700409 if (required) {
410 CheckRegStorage(res, WidenessCheck::kCheckWide, RefCheck::kIgnoreRef, FPCheck::kCheckNotFP);
411 }
buzbeeb01bf152014-05-13 15:59:07 -0700412 return res;
413}
414
Serguei Katkov9ee45192014-07-17 14:39:03 +0700415RegStorage Mir2Lir::AllocTempRef(bool required) {
416 RegStorage res = AllocTempBody(*reg_pool_->ref_regs_, reg_pool_->next_ref_reg_, required);
417 if (required) {
418 DCHECK(!res.IsPair());
419 CheckRegStorage(res, WidenessCheck::kCheckNotWide, RefCheck::kCheckRef, FPCheck::kCheckNotFP);
420 }
buzbeea0cd2d72014-06-01 09:33:49 -0700421 return res;
Matteo Franchin0955f7e2014-05-23 17:32:52 +0100422}
423
Serguei Katkov9ee45192014-07-17 14:39:03 +0700424RegStorage Mir2Lir::AllocTempSingle(bool required) {
425 RegStorage res = AllocTempBody(reg_pool_->sp_regs_, &reg_pool_->next_sp_reg_, required);
426 if (required) {
427 DCHECK(res.IsSingle()) << "Reg: 0x" << std::hex << res.GetRawBits();
428 CheckRegStorage(res, WidenessCheck::kCheckNotWide, RefCheck::kCheckNotRef, FPCheck::kIgnoreFP);
429 }
buzbee091cc402014-03-31 10:14:40 -0700430 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700431}
432
Serguei Katkov9ee45192014-07-17 14:39:03 +0700433RegStorage Mir2Lir::AllocTempDouble(bool required) {
434 RegStorage res = AllocTempBody(reg_pool_->dp_regs_, &reg_pool_->next_dp_reg_, required);
435 if (required) {
436 DCHECK(res.IsDouble()) << "Reg: 0x" << std::hex << res.GetRawBits();
437 CheckRegStorage(res, WidenessCheck::kCheckWide, RefCheck::kCheckNotRef, FPCheck::kIgnoreFP);
438 }
buzbee091cc402014-03-31 10:14:40 -0700439 return res;
440}
441
Serguei Katkov9ee45192014-07-17 14:39:03 +0700442RegStorage Mir2Lir::AllocTypedTempWide(bool fp_hint, int reg_class, bool required) {
buzbeea0cd2d72014-06-01 09:33:49 -0700443 DCHECK_NE(reg_class, kRefReg); // NOTE: the Dalvik width of a reference is always 32 bits.
buzbeeb01bf152014-05-13 15:59:07 -0700444 if (((reg_class == kAnyReg) && fp_hint) || (reg_class == kFPReg)) {
Serguei Katkov9ee45192014-07-17 14:39:03 +0700445 return AllocTempDouble(required);
buzbeeb01bf152014-05-13 15:59:07 -0700446 }
Serguei Katkov9ee45192014-07-17 14:39:03 +0700447 return AllocTempWide(required);
buzbeeb01bf152014-05-13 15:59:07 -0700448}
449
Serguei Katkov9ee45192014-07-17 14:39:03 +0700450RegStorage Mir2Lir::AllocTypedTemp(bool fp_hint, int reg_class, bool required) {
buzbeeb01bf152014-05-13 15:59:07 -0700451 if (((reg_class == kAnyReg) && fp_hint) || (reg_class == kFPReg)) {
Serguei Katkov9ee45192014-07-17 14:39:03 +0700452 return AllocTempSingle(required);
buzbeea0cd2d72014-06-01 09:33:49 -0700453 } else if (reg_class == kRefReg) {
Serguei Katkov9ee45192014-07-17 14:39:03 +0700454 return AllocTempRef(required);
buzbeeb01bf152014-05-13 15:59:07 -0700455 }
Serguei Katkov9ee45192014-07-17 14:39:03 +0700456 return AllocTemp(required);
buzbeeb01bf152014-05-13 15:59:07 -0700457}
458
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100459RegStorage Mir2Lir::FindLiveReg(ArenaVector<RegisterInfo*>& regs, int s_reg) {
buzbee091cc402014-03-31 10:14:40 -0700460 RegStorage res;
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100461 for (RegisterInfo* info : regs) {
buzbee091cc402014-03-31 10:14:40 -0700462 if ((info->SReg() == s_reg) && info->IsLive()) {
463 res = info->GetReg();
464 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700465 }
466 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700467 return res;
468}
469
buzbee091cc402014-03-31 10:14:40 -0700470RegStorage Mir2Lir::AllocLiveReg(int s_reg, int reg_class, bool wide) {
471 RegStorage reg;
buzbeea0cd2d72014-06-01 09:33:49 -0700472 if (reg_class == kRefReg) {
473 reg = FindLiveReg(*reg_pool_->ref_regs_, s_reg);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700474 CheckRegStorage(reg, WidenessCheck::kCheckNotWide, RefCheck::kCheckRef, FPCheck::kCheckNotFP);
buzbeea0cd2d72014-06-01 09:33:49 -0700475 }
476 if (!reg.Valid() && ((reg_class == kAnyReg) || (reg_class == kFPReg))) {
buzbee091cc402014-03-31 10:14:40 -0700477 reg = FindLiveReg(wide ? reg_pool_->dp_regs_ : reg_pool_->sp_regs_, s_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700478 }
buzbee091cc402014-03-31 10:14:40 -0700479 if (!reg.Valid() && (reg_class != kFPReg)) {
buzbee33ae5582014-06-12 14:56:32 -0700480 if (cu_->target64) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700481 reg = FindLiveReg(wide || reg_class == kRefReg ? reg_pool_->core64_regs_ :
482 reg_pool_->core_regs_, s_reg);
Serban Constantinescued65c5e2014-05-22 15:10:18 +0100483 } else {
484 reg = FindLiveReg(reg_pool_->core_regs_, s_reg);
485 }
buzbee091cc402014-03-31 10:14:40 -0700486 }
487 if (reg.Valid()) {
buzbee33ae5582014-06-12 14:56:32 -0700488 if (wide && !reg.IsFloat() && !cu_->target64) {
buzbee30adc732014-05-09 15:10:18 -0700489 // Only allow reg pairs for core regs on 32-bit targets.
buzbee091cc402014-03-31 10:14:40 -0700490 RegStorage high_reg = FindLiveReg(reg_pool_->core_regs_, s_reg + 1);
491 if (high_reg.Valid()) {
buzbee091cc402014-03-31 10:14:40 -0700492 reg = RegStorage::MakeRegPair(reg, high_reg);
493 MarkWide(reg);
494 } else {
buzbee30adc732014-05-09 15:10:18 -0700495 // Only half available.
buzbee091cc402014-03-31 10:14:40 -0700496 reg = RegStorage::InvalidReg();
497 }
498 }
buzbee30adc732014-05-09 15:10:18 -0700499 if (reg.Valid() && (wide != GetRegInfo(reg)->IsWide())) {
500 // Width mismatch - don't try to reuse.
501 reg = RegStorage::InvalidReg();
502 }
503 }
504 if (reg.Valid()) {
505 if (reg.IsPair()) {
506 RegisterInfo* info_low = GetRegInfo(reg.GetLow());
507 RegisterInfo* info_high = GetRegInfo(reg.GetHigh());
508 if (info_low->IsTemp()) {
509 info_low->MarkInUse();
510 }
511 if (info_high->IsTemp()) {
512 info_high->MarkInUse();
513 }
514 } else {
buzbee091cc402014-03-31 10:14:40 -0700515 RegisterInfo* info = GetRegInfo(reg);
516 if (info->IsTemp()) {
517 info->MarkInUse();
518 }
519 }
buzbee30adc732014-05-09 15:10:18 -0700520 } else {
521 // Either not found, or something didn't match up. Clobber to prevent any stale instances.
522 ClobberSReg(s_reg);
523 if (wide) {
524 ClobberSReg(s_reg + 1);
buzbee091cc402014-03-31 10:14:40 -0700525 }
526 }
Andreas Gampe4b537a82014-06-30 22:24:53 -0700527 CheckRegStorage(reg, WidenessCheck::kIgnoreWide,
528 reg_class == kRefReg ? RefCheck::kCheckRef : RefCheck::kIgnoreRef,
529 FPCheck::kIgnoreFP);
buzbee091cc402014-03-31 10:14:40 -0700530 return reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700531}
532
buzbee2700f7e2014-03-07 09:46:20 -0800533void Mir2Lir::FreeTemp(RegStorage reg) {
534 if (reg.IsPair()) {
buzbee091cc402014-03-31 10:14:40 -0700535 FreeTemp(reg.GetLow());
536 FreeTemp(reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800537 } else {
buzbee091cc402014-03-31 10:14:40 -0700538 RegisterInfo* p = GetRegInfo(reg);
539 if (p->IsTemp()) {
540 p->MarkFree();
541 p->SetIsWide(false);
542 p->SetPartner(reg);
543 }
buzbee2700f7e2014-03-07 09:46:20 -0800544 }
545}
546
buzbee082833c2014-05-17 23:16:26 -0700547void Mir2Lir::FreeRegLocTemps(RegLocation rl_keep, RegLocation rl_free) {
548 DCHECK(rl_keep.wide);
549 DCHECK(rl_free.wide);
550 int free_low = rl_free.reg.GetLowReg();
551 int free_high = rl_free.reg.GetHighReg();
552 int keep_low = rl_keep.reg.GetLowReg();
553 int keep_high = rl_keep.reg.GetHighReg();
554 if ((free_low != keep_low) && (free_low != keep_high) &&
555 (free_high != keep_low) && (free_high != keep_high)) {
556 // No overlap, free both
557 FreeTemp(rl_free.reg);
558 }
559}
560
buzbee262b2992014-03-27 11:22:43 -0700561bool Mir2Lir::IsLive(RegStorage reg) {
buzbee091cc402014-03-31 10:14:40 -0700562 bool res;
buzbee2700f7e2014-03-07 09:46:20 -0800563 if (reg.IsPair()) {
buzbee091cc402014-03-31 10:14:40 -0700564 RegisterInfo* p_lo = GetRegInfo(reg.GetLow());
565 RegisterInfo* p_hi = GetRegInfo(reg.GetHigh());
buzbee30adc732014-05-09 15:10:18 -0700566 DCHECK_EQ(p_lo->IsLive(), p_hi->IsLive());
buzbee091cc402014-03-31 10:14:40 -0700567 res = p_lo->IsLive() || p_hi->IsLive();
buzbee2700f7e2014-03-07 09:46:20 -0800568 } else {
buzbee091cc402014-03-31 10:14:40 -0700569 RegisterInfo* p = GetRegInfo(reg);
570 res = p->IsLive();
buzbee2700f7e2014-03-07 09:46:20 -0800571 }
buzbee091cc402014-03-31 10:14:40 -0700572 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700573}
574
buzbee262b2992014-03-27 11:22:43 -0700575bool Mir2Lir::IsTemp(RegStorage reg) {
buzbee091cc402014-03-31 10:14:40 -0700576 bool res;
buzbee2700f7e2014-03-07 09:46:20 -0800577 if (reg.IsPair()) {
buzbee091cc402014-03-31 10:14:40 -0700578 RegisterInfo* p_lo = GetRegInfo(reg.GetLow());
579 RegisterInfo* p_hi = GetRegInfo(reg.GetHigh());
580 res = p_lo->IsTemp() || p_hi->IsTemp();
buzbee2700f7e2014-03-07 09:46:20 -0800581 } else {
buzbee091cc402014-03-31 10:14:40 -0700582 RegisterInfo* p = GetRegInfo(reg);
583 res = p->IsTemp();
buzbee2700f7e2014-03-07 09:46:20 -0800584 }
buzbee091cc402014-03-31 10:14:40 -0700585 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586}
587
buzbee262b2992014-03-27 11:22:43 -0700588bool Mir2Lir::IsPromoted(RegStorage reg) {
buzbee091cc402014-03-31 10:14:40 -0700589 bool res;
buzbee2700f7e2014-03-07 09:46:20 -0800590 if (reg.IsPair()) {
buzbee091cc402014-03-31 10:14:40 -0700591 RegisterInfo* p_lo = GetRegInfo(reg.GetLow());
592 RegisterInfo* p_hi = GetRegInfo(reg.GetHigh());
593 res = !p_lo->IsTemp() || !p_hi->IsTemp();
buzbee2700f7e2014-03-07 09:46:20 -0800594 } else {
buzbee091cc402014-03-31 10:14:40 -0700595 RegisterInfo* p = GetRegInfo(reg);
596 res = !p->IsTemp();
buzbee2700f7e2014-03-07 09:46:20 -0800597 }
buzbee091cc402014-03-31 10:14:40 -0700598 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700599}
600
buzbee2700f7e2014-03-07 09:46:20 -0800601bool Mir2Lir::IsDirty(RegStorage reg) {
buzbee091cc402014-03-31 10:14:40 -0700602 bool res;
buzbee2700f7e2014-03-07 09:46:20 -0800603 if (reg.IsPair()) {
buzbee091cc402014-03-31 10:14:40 -0700604 RegisterInfo* p_lo = GetRegInfo(reg.GetLow());
605 RegisterInfo* p_hi = GetRegInfo(reg.GetHigh());
606 res = p_lo->IsDirty() || p_hi->IsDirty();
buzbee2700f7e2014-03-07 09:46:20 -0800607 } else {
buzbee091cc402014-03-31 10:14:40 -0700608 RegisterInfo* p = GetRegInfo(reg);
609 res = p->IsDirty();
buzbee2700f7e2014-03-07 09:46:20 -0800610 }
buzbee091cc402014-03-31 10:14:40 -0700611 return res;
buzbee2700f7e2014-03-07 09:46:20 -0800612}
613
Brian Carlstrom7940e442013-07-12 13:46:57 -0700614/*
615 * Similar to AllocTemp(), but forces the allocation of a specific
616 * register. No check is made to see if the register was previously
617 * allocated. Use with caution.
618 */
buzbee2700f7e2014-03-07 09:46:20 -0800619void Mir2Lir::LockTemp(RegStorage reg) {
buzbee091cc402014-03-31 10:14:40 -0700620 DCHECK(IsTemp(reg));
621 if (reg.IsPair()) {
622 RegisterInfo* p_lo = GetRegInfo(reg.GetLow());
623 RegisterInfo* p_hi = GetRegInfo(reg.GetHigh());
624 p_lo->MarkInUse();
buzbee30adc732014-05-09 15:10:18 -0700625 p_lo->MarkDead();
buzbee091cc402014-03-31 10:14:40 -0700626 p_hi->MarkInUse();
buzbee30adc732014-05-09 15:10:18 -0700627 p_hi->MarkDead();
buzbee091cc402014-03-31 10:14:40 -0700628 } else {
629 RegisterInfo* p = GetRegInfo(reg);
630 p->MarkInUse();
buzbee30adc732014-05-09 15:10:18 -0700631 p->MarkDead();
buzbee091cc402014-03-31 10:14:40 -0700632 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633}
634
buzbee2700f7e2014-03-07 09:46:20 -0800635void Mir2Lir::ResetDef(RegStorage reg) {
buzbee091cc402014-03-31 10:14:40 -0700636 if (reg.IsPair()) {
637 GetRegInfo(reg.GetLow())->ResetDefBody();
638 GetRegInfo(reg.GetHigh())->ResetDefBody();
639 } else {
640 GetRegInfo(reg)->ResetDefBody();
641 }
buzbee2700f7e2014-03-07 09:46:20 -0800642}
643
buzbee091cc402014-03-31 10:14:40 -0700644void Mir2Lir::NullifyRange(RegStorage reg, int s_reg) {
645 RegisterInfo* info = nullptr;
646 RegStorage rs = reg.IsPair() ? reg.GetLow() : reg;
647 if (IsTemp(rs)) {
648 info = GetRegInfo(reg);
649 }
650 if ((info != nullptr) && (info->DefStart() != nullptr) && (info->DefEnd() != nullptr)) {
651 DCHECK_EQ(info->SReg(), s_reg); // Make sure we're on the same page.
652 for (LIR* p = info->DefStart();; p = p->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700653 NopLIR(p);
buzbee091cc402014-03-31 10:14:40 -0700654 if (p == info->DefEnd()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700655 break;
buzbee091cc402014-03-31 10:14:40 -0700656 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 }
658 }
659}
660
661/*
662 * Mark the beginning and end LIR of a def sequence. Note that
663 * on entry start points to the LIR prior to the beginning of the
664 * sequence.
665 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700666void Mir2Lir::MarkDef(RegLocation rl, LIR *start, LIR *finish) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700667 DCHECK(!rl.wide);
668 DCHECK(start && start->next);
669 DCHECK(finish);
buzbee091cc402014-03-31 10:14:40 -0700670 RegisterInfo* p = GetRegInfo(rl.reg);
671 p->SetDefStart(start->next);
672 p->SetDefEnd(finish);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700673}
674
675/*
676 * Mark the beginning and end LIR of a def sequence. Note that
677 * on entry start points to the LIR prior to the beginning of the
678 * sequence.
679 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700680void Mir2Lir::MarkDefWide(RegLocation rl, LIR *start, LIR *finish) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700681 DCHECK(rl.wide);
682 DCHECK(start && start->next);
683 DCHECK(finish);
buzbee091cc402014-03-31 10:14:40 -0700684 RegisterInfo* p;
685 if (rl.reg.IsPair()) {
686 p = GetRegInfo(rl.reg.GetLow());
687 ResetDef(rl.reg.GetHigh()); // Only track low of pair
688 } else {
689 p = GetRegInfo(rl.reg);
690 }
691 p->SetDefStart(start->next);
692 p->SetDefEnd(finish);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693}
694
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700695void Mir2Lir::ResetDefLoc(RegLocation rl) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700696 DCHECK(!rl.wide);
buzbee091cc402014-03-31 10:14:40 -0700697 if (IsTemp(rl.reg) && !(cu_->disable_opt & (1 << kSuppressLoads))) {
698 NullifyRange(rl.reg, rl.s_reg_low);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700699 }
buzbee091cc402014-03-31 10:14:40 -0700700 ResetDef(rl.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700701}
702
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700703void Mir2Lir::ResetDefLocWide(RegLocation rl) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700704 DCHECK(rl.wide);
buzbee091cc402014-03-31 10:14:40 -0700705 // If pair, only track low reg of pair.
706 RegStorage rs = rl.reg.IsPair() ? rl.reg.GetLow() : rl.reg;
707 if (IsTemp(rs) && !(cu_->disable_opt & (1 << kSuppressLoads))) {
708 NullifyRange(rs, rl.s_reg_low);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700709 }
buzbee091cc402014-03-31 10:14:40 -0700710 ResetDef(rs);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711}
712
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700713void Mir2Lir::ResetDefTracking() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100714 for (RegisterInfo* info : tempreg_info_) {
buzbee091cc402014-03-31 10:14:40 -0700715 info->ResetDefBody();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700716 }
717}
718
buzbeeba574512014-05-12 15:13:16 -0700719void Mir2Lir::ClobberAllTemps() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100720 for (RegisterInfo* info : tempreg_info_) {
buzbee30adc732014-05-09 15:10:18 -0700721 ClobberBody(info);
buzbee091cc402014-03-31 10:14:40 -0700722 }
723}
724
725void Mir2Lir::FlushRegWide(RegStorage reg) {
726 if (reg.IsPair()) {
727 RegisterInfo* info1 = GetRegInfo(reg.GetLow());
728 RegisterInfo* info2 = GetRegInfo(reg.GetHigh());
729 DCHECK(info1 && info2 && info1->IsWide() && info2->IsWide() &&
buzbeeb5860fb2014-06-21 15:31:01 -0700730 (info1->Partner().ExactlyEquals(info2->GetReg())) &&
731 (info2->Partner().ExactlyEquals(info1->GetReg())));
buzbee091cc402014-03-31 10:14:40 -0700732 if ((info1->IsLive() && info1->IsDirty()) || (info2->IsLive() && info2->IsDirty())) {
733 if (!(info1->IsTemp() && info2->IsTemp())) {
734 /* Should not happen. If it does, there's a problem in eval_loc */
735 LOG(FATAL) << "Long half-temp, half-promoted";
736 }
737
738 info1->SetIsDirty(false);
739 info2->SetIsDirty(false);
740 if (mir_graph_->SRegToVReg(info2->SReg()) < mir_graph_->SRegToVReg(info1->SReg())) {
741 info1 = info2;
742 }
743 int v_reg = mir_graph_->SRegToVReg(info1->SReg());
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100744 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700745 StoreBaseDisp(TargetPtrReg(kSp), VRegOffset(v_reg), reg, k64, kNotVolatile);
buzbee091cc402014-03-31 10:14:40 -0700746 }
747 } else {
748 RegisterInfo* info = GetRegInfo(reg);
749 if (info->IsLive() && info->IsDirty()) {
750 info->SetIsDirty(false);
751 int v_reg = mir_graph_->SRegToVReg(info->SReg());
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100752 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700753 StoreBaseDisp(TargetPtrReg(kSp), VRegOffset(v_reg), reg, k64, kNotVolatile);
buzbee091cc402014-03-31 10:14:40 -0700754 }
755 }
756}
757
758void Mir2Lir::FlushReg(RegStorage reg) {
759 DCHECK(!reg.IsPair());
760 RegisterInfo* info = GetRegInfo(reg);
761 if (info->IsLive() && info->IsDirty()) {
762 info->SetIsDirty(false);
763 int v_reg = mir_graph_->SRegToVReg(info->SReg());
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100764 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700765 StoreBaseDisp(TargetPtrReg(kSp), VRegOffset(v_reg), reg, kWord, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700766 }
767}
768
Razvan A Lupusoru614c2b42014-01-28 17:05:21 -0800769void Mir2Lir::FlushSpecificReg(RegisterInfo* info) {
buzbee091cc402014-03-31 10:14:40 -0700770 if (info->IsWide()) {
771 FlushRegWide(info->GetReg());
Razvan A Lupusoru614c2b42014-01-28 17:05:21 -0800772 } else {
buzbee091cc402014-03-31 10:14:40 -0700773 FlushReg(info->GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700774 }
775}
776
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700777void Mir2Lir::FlushAllRegs() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100778 for (RegisterInfo* info : tempreg_info_) {
buzbeeba574512014-05-12 15:13:16 -0700779 if (info->IsDirty() && info->IsLive()) {
buzbee091cc402014-03-31 10:14:40 -0700780 FlushSpecificReg(info);
781 }
buzbee30adc732014-05-09 15:10:18 -0700782 info->MarkDead();
buzbee091cc402014-03-31 10:14:40 -0700783 info->SetIsWide(false);
784 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700785}
786
787
buzbee2700f7e2014-03-07 09:46:20 -0800788bool Mir2Lir::RegClassMatches(int reg_class, RegStorage reg) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 if (reg_class == kAnyReg) {
790 return true;
buzbeea0cd2d72014-06-01 09:33:49 -0700791 } else if ((reg_class == kCoreReg) || (reg_class == kRefReg)) {
792 /*
793 * For this purpose, consider Core and Ref to be the same class. We aren't dealing
794 * with width here - that should be checked at a higher level (if needed).
795 */
buzbee091cc402014-03-31 10:14:40 -0700796 return !reg.IsFloat();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700797 } else {
buzbee091cc402014-03-31 10:14:40 -0700798 return reg.IsFloat();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700799 }
800}
801
buzbee091cc402014-03-31 10:14:40 -0700802void Mir2Lir::MarkLive(RegLocation loc) {
803 RegStorage reg = loc.reg;
buzbee082833c2014-05-17 23:16:26 -0700804 if (!IsTemp(reg)) {
805 return;
806 }
buzbee091cc402014-03-31 10:14:40 -0700807 int s_reg = loc.s_reg_low;
buzbee082833c2014-05-17 23:16:26 -0700808 if (s_reg == INVALID_SREG) {
809 // Can't be live if no associated sreg.
810 if (reg.IsPair()) {
811 GetRegInfo(reg.GetLow())->MarkDead();
812 GetRegInfo(reg.GetHigh())->MarkDead();
813 } else {
814 GetRegInfo(reg)->MarkDead();
buzbee091cc402014-03-31 10:14:40 -0700815 }
buzbee082833c2014-05-17 23:16:26 -0700816 } else {
817 if (reg.IsPair()) {
818 RegisterInfo* info_lo = GetRegInfo(reg.GetLow());
819 RegisterInfo* info_hi = GetRegInfo(reg.GetHigh());
820 if (info_lo->IsLive() && (info_lo->SReg() == s_reg) && info_hi->IsLive() &&
821 (info_hi->SReg() == s_reg)) {
822 return; // Already live.
823 }
824 ClobberSReg(s_reg);
825 ClobberSReg(s_reg + 1);
826 info_lo->MarkLive(s_reg);
827 info_hi->MarkLive(s_reg + 1);
828 } else {
829 RegisterInfo* info = GetRegInfo(reg);
830 if (info->IsLive() && (info->SReg() == s_reg)) {
831 return; // Already live.
832 }
833 ClobberSReg(s_reg);
834 if (loc.wide) {
835 ClobberSReg(s_reg + 1);
836 }
837 info->MarkLive(s_reg);
838 }
839 if (loc.wide) {
840 MarkWide(reg);
841 } else {
842 MarkNarrow(reg);
843 }
buzbee091cc402014-03-31 10:14:40 -0700844 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700845}
846
buzbee2700f7e2014-03-07 09:46:20 -0800847void Mir2Lir::MarkTemp(RegStorage reg) {
848 DCHECK(!reg.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849 RegisterInfo* info = GetRegInfo(reg);
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100850 tempreg_info_.push_back(info);
buzbee091cc402014-03-31 10:14:40 -0700851 info->SetIsTemp(true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852}
853
buzbee2700f7e2014-03-07 09:46:20 -0800854void Mir2Lir::UnmarkTemp(RegStorage reg) {
855 DCHECK(!reg.IsPair());
buzbee091cc402014-03-31 10:14:40 -0700856 RegisterInfo* info = GetRegInfo(reg);
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100857 auto pos = std::find(tempreg_info_.begin(), tempreg_info_.end(), info);
858 DCHECK(pos != tempreg_info_.end());
859 tempreg_info_.erase(pos);
buzbee091cc402014-03-31 10:14:40 -0700860 info->SetIsTemp(false);
buzbee2700f7e2014-03-07 09:46:20 -0800861}
862
buzbee091cc402014-03-31 10:14:40 -0700863void Mir2Lir::MarkWide(RegStorage reg) {
864 if (reg.IsPair()) {
865 RegisterInfo* info_lo = GetRegInfo(reg.GetLow());
866 RegisterInfo* info_hi = GetRegInfo(reg.GetHigh());
buzbee082833c2014-05-17 23:16:26 -0700867 // Unpair any old partners.
buzbeeb5860fb2014-06-21 15:31:01 -0700868 if (info_lo->IsWide() && info_lo->Partner().NotExactlyEquals(info_hi->GetReg())) {
buzbee082833c2014-05-17 23:16:26 -0700869 GetRegInfo(info_lo->Partner())->SetIsWide(false);
870 }
buzbeeb5860fb2014-06-21 15:31:01 -0700871 if (info_hi->IsWide() && info_hi->Partner().NotExactlyEquals(info_lo->GetReg())) {
buzbee082833c2014-05-17 23:16:26 -0700872 GetRegInfo(info_hi->Partner())->SetIsWide(false);
873 }
buzbee091cc402014-03-31 10:14:40 -0700874 info_lo->SetIsWide(true);
875 info_hi->SetIsWide(true);
876 info_lo->SetPartner(reg.GetHigh());
877 info_hi->SetPartner(reg.GetLow());
buzbee2700f7e2014-03-07 09:46:20 -0800878 } else {
buzbee091cc402014-03-31 10:14:40 -0700879 RegisterInfo* info = GetRegInfo(reg);
880 info->SetIsWide(true);
881 info->SetPartner(reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700882 }
883}
884
buzbee082833c2014-05-17 23:16:26 -0700885void Mir2Lir::MarkNarrow(RegStorage reg) {
886 DCHECK(!reg.IsPair());
887 RegisterInfo* info = GetRegInfo(reg);
888 info->SetIsWide(false);
889 info->SetPartner(reg);
890}
891
buzbee091cc402014-03-31 10:14:40 -0700892void Mir2Lir::MarkClean(RegLocation loc) {
893 if (loc.reg.IsPair()) {
894 RegisterInfo* info = GetRegInfo(loc.reg.GetLow());
895 info->SetIsDirty(false);
896 info = GetRegInfo(loc.reg.GetHigh());
897 info->SetIsDirty(false);
898 } else {
899 RegisterInfo* info = GetRegInfo(loc.reg);
900 info->SetIsDirty(false);
901 }
902}
903
904// FIXME: need to verify rules/assumptions about how wide values are treated in 64BitSolos.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700905void Mir2Lir::MarkDirty(RegLocation loc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700906 if (loc.home) {
907 // If already home, can't be dirty
908 return;
909 }
buzbee091cc402014-03-31 10:14:40 -0700910 if (loc.reg.IsPair()) {
911 RegisterInfo* info = GetRegInfo(loc.reg.GetLow());
912 info->SetIsDirty(true);
913 info = GetRegInfo(loc.reg.GetHigh());
914 info->SetIsDirty(true);
buzbee2700f7e2014-03-07 09:46:20 -0800915 } else {
buzbee091cc402014-03-31 10:14:40 -0700916 RegisterInfo* info = GetRegInfo(loc.reg);
917 info->SetIsDirty(true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 }
919}
920
buzbee2700f7e2014-03-07 09:46:20 -0800921void Mir2Lir::MarkInUse(RegStorage reg) {
922 if (reg.IsPair()) {
buzbee091cc402014-03-31 10:14:40 -0700923 GetRegInfo(reg.GetLow())->MarkInUse();
924 GetRegInfo(reg.GetHigh())->MarkInUse();
buzbee2700f7e2014-03-07 09:46:20 -0800925 } else {
buzbee091cc402014-03-31 10:14:40 -0700926 GetRegInfo(reg)->MarkInUse();
buzbee2700f7e2014-03-07 09:46:20 -0800927 }
928}
929
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700930bool Mir2Lir::CheckCorePoolSanity() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100931 for (RegisterInfo* info : tempreg_info_) {
buzbee3a658072014-08-28 13:48:56 -0700932 int my_sreg = info->SReg();
933 if (info->IsTemp() && info->IsLive() && info->IsWide() && my_sreg != INVALID_SREG) {
buzbee082833c2014-05-17 23:16:26 -0700934 RegStorage my_reg = info->GetReg();
buzbee091cc402014-03-31 10:14:40 -0700935 RegStorage partner_reg = info->Partner();
936 RegisterInfo* partner = GetRegInfo(partner_reg);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700937 DCHECK(partner != nullptr);
buzbee091cc402014-03-31 10:14:40 -0700938 DCHECK(partner->IsWide());
939 DCHECK_EQ(my_reg.GetReg(), partner->Partner().GetReg());
buzbee082833c2014-05-17 23:16:26 -0700940 DCHECK(partner->IsLive());
buzbee091cc402014-03-31 10:14:40 -0700941 int partner_sreg = partner->SReg();
buzbee3a658072014-08-28 13:48:56 -0700942 int diff = my_sreg - partner_sreg;
943 DCHECK((diff == 0) || (diff == -1) || (diff == 1));
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700944 }
buzbee082833c2014-05-17 23:16:26 -0700945 if (info->Master() != info) {
946 // Aliased.
947 if (info->IsLive() && (info->SReg() != INVALID_SREG)) {
948 // If I'm live, master should not be live, but should show liveness in alias set.
949 DCHECK_EQ(info->Master()->SReg(), INVALID_SREG);
950 DCHECK(!info->Master()->IsDead());
buzbee082833c2014-05-17 23:16:26 -0700951 }
buzbee642fe342014-05-23 16:04:08 -0700952// TODO: Add checks in !info->IsDead() case to ensure every live bit is owned by exactly 1 reg.
buzbee082833c2014-05-17 23:16:26 -0700953 }
954 if (info->IsAliased()) {
955 // Has child aliases.
956 DCHECK_EQ(info->Master(), info);
957 if (info->IsLive() && (info->SReg() != INVALID_SREG)) {
958 // Master live, no child should be dead - all should show liveness in set.
959 for (RegisterInfo* p = info->GetAliasChain(); p != nullptr; p = p->GetAliasChain()) {
960 DCHECK(!p->IsDead());
961 DCHECK_EQ(p->SReg(), INVALID_SREG);
962 }
963 } else if (!info->IsDead()) {
964 // Master not live, one or more aliases must be.
965 bool live_alias = false;
966 for (RegisterInfo* p = info->GetAliasChain(); p != nullptr; p = p->GetAliasChain()) {
967 live_alias |= p->IsLive();
968 }
969 DCHECK(live_alias);
970 }
971 }
972 if (info->IsLive() && (info->SReg() == INVALID_SREG)) {
973 // If not fully live, should have INVALID_SREG and def's should be null.
974 DCHECK(info->DefStart() == nullptr);
975 DCHECK(info->DefEnd() == nullptr);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700976 }
977 }
978 return true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979}
980
981/*
982 * Return an updated location record with current in-register status.
983 * If the value lives in live temps, reflect that fact. No code
984 * is generated. If the live value is part of an older pair,
985 * clobber both low and high.
986 * TUNING: clobbering both is a bit heavy-handed, but the alternative
987 * is a bit complex when dealing with FP regs. Examine code to see
988 * if it's worthwhile trying to be more clever here.
989 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700990RegLocation Mir2Lir::UpdateLoc(RegLocation loc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700991 DCHECK(!loc.wide);
992 DCHECK(CheckCorePoolSanity());
993 if (loc.location != kLocPhysReg) {
994 DCHECK((loc.location == kLocDalvikFrame) ||
995 (loc.location == kLocCompilerTemp));
Andreas Gampe4b537a82014-06-30 22:24:53 -0700996 RegStorage reg = AllocLiveReg(loc.s_reg_low, loc.ref ? kRefReg : kAnyReg, false);
buzbee091cc402014-03-31 10:14:40 -0700997 if (reg.Valid()) {
998 bool match = true;
999 RegisterInfo* info = GetRegInfo(reg);
1000 match &= !reg.IsPair();
1001 match &= !info->IsWide();
1002 if (match) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001003 loc.location = kLocPhysReg;
buzbee091cc402014-03-31 10:14:40 -07001004 loc.reg = reg;
1005 } else {
1006 Clobber(reg);
1007 FreeTemp(reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001008 }
1009 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001010 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001011 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001012 return loc;
1013}
1014
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001015RegLocation Mir2Lir::UpdateLocWide(RegLocation loc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001016 DCHECK(loc.wide);
1017 DCHECK(CheckCorePoolSanity());
1018 if (loc.location != kLocPhysReg) {
1019 DCHECK((loc.location == kLocDalvikFrame) ||
1020 (loc.location == kLocCompilerTemp));
buzbee091cc402014-03-31 10:14:40 -07001021 RegStorage reg = AllocLiveReg(loc.s_reg_low, kAnyReg, true);
1022 if (reg.Valid()) {
1023 bool match = true;
1024 if (reg.IsPair()) {
1025 // If we've got a register pair, make sure that it was last used as the same pair.
1026 RegisterInfo* info_lo = GetRegInfo(reg.GetLow());
1027 RegisterInfo* info_hi = GetRegInfo(reg.GetHigh());
1028 match &= info_lo->IsWide();
1029 match &= info_hi->IsWide();
buzbeeb5860fb2014-06-21 15:31:01 -07001030 match &= (info_lo->Partner().ExactlyEquals(info_hi->GetReg()));
1031 match &= (info_hi->Partner().ExactlyEquals(info_lo->GetReg()));
buzbee091cc402014-03-31 10:14:40 -07001032 } else {
1033 RegisterInfo* info = GetRegInfo(reg);
1034 match &= info->IsWide();
buzbeeb5860fb2014-06-21 15:31:01 -07001035 match &= (info->GetReg().ExactlyEquals(info->Partner()));
buzbee091cc402014-03-31 10:14:40 -07001036 }
1037 if (match) {
1038 loc.location = kLocPhysReg;
1039 loc.reg = reg;
1040 } else {
1041 Clobber(reg);
1042 FreeTemp(reg);
1043 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001044 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001045 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001046 }
1047 return loc;
1048}
1049
Brian Carlstrom7940e442013-07-12 13:46:57 -07001050/* For use in cases we don't know (or care) width */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001051RegLocation Mir2Lir::UpdateRawLoc(RegLocation loc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001052 if (loc.wide)
1053 return UpdateLocWide(loc);
1054 else
1055 return UpdateLoc(loc);
1056}
1057
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001058RegLocation Mir2Lir::EvalLocWide(RegLocation loc, int reg_class, bool update) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001059 DCHECK(loc.wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001060
1061 loc = UpdateLocWide(loc);
1062
1063 /* If already in registers, we can assume proper form. Right reg class? */
1064 if (loc.location == kLocPhysReg) {
buzbee2700f7e2014-03-07 09:46:20 -08001065 if (!RegClassMatches(reg_class, loc.reg)) {
Vladimir Marko0dc242d2014-05-12 16:22:14 +01001066 // Wrong register class. Reallocate and transfer ownership.
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001067 RegStorage new_regs = AllocTypedTempWide(loc.fp, reg_class);
buzbee082833c2014-05-17 23:16:26 -07001068 // Clobber the old regs.
buzbee2700f7e2014-03-07 09:46:20 -08001069 Clobber(loc.reg);
buzbee082833c2014-05-17 23:16:26 -07001070 // ...and mark the new ones live.
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001071 loc.reg = new_regs;
buzbee091cc402014-03-31 10:14:40 -07001072 MarkWide(loc.reg);
buzbee082833c2014-05-17 23:16:26 -07001073 MarkLive(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001074 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001075 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001076 return loc;
1077 }
1078
1079 DCHECK_NE(loc.s_reg_low, INVALID_SREG);
1080 DCHECK_NE(GetSRegHi(loc.s_reg_low), INVALID_SREG);
1081
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001082 loc.reg = AllocTypedTempWide(loc.fp, reg_class);
buzbee091cc402014-03-31 10:14:40 -07001083 MarkWide(loc.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001084
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 if (update) {
1086 loc.location = kLocPhysReg;
buzbee091cc402014-03-31 10:14:40 -07001087 MarkLive(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001088 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001089 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001090 return loc;
1091}
1092
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001093RegLocation Mir2Lir::EvalLoc(RegLocation loc, int reg_class, bool update) {
Andreas Gampe4b537a82014-06-30 22:24:53 -07001094 // Narrow reg_class if the loc is a ref.
1095 if (loc.ref && reg_class == kAnyReg) {
1096 reg_class = kRefReg;
1097 }
1098
buzbee091cc402014-03-31 10:14:40 -07001099 if (loc.wide) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001100 return EvalLocWide(loc, reg_class, update);
buzbee091cc402014-03-31 10:14:40 -07001101 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001102
1103 loc = UpdateLoc(loc);
1104
1105 if (loc.location == kLocPhysReg) {
buzbee2700f7e2014-03-07 09:46:20 -08001106 if (!RegClassMatches(reg_class, loc.reg)) {
Vladimir Marko0dc242d2014-05-12 16:22:14 +01001107 // Wrong register class. Reallocate and transfer ownership.
buzbee2700f7e2014-03-07 09:46:20 -08001108 RegStorage new_reg = AllocTypedTemp(loc.fp, reg_class);
buzbee082833c2014-05-17 23:16:26 -07001109 // Clobber the old reg.
buzbee2700f7e2014-03-07 09:46:20 -08001110 Clobber(loc.reg);
buzbee082833c2014-05-17 23:16:26 -07001111 // ...and mark the new one live.
buzbee2700f7e2014-03-07 09:46:20 -08001112 loc.reg = new_reg;
buzbee082833c2014-05-17 23:16:26 -07001113 MarkLive(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001114 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001115 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001116 return loc;
1117 }
1118
1119 DCHECK_NE(loc.s_reg_low, INVALID_SREG);
1120
buzbee2700f7e2014-03-07 09:46:20 -08001121 loc.reg = AllocTypedTemp(loc.fp, reg_class);
Andreas Gampe4b537a82014-06-30 22:24:53 -07001122 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001123
1124 if (update) {
1125 loc.location = kLocPhysReg;
buzbee091cc402014-03-31 10:14:40 -07001126 MarkLive(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001127 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001128 CheckRegLocation(loc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001129 return loc;
1130}
1131
Vladimir Markocc234812015-04-07 09:36:09 +01001132void Mir2Lir::AnalyzeMIR(RefCounts* core_counts, MIR* mir, uint32_t weight) {
1133 // NOTE: This should be in sync with functions that actually generate code for
1134 // the opcodes below. However, if we get this wrong, the generated code will
1135 // still be correct even if it may be sub-optimal.
1136 int opcode = mir->dalvikInsn.opcode;
1137 bool uses_method = false;
1138 bool uses_pc_rel_load = false;
1139 uint32_t dex_cache_array_offset = std::numeric_limits<uint32_t>::max();
1140 switch (opcode) {
1141 case Instruction::CHECK_CAST:
1142 case Instruction::INSTANCE_OF: {
1143 if ((opcode == Instruction::CHECK_CAST) &&
1144 (mir->optimization_flags & MIR_IGNORE_CHECK_CAST) != 0) {
1145 break; // No code generated.
1146 }
1147 uint32_t type_idx =
1148 (opcode == Instruction::CHECK_CAST) ? mir->dalvikInsn.vB : mir->dalvikInsn.vC;
1149 bool type_known_final, type_known_abstract, use_declaring_class;
1150 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(
1151 cu_->method_idx, *cu_->dex_file, type_idx,
1152 &type_known_final, &type_known_abstract, &use_declaring_class);
1153 if (opcode == Instruction::CHECK_CAST && !needs_access_check &&
1154 cu_->compiler_driver->IsSafeCast(
1155 mir_graph_->GetCurrentDexCompilationUnit(), mir->offset)) {
1156 break; // No code generated.
1157 }
Vladimir Marko87b7c522015-04-08 10:01:01 +01001158 if (!needs_access_check && !use_declaring_class && CanUseOpPcRelDexCacheArrayLoad()) {
Vladimir Markocc234812015-04-07 09:36:09 +01001159 uses_pc_rel_load = true; // And ignore method use in slow path.
1160 dex_cache_array_offset = dex_cache_arrays_layout_.TypeOffset(type_idx);
1161 } else {
1162 uses_method = true;
1163 }
1164 break;
1165 }
1166
1167 case Instruction::CONST_CLASS:
Vladimir Marko87b7c522015-04-08 10:01:01 +01001168 if (CanUseOpPcRelDexCacheArrayLoad() &&
Vladimir Markocc234812015-04-07 09:36:09 +01001169 cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file,
1170 mir->dalvikInsn.vB)) {
1171 uses_pc_rel_load = true; // And ignore method use in slow path.
1172 dex_cache_array_offset = dex_cache_arrays_layout_.TypeOffset(mir->dalvikInsn.vB);
1173 } else {
1174 uses_method = true;
1175 }
1176 break;
1177
1178 case Instruction::CONST_STRING:
1179 case Instruction::CONST_STRING_JUMBO:
Vladimir Marko87b7c522015-04-08 10:01:01 +01001180 if (CanUseOpPcRelDexCacheArrayLoad()) {
Vladimir Markocc234812015-04-07 09:36:09 +01001181 uses_pc_rel_load = true; // And ignore method use in slow path.
1182 dex_cache_array_offset = dex_cache_arrays_layout_.StringOffset(mir->dalvikInsn.vB);
1183 } else {
1184 uses_method = true;
1185 }
1186 break;
1187
1188 case Instruction::INVOKE_VIRTUAL:
1189 case Instruction::INVOKE_SUPER:
1190 case Instruction::INVOKE_DIRECT:
1191 case Instruction::INVOKE_STATIC:
1192 case Instruction::INVOKE_INTERFACE:
1193 case Instruction::INVOKE_VIRTUAL_RANGE:
1194 case Instruction::INVOKE_SUPER_RANGE:
1195 case Instruction::INVOKE_DIRECT_RANGE:
1196 case Instruction::INVOKE_STATIC_RANGE:
1197 case Instruction::INVOKE_INTERFACE_RANGE:
1198 case Instruction::INVOKE_VIRTUAL_QUICK:
1199 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK: {
1200 const MirMethodLoweringInfo& info = mir_graph_->GetMethodLoweringInfo(mir);
1201 InvokeType sharp_type = info.GetSharpType();
Vladimir Marko87b7c522015-04-08 10:01:01 +01001202 if (info.IsIntrinsic()) {
1203 // Nothing to do, if an intrinsic uses ArtMethod* it's in the slow-path - don't count it.
1204 } else if (!info.FastPath() || (sharp_type != kStatic && sharp_type != kDirect)) {
Vladimir Markocc234812015-04-07 09:36:09 +01001205 // Nothing to do, the generated code or entrypoint uses method from the stack.
1206 } else if (info.DirectCode() != 0 && info.DirectMethod() != 0) {
1207 // Nothing to do, the generated code uses method from the stack.
Vladimir Marko87b7c522015-04-08 10:01:01 +01001208 } else if (CanUseOpPcRelDexCacheArrayLoad()) {
Vladimir Markocc234812015-04-07 09:36:09 +01001209 uses_pc_rel_load = true;
1210 dex_cache_array_offset = dex_cache_arrays_layout_.MethodOffset(mir->dalvikInsn.vB);
1211 } else {
1212 uses_method = true;
1213 }
1214 break;
1215 }
1216
1217 case Instruction::NEW_INSTANCE:
1218 case Instruction::NEW_ARRAY:
1219 case Instruction::FILLED_NEW_ARRAY:
1220 case Instruction::FILLED_NEW_ARRAY_RANGE:
1221 uses_method = true;
1222 break;
1223 case Instruction::FILL_ARRAY_DATA:
1224 // Nothing to do, the entrypoint uses method from the stack.
1225 break;
1226 case Instruction::THROW:
1227 // Nothing to do, the entrypoint uses method from the stack.
1228 break;
1229
1230 case Instruction::SGET:
1231 case Instruction::SGET_WIDE:
1232 case Instruction::SGET_OBJECT:
1233 case Instruction::SGET_BOOLEAN:
1234 case Instruction::SGET_BYTE:
1235 case Instruction::SGET_CHAR:
1236 case Instruction::SGET_SHORT:
1237 case Instruction::SPUT:
1238 case Instruction::SPUT_WIDE:
1239 case Instruction::SPUT_OBJECT:
1240 case Instruction::SPUT_BOOLEAN:
1241 case Instruction::SPUT_BYTE:
1242 case Instruction::SPUT_CHAR:
1243 case Instruction::SPUT_SHORT: {
1244 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
1245 bool fast = IsInstructionSGet(static_cast<Instruction::Code>(opcode))
1246 ? field_info.FastGet()
1247 : field_info.FastPut();
1248 if (fast && (cu_->enable_debug & (1 << kDebugSlowFieldPath)) == 0) {
Vladimir Marko87b7c522015-04-08 10:01:01 +01001249 if (!field_info.IsReferrersClass() && CanUseOpPcRelDexCacheArrayLoad()) {
Vladimir Markocc234812015-04-07 09:36:09 +01001250 uses_pc_rel_load = true; // And ignore method use in slow path.
1251 dex_cache_array_offset = dex_cache_arrays_layout_.TypeOffset(field_info.StorageIndex());
1252 } else {
1253 uses_method = true;
1254 }
1255 } else {
1256 // Nothing to do, the entrypoint uses method from the stack.
1257 }
1258 break;
1259 }
1260
1261 default:
1262 break;
1263 }
1264 if (uses_method) {
1265 core_counts[SRegToPMap(mir_graph_->GetMethodLoc().s_reg_low)].count += weight;
1266 }
1267 if (uses_pc_rel_load) {
Vladimir Marko87b7c522015-04-08 10:01:01 +01001268 if (pc_rel_temp_ != nullptr) {
1269 core_counts[SRegToPMap(pc_rel_temp_->s_reg_low)].count += weight;
1270 DCHECK_NE(dex_cache_array_offset, std::numeric_limits<uint32_t>::max());
1271 dex_cache_arrays_min_offset_ = std::min(dex_cache_arrays_min_offset_, dex_cache_array_offset);
1272 } else {
1273 // Nothing to do, using PC-relative addressing without promoting base PC to register.
1274 }
Vladimir Markocc234812015-04-07 09:36:09 +01001275 }
1276}
1277
Brian Carlstrom7940e442013-07-12 13:46:57 -07001278/* USE SSA names to count references of base Dalvik v_regs. */
buzbeec729a6b2013-09-14 16:04:31 -07001279void Mir2Lir::CountRefs(RefCounts* core_counts, RefCounts* fp_counts, size_t num_regs) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001280 for (int i = 0; i < mir_graph_->GetNumSSARegs(); i++) {
1281 RegLocation loc = mir_graph_->reg_location_[i];
1282 RefCounts* counts = loc.fp ? fp_counts : core_counts;
1283 int p_map_idx = SRegToPMap(loc.s_reg_low);
buzbeeb5860fb2014-06-21 15:31:01 -07001284 int use_count = mir_graph_->GetUseCount(i);
buzbeec729a6b2013-09-14 16:04:31 -07001285 if (loc.fp) {
1286 if (loc.wide) {
Serguei Katkov59a42af2014-07-05 00:55:46 +07001287 if (WideFPRsAreAliases()) {
1288 // Floats and doubles can be counted together.
1289 counts[p_map_idx].count += use_count;
1290 } else {
1291 // Treat doubles as a unit, using upper half of fp_counts array.
1292 counts[p_map_idx + num_regs].count += use_count;
1293 }
buzbeec729a6b2013-09-14 16:04:31 -07001294 i++;
1295 } else {
buzbeeb5860fb2014-06-21 15:31:01 -07001296 counts[p_map_idx].count += use_count;
buzbeec729a6b2013-09-14 16:04:31 -07001297 }
Matteo Franchinc763e352014-07-04 12:53:27 +01001298 } else {
Serguei Katkov59a42af2014-07-05 00:55:46 +07001299 if (loc.wide && WideGPRsAreAliases()) {
buzbeeb5860fb2014-06-21 15:31:01 -07001300 i++;
buzbeeb5860fb2014-06-21 15:31:01 -07001301 }
Matteo Franchinc763e352014-07-04 12:53:27 +01001302 if (!IsInexpensiveConstant(loc)) {
1303 counts[p_map_idx].count += use_count;
1304 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001305 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001306 }
Vladimir Markocc234812015-04-07 09:36:09 +01001307
1308 // Now analyze the ArtMethod* and pc_rel_temp_ uses.
1309 DCHECK_EQ(core_counts[SRegToPMap(mir_graph_->GetMethodLoc().s_reg_low)].count, 0);
1310 if (pc_rel_temp_ != nullptr) {
1311 DCHECK_EQ(core_counts[SRegToPMap(pc_rel_temp_->s_reg_low)].count, 0);
1312 }
1313 PreOrderDfsIterator iter(mir_graph_);
1314 for (BasicBlock* bb = iter.Next(); bb != nullptr; bb = iter.Next()) {
1315 if (bb->block_type == kDead) {
1316 continue;
1317 }
1318 uint32_t weight = mir_graph_->GetUseCountWeight(bb);
1319 for (MIR* mir = bb->first_mir_insn; mir != nullptr; mir = mir->next) {
1320 AnalyzeMIR(core_counts, mir, weight);
1321 }
1322 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001323}
1324
1325/* qsort callback function, sort descending */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001326static int SortCounts(const void *val1, const void *val2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001327 const Mir2Lir::RefCounts* op1 = reinterpret_cast<const Mir2Lir::RefCounts*>(val1);
1328 const Mir2Lir::RefCounts* op2 = reinterpret_cast<const Mir2Lir::RefCounts*>(val2);
Matteo Franchinc763e352014-07-04 12:53:27 +01001329 // Note that we fall back to sorting on reg so we get stable output on differing qsort
1330 // implementations (such as on host and target or between local host and build servers).
1331 // Note also that if a wide val1 and a non-wide val2 have the same count, then val1 always
1332 // ``loses'' (as STARTING_WIDE_SREG is or-ed in val1->s_reg).
Brian Carlstrom4b8c13e2013-08-23 18:10:32 -07001333 return (op1->count == op2->count)
1334 ? (op1->s_reg - op2->s_reg)
1335 : (op1->count < op2->count ? 1 : -1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001336}
1337
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001338void Mir2Lir::DumpCounts(const RefCounts* arr, int size, const char* msg) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001339 LOG(INFO) << msg;
1340 for (int i = 0; i < size; i++) {
buzbeeb5860fb2014-06-21 15:31:01 -07001341 if ((arr[i].s_reg & STARTING_WIDE_SREG) != 0) {
1342 LOG(INFO) << "s_reg[64_" << (arr[i].s_reg & ~STARTING_WIDE_SREG) << "]: " << arr[i].count;
buzbeec729a6b2013-09-14 16:04:31 -07001343 } else {
buzbeeb5860fb2014-06-21 15:31:01 -07001344 LOG(INFO) << "s_reg[32_" << arr[i].s_reg << "]: " << arr[i].count;
buzbeec729a6b2013-09-14 16:04:31 -07001345 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001346 }
1347}
1348
1349/*
1350 * Note: some portions of this code required even if the kPromoteRegs
1351 * optimization is disabled.
1352 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001353void Mir2Lir::DoPromotion() {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001354 int num_regs = mir_graph_->GetNumOfCodeAndTempVRs();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001355 const int promotion_threshold = 1;
buzbeed69835d2014-02-03 14:40:27 -08001356 // Allocate the promotion map - one entry for each Dalvik vReg or compiler temp
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +00001357 promotion_map_ = arena_->AllocArray<PromotionMap>(num_regs, kArenaAllocRegAlloc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001358
1359 // Allow target code to add any special registers
1360 AdjustSpillMask();
1361
1362 /*
1363 * Simple register promotion. Just do a static count of the uses
1364 * of Dalvik registers. Note that we examine the SSA names, but
1365 * count based on original Dalvik register name. Count refs
1366 * separately based on type in order to give allocation
1367 * preference to fp doubles - which must be allocated sequential
buzbeec729a6b2013-09-14 16:04:31 -07001368 * physical single fp registers starting with an even-numbered
Brian Carlstrom7940e442013-07-12 13:46:57 -07001369 * reg.
1370 * TUNING: replace with linear scan once we have the ability
1371 * to describe register live ranges for GC.
1372 */
Matteo Franchinc763e352014-07-04 12:53:27 +01001373 size_t core_reg_count_size = WideGPRsAreAliases() ? num_regs : num_regs * 2;
1374 size_t fp_reg_count_size = WideFPRsAreAliases() ? num_regs : num_regs * 2;
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +00001375 RefCounts *core_regs = arena_->AllocArray<RefCounts>(core_reg_count_size, kArenaAllocRegAlloc);
1376 RefCounts *fp_regs = arena_->AllocArray<RefCounts>(fp_reg_count_size, kArenaAllocRegAlloc);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001377 // Set ssa names for original Dalvik registers
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001378 for (int i = 0; i < num_regs; i++) {
buzbeeb5860fb2014-06-21 15:31:01 -07001379 core_regs[i].s_reg = fp_regs[i].s_reg = i;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001380 }
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001381
buzbeeb5860fb2014-06-21 15:31:01 -07001382 // Duplicate in upper half to represent possible wide starting sregs.
1383 for (size_t i = num_regs; i < fp_reg_count_size; i++) {
1384 fp_regs[i].s_reg = fp_regs[i - num_regs].s_reg | STARTING_WIDE_SREG;
1385 }
1386 for (size_t i = num_regs; i < core_reg_count_size; i++) {
1387 core_regs[i].s_reg = core_regs[i - num_regs].s_reg | STARTING_WIDE_SREG;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001388 }
1389
1390 // Sum use counts of SSA regs by original Dalvik vreg.
buzbeeb5860fb2014-06-21 15:31:01 -07001391 CountRefs(core_regs, fp_regs, num_regs);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001392
Brian Carlstrom7940e442013-07-12 13:46:57 -07001393 // Sort the count arrays
buzbeeb5860fb2014-06-21 15:31:01 -07001394 qsort(core_regs, core_reg_count_size, sizeof(RefCounts), SortCounts);
1395 qsort(fp_regs, fp_reg_count_size, sizeof(RefCounts), SortCounts);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001396
1397 if (cu_->verbose) {
buzbeeb5860fb2014-06-21 15:31:01 -07001398 DumpCounts(core_regs, core_reg_count_size, "Core regs after sort");
1399 DumpCounts(fp_regs, fp_reg_count_size, "Fp regs after sort");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001400 }
1401
1402 if (!(cu_->disable_opt & (1 << kPromoteRegs))) {
buzbeeb5860fb2014-06-21 15:31:01 -07001403 // Promote fp regs
1404 for (size_t i = 0; (i < fp_reg_count_size) && (fp_regs[i].count >= promotion_threshold); i++) {
1405 int low_sreg = fp_regs[i].s_reg & ~STARTING_WIDE_SREG;
1406 size_t p_map_idx = SRegToPMap(low_sreg);
1407 RegStorage reg = RegStorage::InvalidReg();
1408 if (promotion_map_[p_map_idx].fp_location != kLocPhysReg) {
1409 // TODO: break out the Thumb2-specific code.
1410 if (cu_->instruction_set == kThumb2) {
1411 bool wide = fp_regs[i].s_reg & STARTING_WIDE_SREG;
1412 if (wide) {
Andreas Gampe01758d52014-07-08 21:10:55 -07001413 if (promotion_map_[p_map_idx + 1].fp_location != kLocPhysReg) {
buzbeeb5860fb2014-06-21 15:31:01 -07001414 // Ignore result - if can't alloc double may still be able to alloc singles.
1415 AllocPreservedDouble(low_sreg);
1416 }
1417 // Continue regardless of success - might still be able to grab a single.
1418 continue;
1419 } else {
1420 reg = AllocPreservedSingle(low_sreg);
1421 }
1422 } else {
1423 reg = AllocPreservedFpReg(low_sreg);
buzbeec729a6b2013-09-14 16:04:31 -07001424 }
buzbee2700f7e2014-03-07 09:46:20 -08001425 if (!reg.Valid()) {
buzbeeb5860fb2014-06-21 15:31:01 -07001426 break; // No more left
Brian Carlstrom7940e442013-07-12 13:46:57 -07001427 }
1428 }
1429 }
1430
1431 // Promote core regs
buzbeeb5860fb2014-06-21 15:31:01 -07001432 for (size_t i = 0; (i < core_reg_count_size) &&
1433 (core_regs[i].count >= promotion_threshold); i++) {
1434 int low_sreg = core_regs[i].s_reg & ~STARTING_WIDE_SREG;
1435 size_t p_map_idx = SRegToPMap(low_sreg);
1436 if (promotion_map_[p_map_idx].core_location != kLocPhysReg) {
1437 RegStorage reg = AllocPreservedCoreReg(low_sreg);
buzbee2700f7e2014-03-07 09:46:20 -08001438 if (!reg.Valid()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001439 break; // No more left
1440 }
1441 }
1442 }
1443 }
1444
1445 // Now, update SSA names to new home locations
1446 for (int i = 0; i < mir_graph_->GetNumSSARegs(); i++) {
1447 RegLocation *curr = &mir_graph_->reg_location_[i];
1448 int p_map_idx = SRegToPMap(curr->s_reg_low);
buzbeeb5860fb2014-06-21 15:31:01 -07001449 int reg_num = curr->fp ? promotion_map_[p_map_idx].fp_reg : promotion_map_[p_map_idx].core_reg;
Chao-ying Fua77ee512014-07-01 17:43:41 -07001450 bool wide = curr->wide || (cu_->target64 && curr->ref);
buzbeeb5860fb2014-06-21 15:31:01 -07001451 RegStorage reg = RegStorage::InvalidReg();
1452 if (curr->fp && promotion_map_[p_map_idx].fp_location == kLocPhysReg) {
1453 if (wide && cu_->instruction_set == kThumb2) {
1454 if (promotion_map_[p_map_idx + 1].fp_location == kLocPhysReg) {
1455 int high_reg = promotion_map_[p_map_idx+1].fp_reg;
buzbee091cc402014-03-31 10:14:40 -07001456 // TODO: move target-specific restrictions out of here.
buzbeeb5860fb2014-06-21 15:31:01 -07001457 if (((reg_num & 0x1) == 0) && ((reg_num + 1) == high_reg)) {
1458 reg = RegStorage::FloatSolo64(RegStorage::RegNum(reg_num) >> 1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001459 }
1460 }
1461 } else {
buzbeeb5860fb2014-06-21 15:31:01 -07001462 reg = wide ? RegStorage::FloatSolo64(reg_num) : RegStorage::FloatSolo32(reg_num);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001463 }
buzbeeb5860fb2014-06-21 15:31:01 -07001464 } else if (!curr->fp && promotion_map_[p_map_idx].core_location == kLocPhysReg) {
1465 if (wide && !cu_->target64) {
1466 if (promotion_map_[p_map_idx + 1].core_location == kLocPhysReg) {
1467 int high_reg = promotion_map_[p_map_idx+1].core_reg;
1468 reg = RegStorage(RegStorage::k64BitPair, reg_num, high_reg);
1469 }
1470 } else {
1471 reg = wide ? RegStorage::Solo64(reg_num) : RegStorage::Solo32(reg_num);
1472 }
1473 }
1474 if (reg.Valid()) {
1475 curr->reg = reg;
1476 curr->location = kLocPhysReg;
1477 curr->home = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001478 }
1479 }
1480 if (cu_->verbose) {
1481 DumpPromotionMap();
1482 }
1483}
1484
1485/* Returns sp-relative offset in bytes for a VReg */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001486int Mir2Lir::VRegOffset(int v_reg) {
Razvan A Lupusoru75035972014-09-11 15:24:59 -07001487 const DexFile::CodeItem* code_item = mir_graph_->GetCurrentDexCompilationUnit()->GetCodeItem();
Nicolas Geoffray15b9d522015-03-12 15:05:13 +00001488 return StackVisitor::GetVRegOffsetFromQuickCode(code_item, core_spill_mask_,
1489 fp_spill_mask_, frame_size_, v_reg,
1490 cu_->instruction_set);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001491}
1492
1493/* Returns sp-relative offset in bytes for a SReg */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001494int Mir2Lir::SRegOffset(int s_reg) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001495 return VRegOffset(mir_graph_->SRegToVReg(s_reg));
1496}
1497
1498/* Mark register usage state and return long retloc */
buzbeea0cd2d72014-06-01 09:33:49 -07001499RegLocation Mir2Lir::GetReturnWide(RegisterClass reg_class) {
1500 RegLocation res;
1501 switch (reg_class) {
1502 case kRefReg: LOG(FATAL); break;
1503 case kFPReg: res = LocCReturnDouble(); break;
1504 default: res = LocCReturnWide(); break;
1505 }
buzbee082833c2014-05-17 23:16:26 -07001506 Clobber(res.reg);
1507 LockTemp(res.reg);
1508 MarkWide(res.reg);
Andreas Gampe4b537a82014-06-30 22:24:53 -07001509 CheckRegLocation(res);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001510 return res;
1511}
1512
buzbeea0cd2d72014-06-01 09:33:49 -07001513RegLocation Mir2Lir::GetReturn(RegisterClass reg_class) {
1514 RegLocation res;
1515 switch (reg_class) {
1516 case kRefReg: res = LocCReturnRef(); break;
1517 case kFPReg: res = LocCReturnFloat(); break;
1518 default: res = LocCReturn(); break;
1519 }
buzbee091cc402014-03-31 10:14:40 -07001520 Clobber(res.reg);
Maja Gagic6ea651f2015-02-24 16:55:04 +01001521 if (cu_->instruction_set == kMips || cu_->instruction_set == kMips64) {
buzbee091cc402014-03-31 10:14:40 -07001522 MarkInUse(res.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001523 } else {
buzbee091cc402014-03-31 10:14:40 -07001524 LockTemp(res.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001525 }
Andreas Gampe4b537a82014-06-30 22:24:53 -07001526 CheckRegLocation(res);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001527 return res;
1528}
1529
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001530void Mir2Lir::SimpleRegAlloc() {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001531 DoPromotion();
1532
1533 if (cu_->verbose && !(cu_->disable_opt & (1 << kPromoteRegs))) {
1534 LOG(INFO) << "After Promotion";
1535 mir_graph_->DumpRegLocTable(mir_graph_->reg_location_, mir_graph_->GetNumSSARegs());
1536 }
1537
1538 /* Set the frame size */
1539 frame_size_ = ComputeFrameSize();
1540}
1541
1542/*
1543 * Get the "real" sreg number associated with an s_reg slot. In general,
1544 * s_reg values passed through codegen are the SSA names created by
1545 * dataflow analysis and refer to slot numbers in the mir_graph_->reg_location
1546 * array. However, renaming is accomplished by simply replacing RegLocation
1547 * entries in the reglocation[] array. Therefore, when location
1548 * records for operands are first created, we need to ask the locRecord
1549 * identified by the dataflow pass what it's new name is.
1550 */
1551int Mir2Lir::GetSRegHi(int lowSreg) {
1552 return (lowSreg == INVALID_SREG) ? INVALID_SREG : lowSreg + 1;
1553}
1554
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001555bool Mir2Lir::LiveOut(int s_reg ATTRIBUTE_UNUSED) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001556 // For now.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001557 return true;
1558}
1559
Brian Carlstrom7940e442013-07-12 13:46:57 -07001560} // namespace art