blob: 237288e7b1cbe76ef006b471a3f20645d1dc6dab [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
21#include "object_utils.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
buzbeea0cd2d72014-06-01 09:33:49 -070026RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
27 RegisterClass res;
28 switch (shorty_type) {
29 case 'L':
30 res = kRefReg;
31 break;
32 case 'F':
33 // Expected fallthrough.
34 case 'D':
35 res = kFPReg;
36 break;
37 default:
38 res = kCoreReg;
39 }
40 return res;
41}
42
43RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
44 RegisterClass res;
45 if (loc.fp) {
46 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
47 res = kFPReg;
48 } else if (loc.ref) {
49 res = kRefReg;
50 } else {
51 res = kCoreReg;
52 }
53 return res;
54}
55
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080056void Mir2Lir::LockArg(int in_position, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080057 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
58 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
59 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080060
buzbee2700f7e2014-03-07 09:46:20 -080061 if (reg_arg_low.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080062 LockTemp(reg_arg_low);
63 }
buzbeeb5860fb2014-06-21 15:31:01 -070064 if (reg_arg_high.Valid() && reg_arg_low.NotExactlyEquals(reg_arg_high)) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080065 LockTemp(reg_arg_high);
66 }
67}
68
buzbee33ae5582014-06-12 14:56:32 -070069// TODO: simplify when 32-bit targets go hard-float.
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010070RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +010071 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000072 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070073
74 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080075 /*
76 * When doing a call for x86, it moves the stack pointer in order to push return.
77 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080078 */
79 offset += sizeof(uint32_t);
80 }
81
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070082 if (cu_->instruction_set == kX86_64) {
83 /*
84 * When doing a call for x86, it moves the stack pointer in order to push return.
85 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
86 */
87 offset += sizeof(uint64_t);
88 }
89
buzbee33ae5582014-06-12 14:56:32 -070090 if (cu_->target64) {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070091 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
92 if (!reg_arg.Valid()) {
buzbee33ae5582014-06-12 14:56:32 -070093 RegStorage new_reg =
94 wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
Andreas Gampe3c12c512014-06-24 18:46:29 +000095 LoadBaseDisp(TargetReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070096 return new_reg;
97 } else {
98 // Check if we need to copy the arg to a different reg_class.
99 if (!RegClassMatches(reg_class, reg_arg)) {
100 if (wide) {
101 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
102 OpRegCopyWide(new_reg, reg_arg);
103 reg_arg = new_reg;
104 } else {
105 RegStorage new_reg = AllocTypedTemp(false, reg_class);
106 OpRegCopy(new_reg, reg_arg);
107 reg_arg = new_reg;
108 }
109 }
110 }
111 return reg_arg;
112 }
113
114 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
115 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
116 RegStorage::InvalidReg();
117
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800118 // If the VR is wide and there is no register for high part, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800119 if (wide && !reg_arg_high.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800120 // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg.
buzbee2700f7e2014-03-07 09:46:20 -0800121 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100122 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000123 LoadBaseDisp(TargetReg(kSp), offset, new_regs, k64, kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100124 return new_regs; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800125 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100126 // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory,
127 // i.e. the low part is in a core reg. Load the second part in a core reg as well for now.
128 DCHECK(!reg_arg_low.IsFloat());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800129 reg_arg_high = AllocTemp();
130 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -0700131 Load32Disp(TargetReg(kSp), offset_high, reg_arg_high);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100132 // Continue below to check the reg_class.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800133 }
134 }
135
136 // If the low part is not in a register yet, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800137 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100138 // Assume that if the low part of a wide arg is passed in memory, so is the high part,
139 // thus we don't get here for wide args as it's handled above. Big-endian ABIs could
140 // conceivably break this assumption but Android supports only little-endian architectures.
141 DCHECK(!wide);
142 reg_arg_low = AllocTypedTemp(false, reg_class);
buzbee695d13a2014-04-19 13:32:20 -0700143 Load32Disp(TargetReg(kSp), offset, reg_arg_low);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100144 return reg_arg_low; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800145 }
146
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100147 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low;
148 // Check if we need to copy the arg to a different reg_class.
149 if (!RegClassMatches(reg_class, reg_arg)) {
150 if (wide) {
151 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
152 OpRegCopyWide(new_regs, reg_arg);
153 reg_arg = new_regs;
154 } else {
155 RegStorage new_reg = AllocTypedTemp(false, reg_class);
156 OpRegCopy(new_reg, reg_arg);
157 reg_arg = new_reg;
158 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800159 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100160 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800161}
162
buzbee33ae5582014-06-12 14:56:32 -0700163// TODO: simpilfy when 32-bit targets go hard float.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800164void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100165 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000166 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700167 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800168 /*
169 * When doing a call for x86, it moves the stack pointer in order to push return.
170 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800171 */
172 offset += sizeof(uint32_t);
173 }
174
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700175 if (cu_->instruction_set == kX86_64) {
176 /*
177 * When doing a call for x86, it moves the stack pointer in order to push return.
178 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
179 */
180 offset += sizeof(uint64_t);
181 }
182
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800183 if (!rl_dest.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800184 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
185 if (reg.Valid()) {
186 OpRegCopy(rl_dest.reg, reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800187 } else {
buzbee695d13a2014-04-19 13:32:20 -0700188 Load32Disp(TargetReg(kSp), offset, rl_dest.reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800189 }
190 } else {
buzbee33ae5582014-06-12 14:56:32 -0700191 if (cu_->target64) {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700192 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
193 if (reg.Valid()) {
194 OpRegCopy(rl_dest.reg, reg);
195 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000196 LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64, kNotVolatile);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700197 }
198 return;
199 }
200
buzbee2700f7e2014-03-07 09:46:20 -0800201 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
202 RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800203
buzbee2700f7e2014-03-07 09:46:20 -0800204 if (reg_arg_low.Valid() && reg_arg_high.Valid()) {
205 OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high));
206 } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) {
207 OpRegCopy(rl_dest.reg, reg_arg_low);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800208 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -0700209 Load32Disp(TargetReg(kSp), offset_high, rl_dest.reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800210 } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) {
211 OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high);
buzbee695d13a2014-04-19 13:32:20 -0700212 Load32Disp(TargetReg(kSp), offset, rl_dest.reg.GetLow());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800213 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000214 LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800215 }
216 }
217}
218
219bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
220 // FastInstance() already checked by DexFileMethodInliner.
221 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100222 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800223 // The object is not "this" and has to be null-checked.
224 return false;
225 }
226
Vladimir Markoe3e02602014-03-12 15:42:41 +0000227 bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100228 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
229 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100230 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
231 return false;
232 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100233
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800234 // Point of no return - no aborts after this
235 GenPrintLabel(mir);
236 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700237 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100238 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700239 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
240 RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100241 RegStorage r_result = rl_dest.reg;
242 if (!RegClassMatches(reg_class, r_result)) {
243 r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class)
244 : AllocTypedTemp(rl_dest.fp, reg_class);
245 }
Andreas Gampe3c12c512014-06-24 18:46:29 +0000246 if (ref) {
247 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100248 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000249 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
250 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100251 }
buzbeeb5860fb2014-06-21 15:31:01 -0700252 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100253 if (wide) {
254 OpRegCopyWide(rl_dest.reg, r_result);
255 } else {
256 OpRegCopy(rl_dest.reg, r_result);
257 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800258 }
259 return true;
260}
261
262bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
263 // FastInstance() already checked by DexFileMethodInliner.
264 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100265 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800266 // The object is not "this" and has to be null-checked.
267 return false;
268 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100269 if (data.return_arg_plus1 != 0u) {
270 // The setter returns a method argument which we don't support here.
271 return false;
272 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800273
Vladimir Markoe3e02602014-03-12 15:42:41 +0000274 bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100275 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
276 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100277 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
278 return false;
279 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800280
281 // Point of no return - no aborts after this
282 GenPrintLabel(mir);
283 LockArg(data.object_arg);
284 LockArg(data.src_arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700285 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100286 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
287 RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000288 if (ref) {
289 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100290 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000291 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
292 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800293 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100294 if (ref) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800295 MarkGCCard(reg_src, reg_obj);
296 }
297 return true;
298}
299
300bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
301 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000302 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800303
304 // Point of no return - no aborts after this
305 GenPrintLabel(mir);
306 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700307 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
308 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800309 LoadArgDirect(data.arg, rl_dest);
310 return true;
311}
312
313/*
314 * Special-case code generation for simple non-throwing leaf methods.
315 */
316bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
317 DCHECK(special.flags & kInlineSpecial);
318 current_dalvik_offset_ = mir->offset;
319 MIR* return_mir = nullptr;
320 bool successful = false;
321
322 switch (special.opcode) {
323 case kInlineOpNop:
324 successful = true;
325 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
326 return_mir = mir;
327 break;
328 case kInlineOpNonWideConst: {
329 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700330 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800331 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800332 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700333 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800334 break;
335 }
336 case kInlineOpReturnArg:
337 successful = GenSpecialIdentity(mir, special);
338 return_mir = mir;
339 break;
340 case kInlineOpIGet:
341 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700342 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800343 break;
344 case kInlineOpIPut:
345 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700346 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800347 break;
348 default:
349 break;
350 }
351
352 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000353 if (kIsDebugBuild) {
354 // Clear unreachable catch entries.
355 mir_graph_->catches_.clear();
356 }
357
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800358 // Handle verbosity for return MIR.
359 if (return_mir != nullptr) {
360 current_dalvik_offset_ = return_mir->offset;
361 // Not handling special identity case because it already generated code as part
362 // of the return. The label should have been added before any code was generated.
363 if (special.opcode != kInlineOpReturnArg) {
364 GenPrintLabel(return_mir);
365 }
366 }
367 GenSpecialExitSequence();
368
369 core_spill_mask_ = 0;
370 num_core_spills_ = 0;
371 fp_spill_mask_ = 0;
372 num_fp_spills_ = 0;
373 frame_size_ = 0;
374 core_vmap_table_.clear();
375 fp_vmap_table_.clear();
376 }
377
378 return successful;
379}
380
Brian Carlstrom7940e442013-07-12 13:46:57 -0700381/*
382 * Target-independent code generation. Use only high-level
383 * load/store utilities here, or target-dependent genXX() handlers
384 * when necessary.
385 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700386void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700387 RegLocation rl_src[3];
388 RegLocation rl_dest = mir_graph_->GetBadLoc();
389 RegLocation rl_result = mir_graph_->GetBadLoc();
390 Instruction::Code opcode = mir->dalvikInsn.opcode;
391 int opt_flags = mir->optimization_flags;
392 uint32_t vB = mir->dalvikInsn.vB;
393 uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700394 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
395 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700396
397 // Prep Src and Dest locations.
398 int next_sreg = 0;
399 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700400 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700401 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
402 if (attrs & DF_UA) {
403 if (attrs & DF_A_WIDE) {
404 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
405 next_sreg+= 2;
406 } else {
407 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
408 next_sreg++;
409 }
410 }
411 if (attrs & DF_UB) {
412 if (attrs & DF_B_WIDE) {
413 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
414 next_sreg+= 2;
415 } else {
416 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
417 next_sreg++;
418 }
419 }
420 if (attrs & DF_UC) {
421 if (attrs & DF_C_WIDE) {
422 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
423 } else {
424 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
425 }
426 }
427 if (attrs & DF_DA) {
428 if (attrs & DF_A_WIDE) {
429 rl_dest = mir_graph_->GetDestWide(mir);
430 } else {
431 rl_dest = mir_graph_->GetDest(mir);
432 }
433 }
434 switch (opcode) {
435 case Instruction::NOP:
436 break;
437
438 case Instruction::MOVE_EXCEPTION:
439 GenMoveException(rl_dest);
440 break;
441
442 case Instruction::RETURN_VOID:
443 if (((cu_->access_flags & kAccConstructor) != 0) &&
444 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
445 cu_->class_def_idx)) {
446 GenMemBarrier(kStoreStore);
447 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700448 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700449 GenSuspendTest(opt_flags);
450 }
451 break;
452
Brian Carlstrom7940e442013-07-12 13:46:57 -0700453 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700454 DCHECK(rl_src[0].ref);
455 // Intentional fallthrough.
456 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700457 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700458 GenSuspendTest(opt_flags);
459 }
buzbeea0cd2d72014-06-01 09:33:49 -0700460 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
461 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700462 break;
463
464 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700465 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700466 GenSuspendTest(opt_flags);
467 }
buzbeea0cd2d72014-06-01 09:33:49 -0700468 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
469 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700470 break;
471
472 case Instruction::MOVE_RESULT_WIDE:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000473 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700474 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000475 }
buzbeea0cd2d72014-06-01 09:33:49 -0700476 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700477 break;
478
479 case Instruction::MOVE_RESULT:
480 case Instruction::MOVE_RESULT_OBJECT:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000481 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700482 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000483 }
buzbeea0cd2d72014-06-01 09:33:49 -0700484 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485 break;
486
487 case Instruction::MOVE:
488 case Instruction::MOVE_OBJECT:
489 case Instruction::MOVE_16:
490 case Instruction::MOVE_OBJECT_16:
491 case Instruction::MOVE_FROM16:
492 case Instruction::MOVE_OBJECT_FROM16:
493 StoreValue(rl_dest, rl_src[0]);
494 break;
495
496 case Instruction::MOVE_WIDE:
497 case Instruction::MOVE_WIDE_16:
498 case Instruction::MOVE_WIDE_FROM16:
499 StoreValueWide(rl_dest, rl_src[0]);
500 break;
501
502 case Instruction::CONST:
503 case Instruction::CONST_4:
504 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400505 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700506 break;
507
508 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400509 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700510 break;
511
512 case Instruction::CONST_WIDE_16:
513 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000514 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700515 break;
516
517 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000518 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 break;
520
521 case Instruction::CONST_WIDE_HIGH16:
522 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800523 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700524 StoreValueWide(rl_dest, rl_result);
525 break;
526
527 case Instruction::MONITOR_ENTER:
528 GenMonitorEnter(opt_flags, rl_src[0]);
529 break;
530
531 case Instruction::MONITOR_EXIT:
532 GenMonitorExit(opt_flags, rl_src[0]);
533 break;
534
535 case Instruction::CHECK_CAST: {
536 GenCheckCast(mir->offset, vB, rl_src[0]);
537 break;
538 }
539 case Instruction::INSTANCE_OF:
540 GenInstanceof(vC, rl_dest, rl_src[0]);
541 break;
542
543 case Instruction::NEW_INSTANCE:
544 GenNewInstance(vB, rl_dest);
545 break;
546
547 case Instruction::THROW:
548 GenThrow(rl_src[0]);
549 break;
550
551 case Instruction::ARRAY_LENGTH:
552 int len_offset;
553 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700554 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800555 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700556 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700557 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700558 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700559 StoreValue(rl_dest, rl_result);
560 break;
561
562 case Instruction::CONST_STRING:
563 case Instruction::CONST_STRING_JUMBO:
564 GenConstString(vB, rl_dest);
565 break;
566
567 case Instruction::CONST_CLASS:
568 GenConstClass(vB, rl_dest);
569 break;
570
571 case Instruction::FILL_ARRAY_DATA:
572 GenFillArrayData(vB, rl_src[0]);
573 break;
574
575 case Instruction::FILLED_NEW_ARRAY:
576 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
577 false /* not range */));
578 break;
579
580 case Instruction::FILLED_NEW_ARRAY_RANGE:
581 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
582 true /* range */));
583 break;
584
585 case Instruction::NEW_ARRAY:
586 GenNewArray(vC, rl_dest, rl_src[0]);
587 break;
588
589 case Instruction::GOTO:
590 case Instruction::GOTO_16:
591 case Instruction::GOTO_32:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700592 if (mir_graph_->IsBackedge(bb, bb->taken) &&
593 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) {
buzbee0d829482013-10-11 15:24:55 -0700594 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700595 } else {
buzbee0d829482013-10-11 15:24:55 -0700596 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700597 }
598 break;
599
600 case Instruction::PACKED_SWITCH:
601 GenPackedSwitch(mir, vB, rl_src[0]);
602 break;
603
604 case Instruction::SPARSE_SWITCH:
605 GenSparseSwitch(mir, vB, rl_src[0]);
606 break;
607
608 case Instruction::CMPL_FLOAT:
609 case Instruction::CMPG_FLOAT:
610 case Instruction::CMPL_DOUBLE:
611 case Instruction::CMPG_DOUBLE:
612 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
613 break;
614
615 case Instruction::CMP_LONG:
616 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
617 break;
618
619 case Instruction::IF_EQ:
620 case Instruction::IF_NE:
621 case Instruction::IF_LT:
622 case Instruction::IF_GE:
623 case Instruction::IF_GT:
624 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700625 LIR* taken = &label_list[bb->taken];
626 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700627 // Result known at compile time?
628 if (rl_src[0].is_const && rl_src[1].is_const) {
629 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg),
630 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
buzbee0d829482013-10-11 15:24:55 -0700631 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700632 if (mir_graph_->IsBackedge(bb, target_id) &&
633 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700634 GenSuspendTest(opt_flags);
635 }
buzbee0d829482013-10-11 15:24:55 -0700636 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700637 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700638 if (mir_graph_->IsBackwardsBranch(bb) &&
639 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
640 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700641 GenSuspendTest(opt_flags);
642 }
buzbee0d829482013-10-11 15:24:55 -0700643 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700644 }
645 break;
646 }
647
648 case Instruction::IF_EQZ:
649 case Instruction::IF_NEZ:
650 case Instruction::IF_LTZ:
651 case Instruction::IF_GEZ:
652 case Instruction::IF_GTZ:
653 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700654 LIR* taken = &label_list[bb->taken];
655 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 // Result known at compile time?
657 if (rl_src[0].is_const) {
658 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0);
buzbee0d829482013-10-11 15:24:55 -0700659 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700660 if (mir_graph_->IsBackedge(bb, target_id) &&
661 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700662 GenSuspendTest(opt_flags);
663 }
buzbee0d829482013-10-11 15:24:55 -0700664 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700665 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700666 if (mir_graph_->IsBackwardsBranch(bb) &&
667 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
668 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700669 GenSuspendTest(opt_flags);
670 }
671 GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through);
672 }
673 break;
674 }
675
676 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700677 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
678 break;
679 case Instruction::AGET_OBJECT:
680 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700681 break;
682 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700683 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700684 break;
685 case Instruction::AGET_BOOLEAN:
686 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
687 break;
688 case Instruction::AGET_BYTE:
689 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
690 break;
691 case Instruction::AGET_CHAR:
692 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
693 break;
694 case Instruction::AGET_SHORT:
695 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
696 break;
697 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700698 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700699 break;
700 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700701 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700702 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700703 case Instruction::APUT_OBJECT: {
704 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
705 bool is_safe = is_null; // Always safe to store null.
706 if (!is_safe) {
707 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000708 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
709 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700710 }
711 if (is_null || is_safe) {
712 // Store of constant null doesn't require an assignability test and can be generated inline
713 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700714 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700715 } else {
716 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
717 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700718 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700719 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 case Instruction::APUT_SHORT:
721 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700722 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723 break;
724 case Instruction::APUT_BYTE:
725 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700726 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700727 break;
728
729 case Instruction::IGET_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700730 GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731 break;
732
733 case Instruction::IGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700734 GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700735 break;
736
737 case Instruction::IGET:
buzbee695d13a2014-04-19 13:32:20 -0700738 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 break;
740
741 case Instruction::IGET_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000742 GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700743 break;
744
745 case Instruction::IGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000746 GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700747 break;
748
749 case Instruction::IGET_BOOLEAN:
750 case Instruction::IGET_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000751 GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752 break;
753
754 case Instruction::IPUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700755 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700756 break;
757
758 case Instruction::IPUT_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700759 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700760 break;
761
762 case Instruction::IPUT:
buzbee695d13a2014-04-19 13:32:20 -0700763 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700764 break;
765
766 case Instruction::IPUT_BOOLEAN:
767 case Instruction::IPUT_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000768 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700769 break;
770
771 case Instruction::IPUT_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000772 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700773 break;
774
775 case Instruction::IPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000776 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700777 break;
778
779 case Instruction::SGET_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000780 GenSget(mir, rl_dest, false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700781 break;
782 case Instruction::SGET:
783 case Instruction::SGET_BOOLEAN:
784 case Instruction::SGET_BYTE:
785 case Instruction::SGET_CHAR:
786 case Instruction::SGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000787 GenSget(mir, rl_dest, false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700788 break;
789
790 case Instruction::SGET_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000791 GenSget(mir, rl_dest, true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700792 break;
793
794 case Instruction::SPUT_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000795 GenSput(mir, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700796 break;
797
798 case Instruction::SPUT:
799 case Instruction::SPUT_BOOLEAN:
800 case Instruction::SPUT_BYTE:
801 case Instruction::SPUT_CHAR:
802 case Instruction::SPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000803 GenSput(mir, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700804 break;
805
806 case Instruction::SPUT_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000807 GenSput(mir, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700808 break;
809
810 case Instruction::INVOKE_STATIC_RANGE:
811 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700812 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
813 // If the invocation is not inlined, we can assume there is already a
814 // suspend check at the return site
815 mir_graph_->AppendGenSuspendTestList(bb);
816 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700817 break;
818 case Instruction::INVOKE_STATIC:
819 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700820 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
821 mir_graph_->AppendGenSuspendTestList(bb);
822 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700823 break;
824
825 case Instruction::INVOKE_DIRECT:
826 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700827 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
828 mir_graph_->AppendGenSuspendTestList(bb);
829 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700830 break;
831 case Instruction::INVOKE_DIRECT_RANGE:
832 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700833 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
834 mir_graph_->AppendGenSuspendTestList(bb);
835 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700836 break;
837
838 case Instruction::INVOKE_VIRTUAL:
839 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700840 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
841 mir_graph_->AppendGenSuspendTestList(bb);
842 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700843 break;
844 case Instruction::INVOKE_VIRTUAL_RANGE:
845 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700846 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
847 mir_graph_->AppendGenSuspendTestList(bb);
848 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849 break;
850
851 case Instruction::INVOKE_SUPER:
852 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700853 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
854 mir_graph_->AppendGenSuspendTestList(bb);
855 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856 break;
857 case Instruction::INVOKE_SUPER_RANGE:
858 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700859 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
860 mir_graph_->AppendGenSuspendTestList(bb);
861 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 break;
863
864 case Instruction::INVOKE_INTERFACE:
865 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700866 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
867 mir_graph_->AppendGenSuspendTestList(bb);
868 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700869 break;
870 case Instruction::INVOKE_INTERFACE_RANGE:
871 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
Wei Jin04f4d8a2014-05-29 18:04:29 -0700872 if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) {
873 mir_graph_->AppendGenSuspendTestList(bb);
874 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875 break;
876
877 case Instruction::NEG_INT:
878 case Instruction::NOT_INT:
879 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]);
880 break;
881
882 case Instruction::NEG_LONG:
883 case Instruction::NOT_LONG:
884 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]);
885 break;
886
887 case Instruction::NEG_FLOAT:
888 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
889 break;
890
891 case Instruction::NEG_DOUBLE:
892 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
893 break;
894
895 case Instruction::INT_TO_LONG:
896 GenIntToLong(rl_dest, rl_src[0]);
897 break;
898
899 case Instruction::LONG_TO_INT:
900 rl_src[0] = UpdateLocWide(rl_src[0]);
buzbeea0cd2d72014-06-01 09:33:49 -0700901 rl_src[0] = NarrowRegLoc(rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700902 StoreValue(rl_dest, rl_src[0]);
903 break;
904
905 case Instruction::INT_TO_BYTE:
906 case Instruction::INT_TO_SHORT:
907 case Instruction::INT_TO_CHAR:
908 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
909 break;
910
911 case Instruction::INT_TO_FLOAT:
912 case Instruction::INT_TO_DOUBLE:
913 case Instruction::LONG_TO_FLOAT:
914 case Instruction::LONG_TO_DOUBLE:
915 case Instruction::FLOAT_TO_INT:
916 case Instruction::FLOAT_TO_LONG:
917 case Instruction::FLOAT_TO_DOUBLE:
918 case Instruction::DOUBLE_TO_INT:
919 case Instruction::DOUBLE_TO_LONG:
920 case Instruction::DOUBLE_TO_FLOAT:
921 GenConversion(opcode, rl_dest, rl_src[0]);
922 break;
923
924
925 case Instruction::ADD_INT:
926 case Instruction::ADD_INT_2ADDR:
927 case Instruction::MUL_INT:
928 case Instruction::MUL_INT_2ADDR:
929 case Instruction::AND_INT:
930 case Instruction::AND_INT_2ADDR:
931 case Instruction::OR_INT:
932 case Instruction::OR_INT_2ADDR:
933 case Instruction::XOR_INT:
934 case Instruction::XOR_INT_2ADDR:
935 if (rl_src[0].is_const &&
936 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) {
937 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
938 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
939 } else if (rl_src[1].is_const &&
940 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
941 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
942 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
943 } else {
944 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
945 }
946 break;
947
948 case Instruction::SUB_INT:
949 case Instruction::SUB_INT_2ADDR:
950 case Instruction::DIV_INT:
951 case Instruction::DIV_INT_2ADDR:
952 case Instruction::REM_INT:
953 case Instruction::REM_INT_2ADDR:
954 case Instruction::SHL_INT:
955 case Instruction::SHL_INT_2ADDR:
956 case Instruction::SHR_INT:
957 case Instruction::SHR_INT_2ADDR:
958 case Instruction::USHR_INT:
959 case Instruction::USHR_INT_2ADDR:
960 if (rl_src[1].is_const &&
961 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
962 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
963 } else {
964 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
965 }
966 break;
967
968 case Instruction::ADD_LONG:
969 case Instruction::SUB_LONG:
970 case Instruction::AND_LONG:
971 case Instruction::OR_LONG:
972 case Instruction::XOR_LONG:
973 case Instruction::ADD_LONG_2ADDR:
974 case Instruction::SUB_LONG_2ADDR:
975 case Instruction::AND_LONG_2ADDR:
976 case Instruction::OR_LONG_2ADDR:
977 case Instruction::XOR_LONG_2ADDR:
978 if (rl_src[0].is_const || rl_src[1].is_const) {
979 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
980 break;
981 }
982 // Note: intentional fallthrough.
983
984 case Instruction::MUL_LONG:
985 case Instruction::DIV_LONG:
986 case Instruction::REM_LONG:
987 case Instruction::MUL_LONG_2ADDR:
988 case Instruction::DIV_LONG_2ADDR:
989 case Instruction::REM_LONG_2ADDR:
990 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
991 break;
992
993 case Instruction::SHL_LONG:
994 case Instruction::SHR_LONG:
995 case Instruction::USHR_LONG:
996 case Instruction::SHL_LONG_2ADDR:
997 case Instruction::SHR_LONG_2ADDR:
998 case Instruction::USHR_LONG_2ADDR:
999 if (rl_src[1].is_const) {
1000 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1001 } else {
1002 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1003 }
1004 break;
1005
1006 case Instruction::ADD_FLOAT:
1007 case Instruction::SUB_FLOAT:
1008 case Instruction::MUL_FLOAT:
1009 case Instruction::DIV_FLOAT:
1010 case Instruction::REM_FLOAT:
1011 case Instruction::ADD_FLOAT_2ADDR:
1012 case Instruction::SUB_FLOAT_2ADDR:
1013 case Instruction::MUL_FLOAT_2ADDR:
1014 case Instruction::DIV_FLOAT_2ADDR:
1015 case Instruction::REM_FLOAT_2ADDR:
1016 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
1017 break;
1018
1019 case Instruction::ADD_DOUBLE:
1020 case Instruction::SUB_DOUBLE:
1021 case Instruction::MUL_DOUBLE:
1022 case Instruction::DIV_DOUBLE:
1023 case Instruction::REM_DOUBLE:
1024 case Instruction::ADD_DOUBLE_2ADDR:
1025 case Instruction::SUB_DOUBLE_2ADDR:
1026 case Instruction::MUL_DOUBLE_2ADDR:
1027 case Instruction::DIV_DOUBLE_2ADDR:
1028 case Instruction::REM_DOUBLE_2ADDR:
1029 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
1030 break;
1031
1032 case Instruction::RSUB_INT:
1033 case Instruction::ADD_INT_LIT16:
1034 case Instruction::MUL_INT_LIT16:
1035 case Instruction::DIV_INT_LIT16:
1036 case Instruction::REM_INT_LIT16:
1037 case Instruction::AND_INT_LIT16:
1038 case Instruction::OR_INT_LIT16:
1039 case Instruction::XOR_INT_LIT16:
1040 case Instruction::ADD_INT_LIT8:
1041 case Instruction::RSUB_INT_LIT8:
1042 case Instruction::MUL_INT_LIT8:
1043 case Instruction::DIV_INT_LIT8:
1044 case Instruction::REM_INT_LIT8:
1045 case Instruction::AND_INT_LIT8:
1046 case Instruction::OR_INT_LIT8:
1047 case Instruction::XOR_INT_LIT8:
1048 case Instruction::SHL_INT_LIT8:
1049 case Instruction::SHR_INT_LIT8:
1050 case Instruction::USHR_INT_LIT8:
1051 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1052 break;
1053
1054 default:
1055 LOG(FATAL) << "Unexpected opcode: " << opcode;
1056 }
buzbee082833c2014-05-17 23:16:26 -07001057 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001058} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001059
1060// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001061void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001062 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1063 case kMirOpCopy: {
1064 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1065 RegLocation rl_dest = mir_graph_->GetDest(mir);
1066 StoreValue(rl_dest, rl_src);
1067 break;
1068 }
1069 case kMirOpFusedCmplFloat:
1070 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1071 break;
1072 case kMirOpFusedCmpgFloat:
1073 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1074 break;
1075 case kMirOpFusedCmplDouble:
1076 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1077 break;
1078 case kMirOpFusedCmpgDouble:
1079 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1080 break;
1081 case kMirOpFusedCmpLong:
1082 GenFusedLongCmpBranch(bb, mir);
1083 break;
1084 case kMirOpSelect:
1085 GenSelect(bb, mir);
1086 break;
Mark Mendelld65c51a2014-04-29 16:55:20 -04001087 case kMirOpPhi:
1088 case kMirOpNop:
1089 case kMirOpNullCheck:
1090 case kMirOpRangeCheck:
1091 case kMirOpDivZeroCheck:
1092 case kMirOpCheck:
1093 case kMirOpCheckPart2:
1094 // Ignore these known opcodes
1095 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001096 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001097 // Give the backends a chance to handle unknown extended MIR opcodes.
1098 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001099 break;
1100 }
1101}
1102
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001103void Mir2Lir::GenPrintLabel(MIR* mir) {
1104 // Mark the beginning of a Dalvik instruction for line tracking.
1105 if (cu_->verbose) {
1106 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1107 MarkBoundary(mir->offset, inst_str);
1108 }
1109}
1110
Brian Carlstrom7940e442013-07-12 13:46:57 -07001111// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001112bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001113 if (bb->block_type == kDead) return false;
1114 current_dalvik_offset_ = bb->start_offset;
1115 MIR* mir;
1116 int block_id = bb->id;
1117
1118 block_label_list_[block_id].operands[0] = bb->start_offset;
1119
1120 // Insert the block label.
1121 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001122 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001123 AppendLIR(&block_label_list_[block_id]);
1124
1125 LIR* head_lir = NULL;
1126
1127 // If this is a catch block, export the start address.
1128 if (bb->catch_entry) {
1129 head_lir = NewLIR0(kPseudoExportedPC);
1130 }
1131
1132 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001133 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134
1135 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001136 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001137 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
1138 GenEntrySequence(&mir_graph_->reg_location_[start_vreg],
1139 mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]);
1140 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001141 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001142 GenExitSequence();
1143 }
1144
1145 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1146 ResetRegPool();
1147 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001148 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001149 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001150 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001151 }
1152
1153 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1154 ResetDefTracking();
1155 }
1156
1157 // Reset temp tracking sanity check.
1158 if (kIsDebugBuild) {
1159 live_sreg_ = INVALID_SREG;
1160 }
1161
1162 current_dalvik_offset_ = mir->offset;
1163 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001164
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001165 GenPrintLabel(mir);
1166
Brian Carlstrom7940e442013-07-12 13:46:57 -07001167 // Remember the first LIR for this block.
1168 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001169 head_lir = &block_label_list_[bb->id];
1170 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001171 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001172 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001173 }
1174
1175 if (opcode == kMirOpCheck) {
1176 // Combine check and work halves of throwing instruction.
1177 MIR* work_half = mir->meta.throw_insn;
1178 mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode;
Vladimir Marko4376c872014-01-23 12:39:29 +00001179 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001180 opcode = work_half->dalvikInsn.opcode;
1181 SSARepresentation* ssa_rep = work_half->ssa_rep;
1182 work_half->ssa_rep = mir->ssa_rep;
1183 mir->ssa_rep = ssa_rep;
1184 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001185 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001186 }
1187
buzbee35ba7f32014-05-31 08:59:01 -07001188 if (MIRGraph::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001189 HandleExtendedMethodMIR(bb, mir);
1190 continue;
1191 }
1192
1193 CompileDalvikInstruction(mir, bb, block_label_list_);
1194 }
1195
1196 if (head_lir) {
1197 // Eliminate redundant loads/stores and delay stores into later slots.
1198 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001199 }
1200 return false;
1201}
1202
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001203bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001204 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001205 // Find the first DalvikByteCode block.
1206 int num_reachable_blocks = mir_graph_->GetNumReachableBlocks();
1207 BasicBlock*bb = NULL;
1208 for (int idx = 0; idx < num_reachable_blocks; idx++) {
1209 // TODO: no direct access of growable lists.
1210 int dfs_index = mir_graph_->GetDfsOrder()->Get(idx);
1211 bb = mir_graph_->GetBasicBlock(dfs_index);
1212 if (bb->block_type == kDalvikByteCode) {
1213 break;
1214 }
1215 }
1216 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001217 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001218 }
1219 DCHECK_EQ(bb->start_offset, 0);
1220 DCHECK(bb->first_mir_insn != NULL);
1221
1222 // Get the first instruction.
1223 MIR* mir = bb->first_mir_insn;
1224
1225 // Free temp registers and reset redundant store tracking.
1226 ResetRegPool();
1227 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001228 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001229
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001230 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001231}
1232
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001233void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001234 cu_->NewTimingSplit("MIR2LIR");
1235
Brian Carlstrom7940e442013-07-12 13:46:57 -07001236 // Hold the labels of each block.
1237 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001238 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001239 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001240
buzbee56c71782013-09-05 17:13:19 -07001241 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001242 BasicBlock* curr_bb = iter.Next();
1243 BasicBlock* next_bb = iter.Next();
1244 while (curr_bb != NULL) {
1245 MethodBlockCodeGen(curr_bb);
1246 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001247 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1248 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1249 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001250 }
1251 curr_bb = next_bb;
1252 do {
1253 next_bb = iter.Next();
1254 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001255 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001256 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001257}
1258
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001259//
1260// LIR Slow Path
1261//
1262
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001263LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001264 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001265 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001266 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001267 return target;
1268}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001269
Andreas Gampe4b537a82014-06-30 22:24:53 -07001270
1271void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1272 bool fail, bool report)
1273 const {
1274 if (rs.Valid()) {
1275 if (ref == RefCheck::kCheckRef) {
1276 if (cu_->target64 && !rs.Is64Bit()) {
1277 if (fail) {
1278 CHECK(false) << "Reg storage not 64b for ref.";
1279 } else if (report) {
1280 LOG(WARNING) << "Reg storage not 64b for ref.";
1281 }
1282 }
1283 }
1284 if (wide == WidenessCheck::kCheckWide) {
1285 if (!rs.Is64Bit()) {
1286 if (fail) {
1287 CHECK(false) << "Reg storage not 64b for wide.";
1288 } else if (report) {
1289 LOG(WARNING) << "Reg storage not 64b for wide.";
1290 }
1291 }
1292 }
1293 // A tighter check would be nice, but for now soft-float will not check float at all.
1294 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1295 if (!rs.IsFloat()) {
1296 if (fail) {
1297 CHECK(false) << "Reg storage not float for fp.";
1298 } else if (report) {
1299 LOG(WARNING) << "Reg storage not float for fp.";
1300 }
1301 }
1302 } else if (fp == FPCheck::kCheckNotFP) {
1303 if (rs.IsFloat()) {
1304 if (fail) {
1305 CHECK(false) << "Reg storage float for not-fp.";
1306 } else if (report) {
1307 LOG(WARNING) << "Reg storage float for not-fp.";
1308 }
1309 }
1310 }
1311 }
1312}
1313
1314void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1315 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1316 // will be stored.
1317 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1318 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1319}
1320
Brian Carlstrom7940e442013-07-12 13:46:57 -07001321} // namespace art