blob: 92ef70db7e273de2a942b736b582f540ee5a4371 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
Fred Shih37f05ef2014-07-16 18:38:08 -070021#include "primitive.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
buzbeea0cd2d72014-06-01 09:33:49 -070026RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
27 RegisterClass res;
28 switch (shorty_type) {
29 case 'L':
30 res = kRefReg;
31 break;
32 case 'F':
33 // Expected fallthrough.
34 case 'D':
35 res = kFPReg;
36 break;
37 default:
38 res = kCoreReg;
39 }
40 return res;
41}
42
43RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
44 RegisterClass res;
45 if (loc.fp) {
46 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
47 res = kFPReg;
48 } else if (loc.ref) {
49 res = kRefReg;
50 } else {
51 res = kCoreReg;
52 }
53 return res;
54}
55
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080056void Mir2Lir::LockArg(int in_position, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080057 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
58 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
59 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080060
buzbee2700f7e2014-03-07 09:46:20 -080061 if (reg_arg_low.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080062 LockTemp(reg_arg_low);
63 }
buzbeeb5860fb2014-06-21 15:31:01 -070064 if (reg_arg_high.Valid() && reg_arg_low.NotExactlyEquals(reg_arg_high)) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080065 LockTemp(reg_arg_high);
66 }
67}
68
buzbee33ae5582014-06-12 14:56:32 -070069// TODO: simplify when 32-bit targets go hard-float.
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010070RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +010071 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000072 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070073
74 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080075 /*
76 * When doing a call for x86, it moves the stack pointer in order to push return.
77 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080078 */
79 offset += sizeof(uint32_t);
80 }
81
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070082 if (cu_->instruction_set == kX86_64) {
83 /*
84 * When doing a call for x86, it moves the stack pointer in order to push return.
85 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
86 */
87 offset += sizeof(uint64_t);
88 }
89
buzbee33ae5582014-06-12 14:56:32 -070090 if (cu_->target64) {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070091 RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position);
92 if (!reg_arg.Valid()) {
buzbee33ae5582014-06-12 14:56:32 -070093 RegStorage new_reg =
94 wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
Chao-ying Fua77ee512014-07-01 17:43:41 -070095 LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +070096 return new_reg;
97 } else {
98 // Check if we need to copy the arg to a different reg_class.
99 if (!RegClassMatches(reg_class, reg_arg)) {
100 if (wide) {
101 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
102 OpRegCopyWide(new_reg, reg_arg);
103 reg_arg = new_reg;
104 } else {
105 RegStorage new_reg = AllocTypedTemp(false, reg_class);
106 OpRegCopy(new_reg, reg_arg);
107 reg_arg = new_reg;
108 }
109 }
110 }
111 return reg_arg;
112 }
113
114 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
115 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
116 RegStorage::InvalidReg();
117
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800118 // If the VR is wide and there is no register for high part, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800119 if (wide && !reg_arg_high.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800120 // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg.
buzbee2700f7e2014-03-07 09:46:20 -0800121 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100122 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700123 LoadBaseDisp(TargetPtrReg(kSp), offset, new_regs, k64, kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100124 return new_regs; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800125 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100126 // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory,
127 // i.e. the low part is in a core reg. Load the second part in a core reg as well for now.
128 DCHECK(!reg_arg_low.IsFloat());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800129 reg_arg_high = AllocTemp();
130 int offset_high = offset + sizeof(uint32_t);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700131 Load32Disp(TargetPtrReg(kSp), offset_high, reg_arg_high);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100132 // Continue below to check the reg_class.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800133 }
134 }
135
136 // If the low part is not in a register yet, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -0800137 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100138 // Assume that if the low part of a wide arg is passed in memory, so is the high part,
139 // thus we don't get here for wide args as it's handled above. Big-endian ABIs could
140 // conceivably break this assumption but Android supports only little-endian architectures.
141 DCHECK(!wide);
142 reg_arg_low = AllocTypedTemp(false, reg_class);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700143 Load32Disp(TargetPtrReg(kSp), offset, reg_arg_low);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100144 return reg_arg_low; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800145 }
146
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100147 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low;
148 // Check if we need to copy the arg to a different reg_class.
149 if (!RegClassMatches(reg_class, reg_arg)) {
150 if (wide) {
151 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
152 OpRegCopyWide(new_regs, reg_arg);
153 reg_arg = new_regs;
154 } else {
155 RegStorage new_reg = AllocTypedTemp(false, reg_class);
156 OpRegCopy(new_reg, reg_arg);
157 reg_arg = new_reg;
158 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800159 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100160 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800161}
162
buzbee33ae5582014-06-12 14:56:32 -0700163// TODO: simpilfy when 32-bit targets go hard float.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800164void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100165 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000166 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700167 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800168 /*
169 * When doing a call for x86, it moves the stack pointer in order to push return.
170 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800171 */
172 offset += sizeof(uint32_t);
173 }
174
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700175 if (cu_->instruction_set == kX86_64) {
176 /*
177 * When doing a call for x86, it moves the stack pointer in order to push return.
178 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
179 */
180 offset += sizeof(uint64_t);
181 }
182
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800183 if (!rl_dest.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800184 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
185 if (reg.Valid()) {
186 OpRegCopy(rl_dest.reg, reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800187 } else {
Chao-ying Fua77ee512014-07-01 17:43:41 -0700188 Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800189 }
190 } else {
buzbee33ae5582014-06-12 14:56:32 -0700191 if (cu_->target64) {
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700192 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
193 if (reg.Valid()) {
194 OpRegCopy(rl_dest.reg, reg);
195 } else {
Chao-ying Fua77ee512014-07-01 17:43:41 -0700196 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700197 }
198 return;
199 }
200
buzbee2700f7e2014-03-07 09:46:20 -0800201 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
202 RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800203
buzbee2700f7e2014-03-07 09:46:20 -0800204 if (reg_arg_low.Valid() && reg_arg_high.Valid()) {
205 OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high));
206 } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) {
207 OpRegCopy(rl_dest.reg, reg_arg_low);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800208 int offset_high = offset + sizeof(uint32_t);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700209 Load32Disp(TargetPtrReg(kSp), offset_high, rl_dest.reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800210 } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) {
211 OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700212 Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg.GetLow());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800213 } else {
Chao-ying Fua77ee512014-07-01 17:43:41 -0700214 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800215 }
216 }
217}
218
219bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
220 // FastInstance() already checked by DexFileMethodInliner.
221 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100222 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800223 // The object is not "this" and has to be null-checked.
224 return false;
225 }
226
Fred Shih37f05ef2014-07-16 18:38:08 -0700227 OpSize size = k32;
228 switch (data.op_variant) {
229 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT):
230 size = kReference;
231 break;
232 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE):
233 size = k64;
234 break;
235 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_SHORT):
236 size = kSignedHalf;
237 break;
238 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_CHAR):
239 size = kUnsignedHalf;
240 break;
241 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BYTE):
242 size = kSignedByte;
243 break;
244 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BOOLEAN):
245 size = kUnsignedByte;
246 break;
247 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100248
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800249 // Point of no return - no aborts after this
250 GenPrintLabel(mir);
251 LockArg(data.object_arg);
buzbeea0cd2d72014-06-01 09:33:49 -0700252 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100253 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700254 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
Fred Shih37f05ef2014-07-16 18:38:08 -0700255 RegLocation rl_dest = IsWide(size) ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100256 RegStorage r_result = rl_dest.reg;
257 if (!RegClassMatches(reg_class, r_result)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700258 r_result = IsWide(size) ? AllocTypedTempWide(rl_dest.fp, reg_class)
259 : AllocTypedTemp(rl_dest.fp, reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100260 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700261 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000262 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100263 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000264 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
265 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100266 }
buzbeeb5860fb2014-06-21 15:31:01 -0700267 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700268 if (IsWide(size)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100269 OpRegCopyWide(rl_dest.reg, r_result);
270 } else {
271 OpRegCopy(rl_dest.reg, r_result);
272 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800273 }
274 return true;
275}
276
277bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
278 // FastInstance() already checked by DexFileMethodInliner.
279 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100280 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800281 // The object is not "this" and has to be null-checked.
282 return false;
283 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100284 if (data.return_arg_plus1 != 0u) {
285 // The setter returns a method argument which we don't support here.
286 return false;
287 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800288
Fred Shih37f05ef2014-07-16 18:38:08 -0700289 OpSize size = k32;
290 switch (data.op_variant) {
291 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_OBJECT):
292 size = kReference;
293 break;
294 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE):
295 size = k64;
296 break;
297 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_SHORT):
298 size = kSignedHalf;
299 break;
300 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_CHAR):
301 size = kUnsignedHalf;
302 break;
303 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BYTE):
304 size = kSignedByte;
305 break;
306 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BOOLEAN):
307 size = kUnsignedByte;
308 break;
309 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800310
311 // Point of no return - no aborts after this
312 GenPrintLabel(mir);
313 LockArg(data.object_arg);
Fred Shih37f05ef2014-07-16 18:38:08 -0700314 LockArg(data.src_arg, IsWide(size));
buzbeea0cd2d72014-06-01 09:33:49 -0700315 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100316 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
Fred Shih37f05ef2014-07-16 18:38:08 -0700317 RegStorage reg_src = LoadArg(data.src_arg, reg_class, IsWide(size));
318 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000319 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100320 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000321 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
322 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800323 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700324 if (IsRef(size)) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800325 MarkGCCard(reg_src, reg_obj);
326 }
327 return true;
328}
329
330bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
331 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000332 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800333
334 // Point of no return - no aborts after this
335 GenPrintLabel(mir);
336 LockArg(data.arg, wide);
buzbeea0cd2d72014-06-01 09:33:49 -0700337 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
338 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800339 LoadArgDirect(data.arg, rl_dest);
340 return true;
341}
342
343/*
344 * Special-case code generation for simple non-throwing leaf methods.
345 */
346bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
347 DCHECK(special.flags & kInlineSpecial);
348 current_dalvik_offset_ = mir->offset;
349 MIR* return_mir = nullptr;
350 bool successful = false;
351
352 switch (special.opcode) {
353 case kInlineOpNop:
354 successful = true;
355 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
356 return_mir = mir;
357 break;
358 case kInlineOpNonWideConst: {
359 successful = true;
buzbeea0cd2d72014-06-01 09:33:49 -0700360 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800361 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800362 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700363 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800364 break;
365 }
366 case kInlineOpReturnArg:
367 successful = GenSpecialIdentity(mir, special);
368 return_mir = mir;
369 break;
370 case kInlineOpIGet:
371 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700372 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800373 break;
374 case kInlineOpIPut:
375 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700376 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800377 break;
378 default:
379 break;
380 }
381
382 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000383 if (kIsDebugBuild) {
384 // Clear unreachable catch entries.
385 mir_graph_->catches_.clear();
386 }
387
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800388 // Handle verbosity for return MIR.
389 if (return_mir != nullptr) {
390 current_dalvik_offset_ = return_mir->offset;
391 // Not handling special identity case because it already generated code as part
392 // of the return. The label should have been added before any code was generated.
393 if (special.opcode != kInlineOpReturnArg) {
394 GenPrintLabel(return_mir);
395 }
396 }
397 GenSpecialExitSequence();
398
399 core_spill_mask_ = 0;
400 num_core_spills_ = 0;
401 fp_spill_mask_ = 0;
402 num_fp_spills_ = 0;
403 frame_size_ = 0;
404 core_vmap_table_.clear();
405 fp_vmap_table_.clear();
406 }
407
408 return successful;
409}
410
Brian Carlstrom7940e442013-07-12 13:46:57 -0700411/*
412 * Target-independent code generation. Use only high-level
413 * load/store utilities here, or target-dependent genXX() handlers
414 * when necessary.
415 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700416void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700417 RegLocation rl_src[3];
418 RegLocation rl_dest = mir_graph_->GetBadLoc();
419 RegLocation rl_result = mir_graph_->GetBadLoc();
Ian Rogersc35cda82014-11-10 16:34:29 -0800420 const Instruction::Code opcode = mir->dalvikInsn.opcode;
421 const int opt_flags = mir->optimization_flags;
422 const uint32_t vB = mir->dalvikInsn.vB;
423 const uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700424 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
425 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700426
427 // Prep Src and Dest locations.
428 int next_sreg = 0;
429 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700430 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700431 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
432 if (attrs & DF_UA) {
433 if (attrs & DF_A_WIDE) {
434 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
435 next_sreg+= 2;
436 } else {
437 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
438 next_sreg++;
439 }
440 }
441 if (attrs & DF_UB) {
442 if (attrs & DF_B_WIDE) {
443 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
444 next_sreg+= 2;
445 } else {
446 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
447 next_sreg++;
448 }
449 }
450 if (attrs & DF_UC) {
451 if (attrs & DF_C_WIDE) {
452 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
453 } else {
454 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
455 }
456 }
457 if (attrs & DF_DA) {
458 if (attrs & DF_A_WIDE) {
459 rl_dest = mir_graph_->GetDestWide(mir);
460 } else {
461 rl_dest = mir_graph_->GetDest(mir);
462 }
463 }
464 switch (opcode) {
465 case Instruction::NOP:
466 break;
467
468 case Instruction::MOVE_EXCEPTION:
469 GenMoveException(rl_dest);
470 break;
471
472 case Instruction::RETURN_VOID:
473 if (((cu_->access_flags & kAccConstructor) != 0) &&
474 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
475 cu_->class_def_idx)) {
476 GenMemBarrier(kStoreStore);
477 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700478 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 GenSuspendTest(opt_flags);
480 }
481 break;
482
Brian Carlstrom7940e442013-07-12 13:46:57 -0700483 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700484 DCHECK(rl_src[0].ref);
Ian Rogersfc787ec2014-10-09 21:56:44 -0700485 FALLTHROUGH_INTENDED;
buzbeea0cd2d72014-06-01 09:33:49 -0700486 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700487 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700488 GenSuspendTest(opt_flags);
489 }
buzbeea0cd2d72014-06-01 09:33:49 -0700490 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
491 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700492 break;
493
494 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700495 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700496 GenSuspendTest(opt_flags);
497 }
buzbeea0cd2d72014-06-01 09:33:49 -0700498 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
499 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700500 break;
501
502 case Instruction::MOVE_RESULT_WIDE:
buzbeea0cd2d72014-06-01 09:33:49 -0700503 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700504 break;
505
506 case Instruction::MOVE_RESULT:
507 case Instruction::MOVE_RESULT_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700508 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700509 break;
510
511 case Instruction::MOVE:
512 case Instruction::MOVE_OBJECT:
513 case Instruction::MOVE_16:
514 case Instruction::MOVE_OBJECT_16:
515 case Instruction::MOVE_FROM16:
516 case Instruction::MOVE_OBJECT_FROM16:
517 StoreValue(rl_dest, rl_src[0]);
518 break;
519
520 case Instruction::MOVE_WIDE:
521 case Instruction::MOVE_WIDE_16:
522 case Instruction::MOVE_WIDE_FROM16:
523 StoreValueWide(rl_dest, rl_src[0]);
524 break;
525
526 case Instruction::CONST:
527 case Instruction::CONST_4:
528 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400529 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700530 break;
531
532 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400533 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700534 break;
535
536 case Instruction::CONST_WIDE_16:
537 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000538 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700539 break;
540
541 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000542 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700543 break;
544
545 case Instruction::CONST_WIDE_HIGH16:
546 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800547 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700548 StoreValueWide(rl_dest, rl_result);
549 break;
550
551 case Instruction::MONITOR_ENTER:
552 GenMonitorEnter(opt_flags, rl_src[0]);
553 break;
554
555 case Instruction::MONITOR_EXIT:
556 GenMonitorExit(opt_flags, rl_src[0]);
557 break;
558
559 case Instruction::CHECK_CAST: {
560 GenCheckCast(mir->offset, vB, rl_src[0]);
561 break;
562 }
563 case Instruction::INSTANCE_OF:
564 GenInstanceof(vC, rl_dest, rl_src[0]);
565 break;
566
567 case Instruction::NEW_INSTANCE:
568 GenNewInstance(vB, rl_dest);
569 break;
570
571 case Instruction::THROW:
572 GenThrow(rl_src[0]);
573 break;
574
Ian Rogersc35cda82014-11-10 16:34:29 -0800575 case Instruction::ARRAY_LENGTH: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700576 int len_offset;
577 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700578 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800579 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700581 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700582 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700583 StoreValue(rl_dest, rl_result);
584 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800585 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586 case Instruction::CONST_STRING:
587 case Instruction::CONST_STRING_JUMBO:
588 GenConstString(vB, rl_dest);
589 break;
590
591 case Instruction::CONST_CLASS:
592 GenConstClass(vB, rl_dest);
593 break;
594
595 case Instruction::FILL_ARRAY_DATA:
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700596 GenFillArrayData(mir, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700597 break;
598
599 case Instruction::FILLED_NEW_ARRAY:
600 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
601 false /* not range */));
602 break;
603
604 case Instruction::FILLED_NEW_ARRAY_RANGE:
605 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
606 true /* range */));
607 break;
608
609 case Instruction::NEW_ARRAY:
610 GenNewArray(vC, rl_dest, rl_src[0]);
611 break;
612
613 case Instruction::GOTO:
614 case Instruction::GOTO_16:
615 case Instruction::GOTO_32:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700616 if (mir_graph_->IsBackedge(bb, bb->taken) &&
617 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) {
buzbee0d829482013-10-11 15:24:55 -0700618 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700619 } else {
buzbee0d829482013-10-11 15:24:55 -0700620 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700621 }
622 break;
623
624 case Instruction::PACKED_SWITCH:
625 GenPackedSwitch(mir, vB, rl_src[0]);
626 break;
627
628 case Instruction::SPARSE_SWITCH:
629 GenSparseSwitch(mir, vB, rl_src[0]);
630 break;
631
632 case Instruction::CMPL_FLOAT:
633 case Instruction::CMPG_FLOAT:
634 case Instruction::CMPL_DOUBLE:
635 case Instruction::CMPG_DOUBLE:
636 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
637 break;
638
639 case Instruction::CMP_LONG:
640 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
641 break;
642
643 case Instruction::IF_EQ:
644 case Instruction::IF_NE:
645 case Instruction::IF_LT:
646 case Instruction::IF_GE:
647 case Instruction::IF_GT:
648 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700649 LIR* taken = &label_list[bb->taken];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 // Result known at compile time?
651 if (rl_src[0].is_const && rl_src[1].is_const) {
652 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg),
653 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
buzbee0d829482013-10-11 15:24:55 -0700654 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700655 if (mir_graph_->IsBackedge(bb, target_id) &&
656 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 GenSuspendTest(opt_flags);
658 }
buzbee0d829482013-10-11 15:24:55 -0700659 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700660 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700661 if (mir_graph_->IsBackwardsBranch(bb) &&
662 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
663 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700664 GenSuspendTest(opt_flags);
665 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700666 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700667 }
668 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800669 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 case Instruction::IF_EQZ:
671 case Instruction::IF_NEZ:
672 case Instruction::IF_LTZ:
673 case Instruction::IF_GEZ:
674 case Instruction::IF_GTZ:
675 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700676 LIR* taken = &label_list[bb->taken];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700677 // Result known at compile time?
678 if (rl_src[0].is_const) {
679 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0);
buzbee0d829482013-10-11 15:24:55 -0700680 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
Wei Jin04f4d8a2014-05-29 18:04:29 -0700681 if (mir_graph_->IsBackedge(bb, target_id) &&
682 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700683 GenSuspendTest(opt_flags);
684 }
buzbee0d829482013-10-11 15:24:55 -0700685 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700686 } else {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700687 if (mir_graph_->IsBackwardsBranch(bb) &&
688 (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) ||
689 !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 GenSuspendTest(opt_flags);
691 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700692 GenCompareZeroAndBranch(opcode, rl_src[0], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 }
694 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800695 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700696
697 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700698 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
699 break;
700 case Instruction::AGET_OBJECT:
701 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700702 break;
703 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700704 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 break;
706 case Instruction::AGET_BOOLEAN:
707 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
708 break;
709 case Instruction::AGET_BYTE:
710 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
711 break;
712 case Instruction::AGET_CHAR:
713 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
714 break;
715 case Instruction::AGET_SHORT:
716 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
717 break;
718 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700719 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 break;
721 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700722 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700724 case Instruction::APUT_OBJECT: {
725 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
726 bool is_safe = is_null; // Always safe to store null.
727 if (!is_safe) {
728 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000729 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
730 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700731 }
732 if (is_null || is_safe) {
733 // Store of constant null doesn't require an assignability test and can be generated inline
734 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700735 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700736 } else {
737 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
738 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700740 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741 case Instruction::APUT_SHORT:
742 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700743 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 break;
745 case Instruction::APUT_BYTE:
746 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700747 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 break;
749
750 case Instruction::IGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700751 GenIGet(mir, opt_flags, kReference, Primitive::kPrimNot, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752 break;
753
754 case Instruction::IGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700755 // kPrimLong and kPrimDouble share the same entrypoints.
756 GenIGet(mir, opt_flags, k64, Primitive::kPrimLong, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700757 break;
758
759 case Instruction::IGET:
Fred Shih37f05ef2014-07-16 18:38:08 -0700760 GenIGet(mir, opt_flags, k32, Primitive::kPrimInt, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700761 break;
762
763 case Instruction::IGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700764 GenIGet(mir, opt_flags, kUnsignedHalf, Primitive::kPrimChar, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700765 break;
766
767 case Instruction::IGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700768 GenIGet(mir, opt_flags, kSignedHalf, Primitive::kPrimShort, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700769 break;
770
771 case Instruction::IGET_BOOLEAN:
Fred Shih37f05ef2014-07-16 18:38:08 -0700772 GenIGet(mir, opt_flags, kUnsignedByte, Primitive::kPrimBoolean, rl_dest, rl_src[0]);
773 break;
774
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 case Instruction::IGET_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700776 GenIGet(mir, opt_flags, kSignedByte, Primitive::kPrimByte, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700777 break;
778
779 case Instruction::IPUT_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700780 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700781 break;
782
783 case Instruction::IPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700784 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700785 break;
786
787 case Instruction::IPUT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700788 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 break;
790
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791 case Instruction::IPUT_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700792 case Instruction::IPUT_BOOLEAN:
793 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700794 break;
795
796 case Instruction::IPUT_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700797 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 break;
799
800 case Instruction::IPUT_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700801 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700802 break;
803
804 case Instruction::SGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700805 GenSget(mir, rl_dest, kReference, Primitive::kPrimNot);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700806 break;
Fred Shih37f05ef2014-07-16 18:38:08 -0700807
Brian Carlstrom7940e442013-07-12 13:46:57 -0700808 case Instruction::SGET:
Fred Shih37f05ef2014-07-16 18:38:08 -0700809 GenSget(mir, rl_dest, k32, Primitive::kPrimInt);
810 break;
811
Brian Carlstrom7940e442013-07-12 13:46:57 -0700812 case Instruction::SGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700813 GenSget(mir, rl_dest, kUnsignedHalf, Primitive::kPrimChar);
814 break;
815
Brian Carlstrom7940e442013-07-12 13:46:57 -0700816 case Instruction::SGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700817 GenSget(mir, rl_dest, kSignedHalf, Primitive::kPrimShort);
818 break;
819
820 case Instruction::SGET_BOOLEAN:
821 GenSget(mir, rl_dest, kUnsignedByte, Primitive::kPrimBoolean);
822 break;
823
824 case Instruction::SGET_BYTE:
825 GenSget(mir, rl_dest, kSignedByte, Primitive::kPrimByte);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700826 break;
827
828 case Instruction::SGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700829 // kPrimLong and kPrimDouble share the same entrypoints.
830 GenSget(mir, rl_dest, k64, Primitive::kPrimLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700831 break;
832
833 case Instruction::SPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700834 GenSput(mir, rl_src[0], kReference);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700835 break;
836
837 case Instruction::SPUT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700838 GenSput(mir, rl_src[0], k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700839 break;
840
Fred Shih37f05ef2014-07-16 18:38:08 -0700841 case Instruction::SPUT_BYTE:
842 case Instruction::SPUT_BOOLEAN:
843 GenSput(mir, rl_src[0], kUnsignedByte);
844 break;
845
846 case Instruction::SPUT_CHAR:
847 GenSput(mir, rl_src[0], kUnsignedHalf);
848 break;
849
850 case Instruction::SPUT_SHORT:
851 GenSput(mir, rl_src[0], kSignedHalf);
852 break;
853
854
Brian Carlstrom7940e442013-07-12 13:46:57 -0700855 case Instruction::SPUT_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700856 GenSput(mir, rl_src[0], k64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857 break;
858
859 case Instruction::INVOKE_STATIC_RANGE:
860 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100861 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700862 // If the invocation is not inlined, we can assume there is already a
863 // suspend check at the return site
864 mir_graph_->AppendGenSuspendTestList(bb);
865 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700866 break;
867 case Instruction::INVOKE_STATIC:
868 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100869 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700870 mir_graph_->AppendGenSuspendTestList(bb);
871 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700872 break;
873
874 case Instruction::INVOKE_DIRECT:
875 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100876 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700877 mir_graph_->AppendGenSuspendTestList(bb);
878 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700879 break;
880 case Instruction::INVOKE_DIRECT_RANGE:
881 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100882 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700883 mir_graph_->AppendGenSuspendTestList(bb);
884 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700885 break;
886
887 case Instruction::INVOKE_VIRTUAL:
888 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100889 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700890 mir_graph_->AppendGenSuspendTestList(bb);
891 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700892 break;
893 case Instruction::INVOKE_VIRTUAL_RANGE:
894 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100895 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700896 mir_graph_->AppendGenSuspendTestList(bb);
897 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700898 break;
899
900 case Instruction::INVOKE_SUPER:
901 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100902 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700903 mir_graph_->AppendGenSuspendTestList(bb);
904 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700905 break;
906 case Instruction::INVOKE_SUPER_RANGE:
907 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100908 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700909 mir_graph_->AppendGenSuspendTestList(bb);
910 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700911 break;
912
913 case Instruction::INVOKE_INTERFACE:
914 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100915 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700916 mir_graph_->AppendGenSuspendTestList(bb);
917 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 break;
919 case Instruction::INVOKE_INTERFACE_RANGE:
920 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
Vladimir Markoff0ac472014-10-02 17:24:53 +0100921 if (!kLeafOptimization) {
Wei Jin04f4d8a2014-05-29 18:04:29 -0700922 mir_graph_->AppendGenSuspendTestList(bb);
923 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700924 break;
925
926 case Instruction::NEG_INT:
927 case Instruction::NOT_INT:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700928 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700929 break;
930
931 case Instruction::NEG_LONG:
932 case Instruction::NOT_LONG:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700933 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700934 break;
935
936 case Instruction::NEG_FLOAT:
937 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
938 break;
939
940 case Instruction::NEG_DOUBLE:
941 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
942 break;
943
944 case Instruction::INT_TO_LONG:
945 GenIntToLong(rl_dest, rl_src[0]);
946 break;
947
948 case Instruction::LONG_TO_INT:
949 rl_src[0] = UpdateLocWide(rl_src[0]);
buzbeea0cd2d72014-06-01 09:33:49 -0700950 rl_src[0] = NarrowRegLoc(rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700951 StoreValue(rl_dest, rl_src[0]);
952 break;
953
954 case Instruction::INT_TO_BYTE:
955 case Instruction::INT_TO_SHORT:
956 case Instruction::INT_TO_CHAR:
957 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
958 break;
959
960 case Instruction::INT_TO_FLOAT:
961 case Instruction::INT_TO_DOUBLE:
962 case Instruction::LONG_TO_FLOAT:
963 case Instruction::LONG_TO_DOUBLE:
964 case Instruction::FLOAT_TO_INT:
965 case Instruction::FLOAT_TO_LONG:
966 case Instruction::FLOAT_TO_DOUBLE:
967 case Instruction::DOUBLE_TO_INT:
968 case Instruction::DOUBLE_TO_LONG:
969 case Instruction::DOUBLE_TO_FLOAT:
970 GenConversion(opcode, rl_dest, rl_src[0]);
971 break;
972
973
974 case Instruction::ADD_INT:
975 case Instruction::ADD_INT_2ADDR:
976 case Instruction::MUL_INT:
977 case Instruction::MUL_INT_2ADDR:
978 case Instruction::AND_INT:
979 case Instruction::AND_INT_2ADDR:
980 case Instruction::OR_INT:
981 case Instruction::OR_INT_2ADDR:
982 case Instruction::XOR_INT:
983 case Instruction::XOR_INT_2ADDR:
984 if (rl_src[0].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100985 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700986 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
987 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
988 } else if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100989 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700990 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
991 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
992 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700993 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700994 }
995 break;
996
997 case Instruction::SUB_INT:
998 case Instruction::SUB_INT_2ADDR:
999 case Instruction::DIV_INT:
1000 case Instruction::DIV_INT_2ADDR:
1001 case Instruction::REM_INT:
1002 case Instruction::REM_INT_2ADDR:
1003 case Instruction::SHL_INT:
1004 case Instruction::SHL_INT_2ADDR:
1005 case Instruction::SHR_INT:
1006 case Instruction::SHR_INT_2ADDR:
1007 case Instruction::USHR_INT:
1008 case Instruction::USHR_INT_2ADDR:
1009 if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001010 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001011 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
1012 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001013 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001014 }
1015 break;
1016
1017 case Instruction::ADD_LONG:
1018 case Instruction::SUB_LONG:
1019 case Instruction::AND_LONG:
1020 case Instruction::OR_LONG:
1021 case Instruction::XOR_LONG:
1022 case Instruction::ADD_LONG_2ADDR:
1023 case Instruction::SUB_LONG_2ADDR:
1024 case Instruction::AND_LONG_2ADDR:
1025 case Instruction::OR_LONG_2ADDR:
1026 case Instruction::XOR_LONG_2ADDR:
1027 if (rl_src[0].is_const || rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001028 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001029 break;
1030 }
Ian Rogersfc787ec2014-10-09 21:56:44 -07001031 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001032 case Instruction::MUL_LONG:
1033 case Instruction::DIV_LONG:
1034 case Instruction::REM_LONG:
1035 case Instruction::MUL_LONG_2ADDR:
1036 case Instruction::DIV_LONG_2ADDR:
1037 case Instruction::REM_LONG_2ADDR:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001038 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001039 break;
1040
1041 case Instruction::SHL_LONG:
1042 case Instruction::SHR_LONG:
1043 case Instruction::USHR_LONG:
1044 case Instruction::SHL_LONG_2ADDR:
1045 case Instruction::SHR_LONG_2ADDR:
1046 case Instruction::USHR_LONG_2ADDR:
1047 if (rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001048 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001049 } else {
1050 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1051 }
1052 break;
1053
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001054 case Instruction::DIV_FLOAT:
1055 case Instruction::DIV_FLOAT_2ADDR:
1056 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
1057 break;
1058 }
1059 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001060 case Instruction::ADD_FLOAT:
1061 case Instruction::SUB_FLOAT:
1062 case Instruction::MUL_FLOAT:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001063 case Instruction::REM_FLOAT:
1064 case Instruction::ADD_FLOAT_2ADDR:
1065 case Instruction::SUB_FLOAT_2ADDR:
1066 case Instruction::MUL_FLOAT_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001067 case Instruction::REM_FLOAT_2ADDR:
1068 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
1069 break;
1070
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001071 case Instruction::DIV_DOUBLE:
1072 case Instruction::DIV_DOUBLE_2ADDR:
1073 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
1074 break;
1075 }
1076 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001077 case Instruction::ADD_DOUBLE:
1078 case Instruction::SUB_DOUBLE:
1079 case Instruction::MUL_DOUBLE:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001080 case Instruction::REM_DOUBLE:
1081 case Instruction::ADD_DOUBLE_2ADDR:
1082 case Instruction::SUB_DOUBLE_2ADDR:
1083 case Instruction::MUL_DOUBLE_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001084 case Instruction::REM_DOUBLE_2ADDR:
1085 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
1086 break;
1087
1088 case Instruction::RSUB_INT:
1089 case Instruction::ADD_INT_LIT16:
1090 case Instruction::MUL_INT_LIT16:
1091 case Instruction::DIV_INT_LIT16:
1092 case Instruction::REM_INT_LIT16:
1093 case Instruction::AND_INT_LIT16:
1094 case Instruction::OR_INT_LIT16:
1095 case Instruction::XOR_INT_LIT16:
1096 case Instruction::ADD_INT_LIT8:
1097 case Instruction::RSUB_INT_LIT8:
1098 case Instruction::MUL_INT_LIT8:
1099 case Instruction::DIV_INT_LIT8:
1100 case Instruction::REM_INT_LIT8:
1101 case Instruction::AND_INT_LIT8:
1102 case Instruction::OR_INT_LIT8:
1103 case Instruction::XOR_INT_LIT8:
1104 case Instruction::SHL_INT_LIT8:
1105 case Instruction::SHR_INT_LIT8:
1106 case Instruction::USHR_INT_LIT8:
1107 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1108 break;
1109
1110 default:
1111 LOG(FATAL) << "Unexpected opcode: " << opcode;
1112 }
buzbee082833c2014-05-17 23:16:26 -07001113 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001114} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001115
1116// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001117void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001118 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1119 case kMirOpCopy: {
1120 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1121 RegLocation rl_dest = mir_graph_->GetDest(mir);
1122 StoreValue(rl_dest, rl_src);
1123 break;
1124 }
1125 case kMirOpFusedCmplFloat:
1126 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1127 break;
1128 case kMirOpFusedCmpgFloat:
1129 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1130 break;
1131 case kMirOpFusedCmplDouble:
1132 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1133 break;
1134 case kMirOpFusedCmpgDouble:
1135 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1136 break;
1137 case kMirOpFusedCmpLong:
1138 GenFusedLongCmpBranch(bb, mir);
1139 break;
1140 case kMirOpSelect:
1141 GenSelect(bb, mir);
1142 break;
Razvan A Lupusoru76423242014-08-04 09:38:46 -07001143 case kMirOpNullCheck: {
1144 RegLocation rl_obj = mir_graph_->GetSrc(mir, 0);
1145 rl_obj = LoadValue(rl_obj, kRefReg);
1146 // An explicit check is done because it is not expected that when this is used,
1147 // that it will actually trip up the implicit checks (since an invalid access
1148 // is needed on the null object).
1149 GenExplicitNullCheck(rl_obj.reg, mir->optimization_flags);
1150 break;
1151 }
Mark Mendelld65c51a2014-04-29 16:55:20 -04001152 case kMirOpPhi:
1153 case kMirOpNop:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001154 case kMirOpRangeCheck:
1155 case kMirOpDivZeroCheck:
1156 case kMirOpCheck:
1157 case kMirOpCheckPart2:
1158 // Ignore these known opcodes
1159 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001160 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001161 // Give the backends a chance to handle unknown extended MIR opcodes.
1162 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001163 break;
1164 }
1165}
1166
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001167void Mir2Lir::GenPrintLabel(MIR* mir) {
1168 // Mark the beginning of a Dalvik instruction for line tracking.
1169 if (cu_->verbose) {
1170 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1171 MarkBoundary(mir->offset, inst_str);
1172 }
1173}
1174
Brian Carlstrom7940e442013-07-12 13:46:57 -07001175// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001176bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001177 if (bb->block_type == kDead) return false;
1178 current_dalvik_offset_ = bb->start_offset;
1179 MIR* mir;
1180 int block_id = bb->id;
1181
1182 block_label_list_[block_id].operands[0] = bb->start_offset;
1183
1184 // Insert the block label.
1185 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001186 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001187 AppendLIR(&block_label_list_[block_id]);
1188
1189 LIR* head_lir = NULL;
1190
1191 // If this is a catch block, export the start address.
1192 if (bb->catch_entry) {
1193 head_lir = NewLIR0(kPseudoExportedPC);
1194 }
1195
1196 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001197 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001198
1199 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001200 ResetRegPool();
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001201 int start_vreg = mir_graph_->GetFirstInVR();
1202 GenEntrySequence(&mir_graph_->reg_location_[start_vreg], mir_graph_->GetMethodLoc());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001203 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001204 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001205 GenExitSequence();
1206 }
1207
1208 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1209 ResetRegPool();
1210 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001211 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001212 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001213 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001214 }
1215
1216 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1217 ResetDefTracking();
1218 }
1219
1220 // Reset temp tracking sanity check.
1221 if (kIsDebugBuild) {
1222 live_sreg_ = INVALID_SREG;
1223 }
1224
1225 current_dalvik_offset_ = mir->offset;
1226 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001227
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001228 GenPrintLabel(mir);
1229
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230 // Remember the first LIR for this block.
1231 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001232 head_lir = &block_label_list_[bb->id];
1233 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001234 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001235 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001236 }
1237
1238 if (opcode == kMirOpCheck) {
1239 // Combine check and work halves of throwing instruction.
1240 MIR* work_half = mir->meta.throw_insn;
1241 mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode;
Vladimir Markocc8cc7c2014-10-06 10:52:20 +01001242 mir->optimization_flags = work_half->optimization_flags;
Vladimir Marko4376c872014-01-23 12:39:29 +00001243 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001244 opcode = work_half->dalvikInsn.opcode;
1245 SSARepresentation* ssa_rep = work_half->ssa_rep;
1246 work_half->ssa_rep = mir->ssa_rep;
1247 mir->ssa_rep = ssa_rep;
1248 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001249 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001250 }
1251
Jean Christophe Beyler2ab40eb2014-06-02 09:03:14 -07001252 if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001253 HandleExtendedMethodMIR(bb, mir);
1254 continue;
1255 }
1256
1257 CompileDalvikInstruction(mir, bb, block_label_list_);
1258 }
1259
1260 if (head_lir) {
1261 // Eliminate redundant loads/stores and delay stores into later slots.
1262 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001263 }
1264 return false;
1265}
1266
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001267bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001268 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001269 // Find the first DalvikByteCode block.
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001270 DCHECK_EQ(mir_graph_->GetNumReachableBlocks(), mir_graph_->GetDfsOrder().size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001271 BasicBlock*bb = NULL;
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001272 for (BasicBlockId dfs_id : mir_graph_->GetDfsOrder()) {
1273 BasicBlock* candidate = mir_graph_->GetBasicBlock(dfs_id);
1274 if (candidate->block_type == kDalvikByteCode) {
1275 bb = candidate;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001276 break;
1277 }
1278 }
1279 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001280 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001281 }
1282 DCHECK_EQ(bb->start_offset, 0);
1283 DCHECK(bb->first_mir_insn != NULL);
1284
1285 // Get the first instruction.
1286 MIR* mir = bb->first_mir_insn;
1287
1288 // Free temp registers and reset redundant store tracking.
1289 ResetRegPool();
1290 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001291 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001292
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001293 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001294}
1295
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001296void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001297 cu_->NewTimingSplit("MIR2LIR");
1298
Brian Carlstrom7940e442013-07-12 13:46:57 -07001299 // Hold the labels of each block.
1300 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001301 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001302 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001303
buzbee56c71782013-09-05 17:13:19 -07001304 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001305 BasicBlock* curr_bb = iter.Next();
1306 BasicBlock* next_bb = iter.Next();
1307 while (curr_bb != NULL) {
1308 MethodBlockCodeGen(curr_bb);
1309 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001310 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1311 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1312 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001313 }
1314 curr_bb = next_bb;
1315 do {
1316 next_bb = iter.Next();
1317 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001318 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001319 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001320}
1321
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001322//
1323// LIR Slow Path
1324//
1325
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001326LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001327 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001328 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001329 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001330 return target;
1331}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001332
Andreas Gampe4b537a82014-06-30 22:24:53 -07001333
1334void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1335 bool fail, bool report)
1336 const {
1337 if (rs.Valid()) {
1338 if (ref == RefCheck::kCheckRef) {
1339 if (cu_->target64 && !rs.Is64Bit()) {
1340 if (fail) {
1341 CHECK(false) << "Reg storage not 64b for ref.";
1342 } else if (report) {
1343 LOG(WARNING) << "Reg storage not 64b for ref.";
1344 }
1345 }
1346 }
1347 if (wide == WidenessCheck::kCheckWide) {
1348 if (!rs.Is64Bit()) {
1349 if (fail) {
1350 CHECK(false) << "Reg storage not 64b for wide.";
1351 } else if (report) {
1352 LOG(WARNING) << "Reg storage not 64b for wide.";
1353 }
1354 }
1355 }
1356 // A tighter check would be nice, but for now soft-float will not check float at all.
1357 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1358 if (!rs.IsFloat()) {
1359 if (fail) {
1360 CHECK(false) << "Reg storage not float for fp.";
1361 } else if (report) {
1362 LOG(WARNING) << "Reg storage not float for fp.";
1363 }
1364 }
1365 } else if (fp == FPCheck::kCheckNotFP) {
1366 if (rs.IsFloat()) {
1367 if (fail) {
1368 CHECK(false) << "Reg storage float for not-fp.";
1369 } else if (report) {
1370 LOG(WARNING) << "Reg storage float for not-fp.";
1371 }
1372 }
1373 }
1374 }
1375}
1376
1377void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1378 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1379 // will be stored.
1380 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1381 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1382}
1383
Serban Constantinescu63999682014-07-15 17:44:21 +01001384size_t Mir2Lir::GetInstructionOffset(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001385 UNUSED(lir);
1386 UNIMPLEMENTED(FATAL) << "Unsupported GetInstructionOffset()";
1387 UNREACHABLE();
Serban Constantinescu63999682014-07-15 17:44:21 +01001388}
1389
Brian Carlstrom7940e442013-07-12 13:46:57 -07001390} // namespace art