blob: e3e87ecb13de5836b4faf15bc87d5aef9d8c04df [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe0b9203e2015-01-22 20:39:27 -080017#include "mir_to_lir-inl.h"
18
Brian Carlstrom7940e442013-07-12 13:46:57 -070019#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080020#include "dex/quick/dex_file_method_inliner.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080021#include "driver/compiler_driver.h"
Fred Shih37f05ef2014-07-16 18:38:08 -070022#include "primitive.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070024
25namespace art {
26
Vladimir Marko6ce3eba2015-02-16 13:05:59 +000027class Mir2Lir::SpecialSuspendCheckSlowPath : public Mir2Lir::LIRSlowPath {
28 public:
29 SpecialSuspendCheckSlowPath(Mir2Lir* m2l, LIR* branch, LIR* cont)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +000030 : LIRSlowPath(m2l, branch, cont),
Vladimir Marko6ce3eba2015-02-16 13:05:59 +000031 num_used_args_(0u) {
32 }
33
34 void PreserveArg(int in_position) {
35 // Avoid duplicates.
36 for (size_t i = 0; i != num_used_args_; ++i) {
37 if (used_args_[i] == in_position) {
38 return;
39 }
40 }
41 DCHECK_LT(num_used_args_, kMaxArgsToPreserve);
42 used_args_[num_used_args_] = in_position;
43 ++num_used_args_;
44 }
45
46 void Compile() OVERRIDE {
47 m2l_->ResetRegPool();
48 m2l_->ResetDefTracking();
49 GenerateTargetLabel(kPseudoSuspendTarget);
50
51 m2l_->LockCallTemps();
52
53 // Generate frame.
54 m2l_->GenSpecialEntryForSuspend();
55
56 // Spill all args.
57 for (size_t i = 0, end = m2l_->in_to_reg_storage_mapping_.GetEndMappedIn(); i < end;
58 i += m2l_->in_to_reg_storage_mapping_.GetShorty(i).IsWide() ? 2u : 1u) {
59 m2l_->SpillArg(i);
60 }
61
62 m2l_->FreeCallTemps();
63
64 // Do the actual suspend call to runtime.
65 m2l_->CallRuntimeHelper(kQuickTestSuspend, true);
66
67 m2l_->LockCallTemps();
68
69 // Unspill used regs. (Don't unspill unused args.)
70 for (size_t i = 0; i != num_used_args_; ++i) {
71 m2l_->UnspillArg(used_args_[i]);
72 }
73
74 // Pop the frame.
75 m2l_->GenSpecialExitForSuspend();
76
77 // Branch to the continue label.
78 DCHECK(cont_ != nullptr);
79 m2l_->OpUnconditionalBranch(cont_);
80
81 m2l_->FreeCallTemps();
82 }
83
84 private:
85 static constexpr size_t kMaxArgsToPreserve = 2u;
86 size_t num_used_args_;
87 int used_args_[kMaxArgsToPreserve];
88};
89
buzbeea0cd2d72014-06-01 09:33:49 -070090RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
91 RegisterClass res;
92 switch (shorty_type) {
93 case 'L':
94 res = kRefReg;
95 break;
96 case 'F':
97 // Expected fallthrough.
98 case 'D':
99 res = kFPReg;
100 break;
101 default:
102 res = kCoreReg;
103 }
104 return res;
105}
106
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000107void Mir2Lir::LockArg(size_t in_position) {
108 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800109
Serguei Katkov717a3e42014-11-13 17:19:42 +0600110 if (reg_arg.Valid()) {
111 LockTemp(reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800112 }
113}
114
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000115RegStorage Mir2Lir::LoadArg(size_t in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100116 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000117 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700118
119 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800120 /*
121 * When doing a call for x86, it moves the stack pointer in order to push return.
122 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800123 */
124 offset += sizeof(uint32_t);
125 }
126
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700127 if (cu_->instruction_set == kX86_64) {
128 /*
129 * When doing a call for x86, it moves the stack pointer in order to push return.
130 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
131 */
132 offset += sizeof(uint64_t);
133 }
134
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000135 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
Serguei Katkov717a3e42014-11-13 17:19:42 +0600136
137 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
138 if (reg_arg.Valid() && wide && (reg_arg.GetWideKind() == kNotWide)) {
139 // For wide register we've got only half of it.
140 // Flush it to memory then.
141 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
142 reg_arg = RegStorage::InvalidReg();
143 }
144
145 if (!reg_arg.Valid()) {
146 reg_arg = wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
147 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, wide ? k64 : k32, kNotVolatile);
148 } else {
149 // Check if we need to copy the arg to a different reg_class.
150 if (!RegClassMatches(reg_class, reg_arg)) {
151 if (wide) {
152 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
153 OpRegCopyWide(new_reg, reg_arg);
154 reg_arg = new_reg;
155 } else {
156 RegStorage new_reg = AllocTypedTemp(false, reg_class);
157 OpRegCopy(new_reg, reg_arg);
158 reg_arg = new_reg;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700159 }
160 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800161 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100162 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800163}
164
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000165void Mir2Lir::LoadArgDirect(size_t in_position, RegLocation rl_dest) {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600166 DCHECK_EQ(rl_dest.location, kLocPhysReg);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100167 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000168 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700169 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800170 /*
171 * When doing a call for x86, it moves the stack pointer in order to push return.
172 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800173 */
174 offset += sizeof(uint32_t);
175 }
176
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700177 if (cu_->instruction_set == kX86_64) {
178 /*
179 * When doing a call for x86, it moves the stack pointer in order to push return.
180 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
181 */
182 offset += sizeof(uint64_t);
183 }
184
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000185 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
Serguei Katkov717a3e42014-11-13 17:19:42 +0600186
187 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
188 if (reg_arg.Valid() && rl_dest.wide && (reg_arg.GetWideKind() == kNotWide)) {
189 // For wide register we've got only half of it.
190 // Flush it to memory then.
191 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
192 reg_arg = RegStorage::InvalidReg();
193 }
194
195 if (!reg_arg.Valid()) {
196 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, rl_dest.wide ? k64 : k32, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800197 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600198 if (rl_dest.wide) {
199 OpRegCopyWide(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800200 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600201 OpRegCopy(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800202 }
203 }
204}
205
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000206void Mir2Lir::SpillArg(size_t in_position) {
207 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
208
209 if (reg_arg.Valid()) {
210 int offset = frame_size_ + StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
211 ShortyArg arg = in_to_reg_storage_mapping_.GetShorty(in_position);
212 OpSize size = arg.IsRef() ? kReference :
213 (arg.IsWide() && reg_arg.GetWideKind() == kWide) ? k64 : k32;
214 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, size, kNotVolatile);
215 }
216}
217
218void Mir2Lir::UnspillArg(size_t in_position) {
219 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
220
221 if (reg_arg.Valid()) {
222 int offset = frame_size_ + StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
223 ShortyArg arg = in_to_reg_storage_mapping_.GetShorty(in_position);
224 OpSize size = arg.IsRef() ? kReference :
225 (arg.IsWide() && reg_arg.GetWideKind() == kWide) ? k64 : k32;
226 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, size, kNotVolatile);
227 }
228}
229
230Mir2Lir::SpecialSuspendCheckSlowPath* Mir2Lir::GenSpecialSuspendTest() {
231 LockCallTemps();
232 LIR* branch = OpTestSuspend(nullptr);
233 FreeCallTemps();
234 LIR* cont = NewLIR0(kPseudoTargetLabel);
235 SpecialSuspendCheckSlowPath* slow_path =
236 new (arena_) SpecialSuspendCheckSlowPath(this, branch, cont);
237 AddSlowPath(slow_path);
238 return slow_path;
239}
240
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800241bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
242 // FastInstance() already checked by DexFileMethodInliner.
243 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100244 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800245 // The object is not "this" and has to be null-checked.
246 return false;
247 }
248
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000249 OpSize size;
Fred Shih37f05ef2014-07-16 18:38:08 -0700250 switch (data.op_variant) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000251 case InlineMethodAnalyser::IGetVariant(Instruction::IGET):
252 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kSingle : k32;
Fred Shih37f05ef2014-07-16 18:38:08 -0700253 break;
254 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE):
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000255 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kDouble : k64;
256 break;
257 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT):
258 size = kReference;
Fred Shih37f05ef2014-07-16 18:38:08 -0700259 break;
260 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_SHORT):
261 size = kSignedHalf;
262 break;
263 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_CHAR):
264 size = kUnsignedHalf;
265 break;
266 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BYTE):
267 size = kSignedByte;
268 break;
269 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BOOLEAN):
270 size = kUnsignedByte;
271 break;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000272 default:
273 LOG(FATAL) << "Unknown variant: " << data.op_variant;
274 UNREACHABLE();
Fred Shih37f05ef2014-07-16 18:38:08 -0700275 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100276
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800277 // Point of no return - no aborts after this
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000278 if (!kLeafOptimization) {
279 auto* slow_path = GenSpecialSuspendTest();
280 slow_path->PreserveArg(data.object_arg);
281 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800282 LockArg(data.object_arg);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000283 GenPrintLabel(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700284 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100285 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700286 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
Fred Shih37f05ef2014-07-16 18:38:08 -0700287 RegLocation rl_dest = IsWide(size) ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100288 RegStorage r_result = rl_dest.reg;
289 if (!RegClassMatches(reg_class, r_result)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700290 r_result = IsWide(size) ? AllocTypedTempWide(rl_dest.fp, reg_class)
291 : AllocTypedTemp(rl_dest.fp, reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100292 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700293 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000294 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100295 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000296 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
297 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100298 }
buzbeeb5860fb2014-06-21 15:31:01 -0700299 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700300 if (IsWide(size)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100301 OpRegCopyWide(rl_dest.reg, r_result);
302 } else {
303 OpRegCopy(rl_dest.reg, r_result);
304 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800305 }
306 return true;
307}
308
309bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
310 // FastInstance() already checked by DexFileMethodInliner.
311 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100312 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800313 // The object is not "this" and has to be null-checked.
314 return false;
315 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100316 if (data.return_arg_plus1 != 0u) {
317 // The setter returns a method argument which we don't support here.
318 return false;
319 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800320
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000321 OpSize size;
Fred Shih37f05ef2014-07-16 18:38:08 -0700322 switch (data.op_variant) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000323 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT):
324 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kSingle : k32;
Fred Shih37f05ef2014-07-16 18:38:08 -0700325 break;
326 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE):
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000327 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kDouble : k64;
328 break;
329 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_OBJECT):
330 size = kReference;
Fred Shih37f05ef2014-07-16 18:38:08 -0700331 break;
332 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_SHORT):
333 size = kSignedHalf;
334 break;
335 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_CHAR):
336 size = kUnsignedHalf;
337 break;
338 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BYTE):
339 size = kSignedByte;
340 break;
341 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BOOLEAN):
342 size = kUnsignedByte;
343 break;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000344 default:
345 LOG(FATAL) << "Unknown variant: " << data.op_variant;
346 UNREACHABLE();
Fred Shih37f05ef2014-07-16 18:38:08 -0700347 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800348
349 // Point of no return - no aborts after this
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000350 if (!kLeafOptimization) {
351 auto* slow_path = GenSpecialSuspendTest();
352 slow_path->PreserveArg(data.object_arg);
353 slow_path->PreserveArg(data.src_arg);
354 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800355 LockArg(data.object_arg);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000356 LockArg(data.src_arg);
357 GenPrintLabel(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700358 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100359 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
Fred Shih37f05ef2014-07-16 18:38:08 -0700360 RegStorage reg_src = LoadArg(data.src_arg, reg_class, IsWide(size));
361 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000362 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100363 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000364 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
365 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800366 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700367 if (IsRef(size)) {
Vladimir Marko743b98c2014-11-24 19:45:41 +0000368 MarkGCCard(0, reg_src, reg_obj);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800369 }
370 return true;
371}
372
373bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
374 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000375 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800376
377 // Point of no return - no aborts after this
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000378 if (!kLeafOptimization) {
379 auto* slow_path = GenSpecialSuspendTest();
380 slow_path->PreserveArg(data.arg);
381 }
382 LockArg(data.arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800383 GenPrintLabel(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700384 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
385 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800386 LoadArgDirect(data.arg, rl_dest);
387 return true;
388}
389
390/*
391 * Special-case code generation for simple non-throwing leaf methods.
392 */
393bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
394 DCHECK(special.flags & kInlineSpecial);
395 current_dalvik_offset_ = mir->offset;
Vladimir Marko767c7522015-03-20 12:47:30 +0000396 DCHECK(current_mir_ == nullptr); // Safepoints attributed to prologue.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800397 MIR* return_mir = nullptr;
398 bool successful = false;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000399 EnsureInitializedArgMappingToPhysicalReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800400
401 switch (special.opcode) {
402 case kInlineOpNop:
403 successful = true;
404 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000405 if (!kLeafOptimization) {
406 GenSpecialSuspendTest();
407 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800408 return_mir = mir;
409 break;
410 case kInlineOpNonWideConst: {
411 successful = true;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000412 if (!kLeafOptimization) {
413 GenSpecialSuspendTest();
414 }
buzbeea0cd2d72014-06-01 09:33:49 -0700415 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800416 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800417 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700418 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800419 break;
420 }
421 case kInlineOpReturnArg:
422 successful = GenSpecialIdentity(mir, special);
423 return_mir = mir;
424 break;
425 case kInlineOpIGet:
426 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700427 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800428 break;
429 case kInlineOpIPut:
430 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700431 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800432 break;
433 default:
434 break;
435 }
436
437 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000438 if (kIsDebugBuild) {
439 // Clear unreachable catch entries.
440 mir_graph_->catches_.clear();
441 }
442
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800443 // Handle verbosity for return MIR.
444 if (return_mir != nullptr) {
445 current_dalvik_offset_ = return_mir->offset;
446 // Not handling special identity case because it already generated code as part
447 // of the return. The label should have been added before any code was generated.
448 if (special.opcode != kInlineOpReturnArg) {
449 GenPrintLabel(return_mir);
450 }
451 }
452 GenSpecialExitSequence();
453
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000454 if (!kLeafOptimization) {
455 HandleSlowPaths();
456 } else {
457 core_spill_mask_ = 0;
458 num_core_spills_ = 0;
459 fp_spill_mask_ = 0;
460 num_fp_spills_ = 0;
461 frame_size_ = 0;
462 core_vmap_table_.clear();
463 fp_vmap_table_.clear();
464 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800465 }
466
467 return successful;
468}
469
Brian Carlstrom7940e442013-07-12 13:46:57 -0700470/*
471 * Target-independent code generation. Use only high-level
472 * load/store utilities here, or target-dependent genXX() handlers
473 * when necessary.
474 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700475void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476 RegLocation rl_src[3];
477 RegLocation rl_dest = mir_graph_->GetBadLoc();
478 RegLocation rl_result = mir_graph_->GetBadLoc();
Ian Rogersc35cda82014-11-10 16:34:29 -0800479 const Instruction::Code opcode = mir->dalvikInsn.opcode;
480 const int opt_flags = mir->optimization_flags;
481 const uint32_t vB = mir->dalvikInsn.vB;
482 const uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700483 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
484 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485
486 // Prep Src and Dest locations.
487 int next_sreg = 0;
488 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700489 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700490 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
491 if (attrs & DF_UA) {
492 if (attrs & DF_A_WIDE) {
493 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
494 next_sreg+= 2;
495 } else {
496 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
497 next_sreg++;
498 }
499 }
500 if (attrs & DF_UB) {
501 if (attrs & DF_B_WIDE) {
502 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
503 next_sreg+= 2;
504 } else {
505 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
506 next_sreg++;
507 }
508 }
509 if (attrs & DF_UC) {
510 if (attrs & DF_C_WIDE) {
511 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
512 } else {
513 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
514 }
515 }
516 if (attrs & DF_DA) {
517 if (attrs & DF_A_WIDE) {
518 rl_dest = mir_graph_->GetDestWide(mir);
519 } else {
520 rl_dest = mir_graph_->GetDest(mir);
521 }
522 }
523 switch (opcode) {
524 case Instruction::NOP:
525 break;
526
527 case Instruction::MOVE_EXCEPTION:
528 GenMoveException(rl_dest);
529 break;
530
Mathieu Chartierd7cbf8a2015-03-19 12:43:20 -0700531 case Instruction::RETURN_VOID_NO_BARRIER:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 case Instruction::RETURN_VOID:
533 if (((cu_->access_flags & kAccConstructor) != 0) &&
534 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
535 cu_->class_def_idx)) {
536 GenMemBarrier(kStoreStore);
537 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700538 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700539 GenSuspendTest(opt_flags);
540 }
541 break;
542
Brian Carlstrom7940e442013-07-12 13:46:57 -0700543 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700544 DCHECK(rl_src[0].ref);
Ian Rogersfc787ec2014-10-09 21:56:44 -0700545 FALLTHROUGH_INTENDED;
buzbeea0cd2d72014-06-01 09:33:49 -0700546 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700547 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700548 GenSuspendTest(opt_flags);
549 }
Vladimir Markofac10702015-04-22 11:51:52 +0100550 StoreValue(GetReturn(ShortyToRegClass(cu_->shorty[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 break;
552
553 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700554 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555 GenSuspendTest(opt_flags);
556 }
Vladimir Markofac10702015-04-22 11:51:52 +0100557 StoreValueWide(GetReturnWide(ShortyToRegClass(cu_->shorty[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 break;
559
560 case Instruction::MOVE_RESULT:
Vladimir Markofac10702015-04-22 11:51:52 +0100561 case Instruction::MOVE_RESULT_WIDE:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700562 case Instruction::MOVE_RESULT_OBJECT:
Vladimir Markofac10702015-04-22 11:51:52 +0100563 // Already processed with invoke or filled-new-array.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700564 break;
565
566 case Instruction::MOVE:
567 case Instruction::MOVE_OBJECT:
568 case Instruction::MOVE_16:
569 case Instruction::MOVE_OBJECT_16:
570 case Instruction::MOVE_FROM16:
571 case Instruction::MOVE_OBJECT_FROM16:
572 StoreValue(rl_dest, rl_src[0]);
573 break;
574
575 case Instruction::MOVE_WIDE:
576 case Instruction::MOVE_WIDE_16:
577 case Instruction::MOVE_WIDE_FROM16:
578 StoreValueWide(rl_dest, rl_src[0]);
579 break;
580
581 case Instruction::CONST:
582 case Instruction::CONST_4:
583 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400584 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700585 break;
586
587 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400588 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 break;
590
591 case Instruction::CONST_WIDE_16:
592 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000593 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 break;
595
596 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000597 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598 break;
599
600 case Instruction::CONST_WIDE_HIGH16:
601 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800602 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700603 StoreValueWide(rl_dest, rl_result);
604 break;
605
606 case Instruction::MONITOR_ENTER:
607 GenMonitorEnter(opt_flags, rl_src[0]);
608 break;
609
610 case Instruction::MONITOR_EXIT:
611 GenMonitorExit(opt_flags, rl_src[0]);
612 break;
613
614 case Instruction::CHECK_CAST: {
Vladimir Marko22fe45d2015-03-18 11:33:58 +0000615 GenCheckCast(opt_flags, mir->offset, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700616 break;
617 }
618 case Instruction::INSTANCE_OF:
619 GenInstanceof(vC, rl_dest, rl_src[0]);
620 break;
621
622 case Instruction::NEW_INSTANCE:
623 GenNewInstance(vB, rl_dest);
624 break;
625
626 case Instruction::THROW:
627 GenThrow(rl_src[0]);
628 break;
629
Ian Rogersc35cda82014-11-10 16:34:29 -0800630 case Instruction::ARRAY_LENGTH: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700631 int len_offset;
632 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700633 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800634 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700635 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700636 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700637 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700638 StoreValue(rl_dest, rl_result);
639 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800640 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700641 case Instruction::CONST_STRING:
642 case Instruction::CONST_STRING_JUMBO:
643 GenConstString(vB, rl_dest);
644 break;
645
646 case Instruction::CONST_CLASS:
647 GenConstClass(vB, rl_dest);
648 break;
649
650 case Instruction::FILL_ARRAY_DATA:
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700651 GenFillArrayData(mir, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700652 break;
653
654 case Instruction::FILLED_NEW_ARRAY:
655 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
656 false /* not range */));
657 break;
658
659 case Instruction::FILLED_NEW_ARRAY_RANGE:
660 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
661 true /* range */));
662 break;
663
664 case Instruction::NEW_ARRAY:
665 GenNewArray(vC, rl_dest, rl_src[0]);
666 break;
667
668 case Instruction::GOTO:
669 case Instruction::GOTO_16:
670 case Instruction::GOTO_32:
Vladimir Marko8b858e12014-11-27 14:52:37 +0000671 if (mir_graph_->IsBackEdge(bb, bb->taken)) {
buzbee0d829482013-10-11 15:24:55 -0700672 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700673 } else {
buzbee0d829482013-10-11 15:24:55 -0700674 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700675 }
676 break;
677
678 case Instruction::PACKED_SWITCH:
679 GenPackedSwitch(mir, vB, rl_src[0]);
680 break;
681
682 case Instruction::SPARSE_SWITCH:
683 GenSparseSwitch(mir, vB, rl_src[0]);
684 break;
685
686 case Instruction::CMPL_FLOAT:
687 case Instruction::CMPG_FLOAT:
688 case Instruction::CMPL_DOUBLE:
689 case Instruction::CMPG_DOUBLE:
690 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
691 break;
692
693 case Instruction::CMP_LONG:
694 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
695 break;
696
697 case Instruction::IF_EQ:
698 case Instruction::IF_NE:
699 case Instruction::IF_LT:
700 case Instruction::IF_GE:
701 case Instruction::IF_GT:
702 case Instruction::IF_LE: {
Vladimir Marko8b858e12014-11-27 14:52:37 +0000703 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000704 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 }
Vladimir Marko8b858e12014-11-27 14:52:37 +0000706 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000707 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700708 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800709 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700710 case Instruction::IF_EQZ:
711 case Instruction::IF_NEZ:
712 case Instruction::IF_LTZ:
713 case Instruction::IF_GEZ:
714 case Instruction::IF_GTZ:
715 case Instruction::IF_LEZ: {
Vladimir Marko8b858e12014-11-27 14:52:37 +0000716 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000717 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700718 }
Vladimir Marko8b858e12014-11-27 14:52:37 +0000719 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000720 GenCompareZeroAndBranch(opcode, rl_src[0], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700721 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800722 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723
724 case Instruction::AGET_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400725 GenArrayGet(opt_flags, rl_dest.fp ? kDouble : k64, rl_src[0], rl_src[1], rl_dest, 3);
buzbee695d13a2014-04-19 13:32:20 -0700726 break;
727 case Instruction::AGET_OBJECT:
728 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 break;
730 case Instruction::AGET:
Mark Mendellca541342014-10-15 16:59:49 -0400731 GenArrayGet(opt_flags, rl_dest.fp ? kSingle : k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700732 break;
733 case Instruction::AGET_BOOLEAN:
734 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
735 break;
736 case Instruction::AGET_BYTE:
737 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
738 break;
739 case Instruction::AGET_CHAR:
740 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
741 break;
742 case Instruction::AGET_SHORT:
743 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
744 break;
745 case Instruction::APUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400746 GenArrayPut(opt_flags, rl_src[0].fp ? kDouble : k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700747 break;
748 case Instruction::APUT:
Mark Mendellca541342014-10-15 16:59:49 -0400749 GenArrayPut(opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700750 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700751 case Instruction::APUT_OBJECT: {
752 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
753 bool is_safe = is_null; // Always safe to store null.
754 if (!is_safe) {
755 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000756 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
757 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700758 }
759 if (is_null || is_safe) {
760 // Store of constant null doesn't require an assignability test and can be generated inline
761 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700762 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700763 } else {
764 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
765 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700766 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700767 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 case Instruction::APUT_SHORT:
769 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700770 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700771 break;
772 case Instruction::APUT_BYTE:
773 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700774 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 break;
776
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800777 case Instruction::IGET_OBJECT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700778 case Instruction::IGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700779 GenIGet(mir, opt_flags, kReference, Primitive::kPrimNot, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700780 break;
781
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800782 case Instruction::IGET_WIDE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700783 case Instruction::IGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700784 // kPrimLong and kPrimDouble share the same entrypoints.
Mark Mendellca541342014-10-15 16:59:49 -0400785 if (rl_dest.fp) {
786 GenIGet(mir, opt_flags, kDouble, Primitive::kPrimDouble, rl_dest, rl_src[0]);
787 } else {
788 GenIGet(mir, opt_flags, k64, Primitive::kPrimLong, rl_dest, rl_src[0]);
789 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700790 break;
791
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800792 case Instruction::IGET_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700793 case Instruction::IGET:
Mark Mendellca541342014-10-15 16:59:49 -0400794 if (rl_dest.fp) {
795 GenIGet(mir, opt_flags, kSingle, Primitive::kPrimFloat, rl_dest, rl_src[0]);
796 } else {
797 GenIGet(mir, opt_flags, k32, Primitive::kPrimInt, rl_dest, rl_src[0]);
798 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700799 break;
800
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800801 case Instruction::IGET_CHAR_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700802 case Instruction::IGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700803 GenIGet(mir, opt_flags, kUnsignedHalf, Primitive::kPrimChar, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700804 break;
805
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800806 case Instruction::IGET_SHORT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700807 case Instruction::IGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700808 GenIGet(mir, opt_flags, kSignedHalf, Primitive::kPrimShort, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700809 break;
810
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800811 case Instruction::IGET_BOOLEAN_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700812 case Instruction::IGET_BOOLEAN:
Fred Shih37f05ef2014-07-16 18:38:08 -0700813 GenIGet(mir, opt_flags, kUnsignedByte, Primitive::kPrimBoolean, rl_dest, rl_src[0]);
814 break;
815
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800816 case Instruction::IGET_BYTE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700817 case Instruction::IGET_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700818 GenIGet(mir, opt_flags, kSignedByte, Primitive::kPrimByte, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700819 break;
820
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800821 case Instruction::IPUT_WIDE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700822 case Instruction::IPUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400823 GenIPut(mir, opt_flags, rl_src[0].fp ? kDouble : k64, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700824 break;
825
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800826 case Instruction::IPUT_OBJECT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700827 case Instruction::IPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700828 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 break;
830
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800831 case Instruction::IPUT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700832 case Instruction::IPUT:
Mark Mendellca541342014-10-15 16:59:49 -0400833 GenIPut(mir, opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700834 break;
835
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800836 case Instruction::IPUT_BYTE_QUICK:
837 case Instruction::IPUT_BOOLEAN_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700838 case Instruction::IPUT_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700839 case Instruction::IPUT_BOOLEAN:
840 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700841 break;
842
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800843 case Instruction::IPUT_CHAR_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700844 case Instruction::IPUT_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700845 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700846 break;
847
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800848 case Instruction::IPUT_SHORT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849 case Instruction::IPUT_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700850 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700851 break;
852
853 case Instruction::SGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700854 GenSget(mir, rl_dest, kReference, Primitive::kPrimNot);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700855 break;
Fred Shih37f05ef2014-07-16 18:38:08 -0700856
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857 case Instruction::SGET:
Mark Mendellca541342014-10-15 16:59:49 -0400858 GenSget(mir, rl_dest, rl_dest.fp ? kSingle : k32, Primitive::kPrimInt);
Fred Shih37f05ef2014-07-16 18:38:08 -0700859 break;
860
Brian Carlstrom7940e442013-07-12 13:46:57 -0700861 case Instruction::SGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700862 GenSget(mir, rl_dest, kUnsignedHalf, Primitive::kPrimChar);
863 break;
864
Brian Carlstrom7940e442013-07-12 13:46:57 -0700865 case Instruction::SGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700866 GenSget(mir, rl_dest, kSignedHalf, Primitive::kPrimShort);
867 break;
868
869 case Instruction::SGET_BOOLEAN:
870 GenSget(mir, rl_dest, kUnsignedByte, Primitive::kPrimBoolean);
871 break;
872
873 case Instruction::SGET_BYTE:
874 GenSget(mir, rl_dest, kSignedByte, Primitive::kPrimByte);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875 break;
876
877 case Instruction::SGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700878 // kPrimLong and kPrimDouble share the same entrypoints.
Mark Mendellca541342014-10-15 16:59:49 -0400879 GenSget(mir, rl_dest, rl_dest.fp ? kDouble : k64, Primitive::kPrimDouble);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700880 break;
881
882 case Instruction::SPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700883 GenSput(mir, rl_src[0], kReference);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700884 break;
885
886 case Instruction::SPUT:
Mark Mendellca541342014-10-15 16:59:49 -0400887 GenSput(mir, rl_src[0], rl_src[0].fp ? kSingle : k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700888 break;
889
Fred Shih37f05ef2014-07-16 18:38:08 -0700890 case Instruction::SPUT_BYTE:
891 case Instruction::SPUT_BOOLEAN:
892 GenSput(mir, rl_src[0], kUnsignedByte);
893 break;
894
895 case Instruction::SPUT_CHAR:
896 GenSput(mir, rl_src[0], kUnsignedHalf);
897 break;
898
899 case Instruction::SPUT_SHORT:
900 GenSput(mir, rl_src[0], kSignedHalf);
901 break;
902
903
Brian Carlstrom7940e442013-07-12 13:46:57 -0700904 case Instruction::SPUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400905 GenSput(mir, rl_src[0], rl_src[0].fp ? kDouble : k64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700906 break;
907
908 case Instruction::INVOKE_STATIC_RANGE:
909 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
910 break;
911 case Instruction::INVOKE_STATIC:
912 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
913 break;
914
915 case Instruction::INVOKE_DIRECT:
916 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
917 break;
918 case Instruction::INVOKE_DIRECT_RANGE:
919 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
920 break;
921
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800922 case Instruction::INVOKE_VIRTUAL_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700923 case Instruction::INVOKE_VIRTUAL:
924 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
925 break;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800926
927 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700928 case Instruction::INVOKE_VIRTUAL_RANGE:
929 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
930 break;
931
932 case Instruction::INVOKE_SUPER:
933 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
934 break;
935 case Instruction::INVOKE_SUPER_RANGE:
936 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
937 break;
938
939 case Instruction::INVOKE_INTERFACE:
940 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
941 break;
942 case Instruction::INVOKE_INTERFACE_RANGE:
943 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
944 break;
945
946 case Instruction::NEG_INT:
947 case Instruction::NOT_INT:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700948 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700949 break;
950
951 case Instruction::NEG_LONG:
952 case Instruction::NOT_LONG:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700953 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700954 break;
955
956 case Instruction::NEG_FLOAT:
957 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
958 break;
959
960 case Instruction::NEG_DOUBLE:
961 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
962 break;
963
964 case Instruction::INT_TO_LONG:
965 GenIntToLong(rl_dest, rl_src[0]);
966 break;
967
968 case Instruction::LONG_TO_INT:
Yevgeny Rouban6af82062014-11-26 18:11:54 +0600969 GenLongToInt(rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700970 break;
971
972 case Instruction::INT_TO_BYTE:
973 case Instruction::INT_TO_SHORT:
974 case Instruction::INT_TO_CHAR:
975 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
976 break;
977
978 case Instruction::INT_TO_FLOAT:
979 case Instruction::INT_TO_DOUBLE:
980 case Instruction::LONG_TO_FLOAT:
981 case Instruction::LONG_TO_DOUBLE:
982 case Instruction::FLOAT_TO_INT:
983 case Instruction::FLOAT_TO_LONG:
984 case Instruction::FLOAT_TO_DOUBLE:
985 case Instruction::DOUBLE_TO_INT:
986 case Instruction::DOUBLE_TO_LONG:
987 case Instruction::DOUBLE_TO_FLOAT:
988 GenConversion(opcode, rl_dest, rl_src[0]);
989 break;
990
991
992 case Instruction::ADD_INT:
993 case Instruction::ADD_INT_2ADDR:
994 case Instruction::MUL_INT:
995 case Instruction::MUL_INT_2ADDR:
996 case Instruction::AND_INT:
997 case Instruction::AND_INT_2ADDR:
998 case Instruction::OR_INT:
999 case Instruction::OR_INT_2ADDR:
1000 case Instruction::XOR_INT:
1001 case Instruction::XOR_INT_2ADDR:
1002 if (rl_src[0].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001003 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001004 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
1005 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
1006 } else if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001007 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001008 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
1009 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
1010 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001011 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001012 }
1013 break;
1014
1015 case Instruction::SUB_INT:
1016 case Instruction::SUB_INT_2ADDR:
1017 case Instruction::DIV_INT:
1018 case Instruction::DIV_INT_2ADDR:
1019 case Instruction::REM_INT:
1020 case Instruction::REM_INT_2ADDR:
1021 case Instruction::SHL_INT:
1022 case Instruction::SHL_INT_2ADDR:
1023 case Instruction::SHR_INT:
1024 case Instruction::SHR_INT_2ADDR:
1025 case Instruction::USHR_INT:
1026 case Instruction::USHR_INT_2ADDR:
1027 if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001028 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001029 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
1030 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001031 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001032 }
1033 break;
1034
1035 case Instruction::ADD_LONG:
1036 case Instruction::SUB_LONG:
1037 case Instruction::AND_LONG:
1038 case Instruction::OR_LONG:
1039 case Instruction::XOR_LONG:
1040 case Instruction::ADD_LONG_2ADDR:
1041 case Instruction::SUB_LONG_2ADDR:
1042 case Instruction::AND_LONG_2ADDR:
1043 case Instruction::OR_LONG_2ADDR:
1044 case Instruction::XOR_LONG_2ADDR:
1045 if (rl_src[0].is_const || rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001046 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001047 break;
1048 }
Ian Rogersfc787ec2014-10-09 21:56:44 -07001049 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001050 case Instruction::MUL_LONG:
1051 case Instruction::DIV_LONG:
1052 case Instruction::REM_LONG:
1053 case Instruction::MUL_LONG_2ADDR:
1054 case Instruction::DIV_LONG_2ADDR:
1055 case Instruction::REM_LONG_2ADDR:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001056 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057 break;
1058
1059 case Instruction::SHL_LONG:
1060 case Instruction::SHR_LONG:
1061 case Instruction::USHR_LONG:
1062 case Instruction::SHL_LONG_2ADDR:
1063 case Instruction::SHR_LONG_2ADDR:
1064 case Instruction::USHR_LONG_2ADDR:
1065 if (rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001066 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001067 } else {
1068 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1069 }
1070 break;
1071
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001072 case Instruction::DIV_FLOAT:
1073 case Instruction::DIV_FLOAT_2ADDR:
1074 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
1075 break;
1076 }
1077 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001078 case Instruction::ADD_FLOAT:
1079 case Instruction::SUB_FLOAT:
1080 case Instruction::MUL_FLOAT:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001081 case Instruction::REM_FLOAT:
1082 case Instruction::ADD_FLOAT_2ADDR:
1083 case Instruction::SUB_FLOAT_2ADDR:
1084 case Instruction::MUL_FLOAT_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 case Instruction::REM_FLOAT_2ADDR:
1086 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
1087 break;
1088
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001089 case Instruction::DIV_DOUBLE:
1090 case Instruction::DIV_DOUBLE_2ADDR:
1091 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
1092 break;
1093 }
1094 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 case Instruction::ADD_DOUBLE:
1096 case Instruction::SUB_DOUBLE:
1097 case Instruction::MUL_DOUBLE:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001098 case Instruction::REM_DOUBLE:
1099 case Instruction::ADD_DOUBLE_2ADDR:
1100 case Instruction::SUB_DOUBLE_2ADDR:
1101 case Instruction::MUL_DOUBLE_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001102 case Instruction::REM_DOUBLE_2ADDR:
1103 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
1104 break;
1105
1106 case Instruction::RSUB_INT:
1107 case Instruction::ADD_INT_LIT16:
1108 case Instruction::MUL_INT_LIT16:
1109 case Instruction::DIV_INT_LIT16:
1110 case Instruction::REM_INT_LIT16:
1111 case Instruction::AND_INT_LIT16:
1112 case Instruction::OR_INT_LIT16:
1113 case Instruction::XOR_INT_LIT16:
1114 case Instruction::ADD_INT_LIT8:
1115 case Instruction::RSUB_INT_LIT8:
1116 case Instruction::MUL_INT_LIT8:
1117 case Instruction::DIV_INT_LIT8:
1118 case Instruction::REM_INT_LIT8:
1119 case Instruction::AND_INT_LIT8:
1120 case Instruction::OR_INT_LIT8:
1121 case Instruction::XOR_INT_LIT8:
1122 case Instruction::SHL_INT_LIT8:
1123 case Instruction::SHR_INT_LIT8:
1124 case Instruction::USHR_INT_LIT8:
1125 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1126 break;
1127
1128 default:
1129 LOG(FATAL) << "Unexpected opcode: " << opcode;
1130 }
buzbee082833c2014-05-17 23:16:26 -07001131 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001132} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001133
1134// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001135void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001136 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1137 case kMirOpCopy: {
1138 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1139 RegLocation rl_dest = mir_graph_->GetDest(mir);
1140 StoreValue(rl_dest, rl_src);
1141 break;
1142 }
1143 case kMirOpFusedCmplFloat:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001144 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1145 GenSuspendTest(mir->optimization_flags);
1146 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001147 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1148 break;
1149 case kMirOpFusedCmpgFloat:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001150 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1151 GenSuspendTest(mir->optimization_flags);
1152 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001153 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1154 break;
1155 case kMirOpFusedCmplDouble:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001156 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1157 GenSuspendTest(mir->optimization_flags);
1158 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001159 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1160 break;
1161 case kMirOpFusedCmpgDouble:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001162 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1163 GenSuspendTest(mir->optimization_flags);
1164 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001165 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1166 break;
1167 case kMirOpFusedCmpLong:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001168 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1169 GenSuspendTest(mir->optimization_flags);
1170 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001171 GenFusedLongCmpBranch(bb, mir);
1172 break;
1173 case kMirOpSelect:
1174 GenSelect(bb, mir);
1175 break;
Razvan A Lupusoru76423242014-08-04 09:38:46 -07001176 case kMirOpNullCheck: {
1177 RegLocation rl_obj = mir_graph_->GetSrc(mir, 0);
1178 rl_obj = LoadValue(rl_obj, kRefReg);
1179 // An explicit check is done because it is not expected that when this is used,
1180 // that it will actually trip up the implicit checks (since an invalid access
1181 // is needed on the null object).
1182 GenExplicitNullCheck(rl_obj.reg, mir->optimization_flags);
1183 break;
1184 }
Mark Mendelld65c51a2014-04-29 16:55:20 -04001185 case kMirOpPhi:
1186 case kMirOpNop:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001187 case kMirOpRangeCheck:
1188 case kMirOpDivZeroCheck:
1189 case kMirOpCheck:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001190 // Ignore these known opcodes
1191 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001192 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001193 // Give the backends a chance to handle unknown extended MIR opcodes.
1194 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001195 break;
1196 }
1197}
1198
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001199void Mir2Lir::GenPrintLabel(MIR* mir) {
1200 // Mark the beginning of a Dalvik instruction for line tracking.
1201 if (cu_->verbose) {
1202 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1203 MarkBoundary(mir->offset, inst_str);
1204 }
1205}
1206
Brian Carlstrom7940e442013-07-12 13:46:57 -07001207// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001208bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001209 if (bb->block_type == kDead) return false;
1210 current_dalvik_offset_ = bb->start_offset;
1211 MIR* mir;
1212 int block_id = bb->id;
1213
1214 block_label_list_[block_id].operands[0] = bb->start_offset;
1215
1216 // Insert the block label.
1217 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001218 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001219 AppendLIR(&block_label_list_[block_id]);
1220
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001221 LIR* head_lir = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001222
1223 // If this is a catch block, export the start address.
1224 if (bb->catch_entry) {
1225 head_lir = NewLIR0(kPseudoExportedPC);
1226 }
1227
1228 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001229 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230
1231 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001232 ResetRegPool();
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001233 int start_vreg = mir_graph_->GetFirstInVR();
David Srbecky6f715892015-03-30 14:21:42 +01001234 AppendLIR(NewLIR0(kPseudoPrologueBegin));
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001235 GenEntrySequence(&mir_graph_->reg_location_[start_vreg], mir_graph_->GetMethodLoc());
David Srbecky6f715892015-03-30 14:21:42 +01001236 AppendLIR(NewLIR0(kPseudoPrologueEnd));
David Srbecky1109fb32015-04-07 20:21:06 +01001237 DCHECK_EQ(cfi_.GetCurrentCFAOffset(), frame_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001238 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001239 ResetRegPool();
David Srbecky1109fb32015-04-07 20:21:06 +01001240 DCHECK_EQ(cfi_.GetCurrentCFAOffset(), frame_size_);
David Srbecky6f715892015-03-30 14:21:42 +01001241 AppendLIR(NewLIR0(kPseudoEpilogueBegin));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001242 GenExitSequence();
David Srbecky6f715892015-03-30 14:21:42 +01001243 AppendLIR(NewLIR0(kPseudoEpilogueEnd));
David Srbecky1109fb32015-04-07 20:21:06 +01001244 DCHECK_EQ(cfi_.GetCurrentCFAOffset(), frame_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001245 }
1246
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001247 for (mir = bb->first_mir_insn; mir != nullptr; mir = mir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001248 ResetRegPool();
1249 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001250 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001251 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001252 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001253 }
1254
1255 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1256 ResetDefTracking();
1257 }
1258
1259 // Reset temp tracking sanity check.
1260 if (kIsDebugBuild) {
1261 live_sreg_ = INVALID_SREG;
1262 }
1263
1264 current_dalvik_offset_ = mir->offset;
Vladimir Marko767c7522015-03-20 12:47:30 +00001265 current_mir_ = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001266 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001267
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001268 GenPrintLabel(mir);
1269
Brian Carlstrom7940e442013-07-12 13:46:57 -07001270 // Remember the first LIR for this block.
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001271 if (head_lir == nullptr) {
buzbee252254b2013-09-08 16:20:53 -07001272 head_lir = &block_label_list_[bb->id];
1273 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001274 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001275 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001276 }
1277
Jean Christophe Beyler2ab40eb2014-06-02 09:03:14 -07001278 if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001279 HandleExtendedMethodMIR(bb, mir);
1280 continue;
1281 }
1282
1283 CompileDalvikInstruction(mir, bb, block_label_list_);
1284 }
1285
1286 if (head_lir) {
1287 // Eliminate redundant loads/stores and delay stores into later slots.
1288 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001289 }
1290 return false;
1291}
1292
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001293bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001294 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001295 // Find the first DalvikByteCode block.
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001296 DCHECK_EQ(mir_graph_->GetNumReachableBlocks(), mir_graph_->GetDfsOrder().size());
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001297 BasicBlock*bb = nullptr;
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001298 for (BasicBlockId dfs_id : mir_graph_->GetDfsOrder()) {
1299 BasicBlock* candidate = mir_graph_->GetBasicBlock(dfs_id);
1300 if (candidate->block_type == kDalvikByteCode) {
1301 bb = candidate;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001302 break;
1303 }
1304 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001305 if (bb == nullptr) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001306 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001307 }
1308 DCHECK_EQ(bb->start_offset, 0);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001309 DCHECK(bb->first_mir_insn != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001310
1311 // Get the first instruction.
1312 MIR* mir = bb->first_mir_insn;
1313
1314 // Free temp registers and reset redundant store tracking.
1315 ResetRegPool();
1316 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001317 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001318
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001319 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001320}
1321
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001322void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001323 cu_->NewTimingSplit("MIR2LIR");
1324
Brian Carlstrom7940e442013-07-12 13:46:57 -07001325 // Hold the labels of each block.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +00001326 block_label_list_ = arena_->AllocArray<LIR>(mir_graph_->GetNumBlocks(), kArenaAllocLIR);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001327
buzbee56c71782013-09-05 17:13:19 -07001328 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001329 BasicBlock* curr_bb = iter.Next();
1330 BasicBlock* next_bb = iter.Next();
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001331 while (curr_bb != nullptr) {
buzbee252254b2013-09-08 16:20:53 -07001332 MethodBlockCodeGen(curr_bb);
1333 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001334 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001335 if ((curr_bb_fall_through != nullptr) && (curr_bb_fall_through != next_bb)) {
buzbee0d829482013-10-11 15:24:55 -07001336 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001337 }
1338 curr_bb = next_bb;
1339 do {
1340 next_bb = iter.Next();
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001341 } while ((next_bb != nullptr) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001342 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001343 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001344}
1345
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001346//
1347// LIR Slow Path
1348//
1349
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001350LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001351 m2l_->SetCurrentDexPc(current_dex_pc_);
Vladimir Marko767c7522015-03-20 12:47:30 +00001352 m2l_->current_mir_ = current_mir_;
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001353 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001354 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001355 return target;
1356}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001357
Andreas Gampe4b537a82014-06-30 22:24:53 -07001358
1359void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1360 bool fail, bool report)
1361 const {
1362 if (rs.Valid()) {
1363 if (ref == RefCheck::kCheckRef) {
1364 if (cu_->target64 && !rs.Is64Bit()) {
1365 if (fail) {
1366 CHECK(false) << "Reg storage not 64b for ref.";
1367 } else if (report) {
1368 LOG(WARNING) << "Reg storage not 64b for ref.";
1369 }
1370 }
1371 }
1372 if (wide == WidenessCheck::kCheckWide) {
1373 if (!rs.Is64Bit()) {
1374 if (fail) {
1375 CHECK(false) << "Reg storage not 64b for wide.";
1376 } else if (report) {
1377 LOG(WARNING) << "Reg storage not 64b for wide.";
1378 }
1379 }
1380 }
1381 // A tighter check would be nice, but for now soft-float will not check float at all.
1382 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1383 if (!rs.IsFloat()) {
1384 if (fail) {
1385 CHECK(false) << "Reg storage not float for fp.";
1386 } else if (report) {
1387 LOG(WARNING) << "Reg storage not float for fp.";
1388 }
1389 }
1390 } else if (fp == FPCheck::kCheckNotFP) {
1391 if (rs.IsFloat()) {
1392 if (fail) {
1393 CHECK(false) << "Reg storage float for not-fp.";
1394 } else if (report) {
1395 LOG(WARNING) << "Reg storage float for not-fp.";
1396 }
1397 }
1398 }
1399 }
1400}
1401
1402void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1403 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1404 // will be stored.
1405 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1406 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1407}
1408
Serban Constantinescu63999682014-07-15 17:44:21 +01001409size_t Mir2Lir::GetInstructionOffset(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001410 UNUSED(lir);
1411 UNIMPLEMENTED(FATAL) << "Unsupported GetInstructionOffset()";
1412 UNREACHABLE();
Serban Constantinescu63999682014-07-15 17:44:21 +01001413}
1414
Serguei Katkov717a3e42014-11-13 17:19:42 +06001415void Mir2Lir::InToRegStorageMapping::Initialize(ShortyIterator* shorty,
1416 InToRegStorageMapper* mapper) {
1417 DCHECK(mapper != nullptr);
1418 DCHECK(shorty != nullptr);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001419 DCHECK(!IsInitialized());
1420 DCHECK_EQ(end_mapped_in_, 0u);
1421 DCHECK(!has_arguments_on_stack_);
Serguei Katkov717a3e42014-11-13 17:19:42 +06001422 while (shorty->Next()) {
1423 ShortyArg arg = shorty->GetArg();
1424 RegStorage reg = mapper->GetNextReg(arg);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001425 mapping_.emplace_back(arg, reg);
1426 if (arg.IsWide()) {
1427 mapping_.emplace_back(ShortyArg(kInvalidShorty), RegStorage::InvalidReg());
1428 }
Serguei Katkov717a3e42014-11-13 17:19:42 +06001429 if (reg.Valid()) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001430 end_mapped_in_ = mapping_.size();
1431 // If the VR is wide but wasn't mapped as wide then account for it.
1432 if (arg.IsWide() && !reg.Is64Bit()) {
1433 --end_mapped_in_;
Serguei Katkov717a3e42014-11-13 17:19:42 +06001434 }
1435 } else {
1436 has_arguments_on_stack_ = true;
1437 }
Serguei Katkov717a3e42014-11-13 17:19:42 +06001438 }
1439 initialized_ = true;
1440}
1441
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001442RegStorage Mir2Lir::InToRegStorageMapping::GetReg(size_t in_position) {
Serguei Katkov717a3e42014-11-13 17:19:42 +06001443 DCHECK(IsInitialized());
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001444 DCHECK_LT(in_position, mapping_.size());
1445 DCHECK_NE(mapping_[in_position].first.GetType(), kInvalidShorty);
1446 return mapping_[in_position].second;
1447}
1448
1449Mir2Lir::ShortyArg Mir2Lir::InToRegStorageMapping::GetShorty(size_t in_position) {
1450 DCHECK(IsInitialized());
1451 DCHECK_LT(static_cast<size_t>(in_position), mapping_.size());
1452 DCHECK_NE(mapping_[in_position].first.GetType(), kInvalidShorty);
1453 return mapping_[in_position].first;
Serguei Katkov717a3e42014-11-13 17:19:42 +06001454}
1455
Brian Carlstrom7940e442013-07-12 13:46:57 -07001456} // namespace art