blob: 1813e0939ed0d435eb8e1ea08f4c51f2956654a3 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Andreas Gampe7e499922015-01-06 08:28:12 -080016
Andreas Gampe0b9203e2015-01-22 20:39:27 -080017#include "mir_to_lir-inl.h"
18
Andreas Gampe7e499922015-01-06 08:28:12 -080019#include <functional>
20
Ian Rogersd582fa42014-11-05 23:46:43 -080021#include "arch/arm/instruction_set_features_arm.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080022#include "base/macros.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include "dex/compiler_ir.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080024#include "dex/mir_graph.h"
Brian Carlstrom60d7a652014-03-13 18:10:08 -070025#include "dex/quick/arm/arm_lir.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080026#include "driver/compiler_driver.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000027#include "driver/compiler_options.h"
Ian Rogers166db042013-07-26 12:05:57 -070028#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070029#include "mirror/array.h"
Andreas Gampe9c3b0892014-04-24 17:33:34 +000030#include "mirror/object_array-inl.h"
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080031#include "mirror/object-inl.h"
Andreas Gampeaa910d52014-07-30 18:59:05 -070032#include "mirror/object_reference.h"
Andreas Gampe7e499922015-01-06 08:28:12 -080033#include "utils.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000034#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070035#include "verifier/method_verifier.h"
36
37namespace art {
38
Andreas Gampe9c3b0892014-04-24 17:33:34 +000039// Shortcuts to repeatedly used long types.
40typedef mirror::ObjectArray<mirror::Object> ObjArray;
41typedef mirror::ObjectArray<mirror::Class> ClassArray;
42
Brian Carlstrom7940e442013-07-12 13:46:57 -070043/*
44 * This source files contains "gen" codegen routines that should
45 * be applicable to most targets. Only mid-level support utilities
46 * and "op" calls may be used here.
47 */
48
Andreas Gampe0b9203e2015-01-22 20:39:27 -080049ALWAYS_INLINE static inline bool ForceSlowFieldPath(CompilationUnit* cu) {
50 return (cu->enable_debug & (1 << kDebugSlowFieldPath)) != 0;
51}
52
53ALWAYS_INLINE static inline bool ForceSlowStringPath(CompilationUnit* cu) {
54 return (cu->enable_debug & (1 << kDebugSlowStringPath)) != 0;
55}
56
57ALWAYS_INLINE static inline bool ForceSlowTypePath(CompilationUnit* cu) {
58 return (cu->enable_debug & (1 << kDebugSlowTypePath)) != 0;
59}
60
Vladimir Marko20f85592015-03-19 10:07:02 +000061void Mir2Lir::GenIfNullUseHelperImmMethod(
62 RegStorage r_result, QuickEntrypointEnum trampoline, int imm, RegStorage r_method) {
63 class CallHelperImmMethodSlowPath : public LIRSlowPath {
64 public:
65 CallHelperImmMethodSlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont,
66 QuickEntrypointEnum trampoline_in, int imm_in,
67 RegStorage r_method_in, RegStorage r_result_in)
68 : LIRSlowPath(m2l, fromfast, cont), trampoline_(trampoline_in),
69 imm_(imm_in), r_method_(r_method_in), r_result_(r_result_in) {
70 }
71
72 void Compile() {
73 GenerateTargetLabel();
74 if (r_method_.Valid()) {
75 m2l_->CallRuntimeHelperImmReg(trampoline_, imm_, r_method_, true);
76 } else {
77 m2l_->CallRuntimeHelperImmMethod(trampoline_, imm_, true);
78 }
79 m2l_->OpRegCopy(r_result_, m2l_->TargetReg(kRet0, kRef));
80 m2l_->OpUnconditionalBranch(cont_);
81 }
82
83 private:
84 QuickEntrypointEnum trampoline_;
85 const int imm_;
86 const RegStorage r_method_;
87 const RegStorage r_result_;
88 };
89
90 LIR* branch = OpCmpImmBranch(kCondEq, r_result, 0, NULL);
91 LIR* cont = NewLIR0(kPseudoTargetLabel);
92
93 AddSlowPath(new (arena_) CallHelperImmMethodSlowPath(this, branch, cont, trampoline, imm,
94 r_method, r_result));
95}
96
Brian Carlstrom7940e442013-07-12 13:46:57 -070097/*
buzbeeb48819d2013-09-14 16:15:25 -070098 * Generate a kPseudoBarrier marker to indicate the boundary of special
Brian Carlstrom7940e442013-07-12 13:46:57 -070099 * blocks.
100 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700101void Mir2Lir::GenBarrier() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700102 LIR* barrier = NewLIR0(kPseudoBarrier);
103 /* Mark all resources as being clobbered */
buzbeeb48819d2013-09-14 16:15:25 -0700104 DCHECK(!barrier->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100105 barrier->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700106}
107
Mingyao Yange643a172014-04-08 11:02:52 -0700108void Mir2Lir::GenDivZeroException() {
109 LIR* branch = OpUnconditionalBranch(nullptr);
110 AddDivZeroCheckSlowPath(branch);
111}
112
113void Mir2Lir::GenDivZeroCheck(ConditionCode c_code) {
Mingyao Yang42894562014-04-07 12:42:16 -0700114 LIR* branch = OpCondBranch(c_code, nullptr);
115 AddDivZeroCheckSlowPath(branch);
116}
117
Mingyao Yange643a172014-04-08 11:02:52 -0700118void Mir2Lir::GenDivZeroCheck(RegStorage reg) {
119 LIR* branch = OpCmpImmBranch(kCondEq, reg, 0, nullptr);
Mingyao Yang42894562014-04-07 12:42:16 -0700120 AddDivZeroCheckSlowPath(branch);
121}
122
123void Mir2Lir::AddDivZeroCheckSlowPath(LIR* branch) {
124 class DivZeroCheckSlowPath : public Mir2Lir::LIRSlowPath {
125 public:
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800126 DivZeroCheckSlowPath(Mir2Lir* m2l, LIR* branch_in)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +0000127 : LIRSlowPath(m2l, branch_in) {
Mingyao Yang42894562014-04-07 12:42:16 -0700128 }
129
Mingyao Yange643a172014-04-08 11:02:52 -0700130 void Compile() OVERRIDE {
Mingyao Yang42894562014-04-07 12:42:16 -0700131 m2l_->ResetRegPool();
132 m2l_->ResetDefTracking();
Mingyao Yang6ffcfa02014-04-25 11:06:00 -0700133 GenerateTargetLabel(kPseudoThrowTarget);
Andreas Gampe98430592014-07-27 19:44:50 -0700134 m2l_->CallRuntimeHelper(kQuickThrowDivZero, true);
Mingyao Yang42894562014-04-07 12:42:16 -0700135 }
136 };
137
138 AddSlowPath(new (arena_) DivZeroCheckSlowPath(this, branch));
139}
Dave Allisonb373e092014-02-20 16:06:36 -0800140
Mingyao Yang80365d92014-04-18 12:10:58 -0700141void Mir2Lir::GenArrayBoundsCheck(RegStorage index, RegStorage length) {
142 class ArrayBoundsCheckSlowPath : public Mir2Lir::LIRSlowPath {
143 public:
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800144 ArrayBoundsCheckSlowPath(Mir2Lir* m2l, LIR* branch_in, RegStorage index_in,
145 RegStorage length_in)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +0000146 : LIRSlowPath(m2l, branch_in),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800147 index_(index_in), length_(length_in) {
Mingyao Yang80365d92014-04-18 12:10:58 -0700148 }
149
150 void Compile() OVERRIDE {
151 m2l_->ResetRegPool();
152 m2l_->ResetDefTracking();
Mingyao Yang6ffcfa02014-04-25 11:06:00 -0700153 GenerateTargetLabel(kPseudoThrowTarget);
Andreas Gampe98430592014-07-27 19:44:50 -0700154 m2l_->CallRuntimeHelperRegReg(kQuickThrowArrayBounds, index_, length_, true);
Mingyao Yang80365d92014-04-18 12:10:58 -0700155 }
156
157 private:
158 const RegStorage index_;
159 const RegStorage length_;
160 };
161
162 LIR* branch = OpCmpBranch(kCondUge, index, length, nullptr);
163 AddSlowPath(new (arena_) ArrayBoundsCheckSlowPath(this, branch, index, length));
164}
165
166void Mir2Lir::GenArrayBoundsCheck(int index, RegStorage length) {
167 class ArrayBoundsCheckSlowPath : public Mir2Lir::LIRSlowPath {
168 public:
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800169 ArrayBoundsCheckSlowPath(Mir2Lir* m2l, LIR* branch_in, int index_in, RegStorage length_in)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +0000170 : LIRSlowPath(m2l, branch_in),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800171 index_(index_in), length_(length_in) {
Mingyao Yang80365d92014-04-18 12:10:58 -0700172 }
173
174 void Compile() OVERRIDE {
175 m2l_->ResetRegPool();
176 m2l_->ResetDefTracking();
Mingyao Yang6ffcfa02014-04-25 11:06:00 -0700177 GenerateTargetLabel(kPseudoThrowTarget);
Mingyao Yang80365d92014-04-18 12:10:58 -0700178
Andreas Gampeccc60262014-07-04 18:02:38 -0700179 RegStorage arg1_32 = m2l_->TargetReg(kArg1, kNotWide);
180 RegStorage arg0_32 = m2l_->TargetReg(kArg0, kNotWide);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700181
182 m2l_->OpRegCopy(arg1_32, length_);
183 m2l_->LoadConstant(arg0_32, index_);
Andreas Gampe98430592014-07-27 19:44:50 -0700184 m2l_->CallRuntimeHelperRegReg(kQuickThrowArrayBounds, arg0_32, arg1_32, true);
Mingyao Yang80365d92014-04-18 12:10:58 -0700185 }
186
187 private:
188 const int32_t index_;
189 const RegStorage length_;
190 };
191
192 LIR* branch = OpCmpImmBranch(kCondLs, length, index, nullptr);
193 AddSlowPath(new (arena_) ArrayBoundsCheckSlowPath(this, branch, index, length));
194}
195
Mingyao Yange643a172014-04-08 11:02:52 -0700196LIR* Mir2Lir::GenNullCheck(RegStorage reg) {
197 class NullCheckSlowPath : public Mir2Lir::LIRSlowPath {
198 public:
199 NullCheckSlowPath(Mir2Lir* m2l, LIR* branch)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +0000200 : LIRSlowPath(m2l, branch) {
Mingyao Yange643a172014-04-08 11:02:52 -0700201 }
202
203 void Compile() OVERRIDE {
204 m2l_->ResetRegPool();
205 m2l_->ResetDefTracking();
Mingyao Yang6ffcfa02014-04-25 11:06:00 -0700206 GenerateTargetLabel(kPseudoThrowTarget);
Andreas Gampe98430592014-07-27 19:44:50 -0700207 m2l_->CallRuntimeHelper(kQuickThrowNullPointer, true);
Mingyao Yange643a172014-04-08 11:02:52 -0700208 }
209 };
210
211 LIR* branch = OpCmpImmBranch(kCondEq, reg, 0, nullptr);
212 AddSlowPath(new (arena_) NullCheckSlowPath(this, branch));
213 return branch;
214}
215
Brian Carlstrom7940e442013-07-12 13:46:57 -0700216/* Perform null-check on a register. */
buzbee2700f7e2014-03-07 09:46:20 -0800217LIR* Mir2Lir::GenNullCheck(RegStorage m_reg, int opt_flags) {
Dave Allison69dfe512014-07-11 17:11:58 +0000218 if (!cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) {
Dave Allisonf9439142014-03-27 15:10:22 -0700219 return GenExplicitNullCheck(m_reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700220 }
Pavel Vyssotski9c3617a2014-11-13 18:25:23 +0600221 // If null check has not been eliminated, reset redundant store tracking.
222 if ((opt_flags & MIR_IGNORE_NULL_CHECK) == 0) {
223 ResetDefTracking();
224 }
Dave Allisonb373e092014-02-20 16:06:36 -0800225 return nullptr;
226}
227
Dave Allisonf9439142014-03-27 15:10:22 -0700228/* Perform an explicit null-check on a register. */
229LIR* Mir2Lir::GenExplicitNullCheck(RegStorage m_reg, int opt_flags) {
230 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
231 return NULL;
232 }
Mingyao Yange643a172014-04-08 11:02:52 -0700233 return GenNullCheck(m_reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700234}
235
Dave Allisonb373e092014-02-20 16:06:36 -0800236void Mir2Lir::MarkPossibleNullPointerException(int opt_flags) {
Dave Allison69dfe512014-07-11 17:11:58 +0000237 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) {
Dave Allisonb373e092014-02-20 16:06:36 -0800238 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
239 return;
240 }
Dave Allison69dfe512014-07-11 17:11:58 +0000241 // Insert after last instruction.
Dave Allisonb373e092014-02-20 16:06:36 -0800242 MarkSafepointPC(last_lir_insn_);
243 }
244}
245
Andreas Gampe3c12c512014-06-24 18:46:29 +0000246void Mir2Lir::MarkPossibleNullPointerExceptionAfter(int opt_flags, LIR* after) {
Dave Allison69dfe512014-07-11 17:11:58 +0000247 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000248 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
249 return;
250 }
251 MarkSafepointPCAfter(after);
252 }
253}
254
Dave Allisonb373e092014-02-20 16:06:36 -0800255void Mir2Lir::MarkPossibleStackOverflowException() {
Dave Allison69dfe512014-07-11 17:11:58 +0000256 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitStackOverflowChecks()) {
Dave Allisonb373e092014-02-20 16:06:36 -0800257 MarkSafepointPC(last_lir_insn_);
258 }
259}
260
buzbee2700f7e2014-03-07 09:46:20 -0800261void Mir2Lir::ForceImplicitNullCheck(RegStorage reg, int opt_flags) {
Dave Allison69dfe512014-07-11 17:11:58 +0000262 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) {
Dave Allisonb373e092014-02-20 16:06:36 -0800263 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
264 return;
265 }
266 // Force an implicit null check by performing a memory operation (load) from the given
267 // register with offset 0. This will cause a signal if the register contains 0 (null).
buzbee2700f7e2014-03-07 09:46:20 -0800268 RegStorage tmp = AllocTemp();
269 // TODO: for Mips, would be best to use rZERO as the bogus register target.
buzbee695d13a2014-04-19 13:32:20 -0700270 LIR* load = Load32Disp(reg, 0, tmp);
Dave Allisonb373e092014-02-20 16:06:36 -0800271 FreeTemp(tmp);
272 MarkSafepointPC(load);
273 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700274}
275
Brian Carlstrom7940e442013-07-12 13:46:57 -0700276void Mir2Lir::GenCompareAndBranch(Instruction::Code opcode, RegLocation rl_src1,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700277 RegLocation rl_src2, LIR* taken) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700278 ConditionCode cond;
buzbee7c02e912014-10-03 13:14:17 -0700279 RegisterClass reg_class = (rl_src1.ref || rl_src2.ref) ? kRefReg : kCoreReg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700280 switch (opcode) {
281 case Instruction::IF_EQ:
282 cond = kCondEq;
283 break;
284 case Instruction::IF_NE:
285 cond = kCondNe;
286 break;
287 case Instruction::IF_LT:
288 cond = kCondLt;
289 break;
290 case Instruction::IF_GE:
291 cond = kCondGe;
292 break;
293 case Instruction::IF_GT:
294 cond = kCondGt;
295 break;
296 case Instruction::IF_LE:
297 cond = kCondLe;
298 break;
299 default:
300 cond = static_cast<ConditionCode>(0);
301 LOG(FATAL) << "Unexpected opcode " << opcode;
302 }
303
304 // Normalize such that if either operand is constant, src2 will be constant
305 if (rl_src1.is_const) {
306 RegLocation rl_temp = rl_src1;
307 rl_src1 = rl_src2;
308 rl_src2 = rl_temp;
309 cond = FlipComparisonOrder(cond);
310 }
311
buzbee7c02e912014-10-03 13:14:17 -0700312 rl_src1 = LoadValue(rl_src1, reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700313 // Is this really an immediate comparison?
314 if (rl_src2.is_const) {
315 // If it's already live in a register or not easily materialized, just keep going
316 RegLocation rl_temp = UpdateLoc(rl_src2);
Andreas Gampeb07c1f92014-07-26 01:40:39 -0700317 int32_t constant_value = mir_graph_->ConstantValue(rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700318 if ((rl_temp.location == kLocDalvikFrame) &&
Matteo Franchinc763e352014-07-04 12:53:27 +0100319 InexpensiveConstantInt(constant_value, opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700320 // OK - convert this to a compare immediate and branch
buzbee2700f7e2014-03-07 09:46:20 -0800321 OpCmpImmBranch(cond, rl_src1.reg, mir_graph_->ConstantValue(rl_src2), taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700322 return;
323 }
Andreas Gampeb07c1f92014-07-26 01:40:39 -0700324
325 // It's also commonly more efficient to have a test against zero with Eq/Ne. This is not worse
326 // for x86, and allows a cbz/cbnz for Arm and Mips. At the same time, it works around a register
327 // mismatch for 64b systems, where a reference is compared against null, as dex bytecode uses
328 // the 32b literal 0 for null.
329 if (constant_value == 0 && (cond == kCondEq || cond == kCondNe)) {
330 // Use the OpCmpImmBranch and ignore the value in the register.
331 OpCmpImmBranch(cond, rl_src1.reg, 0, taken);
332 return;
333 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700334 }
Andreas Gampeb07c1f92014-07-26 01:40:39 -0700335
buzbee7c02e912014-10-03 13:14:17 -0700336 rl_src2 = LoadValue(rl_src2, reg_class);
buzbee2700f7e2014-03-07 09:46:20 -0800337 OpCmpBranch(cond, rl_src1.reg, rl_src2.reg, taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700338}
339
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700340void Mir2Lir::GenCompareZeroAndBranch(Instruction::Code opcode, RegLocation rl_src, LIR* taken) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700341 ConditionCode cond;
buzbee7c02e912014-10-03 13:14:17 -0700342 RegisterClass reg_class = rl_src.ref ? kRefReg : kCoreReg;
343 rl_src = LoadValue(rl_src, reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700344 switch (opcode) {
345 case Instruction::IF_EQZ:
346 cond = kCondEq;
347 break;
348 case Instruction::IF_NEZ:
349 cond = kCondNe;
350 break;
351 case Instruction::IF_LTZ:
352 cond = kCondLt;
353 break;
354 case Instruction::IF_GEZ:
355 cond = kCondGe;
356 break;
357 case Instruction::IF_GTZ:
358 cond = kCondGt;
359 break;
360 case Instruction::IF_LEZ:
361 cond = kCondLe;
362 break;
363 default:
364 cond = static_cast<ConditionCode>(0);
365 LOG(FATAL) << "Unexpected opcode " << opcode;
366 }
buzbee2700f7e2014-03-07 09:46:20 -0800367 OpCmpImmBranch(cond, rl_src.reg, 0, taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700368}
369
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700370void Mir2Lir::GenIntToLong(RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700371 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
372 if (rl_src.location == kLocPhysReg) {
buzbee2700f7e2014-03-07 09:46:20 -0800373 OpRegCopy(rl_result.reg, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700374 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800375 LoadValueDirect(rl_src, rl_result.reg.GetLow());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700376 }
buzbee2700f7e2014-03-07 09:46:20 -0800377 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_result.reg.GetLow(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700378 StoreValueWide(rl_dest, rl_result);
379}
380
Yevgeny Rouban6af82062014-11-26 18:11:54 +0600381void Mir2Lir::GenLongToInt(RegLocation rl_dest, RegLocation rl_src) {
382 rl_src = UpdateLocWide(rl_src);
383 rl_src = NarrowRegLoc(rl_src);
384 StoreValue(rl_dest, rl_src);
385}
386
Brian Carlstrom7940e442013-07-12 13:46:57 -0700387void Mir2Lir::GenIntNarrowing(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700388 RegLocation rl_src) {
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700389 rl_src = LoadValue(rl_src, kCoreReg);
390 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
391 OpKind op = kOpInvalid;
392 switch (opcode) {
393 case Instruction::INT_TO_BYTE:
394 op = kOp2Byte;
395 break;
396 case Instruction::INT_TO_SHORT:
397 op = kOp2Short;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700398 break;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700399 case Instruction::INT_TO_CHAR:
400 op = kOp2Char;
401 break;
402 default:
403 LOG(ERROR) << "Bad int conversion type";
404 }
buzbee2700f7e2014-03-07 09:46:20 -0800405 OpRegReg(op, rl_result.reg, rl_src.reg);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700406 StoreValue(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700407}
408
Andreas Gampe98430592014-07-27 19:44:50 -0700409/*
410 * Let helper function take care of everything. Will call
411 * Array::AllocFromCode(type_idx, method, count);
412 * Note: AllocFromCode will handle checks for errNegativeArraySize.
413 */
414void Mir2Lir::GenNewArray(uint32_t type_idx, RegLocation rl_dest,
415 RegLocation rl_src) {
416 FlushAllRegs(); /* Everything to home location */
417 const DexFile* dex_file = cu_->dex_file;
418 CompilerDriver* driver = cu_->compiler_driver;
419 if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *dex_file, type_idx)) {
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800420 bool is_type_initialized; // Ignored as an array does not have an initializer.
421 bool use_direct_type_ptr;
422 uintptr_t direct_type_ptr;
Mathieu Chartier8668c3c2014-04-24 16:48:11 -0700423 bool is_finalizable;
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800424 if (kEmbedClassInCode &&
Mathieu Chartier8668c3c2014-04-24 16:48:11 -0700425 driver->CanEmbedTypeInCode(*dex_file, type_idx, &is_type_initialized, &use_direct_type_ptr,
426 &direct_type_ptr, &is_finalizable)) {
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800427 // The fast path.
428 if (!use_direct_type_ptr) {
Fred Shihe7f82e22014-08-06 10:46:37 -0700429 LoadClassType(*dex_file, type_idx, kArg0);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800430 CallRuntimeHelperRegRegLocationMethod(kQuickAllocArrayResolved, TargetReg(kArg0, kNotWide),
Andreas Gampe98430592014-07-27 19:44:50 -0700431 rl_src, true);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800432 } else {
433 // Use the direct pointer.
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800434 CallRuntimeHelperImmRegLocationMethod(kQuickAllocArrayResolved, direct_type_ptr, rl_src,
Andreas Gampe98430592014-07-27 19:44:50 -0700435 true);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800436 }
437 } else {
438 // The slow path.
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800439 CallRuntimeHelperImmRegLocationMethod(kQuickAllocArray, type_idx, rl_src, true);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800440 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441 } else {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800442 CallRuntimeHelperImmRegLocationMethod(kQuickAllocArrayWithAccessCheck, type_idx, rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700443 }
Andreas Gampe98430592014-07-27 19:44:50 -0700444 StoreValue(rl_dest, GetReturn(kRefReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700445}
446
447/*
448 * Similar to GenNewArray, but with post-allocation initialization.
449 * Verifier guarantees we're dealing with an array class. Current
450 * code throws runtime exception "bad Filled array req" for 'D' and 'J'.
451 * Current code also throws internal unimp if not 'L', '[' or 'I'.
452 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700453void Mir2Lir::GenFilledNewArray(CallInfo* info) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000454 size_t elems = info->num_arg_words;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700455 int type_idx = info->index;
456 FlushAllRegs(); /* Everything to home location */
Andreas Gampe98430592014-07-27 19:44:50 -0700457 QuickEntrypointEnum target;
458 if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file,
459 type_idx)) {
460 target = kQuickCheckAndAllocArray;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700461 } else {
Andreas Gampe98430592014-07-27 19:44:50 -0700462 target = kQuickCheckAndAllocArrayWithAccessCheck;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700463 }
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800464 CallRuntimeHelperImmImmMethod(target, type_idx, elems, true);
Andreas Gampeccc60262014-07-04 18:02:38 -0700465 FreeTemp(TargetReg(kArg2, kNotWide));
466 FreeTemp(TargetReg(kArg1, kNotWide));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700467 /*
468 * NOTE: the implicit target for Instruction::FILLED_NEW_ARRAY is the
469 * return region. Because AllocFromCode placed the new array
470 * in kRet0, we'll just lock it into place. When debugger support is
471 * added, it may be necessary to additionally copy all return
472 * values to a home location in thread-local storage
473 */
Andreas Gampeccc60262014-07-04 18:02:38 -0700474 RegStorage ref_reg = TargetReg(kRet0, kRef);
Chao-ying Fua77ee512014-07-01 17:43:41 -0700475 LockTemp(ref_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476
477 // TODO: use the correct component size, currently all supported types
478 // share array alignment with ints (see comment at head of function)
479 size_t component_size = sizeof(int32_t);
480
Vladimir Markobf535be2014-11-19 18:52:35 +0000481 if (elems > 5) {
482 DCHECK(info->is_range); // Non-range insn can't encode more than 5 elems.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700483 /*
484 * Bit of ugliness here. We're going generate a mem copy loop
485 * on the register range, but it is possible that some regs
486 * in the range have been promoted. This is unlikely, but
487 * before generating the copy, we'll just force a flush
488 * of any regs in the source range that have been promoted to
489 * home location.
490 */
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000491 for (size_t i = 0; i < elems; i++) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700492 RegLocation loc = UpdateLoc(info->args[i]);
493 if (loc.location == kLocPhysReg) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100494 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Serguei Katkov27503542014-11-06 14:45:44 +0600495 if (loc.ref) {
496 StoreRefDisp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, kNotVolatile);
497 } else {
498 Store32Disp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg);
499 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700500 }
501 }
502 /*
503 * TUNING note: generated code here could be much improved, but
504 * this is an uncommon operation and isn't especially performance
505 * critical.
506 */
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700507 // This is addressing the stack, which may be out of the 4G area.
buzbee33ae5582014-06-12 14:56:32 -0700508 RegStorage r_src = AllocTempRef();
509 RegStorage r_dst = AllocTempRef();
510 RegStorage r_idx = AllocTempRef(); // Not really a reference, but match src/dst.
buzbee2700f7e2014-03-07 09:46:20 -0800511 RegStorage r_val;
Brian Carlstromdf629502013-07-17 22:39:56 -0700512 switch (cu_->instruction_set) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700513 case kThumb2:
buzbee33ae5582014-06-12 14:56:32 -0700514 case kArm64:
Andreas Gampeccc60262014-07-04 18:02:38 -0700515 r_val = TargetReg(kLr, kNotWide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700516 break;
517 case kX86:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700518 case kX86_64:
Chao-ying Fua77ee512014-07-01 17:43:41 -0700519 FreeTemp(ref_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 r_val = AllocTemp();
521 break;
522 case kMips:
Maja Gagic6ea651f2015-02-24 16:55:04 +0100523 case kMips64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700524 r_val = AllocTemp();
525 break;
526 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
527 }
528 // Set up source pointer
529 RegLocation rl_first = info->args[0];
Chao-ying Fua77ee512014-07-01 17:43:41 -0700530 OpRegRegImm(kOpAdd, r_src, TargetPtrReg(kSp), SRegOffset(rl_first.s_reg_low));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700531 // Set up the target pointer
Chao-ying Fua77ee512014-07-01 17:43:41 -0700532 OpRegRegImm(kOpAdd, r_dst, ref_reg,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 mirror::Array::DataOffset(component_size).Int32Value());
534 // Set up the loop counter (known to be > 0)
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000535 LoadConstant(r_idx, static_cast<int>(elems - 1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700536 // Generate the copy loop. Going backwards for convenience
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800537 LIR* loop_head_target = NewLIR0(kPseudoTargetLabel);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700538 // Copy next element
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100539 {
540 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
541 LoadBaseIndexed(r_src, r_idx, r_val, 2, k32);
542 // NOTE: No dalvik register annotation, local optimizations will be stopped
543 // by the loop boundaries.
544 }
buzbee695d13a2014-04-19 13:32:20 -0700545 StoreBaseIndexed(r_dst, r_idx, r_val, 2, k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700546 FreeTemp(r_val);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800547 OpDecAndBranch(kCondGe, r_idx, loop_head_target);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700548 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700549 // Restore the target pointer
Chao-ying Fua77ee512014-07-01 17:43:41 -0700550 OpRegRegImm(kOpAdd, ref_reg, r_dst,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 -mirror::Array::DataOffset(component_size).Int32Value());
552 }
Vladimir Markobf535be2014-11-19 18:52:35 +0000553 FreeTemp(r_idx);
554 FreeTemp(r_dst);
555 FreeTemp(r_src);
556 } else {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000557 DCHECK_LE(elems, 5u); // Usually but not necessarily non-range.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 // TUNING: interleave
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000559 for (size_t i = 0; i < elems; i++) {
Serguei Katkov27503542014-11-06 14:45:44 +0600560 RegLocation rl_arg;
561 if (info->args[i].ref) {
562 rl_arg = LoadValue(info->args[i], kRefReg);
563 StoreRefDisp(ref_reg,
564 mirror::Array::DataOffset(component_size).Int32Value() + i * 4, rl_arg.reg,
565 kNotVolatile);
566 } else {
567 rl_arg = LoadValue(info->args[i], kCoreReg);
568 Store32Disp(ref_reg,
569 mirror::Array::DataOffset(component_size).Int32Value() + i * 4, rl_arg.reg);
570 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700571 // If the LoadValue caused a temp to be allocated, free it
buzbee2700f7e2014-03-07 09:46:20 -0800572 if (IsTemp(rl_arg.reg)) {
573 FreeTemp(rl_arg.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700574 }
575 }
576 }
Vladimir Markobf535be2014-11-19 18:52:35 +0000577 if (elems != 0 && info->args[0].ref) {
578 // If there is at least one potentially non-null value, unconditionally mark the GC card.
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000579 for (size_t i = 0; i < elems; i++) {
Vladimir Markobf535be2014-11-19 18:52:35 +0000580 if (!mir_graph_->IsConstantNullRef(info->args[i])) {
581 UnconditionallyMarkGCCard(ref_reg);
582 break;
583 }
584 }
585 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586 if (info->result.location != kLocInvalid) {
buzbeea0cd2d72014-06-01 09:33:49 -0700587 StoreValue(info->result, GetReturn(kRefReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700588 }
589}
590
Ian Rogers832336b2014-10-08 15:35:22 -0700591/*
592 * Array data table format:
593 * ushort ident = 0x0300 magic value
594 * ushort width width of each element in the table
595 * uint size number of elements in the table
596 * ubyte data[size*width] table of data values (may contain a single-byte
597 * padding at the end)
598 *
599 * Total size is 4+(width * size + 1)/2 16-bit code units.
600 */
601void Mir2Lir::GenFillArrayData(MIR* mir, DexOffset table_offset, RegLocation rl_src) {
602 if (kIsDebugBuild) {
603 const uint16_t* table = mir_graph_->GetTable(mir, table_offset);
604 const Instruction::ArrayDataPayload* payload =
605 reinterpret_cast<const Instruction::ArrayDataPayload*>(table);
606 CHECK_EQ(payload->ident, static_cast<uint16_t>(Instruction::kArrayDataSignature));
607 }
608 uint32_t table_offset_from_start = mir->offset + static_cast<int32_t>(table_offset);
609 CallRuntimeHelperImmRegLocation(kQuickHandleFillArrayData, table_offset_from_start, rl_src, true);
610}
611
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800612//
613// Slow path to ensure a class is initialized for sget/sput.
614//
615class StaticFieldSlowPath : public Mir2Lir::LIRSlowPath {
616 public:
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100617 // There are up to two branches to the static field slow path, the "unresolved" when the type
618 // entry in the dex cache is null, and the "uninit" when the class is not yet initialized.
619 // At least one will be non-null here, otherwise we wouldn't generate the slow path.
buzbee2700f7e2014-03-07 09:46:20 -0800620 StaticFieldSlowPath(Mir2Lir* m2l, LIR* unresolved, LIR* uninit, LIR* cont, int storage_index,
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100621 RegStorage r_base)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +0000622 : LIRSlowPath(m2l, unresolved != nullptr ? unresolved : uninit, cont),
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100623 second_branch_(unresolved != nullptr ? uninit : nullptr),
624 storage_index_(storage_index), r_base_(r_base) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800625 }
626
627 void Compile() {
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100628 LIR* target = GenerateTargetLabel();
629 if (second_branch_ != nullptr) {
630 second_branch_->target = target;
631 }
Andreas Gampe98430592014-07-27 19:44:50 -0700632 m2l_->CallRuntimeHelperImm(kQuickInitializeStaticStorage, storage_index_, true);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800633 // Copy helper's result into r_base, a no-op on all but MIPS.
Andreas Gampeccc60262014-07-04 18:02:38 -0700634 m2l_->OpRegCopy(r_base_, m2l_->TargetReg(kRet0, kRef));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800635
636 m2l_->OpUnconditionalBranch(cont_);
637 }
638
639 private:
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100640 // Second branch to the slow path, or null if there's only one branch.
641 LIR* const second_branch_;
642
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800643 const int storage_index_;
buzbee2700f7e2014-03-07 09:46:20 -0800644 const RegStorage r_base_;
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800645};
646
Fred Shih37f05ef2014-07-16 18:38:08 -0700647void Mir2Lir::GenSput(MIR* mir, RegLocation rl_src, OpSize size) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000648 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
Vladimir Markoaf6925b2014-10-31 16:37:32 +0000649 DCHECK_EQ(SPutMemAccessType(mir->dalvikInsn.opcode), field_info.MemAccessType());
Vladimir Markobe0e5462014-02-26 11:24:15 +0000650 cu_->compiler_driver->ProcessedStaticField(field_info.FastPut(), field_info.IsReferrersClass());
Andreas Gampe0b9203e2015-01-22 20:39:27 -0800651 if (!ForceSlowFieldPath(cu_) && field_info.FastPut()) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000652 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
buzbee2700f7e2014-03-07 09:46:20 -0800653 RegStorage r_base;
Vladimir Markobe0e5462014-02-26 11:24:15 +0000654 if (field_info.IsReferrersClass()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700655 // Fast path, static storage base is this method's class
Matteo Franchin0955f7e2014-05-23 17:32:52 +0100656 RegLocation rl_method = LoadCurrMethod();
buzbeea0cd2d72014-06-01 09:33:49 -0700657 r_base = AllocTempRef();
Andreas Gampe3c12c512014-06-24 18:46:29 +0000658 LoadRefDisp(rl_method.reg, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), r_base,
659 kNotVolatile);
buzbee2700f7e2014-03-07 09:46:20 -0800660 if (IsTemp(rl_method.reg)) {
661 FreeTemp(rl_method.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700662 }
663 } else {
664 // Medium path, static storage base in a different class which requires checks that the other
665 // class is initialized.
666 // TODO: remove initialized check now that we are initializing classes in the compiler driver.
Vladimir Markobe0e5462014-02-26 11:24:15 +0000667 DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700668 // May do runtime call so everything to home locations.
669 FlushAllRegs();
670 // Using fixed register to sync with possible call to runtime support.
Andreas Gampeccc60262014-07-04 18:02:38 -0700671 RegStorage r_method = TargetReg(kArg1, kRef);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700672 LockTemp(r_method);
673 LoadCurrMethodDirect(r_method);
Andreas Gampeccc60262014-07-04 18:02:38 -0700674 r_base = TargetReg(kArg0, kRef);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800675 LockTemp(r_base);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000676 LoadRefDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), r_base,
677 kNotVolatile);
Andreas Gampe9c3b0892014-04-24 17:33:34 +0000678 int32_t offset_of_field = ObjArray::OffsetOfElement(field_info.StorageIndex()).Int32Value();
Andreas Gampe3c12c512014-06-24 18:46:29 +0000679 LoadRefDisp(r_base, offset_of_field, r_base, kNotVolatile);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800680 // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100681 LIR* unresolved_branch = nullptr;
682 if (!field_info.IsClassInDexCache() &&
683 (mir->optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) == 0) {
684 // Check if r_base is NULL.
685 unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
686 }
687 LIR* uninit_branch = nullptr;
688 if (!field_info.IsClassInitialized() &&
689 (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0) {
690 // Check if r_base is not yet initialized class.
Andreas Gampeccc60262014-07-04 18:02:38 -0700691 RegStorage r_tmp = TargetReg(kArg2, kNotWide);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800692 LockTemp(r_tmp);
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100693 uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
Mark Mendell766e9292014-01-27 07:55:47 -0800694 mirror::Class::StatusOffset().Int32Value(),
Dave Allison69dfe512014-07-11 17:11:58 +0000695 mirror::Class::kStatusInitialized, nullptr, nullptr);
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100696 FreeTemp(r_tmp);
697 }
698 if (unresolved_branch != nullptr || uninit_branch != nullptr) {
699 // The slow path is invoked if the r_base is NULL or the class pointed
700 // to by it is not initialized.
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800701 LIR* cont = NewLIR0(kPseudoTargetLabel);
buzbee2700f7e2014-03-07 09:46:20 -0800702 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
Vladimir Markobe0e5462014-02-26 11:24:15 +0000703 field_info.StorageIndex(), r_base));
Ian Rogers5ddb4102014-01-07 08:58:46 -0800704
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100705 if (uninit_branch != nullptr) {
706 // Ensure load of status and store of value don't re-order.
707 // TODO: Presumably the actual value store is control-dependent on the status load,
708 // and will thus not be reordered in any case, since stores are never speculated.
709 // Does later code "know" that the class is now initialized? If so, we still
710 // need the barrier to guard later static loads.
711 GenMemBarrier(kLoadAny);
712 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700713 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700714 FreeTemp(r_method);
715 }
716 // rBase now holds static storage base
Fred Shih37f05ef2014-07-16 18:38:08 -0700717 RegisterClass reg_class = RegClassForFieldLoadStore(size, field_info.IsVolatile());
718 if (IsWide(size)) {
Vladimir Marko674744e2014-04-24 15:18:26 +0100719 rl_src = LoadValueWide(rl_src, reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 } else {
Vladimir Marko674744e2014-04-24 15:18:26 +0100721 rl_src = LoadValue(rl_src, reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700723 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000724 StoreRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg,
725 field_info.IsVolatile() ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100726 } else {
Fred Shih37f05ef2014-07-16 18:38:08 -0700727 StoreBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg, size,
Andreas Gampe3c12c512014-06-24 18:46:29 +0000728 field_info.IsVolatile() ? kVolatile : kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700730 if (IsRef(size) && !mir_graph_->IsConstantNullRef(rl_src)) {
Vladimir Marko743b98c2014-11-24 19:45:41 +0000731 MarkGCCard(mir->optimization_flags, rl_src.reg, r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700732 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800733 FreeTemp(r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 } else {
735 FlushAllRegs(); // Everything to home locations
Fred Shih37f05ef2014-07-16 18:38:08 -0700736 QuickEntrypointEnum target;
737 switch (size) {
738 case kReference:
739 target = kQuickSetObjStatic;
740 break;
741 case k64:
742 case kDouble:
743 target = kQuickSet64Static;
744 break;
745 case k32:
746 case kSingle:
747 target = kQuickSet32Static;
748 break;
749 case kSignedHalf:
750 case kUnsignedHalf:
751 target = kQuickSet16Static;
752 break;
753 case kSignedByte:
754 case kUnsignedByte:
755 target = kQuickSet8Static;
756 break;
757 case kWord: // Intentional fallthrough.
758 default:
759 LOG(FATAL) << "Can't determine entrypoint for: " << size;
760 target = kQuickSet32Static;
761 }
Andreas Gampe98430592014-07-27 19:44:50 -0700762 CallRuntimeHelperImmRegLocation(target, field_info.FieldIndex(), rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700763 }
764}
765
Fred Shih37f05ef2014-07-16 18:38:08 -0700766void Mir2Lir::GenSget(MIR* mir, RegLocation rl_dest, OpSize size, Primitive::Type type) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000767 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
Vladimir Markoaf6925b2014-10-31 16:37:32 +0000768 DCHECK_EQ(SGetMemAccessType(mir->dalvikInsn.opcode), field_info.MemAccessType());
Vladimir Markobe0e5462014-02-26 11:24:15 +0000769 cu_->compiler_driver->ProcessedStaticField(field_info.FastGet(), field_info.IsReferrersClass());
Fred Shih37f05ef2014-07-16 18:38:08 -0700770
Andreas Gampe0b9203e2015-01-22 20:39:27 -0800771 if (!ForceSlowFieldPath(cu_) && field_info.FastGet()) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000772 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
buzbee2700f7e2014-03-07 09:46:20 -0800773 RegStorage r_base;
Vladimir Markobe0e5462014-02-26 11:24:15 +0000774 if (field_info.IsReferrersClass()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 // Fast path, static storage base is this method's class
776 RegLocation rl_method = LoadCurrMethod();
buzbeea0cd2d72014-06-01 09:33:49 -0700777 r_base = AllocTempRef();
Andreas Gampe3c12c512014-06-24 18:46:29 +0000778 LoadRefDisp(rl_method.reg, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), r_base,
779 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700780 } else {
781 // Medium path, static storage base in a different class which requires checks that the other
782 // class is initialized
Vladimir Markobe0e5462014-02-26 11:24:15 +0000783 DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 // May do runtime call so everything to home locations.
785 FlushAllRegs();
786 // Using fixed register to sync with possible call to runtime support.
Andreas Gampeccc60262014-07-04 18:02:38 -0700787 RegStorage r_method = TargetReg(kArg1, kRef);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700788 LockTemp(r_method);
789 LoadCurrMethodDirect(r_method);
Andreas Gampeccc60262014-07-04 18:02:38 -0700790 r_base = TargetReg(kArg0, kRef);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800791 LockTemp(r_base);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000792 LoadRefDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), r_base,
793 kNotVolatile);
Andreas Gampe9c3b0892014-04-24 17:33:34 +0000794 int32_t offset_of_field = ObjArray::OffsetOfElement(field_info.StorageIndex()).Int32Value();
Andreas Gampe3c12c512014-06-24 18:46:29 +0000795 LoadRefDisp(r_base, offset_of_field, r_base, kNotVolatile);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800796 // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100797 LIR* unresolved_branch = nullptr;
798 if (!field_info.IsClassInDexCache() &&
799 (mir->optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) == 0) {
800 // Check if r_base is NULL.
801 unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
802 }
803 LIR* uninit_branch = nullptr;
804 if (!field_info.IsClassInitialized() &&
805 (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0) {
806 // Check if r_base is not yet initialized class.
Andreas Gampeccc60262014-07-04 18:02:38 -0700807 RegStorage r_tmp = TargetReg(kArg2, kNotWide);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800808 LockTemp(r_tmp);
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100809 uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
Mark Mendell766e9292014-01-27 07:55:47 -0800810 mirror::Class::StatusOffset().Int32Value(),
Dave Allison69dfe512014-07-11 17:11:58 +0000811 mirror::Class::kStatusInitialized, nullptr, nullptr);
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100812 FreeTemp(r_tmp);
813 }
814 if (unresolved_branch != nullptr || uninit_branch != nullptr) {
815 // The slow path is invoked if the r_base is NULL or the class pointed
816 // to by it is not initialized.
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800817 LIR* cont = NewLIR0(kPseudoTargetLabel);
buzbee2700f7e2014-03-07 09:46:20 -0800818 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
Vladimir Markobe0e5462014-02-26 11:24:15 +0000819 field_info.StorageIndex(), r_base));
Ian Rogers5ddb4102014-01-07 08:58:46 -0800820
Vladimir Marko66c6d7b2014-10-16 15:41:48 +0100821 if (uninit_branch != nullptr) {
822 // Ensure load of status and load of value don't re-order.
823 GenMemBarrier(kLoadAny);
824 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700825 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700826 FreeTemp(r_method);
827 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800828 // r_base now holds static storage base
Fred Shih37f05ef2014-07-16 18:38:08 -0700829 RegisterClass reg_class = RegClassForFieldLoadStore(size, field_info.IsVolatile());
Vladimir Marko674744e2014-04-24 15:18:26 +0100830 RegLocation rl_result = EvalLoc(rl_dest, reg_class, true);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800831
Vladimir Marko674744e2014-04-24 15:18:26 +0100832 int field_offset = field_info.FieldOffset().Int32Value();
Fred Shih37f05ef2014-07-16 18:38:08 -0700833 if (IsRef(size)) {
834 // TODO: DCHECK?
Andreas Gampe3c12c512014-06-24 18:46:29 +0000835 LoadRefDisp(r_base, field_offset, rl_result.reg, field_info.IsVolatile() ? kVolatile :
836 kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100837 } else {
Fred Shih37f05ef2014-07-16 18:38:08 -0700838 LoadBaseDisp(r_base, field_offset, rl_result.reg, size, field_info.IsVolatile() ?
Andreas Gampe3c12c512014-06-24 18:46:29 +0000839 kVolatile : kNotVolatile);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800840 }
Vladimir Marko674744e2014-04-24 15:18:26 +0100841 FreeTemp(r_base);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800842
Fred Shih37f05ef2014-07-16 18:38:08 -0700843 if (IsWide(size)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700844 StoreValueWide(rl_dest, rl_result);
845 } else {
846 StoreValue(rl_dest, rl_result);
847 }
848 } else {
Fred Shih37f05ef2014-07-16 18:38:08 -0700849 DCHECK(SizeMatchesTypeForEntrypoint(size, type));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700850 FlushAllRegs(); // Everything to home locations
Fred Shih37f05ef2014-07-16 18:38:08 -0700851 QuickEntrypointEnum target;
852 switch (type) {
853 case Primitive::kPrimNot:
854 target = kQuickGetObjStatic;
855 break;
856 case Primitive::kPrimLong:
857 case Primitive::kPrimDouble:
858 target = kQuickGet64Static;
859 break;
860 case Primitive::kPrimInt:
861 case Primitive::kPrimFloat:
862 target = kQuickGet32Static;
863 break;
864 case Primitive::kPrimShort:
865 target = kQuickGetShortStatic;
866 break;
867 case Primitive::kPrimChar:
868 target = kQuickGetCharStatic;
869 break;
870 case Primitive::kPrimByte:
871 target = kQuickGetByteStatic;
872 break;
873 case Primitive::kPrimBoolean:
874 target = kQuickGetBooleanStatic;
875 break;
876 case Primitive::kPrimVoid: // Intentional fallthrough.
877 default:
878 LOG(FATAL) << "Can't determine entrypoint for: " << type;
879 target = kQuickGet32Static;
880 }
Andreas Gampe98430592014-07-27 19:44:50 -0700881 CallRuntimeHelperImm(target, field_info.FieldIndex(), true);
882
Douglas Leung2db3e262014-06-25 16:02:55 -0700883 // FIXME: pGetXXStatic always return an int or int64 regardless of rl_dest.fp.
Fred Shih37f05ef2014-07-16 18:38:08 -0700884 if (IsWide(size)) {
Douglas Leung2db3e262014-06-25 16:02:55 -0700885 RegLocation rl_result = GetReturnWide(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700886 StoreValueWide(rl_dest, rl_result);
887 } else {
Douglas Leung2db3e262014-06-25 16:02:55 -0700888 RegLocation rl_result = GetReturn(rl_dest.ref ? kRefReg : kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700889 StoreValue(rl_dest, rl_result);
890 }
891 }
892}
893
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800894// Generate code for all slow paths.
895void Mir2Lir::HandleSlowPaths() {
Chao-ying Fu8159af62014-07-07 17:13:52 -0700896 // We should check slow_paths_.Size() every time, because a new slow path
897 // may be created during slowpath->Compile().
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100898 for (LIRSlowPath* slowpath : slow_paths_) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800899 slowpath->Compile();
900 }
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100901 slow_paths_.clear();
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800902}
903
Fred Shih37f05ef2014-07-16 18:38:08 -0700904void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size, Primitive::Type type,
905 RegLocation rl_dest, RegLocation rl_obj) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000906 const MirIFieldLoweringInfo& field_info = mir_graph_->GetIFieldLoweringInfo(mir);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800907 if (kIsDebugBuild) {
908 auto mem_access_type = IsInstructionIGetQuickOrIPutQuick(mir->dalvikInsn.opcode) ?
909 IGetQuickOrIPutQuickMemAccessType(mir->dalvikInsn.opcode) :
910 IGetMemAccessType(mir->dalvikInsn.opcode);
911 DCHECK_EQ(mem_access_type, field_info.MemAccessType()) << mir->dalvikInsn.opcode;
912 }
Vladimir Markobe0e5462014-02-26 11:24:15 +0000913 cu_->compiler_driver->ProcessedInstanceField(field_info.FastGet());
Andreas Gampe0b9203e2015-01-22 20:39:27 -0800914 if (!ForceSlowFieldPath(cu_) && field_info.FastGet()) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700915 RegisterClass reg_class = RegClassForFieldLoadStore(size, field_info.IsVolatile());
Andreas Gampeaa910d52014-07-30 18:59:05 -0700916 // A load of the class will lead to an iget with offset 0.
Vladimir Markobe0e5462014-02-26 11:24:15 +0000917 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
buzbeea0cd2d72014-06-01 09:33:49 -0700918 rl_obj = LoadValue(rl_obj, kRefReg);
Vladimir Marko674744e2014-04-24 15:18:26 +0100919 GenNullCheck(rl_obj.reg, opt_flags);
920 RegLocation rl_result = EvalLoc(rl_dest, reg_class, true);
921 int field_offset = field_info.FieldOffset().Int32Value();
Andreas Gampe3c12c512014-06-24 18:46:29 +0000922 LIR* load_lir;
Fred Shih37f05ef2014-07-16 18:38:08 -0700923 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000924 load_lir = LoadRefDisp(rl_obj.reg, field_offset, rl_result.reg, field_info.IsVolatile() ?
925 kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100926 } else {
Fred Shih37f05ef2014-07-16 18:38:08 -0700927 load_lir = LoadBaseDisp(rl_obj.reg, field_offset, rl_result.reg, size,
Andreas Gampe3c12c512014-06-24 18:46:29 +0000928 field_info.IsVolatile() ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100929 }
Andreas Gampe3c12c512014-06-24 18:46:29 +0000930 MarkPossibleNullPointerExceptionAfter(opt_flags, load_lir);
Fred Shih37f05ef2014-07-16 18:38:08 -0700931 if (IsWide(size)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932 StoreValueWide(rl_dest, rl_result);
933 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700934 StoreValue(rl_dest, rl_result);
935 }
936 } else {
Fred Shih37f05ef2014-07-16 18:38:08 -0700937 DCHECK(SizeMatchesTypeForEntrypoint(size, type));
938 QuickEntrypointEnum target;
939 switch (type) {
940 case Primitive::kPrimNot:
941 target = kQuickGetObjInstance;
942 break;
943 case Primitive::kPrimLong:
944 case Primitive::kPrimDouble:
945 target = kQuickGet64Instance;
946 break;
947 case Primitive::kPrimFloat:
948 case Primitive::kPrimInt:
949 target = kQuickGet32Instance;
950 break;
951 case Primitive::kPrimShort:
952 target = kQuickGetShortInstance;
953 break;
954 case Primitive::kPrimChar:
955 target = kQuickGetCharInstance;
956 break;
957 case Primitive::kPrimByte:
958 target = kQuickGetByteInstance;
959 break;
960 case Primitive::kPrimBoolean:
961 target = kQuickGetBooleanInstance;
962 break;
963 case Primitive::kPrimVoid: // Intentional fallthrough.
964 default:
965 LOG(FATAL) << "Can't determine entrypoint for: " << type;
966 target = kQuickGet32Instance;
967 }
Andreas Gampe98430592014-07-27 19:44:50 -0700968 // Second argument of pGetXXInstance is always a reference.
969 DCHECK_EQ(static_cast<unsigned int>(rl_obj.wide), 0U);
970 CallRuntimeHelperImmRegLocation(target, field_info.FieldIndex(), rl_obj, true);
971
Serguei Katkov4eca9f52014-07-08 00:45:45 +0700972 // FIXME: pGetXXInstance always return an int or int64 regardless of rl_dest.fp.
Fred Shih37f05ef2014-07-16 18:38:08 -0700973 if (IsWide(size)) {
Serguei Katkov4eca9f52014-07-08 00:45:45 +0700974 RegLocation rl_result = GetReturnWide(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975 StoreValueWide(rl_dest, rl_result);
976 } else {
Serguei Katkov4eca9f52014-07-08 00:45:45 +0700977 RegLocation rl_result = GetReturn(rl_dest.ref ? kRefReg : kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700978 StoreValue(rl_dest, rl_result);
979 }
980 }
981}
982
Vladimir Markobe0e5462014-02-26 11:24:15 +0000983void Mir2Lir::GenIPut(MIR* mir, int opt_flags, OpSize size,
Fred Shih37f05ef2014-07-16 18:38:08 -0700984 RegLocation rl_src, RegLocation rl_obj) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000985 const MirIFieldLoweringInfo& field_info = mir_graph_->GetIFieldLoweringInfo(mir);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800986 if (kIsDebugBuild) {
987 auto mem_access_type = IsInstructionIGetQuickOrIPutQuick(mir->dalvikInsn.opcode) ?
988 IGetQuickOrIPutQuickMemAccessType(mir->dalvikInsn.opcode) :
989 IPutMemAccessType(mir->dalvikInsn.opcode);
990 DCHECK_EQ(mem_access_type, field_info.MemAccessType());
991 }
Vladimir Markobe0e5462014-02-26 11:24:15 +0000992 cu_->compiler_driver->ProcessedInstanceField(field_info.FastPut());
Andreas Gampe0b9203e2015-01-22 20:39:27 -0800993 if (!ForceSlowFieldPath(cu_) && field_info.FastPut()) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700994 RegisterClass reg_class = RegClassForFieldLoadStore(size, field_info.IsVolatile());
Andreas Gampeaa910d52014-07-30 18:59:05 -0700995 // Dex code never writes to the class field.
996 DCHECK_GE(static_cast<uint32_t>(field_info.FieldOffset().Int32Value()),
997 sizeof(mirror::HeapReference<mirror::Class>));
buzbeea0cd2d72014-06-01 09:33:49 -0700998 rl_obj = LoadValue(rl_obj, kRefReg);
Fred Shih37f05ef2014-07-16 18:38:08 -0700999 if (IsWide(size)) {
Vladimir Marko674744e2014-04-24 15:18:26 +01001000 rl_src = LoadValueWide(rl_src, reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001001 } else {
1002 rl_src = LoadValue(rl_src, reg_class);
Vladimir Marko674744e2014-04-24 15:18:26 +01001003 }
1004 GenNullCheck(rl_obj.reg, opt_flags);
1005 int field_offset = field_info.FieldOffset().Int32Value();
Vladimir Markoee5e2732015-01-13 17:34:28 +00001006 LIR* null_ck_insn;
Fred Shih37f05ef2014-07-16 18:38:08 -07001007 if (IsRef(size)) {
Vladimir Markoee5e2732015-01-13 17:34:28 +00001008 null_ck_insn = StoreRefDisp(rl_obj.reg, field_offset, rl_src.reg, field_info.IsVolatile() ?
Andreas Gampe3c12c512014-06-24 18:46:29 +00001009 kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +01001010 } else {
Vladimir Markoee5e2732015-01-13 17:34:28 +00001011 null_ck_insn = StoreBaseDisp(rl_obj.reg, field_offset, rl_src.reg, size,
1012 field_info.IsVolatile() ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +01001013 }
Vladimir Markoee5e2732015-01-13 17:34:28 +00001014 MarkPossibleNullPointerExceptionAfter(opt_flags, null_ck_insn);
Fred Shih37f05ef2014-07-16 18:38:08 -07001015 if (IsRef(size) && !mir_graph_->IsConstantNullRef(rl_src)) {
Vladimir Marko743b98c2014-11-24 19:45:41 +00001016 MarkGCCard(opt_flags, rl_src.reg, rl_obj.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001017 }
1018 } else {
Fred Shih37f05ef2014-07-16 18:38:08 -07001019 QuickEntrypointEnum target;
1020 switch (size) {
1021 case kReference:
1022 target = kQuickSetObjInstance;
1023 break;
1024 case k64:
1025 case kDouble:
1026 target = kQuickSet64Instance;
1027 break;
1028 case k32:
1029 case kSingle:
1030 target = kQuickSet32Instance;
1031 break;
1032 case kSignedHalf:
1033 case kUnsignedHalf:
1034 target = kQuickSet16Instance;
1035 break;
1036 case kSignedByte:
1037 case kUnsignedByte:
1038 target = kQuickSet8Instance;
1039 break;
1040 case kWord: // Intentional fallthrough.
1041 default:
1042 LOG(FATAL) << "Can't determine entrypoint for: " << size;
1043 target = kQuickSet32Instance;
1044 }
Andreas Gampe98430592014-07-27 19:44:50 -07001045 CallRuntimeHelperImmRegLocationRegLocation(target, field_info.FieldIndex(), rl_obj, rl_src,
1046 true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001047 }
1048}
1049
Ian Rogersa9a82542013-10-04 11:17:26 -07001050void Mir2Lir::GenArrayObjPut(int opt_flags, RegLocation rl_array, RegLocation rl_index,
1051 RegLocation rl_src) {
1052 bool needs_range_check = !(opt_flags & MIR_IGNORE_RANGE_CHECK);
1053 bool needs_null_check = !((cu_->disable_opt & (1 << kNullCheckElimination)) &&
1054 (opt_flags & MIR_IGNORE_NULL_CHECK));
Andreas Gampe98430592014-07-27 19:44:50 -07001055 QuickEntrypointEnum target = needs_range_check
1056 ? (needs_null_check ? kQuickAputObjectWithNullAndBoundCheck
1057 : kQuickAputObjectWithBoundCheck)
1058 : kQuickAputObject;
1059 CallRuntimeHelperRegLocationRegLocationRegLocation(target, rl_array, rl_index, rl_src, true);
Ian Rogersa9a82542013-10-04 11:17:26 -07001060}
1061
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001062void Mir2Lir::GenConstClass(uint32_t type_idx, RegLocation rl_dest) {
Vladimir Marko20f85592015-03-19 10:07:02 +00001063 RegLocation rl_result;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 if (!cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
Andreas Gampe4b537a82014-06-30 22:24:53 -07001065 *cu_->dex_file,
1066 type_idx)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001067 // Call out to helper which resolves type and verifies access.
1068 // Resolved type returned in kRet0.
Vladimir Marko20f85592015-03-19 10:07:02 +00001069 CallRuntimeHelperImmMethod(kQuickInitializeTypeAndVerifyAccess, type_idx, true);
1070 rl_result = GetReturn(kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001071 } else {
Vladimir Marko20f85592015-03-19 10:07:02 +00001072 rl_result = EvalLoc(rl_dest, kRefReg, true);
1073 // We don't need access checks, load type from dex cache
1074 RegStorage r_method = RegStorage::InvalidReg();
1075 if (CanUseOpPcRelDexCacheArrayLoad()) {
1076 size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx);
1077 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, rl_result.reg);
1078 } else {
1079 RegLocation rl_method = LoadCurrMethod();
1080 CheckRegLocation(rl_method);
1081 r_method = rl_method.reg;
1082 int32_t dex_cache_offset =
1083 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value();
1084 RegStorage res_reg = AllocTempRef();
1085 LoadRefDisp(r_method, dex_cache_offset, res_reg, kNotVolatile);
1086 int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value();
1087 LoadRefDisp(res_reg, offset_of_type, rl_result.reg, kNotVolatile);
1088 FreeTemp(res_reg);
1089 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001090 if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file,
Andreas Gampe0b9203e2015-01-22 20:39:27 -08001091 type_idx) || ForceSlowTypePath(cu_)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001092 // Slow path, at runtime test if type is null and if so initialize
1093 FlushAllRegs();
Vladimir Marko20f85592015-03-19 10:07:02 +00001094 GenIfNullUseHelperImmMethod(rl_result.reg, kQuickInitializeType, type_idx, r_method);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 }
1096 }
Vladimir Marko20f85592015-03-19 10:07:02 +00001097 StoreValue(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001098}
1099
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001100void Mir2Lir::GenConstString(uint32_t string_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001101 /* NOTE: Most strings should be available at compile time */
Andreas Gampe9c3b0892014-04-24 17:33:34 +00001102 int32_t offset_of_string = mirror::ObjectArray<mirror::String>::OffsetOfElement(string_idx).
1103 Int32Value();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001104 if (!cu_->compiler_driver->CanAssumeStringIsPresentInDexCache(
Andreas Gampe0b9203e2015-01-22 20:39:27 -08001105 *cu_->dex_file, string_idx) || ForceSlowStringPath(cu_)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001106 // slow path, resolve string if not in dex cache
1107 FlushAllRegs();
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001108 LockCallTemps(); // Using explicit registers
Mark Mendell766e9292014-01-27 07:55:47 -08001109
Brian Carlstrom7940e442013-07-12 13:46:57 -07001110 // Might call out to helper, which will return resolved string in kRet0
Vladimir Marko20f85592015-03-19 10:07:02 +00001111 RegStorage ret0 = TargetReg(kRet0, kRef);
1112 RegStorage r_method = RegStorage::InvalidReg();
1113 if (CanUseOpPcRelDexCacheArrayLoad()) {
1114 size_t offset = dex_cache_arrays_layout_.StringOffset(string_idx);
1115 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, ret0);
1116 } else {
1117 r_method = LoadCurrMethodWithHint(TargetReg(kArg1, kRef));
1118 // Method to declaring class.
1119 RegStorage arg0 = TargetReg(kArg0, kRef);
1120 LoadRefDisp(r_method, mirror::ArtMethod::DeclaringClassOffset().Int32Value(),
1121 arg0, kNotVolatile);
1122 // Declaring class to dex cache strings.
1123 LoadRefDisp(arg0, mirror::Class::DexCacheStringsOffset().Int32Value(), arg0, kNotVolatile);
Mark Mendell766e9292014-01-27 07:55:47 -08001124
Vladimir Marko20f85592015-03-19 10:07:02 +00001125 LoadRefDisp(arg0, offset_of_string, ret0, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001126 }
Vladimir Marko20f85592015-03-19 10:07:02 +00001127 GenIfNullUseHelperImmMethod(ret0, kQuickResolveString, string_idx, r_method);
Mingyao Yang3b004ba2014-04-29 15:55:37 -07001128
Brian Carlstrom7940e442013-07-12 13:46:57 -07001129 GenBarrier();
buzbeea0cd2d72014-06-01 09:33:49 -07001130 StoreValue(rl_dest, GetReturn(kRefReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001131 } else {
buzbeea0cd2d72014-06-01 09:33:49 -07001132 RegLocation rl_result = EvalLoc(rl_dest, kRefReg, true);
Vladimir Marko20f85592015-03-19 10:07:02 +00001133 if (CanUseOpPcRelDexCacheArrayLoad()) {
1134 size_t offset = dex_cache_arrays_layout_.StringOffset(string_idx);
1135 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, rl_result.reg);
1136 } else {
1137 RegLocation rl_method = LoadCurrMethod();
1138 RegStorage res_reg = AllocTempRef();
1139 LoadRefDisp(rl_method.reg, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), res_reg,
1140 kNotVolatile);
1141 LoadRefDisp(res_reg, mirror::Class::DexCacheStringsOffset().Int32Value(), res_reg,
1142 kNotVolatile);
1143 LoadRefDisp(res_reg, offset_of_string, rl_result.reg, kNotVolatile);
1144 FreeTemp(res_reg);
1145 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001146 StoreValue(rl_dest, rl_result);
1147 }
1148}
1149
Andreas Gampe98430592014-07-27 19:44:50 -07001150/*
1151 * Let helper function take care of everything. Will
1152 * call Class::NewInstanceFromCode(type_idx, method);
1153 */
1154void Mir2Lir::GenNewInstance(uint32_t type_idx, RegLocation rl_dest) {
1155 FlushAllRegs(); /* Everything to home location */
Brian Carlstrom7940e442013-07-12 13:46:57 -07001156 // alloc will always check for resolution, do we also need to verify
1157 // access because the verifier was unable to?
Andreas Gampe98430592014-07-27 19:44:50 -07001158 const DexFile* dex_file = cu_->dex_file;
1159 CompilerDriver* driver = cu_->compiler_driver;
1160 if (driver->CanAccessInstantiableTypeWithoutChecks(cu_->method_idx, *dex_file, type_idx)) {
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001161 bool is_type_initialized;
1162 bool use_direct_type_ptr;
1163 uintptr_t direct_type_ptr;
Mathieu Chartier8668c3c2014-04-24 16:48:11 -07001164 bool is_finalizable;
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001165 if (kEmbedClassInCode &&
Mathieu Chartier8668c3c2014-04-24 16:48:11 -07001166 driver->CanEmbedTypeInCode(*dex_file, type_idx, &is_type_initialized, &use_direct_type_ptr,
1167 &direct_type_ptr, &is_finalizable) &&
1168 !is_finalizable) {
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001169 // The fast path.
1170 if (!use_direct_type_ptr) {
Fred Shihe7f82e22014-08-06 10:46:37 -07001171 LoadClassType(*dex_file, type_idx, kArg0);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001172 if (!is_type_initialized) {
Andreas Gampe98430592014-07-27 19:44:50 -07001173 CallRuntimeHelperRegMethod(kQuickAllocObjectResolved, TargetReg(kArg0, kRef), true);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001174 } else {
Andreas Gampe98430592014-07-27 19:44:50 -07001175 CallRuntimeHelperRegMethod(kQuickAllocObjectInitialized, TargetReg(kArg0, kRef), true);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001176 }
1177 } else {
1178 // Use the direct pointer.
1179 if (!is_type_initialized) {
Andreas Gampe98430592014-07-27 19:44:50 -07001180 CallRuntimeHelperImmMethod(kQuickAllocObjectResolved, direct_type_ptr, true);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001181 } else {
Andreas Gampe98430592014-07-27 19:44:50 -07001182 CallRuntimeHelperImmMethod(kQuickAllocObjectInitialized, direct_type_ptr, true);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001183 }
1184 }
1185 } else {
1186 // The slow path.
Andreas Gampe98430592014-07-27 19:44:50 -07001187 CallRuntimeHelperImmMethod(kQuickAllocObject, type_idx, true);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001188 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001189 } else {
Andreas Gampe98430592014-07-27 19:44:50 -07001190 CallRuntimeHelperImmMethod(kQuickAllocObjectWithAccessCheck, type_idx, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001191 }
Andreas Gampe98430592014-07-27 19:44:50 -07001192 StoreValue(rl_dest, GetReturn(kRefReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001193}
1194
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001195void Mir2Lir::GenThrow(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001196 FlushAllRegs();
Andreas Gampe98430592014-07-27 19:44:50 -07001197 CallRuntimeHelperRegLocation(kQuickDeliverException, rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001198}
1199
1200// For final classes there are no sub-classes to check and so we can answer the instance-of
1201// question with simple comparisons.
1202void Mir2Lir::GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, RegLocation rl_dest,
1203 RegLocation rl_src) {
Mark Mendelldf8ee2e2014-01-27 16:37:47 -08001204 // X86 has its own implementation.
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001205 DCHECK(cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64);
Mark Mendelldf8ee2e2014-01-27 16:37:47 -08001206
buzbeea0cd2d72014-06-01 09:33:49 -07001207 RegLocation object = LoadValue(rl_src, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001208 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001209 RegStorage result_reg = rl_result.reg;
buzbeeb5860fb2014-06-21 15:31:01 -07001210 if (IsSameReg(result_reg, object.reg)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001211 result_reg = AllocTypedTemp(false, kCoreReg);
buzbeeb5860fb2014-06-21 15:31:01 -07001212 DCHECK(!IsSameReg(result_reg, object.reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001213 }
1214 LoadConstant(result_reg, 0); // assume false
buzbee2700f7e2014-03-07 09:46:20 -08001215 LIR* null_branchover = OpCmpImmBranch(kCondEq, object.reg, 0, NULL);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001216
buzbeea0cd2d72014-06-01 09:33:49 -07001217 RegStorage check_class = AllocTypedTemp(false, kRefReg);
1218 RegStorage object_class = AllocTypedTemp(false, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001219
Brian Carlstrom7940e442013-07-12 13:46:57 -07001220 if (use_declaring_class) {
Vladimir Marko20f85592015-03-19 10:07:02 +00001221 RegStorage r_method = LoadCurrMethodWithHint(check_class);
1222 LoadRefDisp(r_method, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), check_class,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001223 kNotVolatile);
1224 LoadRefDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class,
1225 kNotVolatile);
Vladimir Marko20f85592015-03-19 10:07:02 +00001226 } else if (CanUseOpPcRelDexCacheArrayLoad()) {
1227 size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx);
1228 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, check_class);
1229 LoadRefDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class,
1230 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001231 } else {
Vladimir Marko20f85592015-03-19 10:07:02 +00001232 RegStorage r_method = LoadCurrMethodWithHint(check_class);
1233 LoadRefDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +00001234 check_class, kNotVolatile);
1235 LoadRefDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class,
1236 kNotVolatile);
Andreas Gampe9c3b0892014-04-24 17:33:34 +00001237 int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value();
Andreas Gampe3c12c512014-06-24 18:46:29 +00001238 LoadRefDisp(check_class, offset_of_type, check_class, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001239 }
1240
buzbee695d13a2014-04-19 13:32:20 -07001241 // FIXME: what should we be comparing here? compressed or decompressed references?
Brian Carlstrom7940e442013-07-12 13:46:57 -07001242 if (cu_->instruction_set == kThumb2) {
1243 OpRegReg(kOpCmp, check_class, object_class); // Same?
Dave Allison3da67a52014-04-02 17:03:45 -07001244 LIR* it = OpIT(kCondEq, ""); // if-convert the test
Brian Carlstrom7940e442013-07-12 13:46:57 -07001245 LoadConstant(result_reg, 1); // .eq case - load true
Dave Allison3da67a52014-04-02 17:03:45 -07001246 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001247 } else {
Andreas Gampe90969af2014-07-15 23:02:11 -07001248 GenSelectConst32(check_class, object_class, kCondEq, 1, 0, result_reg, kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001249 }
1250 LIR* target = NewLIR0(kPseudoTargetLabel);
1251 null_branchover->target = target;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001252 FreeTemp(object_class);
1253 FreeTemp(check_class);
1254 if (IsTemp(result_reg)) {
buzbee2700f7e2014-03-07 09:46:20 -08001255 OpRegCopy(rl_result.reg, result_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001256 FreeTemp(result_reg);
1257 }
1258 StoreValue(rl_dest, rl_result);
1259}
1260
1261void Mir2Lir::GenInstanceofCallingHelper(bool needs_access_check, bool type_known_final,
1262 bool type_known_abstract, bool use_declaring_class,
1263 bool can_assume_type_is_in_dex_cache,
1264 uint32_t type_idx, RegLocation rl_dest,
1265 RegLocation rl_src) {
1266 FlushAllRegs();
1267 // May generate a call - use explicit registers
1268 LockCallTemps();
Andreas Gampeccc60262014-07-04 18:02:38 -07001269 RegStorage class_reg = TargetReg(kArg2, kRef); // kArg2 will hold the Class*
Serguei Katkov9ee45192014-07-17 14:39:03 +07001270 RegStorage ref_reg = TargetReg(kArg0, kRef); // kArg0 will hold the ref.
1271 RegStorage ret_reg = GetReturn(kRefReg).reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001272 if (needs_access_check) {
1273 // Check we have access to type_idx and if not throw IllegalAccessError,
1274 // returns Class* in kArg0
Vladimir Marko20f85592015-03-19 10:07:02 +00001275 CallRuntimeHelperImmMethod(kQuickInitializeTypeAndVerifyAccess, type_idx, true);
Serguei Katkov9ee45192014-07-17 14:39:03 +07001276 OpRegCopy(class_reg, ret_reg); // Align usage with fast path
1277 LoadValueDirectFixed(rl_src, ref_reg); // kArg0 <= ref
Brian Carlstrom7940e442013-07-12 13:46:57 -07001278 } else if (use_declaring_class) {
Vladimir Marko20f85592015-03-19 10:07:02 +00001279 RegStorage r_method = LoadCurrMethodWithHint(TargetReg(kArg1, kRef));
Serguei Katkov9ee45192014-07-17 14:39:03 +07001280 LoadValueDirectFixed(rl_src, ref_reg); // kArg0 <= ref
Vladimir Marko20f85592015-03-19 10:07:02 +00001281 LoadRefDisp(r_method, mirror::ArtMethod::DeclaringClassOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +00001282 class_reg, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001283 } else {
Andreas Gampe90969af2014-07-15 23:02:11 -07001284 if (can_assume_type_is_in_dex_cache) {
1285 // Conditionally, as in the other case we will also load it.
Serguei Katkov9ee45192014-07-17 14:39:03 +07001286 LoadValueDirectFixed(rl_src, ref_reg); // kArg0 <= ref
Andreas Gampe90969af2014-07-15 23:02:11 -07001287 }
1288
Vladimir Marko20f85592015-03-19 10:07:02 +00001289 RegStorage r_method = RegStorage::InvalidReg();
1290 if (CanUseOpPcRelDexCacheArrayLoad()) {
1291 size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx);
1292 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, class_reg);
1293 } else {
1294 r_method = LoadCurrMethodWithHint(TargetReg(kArg1, kRef));
1295 // Load dex cache entry into class_reg (kArg2)
1296 LoadRefDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
1297 class_reg, kNotVolatile);
1298 int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value();
1299 LoadRefDisp(class_reg, offset_of_type, class_reg, kNotVolatile);
1300 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001301 if (!can_assume_type_is_in_dex_cache) {
Vladimir Marko20f85592015-03-19 10:07:02 +00001302 GenIfNullUseHelperImmMethod(class_reg, kQuickInitializeType, type_idx, r_method);
Andreas Gampe90969af2014-07-15 23:02:11 -07001303
1304 // Should load value here.
Serguei Katkov9ee45192014-07-17 14:39:03 +07001305 LoadValueDirectFixed(rl_src, ref_reg); // kArg0 <= ref
Brian Carlstrom7940e442013-07-12 13:46:57 -07001306 }
1307 }
1308 /* kArg0 is ref, kArg2 is class. If ref==null, use directly as bool result */
Andreas Gampe4b537a82014-06-30 22:24:53 -07001309 RegLocation rl_result = GetReturn(kCoreReg);
Serguei Katkov9ee45192014-07-17 14:39:03 +07001310 if (!IsSameReg(rl_result.reg, ref_reg)) {
1311 // On MIPS and x86_64 rArg0 != rl_result, place false in result if branch is taken.
buzbee2700f7e2014-03-07 09:46:20 -08001312 LoadConstant(rl_result.reg, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001313 }
Serguei Katkov9ee45192014-07-17 14:39:03 +07001314 LIR* branch1 = OpCmpImmBranch(kCondEq, ref_reg, 0, NULL);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001315
1316 /* load object->klass_ */
Serguei Katkov9ee45192014-07-17 14:39:03 +07001317 RegStorage ref_class_reg = TargetReg(kArg1, kRef); // kArg1 will hold the Class* of ref.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001318 DCHECK_EQ(mirror::Object::ClassOffset().Int32Value(), 0);
Serguei Katkov9ee45192014-07-17 14:39:03 +07001319 LoadRefDisp(ref_reg, mirror::Object::ClassOffset().Int32Value(),
1320 ref_class_reg, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001321 /* kArg0 is ref, kArg1 is ref->klass_, kArg2 is class */
1322 LIR* branchover = NULL;
1323 if (type_known_final) {
Serguei Katkov9ee45192014-07-17 14:39:03 +07001324 // rl_result == ref == class.
1325 GenSelectConst32(ref_class_reg, class_reg, kCondEq, 1, 0, rl_result.reg,
Andreas Gampe90969af2014-07-15 23:02:11 -07001326 kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001327 } else {
1328 if (cu_->instruction_set == kThumb2) {
Andreas Gampe98430592014-07-27 19:44:50 -07001329 RegStorage r_tgt = LoadHelper(kQuickInstanceofNonTrivial);
Dave Allison3da67a52014-04-02 17:03:45 -07001330 LIR* it = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001331 if (!type_known_abstract) {
1332 /* Uses conditional nullification */
Serguei Katkov9ee45192014-07-17 14:39:03 +07001333 OpRegReg(kOpCmp, ref_class_reg, class_reg); // Same?
Dave Allison3da67a52014-04-02 17:03:45 -07001334 it = OpIT(kCondEq, "EE"); // if-convert the test
Serguei Katkov9ee45192014-07-17 14:39:03 +07001335 LoadConstant(rl_result.reg, 1); // .eq case - load true
Brian Carlstrom7940e442013-07-12 13:46:57 -07001336 }
Serguei Katkov9ee45192014-07-17 14:39:03 +07001337 OpRegCopy(ref_reg, class_reg); // .ne case - arg0 <= class
Brian Carlstrom7940e442013-07-12 13:46:57 -07001338 OpReg(kOpBlx, r_tgt); // .ne case: helper(class, ref->class)
Dave Allison3da67a52014-04-02 17:03:45 -07001339 if (it != nullptr) {
1340 OpEndIT(it);
1341 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001342 FreeTemp(r_tgt);
1343 } else {
1344 if (!type_known_abstract) {
1345 /* Uses branchovers */
buzbee2700f7e2014-03-07 09:46:20 -08001346 LoadConstant(rl_result.reg, 1); // assume true
Andreas Gampeccc60262014-07-04 18:02:38 -07001347 branchover = OpCmpBranch(kCondEq, TargetReg(kArg1, kRef), TargetReg(kArg2, kRef), NULL);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001348 }
Andreas Gampe90969af2014-07-15 23:02:11 -07001349
Serguei Katkov9ee45192014-07-17 14:39:03 +07001350 OpRegCopy(TargetReg(kArg0, kRef), class_reg); // .ne case - arg0 <= class
Andreas Gampe98430592014-07-27 19:44:50 -07001351 CallRuntimeHelper(kQuickInstanceofNonTrivial, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001352 }
1353 }
1354 // TODO: only clobber when type isn't final?
Vladimir Marko31c2aac2013-12-09 16:31:19 +00001355 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001356 /* branch targets here */
1357 LIR* target = NewLIR0(kPseudoTargetLabel);
1358 StoreValue(rl_dest, rl_result);
1359 branch1->target = target;
Andreas Gampe98430592014-07-27 19:44:50 -07001360 if (branchover != nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001361 branchover->target = target;
1362 }
1363}
1364
1365void Mir2Lir::GenInstanceof(uint32_t type_idx, RegLocation rl_dest, RegLocation rl_src) {
1366 bool type_known_final, type_known_abstract, use_declaring_class;
1367 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
1368 *cu_->dex_file,
1369 type_idx,
1370 &type_known_final,
1371 &type_known_abstract,
1372 &use_declaring_class);
1373 bool can_assume_type_is_in_dex_cache = !needs_access_check &&
1374 cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx);
1375
1376 if ((use_declaring_class || can_assume_type_is_in_dex_cache) && type_known_final) {
1377 GenInstanceofFinal(use_declaring_class, type_idx, rl_dest, rl_src);
1378 } else {
1379 GenInstanceofCallingHelper(needs_access_check, type_known_final, type_known_abstract,
1380 use_declaring_class, can_assume_type_is_in_dex_cache,
1381 type_idx, rl_dest, rl_src);
1382 }
1383}
1384
Vladimir Marko22fe45d2015-03-18 11:33:58 +00001385void Mir2Lir::GenCheckCast(int opt_flags, uint32_t insn_idx, uint32_t type_idx,
1386 RegLocation rl_src) {
1387 if ((opt_flags & MIR_IGNORE_CHECK_CAST) != 0) {
1388 // Compiler analysis proved that this check-cast would never cause an exception.
1389 return;
1390 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001391 bool type_known_final, type_known_abstract, use_declaring_class;
1392 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
1393 *cu_->dex_file,
1394 type_idx,
1395 &type_known_final,
1396 &type_known_abstract,
1397 &use_declaring_class);
1398 // Note: currently type_known_final is unused, as optimizing will only improve the performance
1399 // of the exception throw path.
1400 DexCompilationUnit* cu = mir_graph_->GetCurrentDexCompilationUnit();
Vladimir Marko2730db02014-01-27 11:15:17 +00001401 if (!needs_access_check && cu_->compiler_driver->IsSafeCast(cu, insn_idx)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001402 // Verifier type analysis proved this check cast would never cause an exception.
1403 return;
1404 }
1405 FlushAllRegs();
1406 // May generate a call - use explicit registers
1407 LockCallTemps();
Andreas Gampeccc60262014-07-04 18:02:38 -07001408 RegStorage class_reg = TargetReg(kArg2, kRef); // kArg2 will hold the Class*
Brian Carlstrom7940e442013-07-12 13:46:57 -07001409 if (needs_access_check) {
1410 // Check we have access to type_idx and if not throw IllegalAccessError,
1411 // returns Class* in kRet0
1412 // InitializeTypeAndVerifyAccess(idx, method)
Vladimir Marko20f85592015-03-19 10:07:02 +00001413 CallRuntimeHelperImmMethod(kQuickInitializeTypeAndVerifyAccess, type_idx, true);
Andreas Gampeccc60262014-07-04 18:02:38 -07001414 OpRegCopy(class_reg, TargetReg(kRet0, kRef)); // Align usage with fast path
Brian Carlstrom7940e442013-07-12 13:46:57 -07001415 } else if (use_declaring_class) {
Vladimir Marko20f85592015-03-19 10:07:02 +00001416 RegStorage method_reg = LoadCurrMethodWithHint(TargetReg(kArg1, kRef));
Andreas Gampe4b537a82014-06-30 22:24:53 -07001417 LoadRefDisp(method_reg, mirror::ArtMethod::DeclaringClassOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +00001418 class_reg, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001419 } else {
1420 // Load dex cache entry into class_reg (kArg2)
Vladimir Marko20f85592015-03-19 10:07:02 +00001421 RegStorage r_method = RegStorage::InvalidReg();
1422 if (CanUseOpPcRelDexCacheArrayLoad()) {
1423 size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx);
1424 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, class_reg);
1425 } else {
1426 r_method = LoadCurrMethodWithHint(TargetReg(kArg1, kRef));
1427
1428 LoadRefDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
1429 class_reg, kNotVolatile);
1430 int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value();
1431 LoadRefDisp(class_reg, offset_of_type, class_reg, kNotVolatile);
1432 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001433 if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx)) {
1434 // Need to test presence of type in dex cache at runtime
Vladimir Marko20f85592015-03-19 10:07:02 +00001435 GenIfNullUseHelperImmMethod(class_reg, kQuickInitializeType, type_idx, r_method);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001436 }
1437 }
1438 // At this point, class_reg (kArg2) has class
Andreas Gampeccc60262014-07-04 18:02:38 -07001439 LoadValueDirectFixed(rl_src, TargetReg(kArg0, kRef)); // kArg0 <= ref
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001440
1441 // Slow path for the case where the classes are not equal. In this case we need
1442 // to call a helper function to do the check.
1443 class SlowPath : public LIRSlowPath {
1444 public:
Vladimir Marko0b40ecf2015-03-20 12:08:03 +00001445 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, bool load)
1446 : LIRSlowPath(m2l, fromfast, cont), load_(load) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001447 }
1448
1449 void Compile() {
1450 GenerateTargetLabel();
1451
1452 if (load_) {
Andreas Gampeccc60262014-07-04 18:02:38 -07001453 m2l_->LoadRefDisp(m2l_->TargetReg(kArg0, kRef), mirror::Object::ClassOffset().Int32Value(),
1454 m2l_->TargetReg(kArg1, kRef), kNotVolatile);
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001455 }
Andreas Gampe98430592014-07-27 19:44:50 -07001456 m2l_->CallRuntimeHelperRegReg(kQuickCheckCast, m2l_->TargetReg(kArg2, kRef),
1457 m2l_->TargetReg(kArg1, kRef), true);
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001458 m2l_->OpUnconditionalBranch(cont_);
1459 }
1460
1461 private:
Mingyao Yang3b004ba2014-04-29 15:55:37 -07001462 const bool load_;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001463 };
1464
1465 if (type_known_abstract) {
1466 // Easier case, run slow path if target is non-null (slow path will load from target)
Andreas Gampeccc60262014-07-04 18:02:38 -07001467 LIR* branch = OpCmpImmBranch(kCondNe, TargetReg(kArg0, kRef), 0, nullptr);
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001468 LIR* cont = NewLIR0(kPseudoTargetLabel);
1469 AddSlowPath(new (arena_) SlowPath(this, branch, cont, true));
1470 } else {
1471 // Harder, more common case. We need to generate a forward branch over the load
1472 // if the target is null. If it's non-null we perform the load and branch to the
1473 // slow path if the classes are not equal.
1474
1475 /* Null is OK - continue */
Andreas Gampeccc60262014-07-04 18:02:38 -07001476 LIR* branch1 = OpCmpImmBranch(kCondEq, TargetReg(kArg0, kRef), 0, nullptr);
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001477 /* load object->klass_ */
1478 DCHECK_EQ(mirror::Object::ClassOffset().Int32Value(), 0);
Andreas Gampeccc60262014-07-04 18:02:38 -07001479 LoadRefDisp(TargetReg(kArg0, kRef), mirror::Object::ClassOffset().Int32Value(),
1480 TargetReg(kArg1, kRef), kNotVolatile);
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001481
Andreas Gampeccc60262014-07-04 18:02:38 -07001482 LIR* branch2 = OpCmpBranch(kCondNe, TargetReg(kArg1, kRef), class_reg, nullptr);
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001483 LIR* cont = NewLIR0(kPseudoTargetLabel);
1484
1485 // Add the slow path that will not perform load since this is already done.
1486 AddSlowPath(new (arena_) SlowPath(this, branch2, cont, false));
1487
1488 // Set the null check to branch to the continuation.
1489 branch1->target = cont;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001490 }
1491}
1492
1493void Mir2Lir::GenLong3Addr(OpKind first_op, OpKind second_op, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001494 RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001495 RegLocation rl_result;
1496 if (cu_->instruction_set == kThumb2) {
1497 /*
1498 * NOTE: This is the one place in the code in which we might have
1499 * as many as six live temporary registers. There are 5 in the normal
1500 * set for Arm. Until we have spill capabilities, temporarily add
1501 * lr to the temp set. It is safe to do this locally, but note that
1502 * lr is used explicitly elsewhere in the code generator and cannot
1503 * normally be used as a general temp register.
1504 */
Andreas Gampeccc60262014-07-04 18:02:38 -07001505 MarkTemp(TargetReg(kLr, kNotWide)); // Add lr to the temp pool
1506 FreeTemp(TargetReg(kLr, kNotWide)); // and make it available
Brian Carlstrom7940e442013-07-12 13:46:57 -07001507 }
1508 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1509 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1510 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1511 // The longs may overlap - use intermediate temp if so
buzbee2700f7e2014-03-07 09:46:20 -08001512 if ((rl_result.reg.GetLowReg() == rl_src1.reg.GetHighReg()) || (rl_result.reg.GetLowReg() == rl_src2.reg.GetHighReg())) {
1513 RegStorage t_reg = AllocTemp();
1514 OpRegRegReg(first_op, t_reg, rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
1515 OpRegRegReg(second_op, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
1516 OpRegCopy(rl_result.reg.GetLow(), t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001517 FreeTemp(t_reg);
1518 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001519 OpRegRegReg(first_op, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
1520 OpRegRegReg(second_op, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001521 }
1522 /*
1523 * NOTE: If rl_dest refers to a frame variable in a large frame, the
1524 * following StoreValueWide might need to allocate a temp register.
1525 * To further work around the lack of a spill capability, explicitly
1526 * free any temps from rl_src1 & rl_src2 that aren't still live in rl_result.
1527 * Remove when spill is functional.
1528 */
1529 FreeRegLocTemps(rl_result, rl_src1);
1530 FreeRegLocTemps(rl_result, rl_src2);
1531 StoreValueWide(rl_dest, rl_result);
1532 if (cu_->instruction_set == kThumb2) {
Andreas Gampeccc60262014-07-04 18:02:38 -07001533 Clobber(TargetReg(kLr, kNotWide));
1534 UnmarkTemp(TargetReg(kLr, kNotWide)); // Remove lr from the temp pool
Brian Carlstrom7940e442013-07-12 13:46:57 -07001535 }
1536}
1537
Andreas Gampe98430592014-07-27 19:44:50 -07001538void Mir2Lir::GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest,
1539 RegLocation rl_src1, RegLocation rl_shift) {
1540 QuickEntrypointEnum target;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001541 switch (opcode) {
1542 case Instruction::SHL_LONG:
1543 case Instruction::SHL_LONG_2ADDR:
Andreas Gampe98430592014-07-27 19:44:50 -07001544 target = kQuickShlLong;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001545 break;
1546 case Instruction::SHR_LONG:
1547 case Instruction::SHR_LONG_2ADDR:
Andreas Gampe98430592014-07-27 19:44:50 -07001548 target = kQuickShrLong;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001549 break;
1550 case Instruction::USHR_LONG:
1551 case Instruction::USHR_LONG_2ADDR:
Andreas Gampe98430592014-07-27 19:44:50 -07001552 target = kQuickUshrLong;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001553 break;
1554 default:
1555 LOG(FATAL) << "Unexpected case";
Andreas Gampe98430592014-07-27 19:44:50 -07001556 target = kQuickShlLong;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001557 }
Andreas Gampe98430592014-07-27 19:44:50 -07001558 FlushAllRegs(); /* Send everything to home location */
1559 CallRuntimeHelperRegLocationRegLocation(target, rl_src1, rl_shift, false);
buzbeea0cd2d72014-06-01 09:33:49 -07001560 RegLocation rl_result = GetReturnWide(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001561 StoreValueWide(rl_dest, rl_result);
1562}
1563
1564
1565void Mir2Lir::GenArithOpInt(Instruction::Code opcode, RegLocation rl_dest,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001566 RegLocation rl_src1, RegLocation rl_src2, int flags) {
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001567 DCHECK(cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001568 OpKind op = kOpBkpt;
1569 bool is_div_rem = false;
1570 bool check_zero = false;
1571 bool unary = false;
1572 RegLocation rl_result;
1573 bool shift_op = false;
1574 switch (opcode) {
1575 case Instruction::NEG_INT:
1576 op = kOpNeg;
1577 unary = true;
1578 break;
1579 case Instruction::NOT_INT:
1580 op = kOpMvn;
1581 unary = true;
1582 break;
1583 case Instruction::ADD_INT:
1584 case Instruction::ADD_INT_2ADDR:
1585 op = kOpAdd;
1586 break;
1587 case Instruction::SUB_INT:
1588 case Instruction::SUB_INT_2ADDR:
1589 op = kOpSub;
1590 break;
1591 case Instruction::MUL_INT:
1592 case Instruction::MUL_INT_2ADDR:
1593 op = kOpMul;
1594 break;
1595 case Instruction::DIV_INT:
1596 case Instruction::DIV_INT_2ADDR:
1597 check_zero = true;
1598 op = kOpDiv;
1599 is_div_rem = true;
1600 break;
1601 /* NOTE: returns in kArg1 */
1602 case Instruction::REM_INT:
1603 case Instruction::REM_INT_2ADDR:
1604 check_zero = true;
1605 op = kOpRem;
1606 is_div_rem = true;
1607 break;
1608 case Instruction::AND_INT:
1609 case Instruction::AND_INT_2ADDR:
1610 op = kOpAnd;
1611 break;
1612 case Instruction::OR_INT:
1613 case Instruction::OR_INT_2ADDR:
1614 op = kOpOr;
1615 break;
1616 case Instruction::XOR_INT:
1617 case Instruction::XOR_INT_2ADDR:
1618 op = kOpXor;
1619 break;
1620 case Instruction::SHL_INT:
1621 case Instruction::SHL_INT_2ADDR:
1622 shift_op = true;
1623 op = kOpLsl;
1624 break;
1625 case Instruction::SHR_INT:
1626 case Instruction::SHR_INT_2ADDR:
1627 shift_op = true;
1628 op = kOpAsr;
1629 break;
1630 case Instruction::USHR_INT:
1631 case Instruction::USHR_INT_2ADDR:
1632 shift_op = true;
1633 op = kOpLsr;
1634 break;
1635 default:
1636 LOG(FATAL) << "Invalid word arith op: " << opcode;
1637 }
1638 if (!is_div_rem) {
1639 if (unary) {
1640 rl_src1 = LoadValue(rl_src1, kCoreReg);
1641 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001642 OpRegReg(op, rl_result.reg, rl_src1.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001643 } else {
Serban Constantinescued65c5e2014-05-22 15:10:18 +01001644 if ((shift_op) && (cu_->instruction_set != kArm64)) {
Mark Mendellfeb2b4e2014-01-28 12:59:49 -08001645 rl_src2 = LoadValue(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001646 RegStorage t_reg = AllocTemp();
1647 OpRegRegImm(kOpAnd, t_reg, rl_src2.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001648 rl_src1 = LoadValue(rl_src1, kCoreReg);
1649 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001650 OpRegRegReg(op, rl_result.reg, rl_src1.reg, t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001651 FreeTemp(t_reg);
1652 } else {
1653 rl_src1 = LoadValue(rl_src1, kCoreReg);
1654 rl_src2 = LoadValue(rl_src2, kCoreReg);
1655 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001656 OpRegRegReg(op, rl_result.reg, rl_src1.reg, rl_src2.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001657 }
1658 }
1659 StoreValue(rl_dest, rl_result);
1660 } else {
Dave Allison70202782013-10-22 17:52:19 -07001661 bool done = false; // Set to true if we happen to find a way to use a real instruction.
Maja Gagic6ea651f2015-02-24 16:55:04 +01001662 if (cu_->instruction_set == kMips || cu_->instruction_set == kMips64 ||
1663 cu_->instruction_set == kArm64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001664 rl_src1 = LoadValue(rl_src1, kCoreReg);
1665 rl_src2 = LoadValue(rl_src2, kCoreReg);
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001666 if (check_zero && (flags & MIR_IGNORE_DIV_ZERO_CHECK) == 0) {
Mingyao Yangd15f4e22014-04-17 18:46:24 -07001667 GenDivZeroCheck(rl_src2.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001668 }
buzbee2700f7e2014-03-07 09:46:20 -08001669 rl_result = GenDivRem(rl_dest, rl_src1.reg, rl_src2.reg, op == kOpDiv);
Dave Allison70202782013-10-22 17:52:19 -07001670 done = true;
1671 } else if (cu_->instruction_set == kThumb2) {
Andreas Gampe0b9203e2015-01-22 20:39:27 -08001672 if (cu_->compiler_driver->GetInstructionSetFeatures()->AsArmInstructionSetFeatures()->
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001673 HasDivideInstruction()) {
Dave Allison70202782013-10-22 17:52:19 -07001674 // Use ARM SDIV instruction for division. For remainder we also need to
1675 // calculate using a MUL and subtract.
1676 rl_src1 = LoadValue(rl_src1, kCoreReg);
1677 rl_src2 = LoadValue(rl_src2, kCoreReg);
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001678 if (check_zero && (flags & MIR_IGNORE_DIV_ZERO_CHECK) == 0) {
Mingyao Yangd15f4e22014-04-17 18:46:24 -07001679 GenDivZeroCheck(rl_src2.reg);
Dave Allison70202782013-10-22 17:52:19 -07001680 }
buzbee2700f7e2014-03-07 09:46:20 -08001681 rl_result = GenDivRem(rl_dest, rl_src1.reg, rl_src2.reg, op == kOpDiv);
Dave Allison70202782013-10-22 17:52:19 -07001682 done = true;
1683 }
1684 }
1685
1686 // If we haven't already generated the code use the callout function.
1687 if (!done) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001688 FlushAllRegs(); /* Send everything to home location */
Andreas Gampeccc60262014-07-04 18:02:38 -07001689 LoadValueDirectFixed(rl_src2, TargetReg(kArg1, kNotWide));
Andreas Gampe98430592014-07-27 19:44:50 -07001690 RegStorage r_tgt = CallHelperSetup(kQuickIdivmod);
Andreas Gampeccc60262014-07-04 18:02:38 -07001691 LoadValueDirectFixed(rl_src1, TargetReg(kArg0, kNotWide));
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001692 if (check_zero && (flags & MIR_IGNORE_DIV_ZERO_CHECK) == 0) {
Andreas Gampeccc60262014-07-04 18:02:38 -07001693 GenDivZeroCheck(TargetReg(kArg1, kNotWide));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001694 }
Dave Allison70202782013-10-22 17:52:19 -07001695 // NOTE: callout here is not a safepoint.
Andreas Gampe98430592014-07-27 19:44:50 -07001696 CallHelper(r_tgt, kQuickIdivmod, false /* not a safepoint */);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001697 if (op == kOpDiv)
buzbeea0cd2d72014-06-01 09:33:49 -07001698 rl_result = GetReturn(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001699 else
1700 rl_result = GetReturnAlt();
1701 }
1702 StoreValue(rl_dest, rl_result);
1703 }
1704}
1705
1706/*
1707 * The following are the first-level codegen routines that analyze the format
1708 * of each bytecode then either dispatch special purpose codegen routines
1709 * or produce corresponding Thumb instructions directly.
1710 */
1711
Brian Carlstrom7940e442013-07-12 13:46:57 -07001712// Returns true if no more than two bits are set in 'x'.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001713static bool IsPopCountLE2(unsigned int x) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001714 x &= x - 1;
1715 return (x & (x - 1)) == 0;
1716}
1717
Brian Carlstrom7940e442013-07-12 13:46:57 -07001718// Returns true if it added instructions to 'cu' to divide 'rl_src' by 'lit'
1719// and store the result in 'rl_dest'.
Andreas Gamped500b532015-01-16 22:09:55 -08001720bool Mir2Lir::HandleEasyDivRem(Instruction::Code dalvik_opcode ATTRIBUTE_UNUSED, bool is_div,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001721 RegLocation rl_src, RegLocation rl_dest, int lit) {
Andreas Gamped500b532015-01-16 22:09:55 -08001722 if ((lit < 2) || (!IsPowerOfTwo(lit))) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001723 return false;
1724 }
Andreas Gampe7e499922015-01-06 08:28:12 -08001725 int k = CTZ(lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001726 if (k >= 30) {
1727 // Avoid special cases.
1728 return false;
1729 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001730 rl_src = LoadValue(rl_src, kCoreReg);
1731 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee11b63d12013-08-27 07:34:17 -07001732 if (is_div) {
buzbee2700f7e2014-03-07 09:46:20 -08001733 RegStorage t_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001734 if (lit == 2) {
1735 // Division by 2 is by far the most common division by constant.
buzbee2700f7e2014-03-07 09:46:20 -08001736 OpRegRegImm(kOpLsr, t_reg, rl_src.reg, 32 - k);
1737 OpRegRegReg(kOpAdd, t_reg, t_reg, rl_src.reg);
1738 OpRegRegImm(kOpAsr, rl_result.reg, t_reg, k);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001739 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001740 OpRegRegImm(kOpAsr, t_reg, rl_src.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001741 OpRegRegImm(kOpLsr, t_reg, t_reg, 32 - k);
buzbee2700f7e2014-03-07 09:46:20 -08001742 OpRegRegReg(kOpAdd, t_reg, t_reg, rl_src.reg);
1743 OpRegRegImm(kOpAsr, rl_result.reg, t_reg, k);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001744 }
1745 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001746 RegStorage t_reg1 = AllocTemp();
1747 RegStorage t_reg2 = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001748 if (lit == 2) {
buzbee2700f7e2014-03-07 09:46:20 -08001749 OpRegRegImm(kOpLsr, t_reg1, rl_src.reg, 32 - k);
1750 OpRegRegReg(kOpAdd, t_reg2, t_reg1, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001751 OpRegRegImm(kOpAnd, t_reg2, t_reg2, lit -1);
buzbee2700f7e2014-03-07 09:46:20 -08001752 OpRegRegReg(kOpSub, rl_result.reg, t_reg2, t_reg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001753 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001754 OpRegRegImm(kOpAsr, t_reg1, rl_src.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001755 OpRegRegImm(kOpLsr, t_reg1, t_reg1, 32 - k);
buzbee2700f7e2014-03-07 09:46:20 -08001756 OpRegRegReg(kOpAdd, t_reg2, t_reg1, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001757 OpRegRegImm(kOpAnd, t_reg2, t_reg2, lit - 1);
buzbee2700f7e2014-03-07 09:46:20 -08001758 OpRegRegReg(kOpSub, rl_result.reg, t_reg2, t_reg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001759 }
1760 }
1761 StoreValue(rl_dest, rl_result);
1762 return true;
1763}
1764
1765// Returns true if it added instructions to 'cu' to multiply 'rl_src' by 'lit'
1766// and store the result in 'rl_dest'.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001767bool Mir2Lir::HandleEasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) {
Ian Rogerse2143c02014-03-28 08:47:16 -07001768 if (lit < 0) {
1769 return false;
1770 }
1771 if (lit == 0) {
1772 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1773 LoadConstant(rl_result.reg, 0);
1774 StoreValue(rl_dest, rl_result);
1775 return true;
1776 }
1777 if (lit == 1) {
1778 rl_src = LoadValue(rl_src, kCoreReg);
1779 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1780 OpRegCopy(rl_result.reg, rl_src.reg);
1781 StoreValue(rl_dest, rl_result);
1782 return true;
1783 }
Zheng Xuf9719f92014-04-02 13:31:31 +01001784 // There is RegRegRegShift on Arm, so check for more special cases
1785 if (cu_->instruction_set == kThumb2) {
Ian Rogerse2143c02014-03-28 08:47:16 -07001786 return EasyMultiply(rl_src, rl_dest, lit);
1787 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001788 // Can we simplify this multiplication?
1789 bool power_of_two = false;
1790 bool pop_count_le2 = false;
1791 bool power_of_two_minus_one = false;
Ian Rogerse2143c02014-03-28 08:47:16 -07001792 if (IsPowerOfTwo(lit)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001793 power_of_two = true;
1794 } else if (IsPopCountLE2(lit)) {
1795 pop_count_le2 = true;
1796 } else if (IsPowerOfTwo(lit + 1)) {
1797 power_of_two_minus_one = true;
1798 } else {
1799 return false;
1800 }
1801 rl_src = LoadValue(rl_src, kCoreReg);
1802 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1803 if (power_of_two) {
1804 // Shift.
Andreas Gampe7e499922015-01-06 08:28:12 -08001805 OpRegRegImm(kOpLsl, rl_result.reg, rl_src.reg, CTZ(lit));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001806 } else if (pop_count_le2) {
1807 // Shift and add and shift.
Andreas Gampe7e499922015-01-06 08:28:12 -08001808 int first_bit = CTZ(lit);
1809 int second_bit = CTZ(lit ^ (1 << first_bit));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001810 GenMultiplyByTwoBitMultiplier(rl_src, rl_result, lit, first_bit, second_bit);
1811 } else {
1812 // Reverse subtract: (src << (shift + 1)) - src.
1813 DCHECK(power_of_two_minus_one);
Andreas Gampe7e499922015-01-06 08:28:12 -08001814 // TUNING: rsb dst, src, src lsl#CTZ(lit + 1)
buzbee2700f7e2014-03-07 09:46:20 -08001815 RegStorage t_reg = AllocTemp();
Andreas Gampe7e499922015-01-06 08:28:12 -08001816 OpRegRegImm(kOpLsl, t_reg, rl_src.reg, CTZ(lit + 1));
buzbee2700f7e2014-03-07 09:46:20 -08001817 OpRegRegReg(kOpSub, rl_result.reg, t_reg, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001818 }
1819 StoreValue(rl_dest, rl_result);
1820 return true;
1821}
1822
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001823// Returns true if it generates instructions.
1824bool Mir2Lir::HandleEasyFloatingPointDiv(RegLocation rl_dest, RegLocation rl_src1,
1825 RegLocation rl_src2) {
1826 if (!rl_src2.is_const ||
1827 ((cu_->instruction_set != kThumb2) && (cu_->instruction_set != kArm64))) {
1828 return false;
1829 }
1830
1831 if (!rl_src2.wide) {
1832 int32_t divisor = mir_graph_->ConstantValue(rl_src2);
1833 if (CanDivideByReciprocalMultiplyFloat(divisor)) {
1834 // Generate multiply by reciprocal instead of div.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001835 float recip = 1.0f/bit_cast<float, int32_t>(divisor);
1836 GenMultiplyByConstantFloat(rl_dest, rl_src1, bit_cast<int32_t, float>(recip));
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001837 return true;
1838 }
1839 } else {
1840 int64_t divisor = mir_graph_->ConstantValueWide(rl_src2);
1841 if (CanDivideByReciprocalMultiplyDouble(divisor)) {
1842 // Generate multiply by reciprocal instead of div.
1843 double recip = 1.0/bit_cast<double, int64_t>(divisor);
Roland Levillainda4d79b2015-03-24 14:36:11 +00001844 GenMultiplyByConstantDouble(rl_dest, rl_src1, bit_cast<int64_t, double>(recip));
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001845 return true;
1846 }
1847 }
1848 return false;
1849}
1850
Brian Carlstrom7940e442013-07-12 13:46:57 -07001851void Mir2Lir::GenArithOpIntLit(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001852 int lit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001853 RegLocation rl_result;
1854 OpKind op = static_cast<OpKind>(0); /* Make gcc happy */
1855 int shift_op = false;
1856 bool is_div = false;
1857
1858 switch (opcode) {
1859 case Instruction::RSUB_INT_LIT8:
1860 case Instruction::RSUB_INT: {
1861 rl_src = LoadValue(rl_src, kCoreReg);
1862 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1863 if (cu_->instruction_set == kThumb2) {
buzbee2700f7e2014-03-07 09:46:20 -08001864 OpRegRegImm(kOpRsub, rl_result.reg, rl_src.reg, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001865 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001866 OpRegReg(kOpNeg, rl_result.reg, rl_src.reg);
1867 OpRegImm(kOpAdd, rl_result.reg, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001868 }
1869 StoreValue(rl_dest, rl_result);
1870 return;
1871 }
1872
1873 case Instruction::SUB_INT:
1874 case Instruction::SUB_INT_2ADDR:
1875 lit = -lit;
Ian Rogersfc787ec2014-10-09 21:56:44 -07001876 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001877 case Instruction::ADD_INT:
1878 case Instruction::ADD_INT_2ADDR:
1879 case Instruction::ADD_INT_LIT8:
1880 case Instruction::ADD_INT_LIT16:
1881 op = kOpAdd;
1882 break;
1883 case Instruction::MUL_INT:
1884 case Instruction::MUL_INT_2ADDR:
1885 case Instruction::MUL_INT_LIT8:
1886 case Instruction::MUL_INT_LIT16: {
1887 if (HandleEasyMultiply(rl_src, rl_dest, lit)) {
1888 return;
1889 }
1890 op = kOpMul;
1891 break;
1892 }
1893 case Instruction::AND_INT:
1894 case Instruction::AND_INT_2ADDR:
1895 case Instruction::AND_INT_LIT8:
1896 case Instruction::AND_INT_LIT16:
1897 op = kOpAnd;
1898 break;
1899 case Instruction::OR_INT:
1900 case Instruction::OR_INT_2ADDR:
1901 case Instruction::OR_INT_LIT8:
1902 case Instruction::OR_INT_LIT16:
1903 op = kOpOr;
1904 break;
1905 case Instruction::XOR_INT:
1906 case Instruction::XOR_INT_2ADDR:
1907 case Instruction::XOR_INT_LIT8:
1908 case Instruction::XOR_INT_LIT16:
1909 op = kOpXor;
1910 break;
1911 case Instruction::SHL_INT_LIT8:
1912 case Instruction::SHL_INT:
1913 case Instruction::SHL_INT_2ADDR:
1914 lit &= 31;
1915 shift_op = true;
1916 op = kOpLsl;
1917 break;
1918 case Instruction::SHR_INT_LIT8:
1919 case Instruction::SHR_INT:
1920 case Instruction::SHR_INT_2ADDR:
1921 lit &= 31;
1922 shift_op = true;
1923 op = kOpAsr;
1924 break;
1925 case Instruction::USHR_INT_LIT8:
1926 case Instruction::USHR_INT:
1927 case Instruction::USHR_INT_2ADDR:
1928 lit &= 31;
1929 shift_op = true;
1930 op = kOpLsr;
1931 break;
1932
1933 case Instruction::DIV_INT:
1934 case Instruction::DIV_INT_2ADDR:
1935 case Instruction::DIV_INT_LIT8:
1936 case Instruction::DIV_INT_LIT16:
1937 case Instruction::REM_INT:
1938 case Instruction::REM_INT_2ADDR:
1939 case Instruction::REM_INT_LIT8:
1940 case Instruction::REM_INT_LIT16: {
1941 if (lit == 0) {
Mingyao Yange643a172014-04-08 11:02:52 -07001942 GenDivZeroException();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001943 return;
1944 }
buzbee11b63d12013-08-27 07:34:17 -07001945 if ((opcode == Instruction::DIV_INT) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -07001946 (opcode == Instruction::DIV_INT_2ADDR) ||
buzbee11b63d12013-08-27 07:34:17 -07001947 (opcode == Instruction::DIV_INT_LIT8) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -07001948 (opcode == Instruction::DIV_INT_LIT16)) {
1949 is_div = true;
1950 } else {
1951 is_div = false;
1952 }
buzbee11b63d12013-08-27 07:34:17 -07001953 if (HandleEasyDivRem(opcode, is_div, rl_src, rl_dest, lit)) {
1954 return;
1955 }
Dave Allison70202782013-10-22 17:52:19 -07001956
1957 bool done = false;
Maja Gagic6ea651f2015-02-24 16:55:04 +01001958 if (cu_->instruction_set == kMips || cu_->instruction_set == kMips64 ||
1959 cu_->instruction_set == kArm64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001960 rl_src = LoadValue(rl_src, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001961 rl_result = GenDivRemLit(rl_dest, rl_src.reg, lit, is_div);
Dave Allison70202782013-10-22 17:52:19 -07001962 done = true;
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001963 } else if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Mark Mendell2bf31e62014-01-23 12:13:40 -08001964 rl_result = GenDivRemLit(rl_dest, rl_src, lit, is_div);
1965 done = true;
Dave Allison70202782013-10-22 17:52:19 -07001966 } else if (cu_->instruction_set == kThumb2) {
Andreas Gampe0b9203e2015-01-22 20:39:27 -08001967 if (cu_->compiler_driver->GetInstructionSetFeatures()->AsArmInstructionSetFeatures()->
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001968 HasDivideInstruction()) {
Dave Allison70202782013-10-22 17:52:19 -07001969 // Use ARM SDIV instruction for division. For remainder we also need to
1970 // calculate using a MUL and subtract.
1971 rl_src = LoadValue(rl_src, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001972 rl_result = GenDivRemLit(rl_dest, rl_src.reg, lit, is_div);
Dave Allison70202782013-10-22 17:52:19 -07001973 done = true;
1974 }
1975 }
1976
1977 if (!done) {
1978 FlushAllRegs(); /* Everything to home location. */
Andreas Gampeccc60262014-07-04 18:02:38 -07001979 LoadValueDirectFixed(rl_src, TargetReg(kArg0, kNotWide));
1980 Clobber(TargetReg(kArg0, kNotWide));
Andreas Gampe98430592014-07-27 19:44:50 -07001981 CallRuntimeHelperRegImm(kQuickIdivmod, TargetReg(kArg0, kNotWide), lit, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001982 if (is_div)
buzbeea0cd2d72014-06-01 09:33:49 -07001983 rl_result = GetReturn(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001984 else
1985 rl_result = GetReturnAlt();
1986 }
1987 StoreValue(rl_dest, rl_result);
1988 return;
1989 }
1990 default:
1991 LOG(FATAL) << "Unexpected opcode " << opcode;
1992 }
1993 rl_src = LoadValue(rl_src, kCoreReg);
1994 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Dave Allison70202782013-10-22 17:52:19 -07001995 // Avoid shifts by literal 0 - no support in Thumb. Change to copy.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001996 if (shift_op && (lit == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -08001997 OpRegCopy(rl_result.reg, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001998 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001999 OpRegRegImm(op, rl_result.reg, rl_src.reg, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002000 }
2001 StoreValue(rl_dest, rl_result);
2002}
2003
Andreas Gampe98430592014-07-27 19:44:50 -07002004void Mir2Lir::GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07002005 RegLocation rl_src1, RegLocation rl_src2, int flags) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07002006 RegLocation rl_result;
2007 OpKind first_op = kOpBkpt;
2008 OpKind second_op = kOpBkpt;
2009 bool call_out = false;
2010 bool check_zero = false;
Andreas Gampe98430592014-07-27 19:44:50 -07002011 int ret_reg = TargetReg(kRet0, kNotWide).GetReg();
2012 QuickEntrypointEnum target;
Brian Carlstrom7940e442013-07-12 13:46:57 -07002013
2014 switch (opcode) {
2015 case Instruction::NOT_LONG:
Andreas Gampe98430592014-07-27 19:44:50 -07002016 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
2017 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002018 // Check for destructive overlap
buzbee2700f7e2014-03-07 09:46:20 -08002019 if (rl_result.reg.GetLowReg() == rl_src2.reg.GetHighReg()) {
Andreas Gampe98430592014-07-27 19:44:50 -07002020 RegStorage t_reg = AllocTemp();
2021 OpRegCopy(t_reg, rl_src2.reg.GetHigh());
2022 OpRegReg(kOpMvn, rl_result.reg.GetLow(), rl_src2.reg.GetLow());
2023 OpRegReg(kOpMvn, rl_result.reg.GetHigh(), t_reg);
2024 FreeTemp(t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002025 } else {
Andreas Gampe98430592014-07-27 19:44:50 -07002026 OpRegReg(kOpMvn, rl_result.reg.GetLow(), rl_src2.reg.GetLow());
2027 OpRegReg(kOpMvn, rl_result.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07002028 }
Andreas Gampe98430592014-07-27 19:44:50 -07002029 StoreValueWide(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002030 return;
2031 case Instruction::ADD_LONG:
2032 case Instruction::ADD_LONG_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07002033 first_op = kOpAdd;
2034 second_op = kOpAdc;
2035 break;
2036 case Instruction::SUB_LONG:
2037 case Instruction::SUB_LONG_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07002038 first_op = kOpSub;
2039 second_op = kOpSbc;
2040 break;
2041 case Instruction::MUL_LONG:
2042 case Instruction::MUL_LONG_2ADDR:
Andreas Gampec76c6142014-08-04 16:30:03 -07002043 call_out = true;
2044 ret_reg = TargetReg(kRet0, kNotWide).GetReg();
2045 target = kQuickLmul;
Brian Carlstrom7940e442013-07-12 13:46:57 -07002046 break;
2047 case Instruction::DIV_LONG:
2048 case Instruction::DIV_LONG_2ADDR:
2049 call_out = true;
2050 check_zero = true;
Andreas Gampe98430592014-07-27 19:44:50 -07002051 ret_reg = TargetReg(kRet0, kNotWide).GetReg();
2052 target = kQuickLdiv;
Brian Carlstrom7940e442013-07-12 13:46:57 -07002053 break;
2054 case Instruction::REM_LONG:
2055 case Instruction::REM_LONG_2ADDR:
2056 call_out = true;
2057 check_zero = true;
Andreas Gampe98430592014-07-27 19:44:50 -07002058 target = kQuickLmod;
Brian Carlstrom7940e442013-07-12 13:46:57 -07002059 /* NOTE - for Arm, result is in kArg2/kArg3 instead of kRet0/kRet1 */
Andreas Gampe98430592014-07-27 19:44:50 -07002060 ret_reg = (cu_->instruction_set == kThumb2) ? TargetReg(kArg2, kNotWide).GetReg() :
2061 TargetReg(kRet0, kNotWide).GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -07002062 break;
2063 case Instruction::AND_LONG_2ADDR:
2064 case Instruction::AND_LONG:
Brian Carlstrom7940e442013-07-12 13:46:57 -07002065 first_op = kOpAnd;
2066 second_op = kOpAnd;
2067 break;
2068 case Instruction::OR_LONG:
2069 case Instruction::OR_LONG_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07002070 first_op = kOpOr;
2071 second_op = kOpOr;
2072 break;
2073 case Instruction::XOR_LONG:
2074 case Instruction::XOR_LONG_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07002075 first_op = kOpXor;
2076 second_op = kOpXor;
2077 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07002078 default:
2079 LOG(FATAL) << "Invalid long arith op";
2080 }
2081 if (!call_out) {
Andreas Gampe98430592014-07-27 19:44:50 -07002082 GenLong3Addr(first_op, second_op, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002083 } else {
Andreas Gampe98430592014-07-27 19:44:50 -07002084 FlushAllRegs(); /* Send everything to home location */
Brian Carlstrom7940e442013-07-12 13:46:57 -07002085 if (check_zero) {
Andreas Gampe98430592014-07-27 19:44:50 -07002086 RegStorage r_tmp1 = TargetReg(kArg0, kWide);
2087 RegStorage r_tmp2 = TargetReg(kArg2, kWide);
2088 LoadValueDirectWideFixed(rl_src2, r_tmp2);
2089 RegStorage r_tgt = CallHelperSetup(target);
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07002090 if ((flags & MIR_IGNORE_DIV_ZERO_CHECK) == 0) {
2091 GenDivZeroCheckWide(r_tmp2);
2092 }
Andreas Gampe98430592014-07-27 19:44:50 -07002093 LoadValueDirectWideFixed(rl_src1, r_tmp1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002094 // NOTE: callout here is not a safepoint
Andreas Gampe98430592014-07-27 19:44:50 -07002095 CallHelper(r_tgt, target, false /* not safepoint */);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002096 } else {
Andreas Gampe98430592014-07-27 19:44:50 -07002097 CallRuntimeHelperRegLocationRegLocation(target, rl_src1, rl_src2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002098 }
2099 // Adjust return regs in to handle case of rem returning kArg2/kArg3
Andreas Gampe98430592014-07-27 19:44:50 -07002100 if (ret_reg == TargetReg(kRet0, kNotWide).GetReg())
2101 rl_result = GetReturnWide(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002102 else
Andreas Gampe98430592014-07-27 19:44:50 -07002103 rl_result = GetReturnWideAlt();
2104 StoreValueWide(rl_dest, rl_result);
Andreas Gampe2f244e92014-05-08 03:35:25 -07002105 }
2106}
2107
Mark Mendelle87f9b52014-04-30 14:13:18 -04002108void Mir2Lir::GenConst(RegLocation rl_dest, int value) {
2109 RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
2110 LoadConstantNoClobber(rl_result.reg, value);
2111 StoreValue(rl_dest, rl_result);
Mark Mendelle87f9b52014-04-30 14:13:18 -04002112}
2113
Andreas Gampe98430592014-07-27 19:44:50 -07002114void Mir2Lir::GenConversionCall(QuickEntrypointEnum trampoline, RegLocation rl_dest,
2115 RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07002116 /*
2117 * Don't optimize the register usage since it calls out to support
2118 * functions
2119 */
Andreas Gampe2f244e92014-05-08 03:35:25 -07002120
Brian Carlstrom7940e442013-07-12 13:46:57 -07002121 FlushAllRegs(); /* Send everything to home location */
Andreas Gampe98430592014-07-27 19:44:50 -07002122 CallRuntimeHelperRegLocation(trampoline, rl_src, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002123 if (rl_dest.wide) {
2124 RegLocation rl_result;
buzbeea0cd2d72014-06-01 09:33:49 -07002125 rl_result = GetReturnWide(LocToRegClass(rl_dest));
Brian Carlstrom7940e442013-07-12 13:46:57 -07002126 StoreValueWide(rl_dest, rl_result);
2127 } else {
2128 RegLocation rl_result;
buzbeea0cd2d72014-06-01 09:33:49 -07002129 rl_result = GetReturn(LocToRegClass(rl_dest));
Brian Carlstrom7940e442013-07-12 13:46:57 -07002130 StoreValue(rl_dest, rl_result);
2131 }
2132}
2133
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00002134class Mir2Lir::SuspendCheckSlowPath : public Mir2Lir::LIRSlowPath {
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07002135 public:
2136 SuspendCheckSlowPath(Mir2Lir* m2l, LIR* branch, LIR* cont)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +00002137 : LIRSlowPath(m2l, branch, cont) {
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07002138 }
2139
2140 void Compile() OVERRIDE {
2141 m2l_->ResetRegPool();
2142 m2l_->ResetDefTracking();
2143 GenerateTargetLabel(kPseudoSuspendTarget);
Andreas Gampe98430592014-07-27 19:44:50 -07002144 m2l_->CallRuntimeHelper(kQuickTestSuspend, true);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07002145 if (cont_ != nullptr) {
2146 m2l_->OpUnconditionalBranch(cont_);
2147 }
2148 }
2149};
2150
Brian Carlstrom7940e442013-07-12 13:46:57 -07002151/* Check if we need to check for pending suspend request */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07002152void Mir2Lir::GenSuspendTest(int opt_flags) {
Vladimir Marko8b858e12014-11-27 14:52:37 +00002153 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK) != 0) {
2154 return;
2155 }
Dave Allison69dfe512014-07-11 17:11:58 +00002156 if (!cu_->compiler_driver->GetCompilerOptions().GetImplicitSuspendChecks()) {
Dave Allisonb373e092014-02-20 16:06:36 -08002157 FlushAllRegs();
2158 LIR* branch = OpTestSuspend(NULL);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07002159 LIR* cont = NewLIR0(kPseudoTargetLabel);
2160 AddSlowPath(new (arena_) SuspendCheckSlowPath(this, branch, cont));
Dave Allisonb373e092014-02-20 16:06:36 -08002161 } else {
Dave Allisonb373e092014-02-20 16:06:36 -08002162 FlushAllRegs(); // TODO: needed?
2163 LIR* inst = CheckSuspendUsingLoad();
2164 MarkSafepointPC(inst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002165 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002166}
2167
2168/* Check if we need to check for pending suspend request */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07002169void Mir2Lir::GenSuspendTestAndBranch(int opt_flags, LIR* target) {
Vladimir Marko8b858e12014-11-27 14:52:37 +00002170 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK) != 0) {
2171 OpUnconditionalBranch(target);
2172 return;
2173 }
Dave Allison69dfe512014-07-11 17:11:58 +00002174 if (!cu_->compiler_driver->GetCompilerOptions().GetImplicitSuspendChecks()) {
Dave Allisonb373e092014-02-20 16:06:36 -08002175 OpTestSuspend(target);
Dave Allisonb373e092014-02-20 16:06:36 -08002176 FlushAllRegs();
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07002177 LIR* branch = OpUnconditionalBranch(nullptr);
2178 AddSlowPath(new (arena_) SuspendCheckSlowPath(this, branch, target));
Dave Allisonb373e092014-02-20 16:06:36 -08002179 } else {
2180 // For the implicit suspend check, just perform the trigger
2181 // load and branch to the target.
Dave Allisonb373e092014-02-20 16:06:36 -08002182 FlushAllRegs();
2183 LIR* inst = CheckSuspendUsingLoad();
2184 MarkSafepointPC(inst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002185 OpUnconditionalBranch(target);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002186 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002187}
2188
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002189/* Call out to helper assembly routine that will null check obj and then lock it. */
2190void Mir2Lir::GenMonitorEnter(int opt_flags, RegLocation rl_src) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002191 UNUSED(opt_flags); // TODO: avoid null check with specialized non-null helper.
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002192 FlushAllRegs();
Andreas Gampe98430592014-07-27 19:44:50 -07002193 CallRuntimeHelperRegLocation(kQuickLockObject, rl_src, true);
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002194}
2195
2196/* Call out to helper assembly routine that will null check obj and then unlock it. */
2197void Mir2Lir::GenMonitorExit(int opt_flags, RegLocation rl_src) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002198 UNUSED(opt_flags); // TODO: avoid null check with specialized non-null helper.
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002199 FlushAllRegs();
Andreas Gampe98430592014-07-27 19:44:50 -07002200 CallRuntimeHelperRegLocation(kQuickUnlockObject, rl_src, true);
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002201}
2202
Bill Buzbeed61ba4b2014-01-13 21:44:01 +00002203/* Generic code for generating a wide constant into a VR. */
2204void Mir2Lir::GenConstWide(RegLocation rl_dest, int64_t value) {
2205 RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08002206 LoadConstantWide(rl_result.reg, value);
Bill Buzbeed61ba4b2014-01-13 21:44:01 +00002207 StoreValueWide(rl_dest, rl_result);
2208}
2209
Andreas Gampe48971b32014-08-06 10:09:01 -07002210void Mir2Lir::GenSmallPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) {
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002211 BasicBlock* bb = mir_graph_->GetBasicBlock(mir->bb);
2212 DCHECK(bb != nullptr);
2213 ArenaVector<SuccessorBlockInfo*>::const_iterator succ_bb_iter = bb->successor_blocks.cbegin();
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07002214 const uint16_t* table = mir_graph_->GetTable(mir, table_offset);
Andreas Gampe48971b32014-08-06 10:09:01 -07002215 const uint16_t entries = table[1];
2216 // Chained cmp-and-branch.
2217 const int32_t* as_int32 = reinterpret_cast<const int32_t*>(&table[2]);
Ian Rogers7d4ecd52014-10-30 15:10:02 -07002218 int32_t starting_key = as_int32[0];
Andreas Gampe48971b32014-08-06 10:09:01 -07002219 rl_src = LoadValue(rl_src, kCoreReg);
2220 int i = 0;
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002221 for (; i < entries; ++i, ++succ_bb_iter) {
Ian Rogers7d4ecd52014-10-30 15:10:02 -07002222 if (!InexpensiveConstantInt(starting_key + i, Instruction::Code::IF_EQ)) {
Andreas Gampe48971b32014-08-06 10:09:01 -07002223 // Switch to using a temp and add.
2224 break;
2225 }
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002226 SuccessorBlockInfo* successor_block_info = *succ_bb_iter;
2227 DCHECK(successor_block_info != nullptr);
2228 int case_block_id = successor_block_info->block;
2229 DCHECK_EQ(starting_key + i, successor_block_info->key);
2230 OpCmpImmBranch(kCondEq, rl_src.reg, starting_key + i, &block_label_list_[case_block_id]);
Andreas Gampe48971b32014-08-06 10:09:01 -07002231 }
2232 if (i < entries) {
2233 // The rest do not seem to be inexpensive. Try to allocate a temp and use add.
2234 RegStorage key_temp = AllocTypedTemp(false, kCoreReg, false);
2235 if (key_temp.Valid()) {
Ian Rogers7d4ecd52014-10-30 15:10:02 -07002236 LoadConstantNoClobber(key_temp, starting_key + i);
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002237 for (; i < entries - 1; ++i, ++succ_bb_iter) {
2238 SuccessorBlockInfo* successor_block_info = *succ_bb_iter;
2239 DCHECK(successor_block_info != nullptr);
2240 int case_block_id = successor_block_info->block;
2241 DCHECK_EQ(starting_key + i, successor_block_info->key);
2242 OpCmpBranch(kCondEq, rl_src.reg, key_temp, &block_label_list_[case_block_id]);
Andreas Gampe48971b32014-08-06 10:09:01 -07002243 OpRegImm(kOpAdd, key_temp, 1); // Increment key.
2244 }
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002245 SuccessorBlockInfo* successor_block_info = *succ_bb_iter;
2246 DCHECK(successor_block_info != nullptr);
2247 int case_block_id = successor_block_info->block;
2248 DCHECK_EQ(starting_key + i, successor_block_info->key);
2249 OpCmpBranch(kCondEq, rl_src.reg, key_temp, &block_label_list_[case_block_id]);
Andreas Gampe48971b32014-08-06 10:09:01 -07002250 } else {
2251 // No free temp, just finish the old loop.
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002252 for (; i < entries; ++i, ++succ_bb_iter) {
2253 SuccessorBlockInfo* successor_block_info = *succ_bb_iter;
2254 DCHECK(successor_block_info != nullptr);
2255 int case_block_id = successor_block_info->block;
2256 DCHECK_EQ(starting_key + i, successor_block_info->key);
2257 OpCmpImmBranch(kCondEq, rl_src.reg, starting_key + i, &block_label_list_[case_block_id]);
Andreas Gampe48971b32014-08-06 10:09:01 -07002258 }
2259 }
2260 }
2261}
2262
2263void Mir2Lir::GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07002264 const uint16_t* table = mir_graph_->GetTable(mir, table_offset);
Andreas Gampe48971b32014-08-06 10:09:01 -07002265 if (cu_->verbose) {
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002266 DumpPackedSwitchTable(table);
Andreas Gampe48971b32014-08-06 10:09:01 -07002267 }
2268
2269 const uint16_t entries = table[1];
2270 if (entries <= kSmallSwitchThreshold) {
2271 GenSmallPackedSwitch(mir, table_offset, rl_src);
2272 } else {
2273 // Use the backend-specific implementation.
2274 GenLargePackedSwitch(mir, table_offset, rl_src);
2275 }
2276}
2277
2278void Mir2Lir::GenSmallSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) {
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002279 BasicBlock* bb = mir_graph_->GetBasicBlock(mir->bb);
2280 DCHECK(bb != nullptr);
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07002281 const uint16_t* table = mir_graph_->GetTable(mir, table_offset);
Andreas Gampe48971b32014-08-06 10:09:01 -07002282 const uint16_t entries = table[1];
2283 // Chained cmp-and-branch.
Andreas Gampe48971b32014-08-06 10:09:01 -07002284 rl_src = LoadValue(rl_src, kCoreReg);
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002285 int i = 0;
2286 for (SuccessorBlockInfo* successor_block_info : bb->successor_blocks) {
2287 int case_block_id = successor_block_info->block;
2288 int key = successor_block_info->key;
2289 OpCmpImmBranch(kCondEq, rl_src.reg, key, &block_label_list_[case_block_id]);
2290 i++;
Andreas Gampe48971b32014-08-06 10:09:01 -07002291 }
Chao-ying Fuda96aed2014-10-27 14:42:00 -07002292 DCHECK_EQ(i, entries);
Andreas Gampe48971b32014-08-06 10:09:01 -07002293}
2294
2295void Mir2Lir::GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07002296 const uint16_t* table = mir_graph_->GetTable(mir, table_offset);
Andreas Gampe48971b32014-08-06 10:09:01 -07002297 if (cu_->verbose) {
2298 DumpSparseSwitchTable(table);
2299 }
2300
2301 const uint16_t entries = table[1];
2302 if (entries <= kSmallSwitchThreshold) {
2303 GenSmallSparseSwitch(mir, table_offset, rl_src);
2304 } else {
2305 // Use the backend-specific implementation.
2306 GenLargeSparseSwitch(mir, table_offset, rl_src);
2307 }
2308}
2309
Fred Shih37f05ef2014-07-16 18:38:08 -07002310bool Mir2Lir::SizeMatchesTypeForEntrypoint(OpSize size, Primitive::Type type) {
2311 switch (size) {
2312 case kReference:
2313 return type == Primitive::kPrimNot;
2314 case k64:
2315 case kDouble:
2316 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
2317 case k32:
2318 case kSingle:
2319 return type == Primitive::kPrimInt || type == Primitive::kPrimFloat;
2320 case kSignedHalf:
2321 return type == Primitive::kPrimShort;
2322 case kUnsignedHalf:
2323 return type == Primitive::kPrimChar;
2324 case kSignedByte:
2325 return type == Primitive::kPrimByte;
2326 case kUnsignedByte:
2327 return type == Primitive::kPrimBoolean;
2328 case kWord: // Intentional fallthrough.
2329 default:
2330 return false; // There are no sane types with this op size.
2331 }
2332}
2333
Brian Carlstrom7940e442013-07-12 13:46:57 -07002334} // namespace art