blob: e38dbf5a8d80cf87cc44c3d5b7d239328188580a [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/* This file contains codegen for the Thumb2 ISA. */
18
Elliott Hughes8366ca02014-11-17 12:02:05 -080019#include "arch/instruction_set_features.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "arm_lir.h"
21#include "codegen_arm.h"
22#include "dex/quick/mir_to_lir-inl.h"
buzbeeb5860fb2014-06-21 15:31:01 -070023#include "dex/reg_storage_eq.h"
Ian Rogers166db042013-07-26 12:05:57 -070024#include "entrypoints/quick/quick_entrypoints.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070025#include "mirror/array-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070026
27namespace art {
28
buzbee2700f7e2014-03-07 09:46:20 -080029LIR* ArmMir2Lir::OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070030 OpRegReg(kOpCmp, src1, src2);
31 return OpCondBranch(cond, target);
32}
33
34/*
35 * Generate a Thumb2 IT instruction, which can nullify up to
36 * four subsequent instructions based on a condition and its
37 * inverse. The condition applies to the first instruction, which
38 * is executed if the condition is met. The string "guide" consists
39 * of 0 to 3 chars, and applies to the 2nd through 4th instruction.
40 * A "T" means the instruction is executed if the condition is
41 * met, and an "E" means the instruction is executed if the condition
42 * is not met.
43 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070044LIR* ArmMir2Lir::OpIT(ConditionCode ccode, const char* guide) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070045 int mask;
46 int mask3 = 0;
47 int mask2 = 0;
48 int mask1 = 0;
49 ArmConditionCode code = ArmConditionEncoding(ccode);
50 int cond_bit = code & 1;
51 int alt_bit = cond_bit ^ 1;
52
Brian Carlstrom7940e442013-07-12 13:46:57 -070053 switch (strlen(guide)) {
54 case 3:
55 mask1 = (guide[2] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070056 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -070057 case 2:
58 mask2 = (guide[1] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070059 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -070060 case 1:
61 mask3 = (guide[0] == 'T') ? cond_bit : alt_bit;
62 break;
63 case 0:
64 break;
65 default:
66 LOG(FATAL) << "OAT: bad case in OpIT";
Ian Rogersfc787ec2014-10-09 21:56:44 -070067 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -070068 }
69 mask = (mask3 << 3) | (mask2 << 2) | (mask1 << 1) |
70 (1 << (3 - strlen(guide)));
71 return NewLIR2(kThumb2It, code, mask);
72}
73
Andreas Gampeb14329f2014-05-15 11:16:06 -070074void ArmMir2Lir::UpdateIT(LIR* it, const char* new_guide) {
75 int mask;
76 int mask3 = 0;
77 int mask2 = 0;
78 int mask1 = 0;
79 ArmConditionCode code = static_cast<ArmConditionCode>(it->operands[0]);
80 int cond_bit = code & 1;
81 int alt_bit = cond_bit ^ 1;
82
Andreas Gampeb14329f2014-05-15 11:16:06 -070083 switch (strlen(new_guide)) {
84 case 3:
85 mask1 = (new_guide[2] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070086 FALLTHROUGH_INTENDED;
Andreas Gampeb14329f2014-05-15 11:16:06 -070087 case 2:
88 mask2 = (new_guide[1] == 'T') ? cond_bit : alt_bit;
Ian Rogersfc787ec2014-10-09 21:56:44 -070089 FALLTHROUGH_INTENDED;
Andreas Gampeb14329f2014-05-15 11:16:06 -070090 case 1:
91 mask3 = (new_guide[0] == 'T') ? cond_bit : alt_bit;
92 break;
93 case 0:
94 break;
95 default:
96 LOG(FATAL) << "OAT: bad case in UpdateIT";
Ian Rogersfc787ec2014-10-09 21:56:44 -070097 UNREACHABLE();
Andreas Gampeb14329f2014-05-15 11:16:06 -070098 }
99 mask = (mask3 << 3) | (mask2 << 2) | (mask1 << 1) |
100 (1 << (3 - strlen(new_guide)));
101 it->operands[1] = mask;
102}
103
Dave Allison3da67a52014-04-02 17:03:45 -0700104void ArmMir2Lir::OpEndIT(LIR* it) {
105 // TODO: use the 'it' pointer to do some checks with the LIR, for example
106 // we could check that the number of instructions matches the mask
107 // in the IT instruction.
108 CHECK(it != nullptr);
109 GenBarrier();
110}
111
Brian Carlstrom7940e442013-07-12 13:46:57 -0700112/*
113 * 64-bit 3way compare function.
114 * mov rX, #-1
115 * cmp op1hi, op2hi
116 * blt done
117 * bgt flip
118 * sub rX, op1lo, op2lo (treat as unsigned)
119 * beq done
120 * ite hi
121 * mov(hi) rX, #-1
122 * mov(!hi) rX, #1
123 * flip:
124 * neg rX
125 * done:
126 */
buzbeea1983d42014-04-07 12:35:39 -0700127void ArmMir2Lir::GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700128 LIR* target1;
129 LIR* target2;
130 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
131 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800132 RegStorage t_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700133 LoadConstant(t_reg, -1);
buzbee2700f7e2014-03-07 09:46:20 -0800134 OpRegReg(kOpCmp, rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 LIR* branch1 = OpCondBranch(kCondLt, NULL);
136 LIR* branch2 = OpCondBranch(kCondGt, NULL);
buzbeea1983d42014-04-07 12:35:39 -0700137 OpRegRegReg(kOpSub, t_reg, rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700138 LIR* branch3 = OpCondBranch(kCondEq, NULL);
139
Dave Allison3da67a52014-04-02 17:03:45 -0700140 LIR* it = OpIT(kCondHi, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800141 NewLIR2(kThumb2MovI8M, t_reg.GetReg(), ModifiedImmediate(-1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700142 LoadConstant(t_reg, 1);
Dave Allison3da67a52014-04-02 17:03:45 -0700143 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700144
145 target2 = NewLIR0(kPseudoTargetLabel);
146 OpRegReg(kOpNeg, t_reg, t_reg);
147
148 target1 = NewLIR0(kPseudoTargetLabel);
149
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700150 RegLocation rl_temp = LocCReturn(); // Just using as template, will change
buzbee2700f7e2014-03-07 09:46:20 -0800151 rl_temp.reg.SetReg(t_reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700152 StoreValue(rl_dest, rl_temp);
153 FreeTemp(t_reg);
154
155 branch1->target = target1;
156 branch2->target = target2;
157 branch3->target = branch1->target;
158}
159
160void ArmMir2Lir::GenFusedLongCmpImmBranch(BasicBlock* bb, RegLocation rl_src1,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700161 int64_t val, ConditionCode ccode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162 int32_t val_lo = Low32Bits(val);
163 int32_t val_hi = High32Bits(val);
Brian Carlstrom42748892013-07-18 18:04:08 -0700164 DCHECK_GE(ModifiedImmediate(val_lo), 0);
165 DCHECK_GE(ModifiedImmediate(val_hi), 0);
buzbee0d829482013-10-11 15:24:55 -0700166 LIR* taken = &block_label_list_[bb->taken];
167 LIR* not_taken = &block_label_list_[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700168 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800169 RegStorage low_reg = rl_src1.reg.GetLow();
170 RegStorage high_reg = rl_src1.reg.GetHigh();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700171
Vladimir Marko58af1f92013-12-19 13:31:15 +0000172 if (val == 0 && (ccode == kCondEq || ccode == kCondNe)) {
buzbee2700f7e2014-03-07 09:46:20 -0800173 RegStorage t_reg = AllocTemp();
174 NewLIR4(kThumb2OrrRRRs, t_reg.GetReg(), low_reg.GetReg(), high_reg.GetReg(), 0);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000175 FreeTemp(t_reg);
176 OpCondBranch(ccode, taken);
177 return;
178 }
179
Brian Carlstromdf629502013-07-17 22:39:56 -0700180 switch (ccode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700181 case kCondEq:
182 case kCondNe:
Vladimir Marko58af1f92013-12-19 13:31:15 +0000183 OpCmpImmBranch(kCondNe, high_reg, val_hi, (ccode == kCondEq) ? not_taken : taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700184 break;
185 case kCondLt:
186 OpCmpImmBranch(kCondLt, high_reg, val_hi, taken);
187 OpCmpImmBranch(kCondGt, high_reg, val_hi, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000188 ccode = kCondUlt;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700189 break;
190 case kCondLe:
191 OpCmpImmBranch(kCondLt, high_reg, val_hi, taken);
192 OpCmpImmBranch(kCondGt, high_reg, val_hi, not_taken);
193 ccode = kCondLs;
194 break;
195 case kCondGt:
196 OpCmpImmBranch(kCondGt, high_reg, val_hi, taken);
197 OpCmpImmBranch(kCondLt, high_reg, val_hi, not_taken);
198 ccode = kCondHi;
199 break;
200 case kCondGe:
201 OpCmpImmBranch(kCondGt, high_reg, val_hi, taken);
202 OpCmpImmBranch(kCondLt, high_reg, val_hi, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000203 ccode = kCondUge;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 break;
205 default:
206 LOG(FATAL) << "Unexpected ccode: " << ccode;
207 }
208 OpCmpImmBranch(ccode, low_reg, val_lo, taken);
209}
210
Andreas Gampe90969af2014-07-15 23:02:11 -0700211void ArmMir2Lir::GenSelectConst32(RegStorage left_op, RegStorage right_op, ConditionCode code,
212 int32_t true_val, int32_t false_val, RegStorage rs_dest,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700213 RegisterClass dest_reg_class) {
214 UNUSED(dest_reg_class);
Andreas Gampe90969af2014-07-15 23:02:11 -0700215 // TODO: Generalize the IT below to accept more than one-instruction loads.
216 DCHECK(InexpensiveConstantInt(true_val));
217 DCHECK(InexpensiveConstantInt(false_val));
218
219 if ((true_val == 0 && code == kCondEq) ||
220 (false_val == 0 && code == kCondNe)) {
221 OpRegRegReg(kOpSub, rs_dest, left_op, right_op);
222 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
223 LIR* it = OpIT(kCondNe, "");
224 LoadConstant(rs_dest, code == kCondEq ? false_val : true_val);
225 OpEndIT(it);
226 return;
227 }
228
229 OpRegReg(kOpCmp, left_op, right_op); // Same?
230 LIR* it = OpIT(code, "E"); // if-convert the test
231 LoadConstant(rs_dest, true_val); // .eq case - load true
232 LoadConstant(rs_dest, false_val); // .eq case - load true
233 OpEndIT(it);
234}
235
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700236void ArmMir2Lir::GenSelect(BasicBlock* bb, MIR* mir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700237 UNUSED(bb);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700238 RegLocation rl_result;
239 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700240 RegLocation rl_dest = mir_graph_->GetDest(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700241 // Avoid using float regs here.
242 RegisterClass src_reg_class = rl_src.ref ? kRefReg : kCoreReg;
243 RegisterClass result_reg_class = rl_dest.ref ? kRefReg : kCoreReg;
244 rl_src = LoadValue(rl_src, src_reg_class);
Vladimir Markoa1a70742014-03-03 10:28:05 +0000245 ConditionCode ccode = mir->meta.ccode;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700246 if (mir->ssa_rep->num_uses == 1) {
247 // CONST case
248 int true_val = mir->dalvikInsn.vB;
249 int false_val = mir->dalvikInsn.vC;
buzbeea0cd2d72014-06-01 09:33:49 -0700250 rl_result = EvalLoc(rl_dest, result_reg_class, true);
Vladimir Markoa1a70742014-03-03 10:28:05 +0000251 // Change kCondNe to kCondEq for the special cases below.
252 if (ccode == kCondNe) {
253 ccode = kCondEq;
254 std::swap(true_val, false_val);
255 }
256 bool cheap_false_val = InexpensiveConstantInt(false_val);
257 if (cheap_false_val && ccode == kCondEq && (true_val == 0 || true_val == -1)) {
buzbee2700f7e2014-03-07 09:46:20 -0800258 OpRegRegImm(kOpSub, rl_result.reg, rl_src.reg, -true_val);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100259 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700260 LIR* it = OpIT(true_val == 0 ? kCondNe : kCondUge, "");
buzbee2700f7e2014-03-07 09:46:20 -0800261 LoadConstant(rl_result.reg, false_val);
Dave Allison3da67a52014-04-02 17:03:45 -0700262 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Vladimir Markoa1a70742014-03-03 10:28:05 +0000263 } else if (cheap_false_val && ccode == kCondEq && true_val == 1) {
buzbee2700f7e2014-03-07 09:46:20 -0800264 OpRegRegImm(kOpRsub, rl_result.reg, rl_src.reg, 1);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100265 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700266 LIR* it = OpIT(kCondLs, "");
buzbee2700f7e2014-03-07 09:46:20 -0800267 LoadConstant(rl_result.reg, false_val);
Dave Allison3da67a52014-04-02 17:03:45 -0700268 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Vladimir Markoa1a70742014-03-03 10:28:05 +0000269 } else if (cheap_false_val && InexpensiveConstantInt(true_val)) {
buzbee2700f7e2014-03-07 09:46:20 -0800270 OpRegImm(kOpCmp, rl_src.reg, 0);
Dave Allison3da67a52014-04-02 17:03:45 -0700271 LIR* it = OpIT(ccode, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800272 LoadConstant(rl_result.reg, true_val);
273 LoadConstant(rl_result.reg, false_val);
Dave Allison3da67a52014-04-02 17:03:45 -0700274 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Brian Carlstrom7940e442013-07-12 13:46:57 -0700275 } else {
276 // Unlikely case - could be tuned.
buzbeea0cd2d72014-06-01 09:33:49 -0700277 RegStorage t_reg1 = AllocTypedTemp(false, result_reg_class);
278 RegStorage t_reg2 = AllocTypedTemp(false, result_reg_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700279 LoadConstant(t_reg1, true_val);
280 LoadConstant(t_reg2, false_val);
buzbee2700f7e2014-03-07 09:46:20 -0800281 OpRegImm(kOpCmp, rl_src.reg, 0);
Dave Allison3da67a52014-04-02 17:03:45 -0700282 LIR* it = OpIT(ccode, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800283 OpRegCopy(rl_result.reg, t_reg1);
284 OpRegCopy(rl_result.reg, t_reg2);
Dave Allison3da67a52014-04-02 17:03:45 -0700285 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Brian Carlstrom7940e442013-07-12 13:46:57 -0700286 }
287 } else {
288 // MOVE case
289 RegLocation rl_true = mir_graph_->reg_location_[mir->ssa_rep->uses[1]];
290 RegLocation rl_false = mir_graph_->reg_location_[mir->ssa_rep->uses[2]];
buzbeea0cd2d72014-06-01 09:33:49 -0700291 rl_true = LoadValue(rl_true, result_reg_class);
292 rl_false = LoadValue(rl_false, result_reg_class);
293 rl_result = EvalLoc(rl_dest, result_reg_class, true);
buzbee2700f7e2014-03-07 09:46:20 -0800294 OpRegImm(kOpCmp, rl_src.reg, 0);
Dave Allison3da67a52014-04-02 17:03:45 -0700295 LIR* it = nullptr;
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000296 if (rl_result.reg.GetReg() == rl_true.reg.GetReg()) { // Is the "true" case already in place?
Dave Allison3da67a52014-04-02 17:03:45 -0700297 it = OpIT(NegateComparison(ccode), "");
buzbee2700f7e2014-03-07 09:46:20 -0800298 OpRegCopy(rl_result.reg, rl_false.reg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000299 } else if (rl_result.reg.GetReg() == rl_false.reg.GetReg()) { // False case in place?
Dave Allison3da67a52014-04-02 17:03:45 -0700300 it = OpIT(ccode, "");
buzbee2700f7e2014-03-07 09:46:20 -0800301 OpRegCopy(rl_result.reg, rl_true.reg);
buzbee252254b2013-09-08 16:20:53 -0700302 } else { // Normal - select between the two.
Dave Allison3da67a52014-04-02 17:03:45 -0700303 it = OpIT(ccode, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800304 OpRegCopy(rl_result.reg, rl_true.reg);
305 OpRegCopy(rl_result.reg, rl_false.reg);
buzbee252254b2013-09-08 16:20:53 -0700306 }
Dave Allison3da67a52014-04-02 17:03:45 -0700307 OpEndIT(it); // Add a scheduling barrier to keep the IT shadow intact
Brian Carlstrom7940e442013-07-12 13:46:57 -0700308 }
309 StoreValue(rl_dest, rl_result);
310}
311
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700312void ArmMir2Lir::GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700313 RegLocation rl_src1 = mir_graph_->GetSrcWide(mir, 0);
314 RegLocation rl_src2 = mir_graph_->GetSrcWide(mir, 2);
315 // Normalize such that if either operand is constant, src2 will be constant.
Vladimir Markoa8946072014-01-22 10:30:44 +0000316 ConditionCode ccode = mir->meta.ccode;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700317 if (rl_src1.is_const) {
Vladimir Marko58af1f92013-12-19 13:31:15 +0000318 std::swap(rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700319 ccode = FlipComparisonOrder(ccode);
320 }
321 if (rl_src2.is_const) {
buzbee082833c2014-05-17 23:16:26 -0700322 rl_src2 = UpdateLocWide(rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323 // Do special compare/branch against simple const operand if not already in registers.
324 int64_t val = mir_graph_->ConstantValueWide(rl_src2);
buzbee082833c2014-05-17 23:16:26 -0700325 if ((rl_src2.location != kLocPhysReg) &&
Brian Carlstrom7940e442013-07-12 13:46:57 -0700326 ((ModifiedImmediate(Low32Bits(val)) >= 0) && (ModifiedImmediate(High32Bits(val)) >= 0))) {
327 GenFusedLongCmpImmBranch(bb, rl_src1, val, ccode);
328 return;
329 }
330 }
buzbee0d829482013-10-11 15:24:55 -0700331 LIR* taken = &block_label_list_[bb->taken];
332 LIR* not_taken = &block_label_list_[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700333 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
334 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800335 OpRegReg(kOpCmp, rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstromdf629502013-07-17 22:39:56 -0700336 switch (ccode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700337 case kCondEq:
338 OpCondBranch(kCondNe, not_taken);
339 break;
340 case kCondNe:
341 OpCondBranch(kCondNe, taken);
342 break;
343 case kCondLt:
344 OpCondBranch(kCondLt, taken);
345 OpCondBranch(kCondGt, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000346 ccode = kCondUlt;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700347 break;
348 case kCondLe:
349 OpCondBranch(kCondLt, taken);
350 OpCondBranch(kCondGt, not_taken);
351 ccode = kCondLs;
352 break;
353 case kCondGt:
354 OpCondBranch(kCondGt, taken);
355 OpCondBranch(kCondLt, not_taken);
356 ccode = kCondHi;
357 break;
358 case kCondGe:
359 OpCondBranch(kCondGt, taken);
360 OpCondBranch(kCondLt, not_taken);
Vladimir Marko58af1f92013-12-19 13:31:15 +0000361 ccode = kCondUge;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700362 break;
363 default:
364 LOG(FATAL) << "Unexpected ccode: " << ccode;
365 }
buzbee2700f7e2014-03-07 09:46:20 -0800366 OpRegReg(kOpCmp, rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700367 OpCondBranch(ccode, taken);
368}
369
370/*
371 * Generate a register comparison to an immediate and branch. Caller
372 * is responsible for setting branch target field.
373 */
buzbee2700f7e2014-03-07 09:46:20 -0800374LIR* ArmMir2Lir::OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, LIR* target) {
Andreas Gampe9522af92014-07-14 20:16:59 -0700375 LIR* branch = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700376 ArmConditionCode arm_cond = ArmConditionEncoding(cond);
buzbeeb48819d2013-09-14 16:15:25 -0700377 /*
378 * A common use of OpCmpImmBranch is for null checks, and using the Thumb 16-bit
379 * compare-and-branch if zero is ideal if it will reach. However, because null checks
Mingyao Yang3a74d152014-04-21 15:39:44 -0700380 * branch forward to a slow path, they will frequently not reach - and thus have to
buzbeeb48819d2013-09-14 16:15:25 -0700381 * be converted to a long form during assembly (which will trigger another assembly
382 * pass). Here we estimate the branch distance for checks, and if large directly
383 * generate the long form in an attempt to avoid an extra assembly pass.
Mingyao Yang3a74d152014-04-21 15:39:44 -0700384 * TODO: consider interspersing slowpaths in code following unconditional branches.
buzbeeb48819d2013-09-14 16:15:25 -0700385 */
386 bool skip = ((target != NULL) && (target->opcode == kPseudoThrowTarget));
Razvan A Lupusoru75035972014-09-11 15:24:59 -0700387 skip &= ((mir_graph_->GetNumDalvikInsns() - current_dalvik_offset_) > 64);
Andreas Gampe9522af92014-07-14 20:16:59 -0700388 if (!skip && reg.Low8() && (check_value == 0)) {
389 if (arm_cond == kArmCondEq || arm_cond == kArmCondNe) {
390 branch = NewLIR2((arm_cond == kArmCondEq) ? kThumb2Cbz : kThumb2Cbnz,
391 reg.GetReg(), 0);
392 } else if (arm_cond == kArmCondLs) {
393 // kArmCondLs is an unsigned less or equal. A comparison r <= 0 is then the same as cbz.
394 // This case happens for a bounds check of array[0].
395 branch = NewLIR2(kThumb2Cbz, reg.GetReg(), 0);
396 }
397 }
398
399 if (branch == nullptr) {
Vladimir Marko22479842013-11-19 17:04:50 +0000400 OpRegImm(kOpCmp, reg, check_value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700401 branch = NewLIR2(kThumbBCond, 0, arm_cond);
402 }
Andreas Gampe9522af92014-07-14 20:16:59 -0700403
Brian Carlstrom7940e442013-07-12 13:46:57 -0700404 branch->target = target;
405 return branch;
406}
407
buzbee2700f7e2014-03-07 09:46:20 -0800408LIR* ArmMir2Lir::OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700409 LIR* res;
410 int opcode;
buzbee2700f7e2014-03-07 09:46:20 -0800411 // If src or dest is a pair, we'll be using low reg.
412 if (r_dest.IsPair()) {
413 r_dest = r_dest.GetLow();
414 }
415 if (r_src.IsPair()) {
416 r_src = r_src.GetLow();
417 }
buzbee091cc402014-03-31 10:14:40 -0700418 if (r_dest.IsFloat() || r_src.IsFloat())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700419 return OpFpRegCopy(r_dest, r_src);
buzbee091cc402014-03-31 10:14:40 -0700420 if (r_dest.Low8() && r_src.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700421 opcode = kThumbMovRR;
buzbee091cc402014-03-31 10:14:40 -0700422 else if (!r_dest.Low8() && !r_src.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700423 opcode = kThumbMovRR_H2H;
buzbee091cc402014-03-31 10:14:40 -0700424 else if (r_dest.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700425 opcode = kThumbMovRR_H2L;
426 else
427 opcode = kThumbMovRR_L2H;
buzbee2700f7e2014-03-07 09:46:20 -0800428 res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700429 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) {
430 res->flags.is_nop = true;
431 }
432 return res;
433}
434
buzbee7a11ab02014-04-28 20:02:38 -0700435void ArmMir2Lir::OpRegCopy(RegStorage r_dest, RegStorage r_src) {
436 if (r_dest != r_src) {
437 LIR* res = OpRegCopyNoInsert(r_dest, r_src);
438 AppendLIR(res);
439 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700440}
441
buzbee2700f7e2014-03-07 09:46:20 -0800442void ArmMir2Lir::OpRegCopyWide(RegStorage r_dest, RegStorage r_src) {
buzbee7a11ab02014-04-28 20:02:38 -0700443 if (r_dest != r_src) {
buzbee091cc402014-03-31 10:14:40 -0700444 bool dest_fp = r_dest.IsFloat();
445 bool src_fp = r_src.IsFloat();
446 DCHECK(r_dest.Is64Bit());
447 DCHECK(r_src.Is64Bit());
Zheng Xu5667fdb2014-10-23 18:29:55 +0800448 // Note: If the register is get by register allocator, it should never be a pair.
449 // But some functions in mir_2_lir assume 64-bit registers are 32-bit register pairs.
450 // TODO: Rework Mir2Lir::LoadArg() and Mir2Lir::LoadArgDirect().
451 if (dest_fp && r_dest.IsPair()) {
452 r_dest = As64BitFloatReg(r_dest);
453 }
454 if (src_fp && r_src.IsPair()) {
455 r_src = As64BitFloatReg(r_src);
456 }
buzbee7a11ab02014-04-28 20:02:38 -0700457 if (dest_fp) {
458 if (src_fp) {
buzbee091cc402014-03-31 10:14:40 -0700459 OpRegCopy(r_dest, r_src);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700460 } else {
buzbee091cc402014-03-31 10:14:40 -0700461 NewLIR3(kThumb2Fmdrr, r_dest.GetReg(), r_src.GetLowReg(), r_src.GetHighReg());
buzbee7a11ab02014-04-28 20:02:38 -0700462 }
463 } else {
464 if (src_fp) {
buzbee091cc402014-03-31 10:14:40 -0700465 NewLIR3(kThumb2Fmrrd, r_dest.GetLowReg(), r_dest.GetHighReg(), r_src.GetReg());
buzbee7a11ab02014-04-28 20:02:38 -0700466 } else {
467 // Handle overlap
468 if (r_src.GetHighReg() == r_dest.GetLowReg()) {
469 DCHECK_NE(r_src.GetLowReg(), r_dest.GetHighReg());
470 OpRegCopy(r_dest.GetHigh(), r_src.GetHigh());
471 OpRegCopy(r_dest.GetLow(), r_src.GetLow());
472 } else {
473 OpRegCopy(r_dest.GetLow(), r_src.GetLow());
474 OpRegCopy(r_dest.GetHigh(), r_src.GetHigh());
475 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476 }
477 }
478 }
479}
480
481// Table of magic divisors
482struct MagicTable {
483 uint32_t magic;
484 uint32_t shift;
485 DividePattern pattern;
486};
487
488static const MagicTable magic_table[] = {
489 {0, 0, DivideNone}, // 0
490 {0, 0, DivideNone}, // 1
491 {0, 0, DivideNone}, // 2
492 {0x55555556, 0, Divide3}, // 3
493 {0, 0, DivideNone}, // 4
494 {0x66666667, 1, Divide5}, // 5
495 {0x2AAAAAAB, 0, Divide3}, // 6
496 {0x92492493, 2, Divide7}, // 7
497 {0, 0, DivideNone}, // 8
498 {0x38E38E39, 1, Divide5}, // 9
499 {0x66666667, 2, Divide5}, // 10
500 {0x2E8BA2E9, 1, Divide5}, // 11
501 {0x2AAAAAAB, 1, Divide5}, // 12
502 {0x4EC4EC4F, 2, Divide5}, // 13
503 {0x92492493, 3, Divide7}, // 14
504 {0x88888889, 3, Divide7}, // 15
505};
506
507// Integer division by constant via reciprocal multiply (Hacker's Delight, 10-4)
buzbee11b63d12013-08-27 07:34:17 -0700508bool ArmMir2Lir::SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700509 RegLocation rl_src, RegLocation rl_dest, int lit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700510 UNUSED(dalvik_opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700511 if ((lit < 0) || (lit >= static_cast<int>(sizeof(magic_table)/sizeof(magic_table[0])))) {
512 return false;
513 }
514 DividePattern pattern = magic_table[lit].pattern;
515 if (pattern == DivideNone) {
516 return false;
517 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700518
buzbee2700f7e2014-03-07 09:46:20 -0800519 RegStorage r_magic = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 LoadConstant(r_magic, magic_table[lit].magic);
521 rl_src = LoadValue(rl_src, kCoreReg);
522 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800523 RegStorage r_hi = AllocTemp();
524 RegStorage r_lo = AllocTemp();
Zheng Xuf9719f92014-04-02 13:31:31 +0100525
526 // rl_dest and rl_src might overlap.
527 // Reuse r_hi to save the div result for reminder case.
528 RegStorage r_div_result = is_div ? rl_result.reg : r_hi;
529
buzbee2700f7e2014-03-07 09:46:20 -0800530 NewLIR4(kThumb2Smull, r_lo.GetReg(), r_hi.GetReg(), r_magic.GetReg(), rl_src.reg.GetReg());
Brian Carlstromdf629502013-07-17 22:39:56 -0700531 switch (pattern) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 case Divide3:
Zheng Xuf9719f92014-04-02 13:31:31 +0100533 OpRegRegRegShift(kOpSub, r_div_result, r_hi, rl_src.reg, EncodeShift(kArmAsr, 31));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700534 break;
535 case Divide5:
buzbee2700f7e2014-03-07 09:46:20 -0800536 OpRegRegImm(kOpAsr, r_lo, rl_src.reg, 31);
Zheng Xuf9719f92014-04-02 13:31:31 +0100537 OpRegRegRegShift(kOpRsub, r_div_result, r_lo, r_hi,
Ian Rogerse2143c02014-03-28 08:47:16 -0700538 EncodeShift(kArmAsr, magic_table[lit].shift));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700539 break;
540 case Divide7:
buzbee2700f7e2014-03-07 09:46:20 -0800541 OpRegReg(kOpAdd, r_hi, rl_src.reg);
542 OpRegRegImm(kOpAsr, r_lo, rl_src.reg, 31);
Zheng Xuf9719f92014-04-02 13:31:31 +0100543 OpRegRegRegShift(kOpRsub, r_div_result, r_lo, r_hi,
Ian Rogerse2143c02014-03-28 08:47:16 -0700544 EncodeShift(kArmAsr, magic_table[lit].shift));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 break;
546 default:
547 LOG(FATAL) << "Unexpected pattern: " << pattern;
548 }
Zheng Xuf9719f92014-04-02 13:31:31 +0100549
550 if (!is_div) {
551 // div_result = src / lit
552 // tmp1 = div_result * lit
553 // dest = src - tmp1
554 RegStorage tmp1 = r_lo;
555 EasyMultiplyOp ops[2];
556
557 bool canEasyMultiply = GetEasyMultiplyTwoOps(lit, ops);
558 DCHECK_NE(canEasyMultiply, false);
559
560 GenEasyMultiplyTwoOps(tmp1, r_div_result, ops);
561 OpRegRegReg(kOpSub, rl_result.reg, rl_src.reg, tmp1);
562 }
563
Brian Carlstrom7940e442013-07-12 13:46:57 -0700564 StoreValue(rl_dest, rl_result);
565 return true;
566}
567
Ian Rogerse2143c02014-03-28 08:47:16 -0700568// Try to convert *lit to 1 RegRegRegShift/RegRegShift form.
569bool ArmMir2Lir::GetEasyMultiplyOp(int lit, ArmMir2Lir::EasyMultiplyOp* op) {
570 if (IsPowerOfTwo(lit)) {
571 op->op = kOpLsl;
572 op->shift = LowestSetBit(lit);
573 return true;
574 }
575
576 if (IsPowerOfTwo(lit - 1)) {
577 op->op = kOpAdd;
578 op->shift = LowestSetBit(lit - 1);
579 return true;
580 }
581
582 if (IsPowerOfTwo(lit + 1)) {
583 op->op = kOpRsub;
584 op->shift = LowestSetBit(lit + 1);
585 return true;
586 }
587
588 op->op = kOpInvalid;
Zheng Xuf9719f92014-04-02 13:31:31 +0100589 op->shift = 0;
Ian Rogerse2143c02014-03-28 08:47:16 -0700590 return false;
591}
592
593// Try to convert *lit to 1~2 RegRegRegShift/RegRegShift forms.
594bool ArmMir2Lir::GetEasyMultiplyTwoOps(int lit, EasyMultiplyOp* ops) {
595 GetEasyMultiplyOp(lit, &ops[0]);
596 if (GetEasyMultiplyOp(lit, &ops[0])) {
597 ops[1].op = kOpInvalid;
Zheng Xuf9719f92014-04-02 13:31:31 +0100598 ops[1].shift = 0;
Ian Rogerse2143c02014-03-28 08:47:16 -0700599 return true;
600 }
601
602 int lit1 = lit;
603 uint32_t shift = LowestSetBit(lit1);
604 if (GetEasyMultiplyOp(lit1 >> shift, &ops[0])) {
605 ops[1].op = kOpLsl;
606 ops[1].shift = shift;
607 return true;
608 }
609
610 lit1 = lit - 1;
611 shift = LowestSetBit(lit1);
612 if (GetEasyMultiplyOp(lit1 >> shift, &ops[0])) {
613 ops[1].op = kOpAdd;
614 ops[1].shift = shift;
615 return true;
616 }
617
618 lit1 = lit + 1;
619 shift = LowestSetBit(lit1);
620 if (GetEasyMultiplyOp(lit1 >> shift, &ops[0])) {
621 ops[1].op = kOpRsub;
622 ops[1].shift = shift;
623 return true;
624 }
625
626 return false;
627}
628
Zheng Xuf9719f92014-04-02 13:31:31 +0100629// Generate instructions to do multiply.
630// Additional temporary register is required,
631// if it need to generate 2 instructions and src/dest overlap.
Ian Rogerse2143c02014-03-28 08:47:16 -0700632void ArmMir2Lir::GenEasyMultiplyTwoOps(RegStorage r_dest, RegStorage r_src, EasyMultiplyOp* ops) {
Zheng Xuf9719f92014-04-02 13:31:31 +0100633 // tmp1 = ( src << shift1) + [ src | -src | 0 ]
634 // dest = (tmp1 << shift2) + [ src | -src | 0 ]
635
636 RegStorage r_tmp1;
637 if (ops[1].op == kOpInvalid) {
638 r_tmp1 = r_dest;
639 } else if (r_dest.GetReg() != r_src.GetReg()) {
640 r_tmp1 = r_dest;
641 } else {
642 r_tmp1 = AllocTemp();
643 }
644
645 switch (ops[0].op) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700646 case kOpLsl:
Zheng Xuf9719f92014-04-02 13:31:31 +0100647 OpRegRegImm(kOpLsl, r_tmp1, r_src, ops[0].shift);
Ian Rogerse2143c02014-03-28 08:47:16 -0700648 break;
649 case kOpAdd:
Zheng Xuf9719f92014-04-02 13:31:31 +0100650 OpRegRegRegShift(kOpAdd, r_tmp1, r_src, r_src, EncodeShift(kArmLsl, ops[0].shift));
Ian Rogerse2143c02014-03-28 08:47:16 -0700651 break;
652 case kOpRsub:
Zheng Xuf9719f92014-04-02 13:31:31 +0100653 OpRegRegRegShift(kOpRsub, r_tmp1, r_src, r_src, EncodeShift(kArmLsl, ops[0].shift));
Ian Rogerse2143c02014-03-28 08:47:16 -0700654 break;
655 default:
Zheng Xuf9719f92014-04-02 13:31:31 +0100656 DCHECK_EQ(ops[0].op, kOpInvalid);
Ian Rogerse2143c02014-03-28 08:47:16 -0700657 break;
Zheng Xuf9719f92014-04-02 13:31:31 +0100658 }
659
660 switch (ops[1].op) {
661 case kOpInvalid:
662 return;
663 case kOpLsl:
664 OpRegRegImm(kOpLsl, r_dest, r_tmp1, ops[1].shift);
665 break;
666 case kOpAdd:
667 OpRegRegRegShift(kOpAdd, r_dest, r_src, r_tmp1, EncodeShift(kArmLsl, ops[1].shift));
668 break;
669 case kOpRsub:
670 OpRegRegRegShift(kOpRsub, r_dest, r_src, r_tmp1, EncodeShift(kArmLsl, ops[1].shift));
671 break;
672 default:
673 LOG(FATAL) << "Unexpected opcode passed to GenEasyMultiplyTwoOps";
674 break;
Ian Rogerse2143c02014-03-28 08:47:16 -0700675 }
676}
677
678bool ArmMir2Lir::EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) {
679 EasyMultiplyOp ops[2];
680
681 if (!GetEasyMultiplyTwoOps(lit, ops)) {
682 return false;
683 }
684
685 rl_src = LoadValue(rl_src, kCoreReg);
686 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
687
688 GenEasyMultiplyTwoOps(rl_result.reg, rl_src.reg, ops);
689 StoreValue(rl_dest, rl_result);
690 return true;
691}
692
Mark Mendell2bf31e62014-01-23 12:13:40 -0800693RegLocation ArmMir2Lir::GenDivRem(RegLocation rl_dest, RegLocation rl_src1,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700694 RegLocation rl_src2, bool is_div, int flags) {
695 UNUSED(rl_dest, rl_src1, rl_src2, is_div, flags);
Mark Mendell2bf31e62014-01-23 12:13:40 -0800696 LOG(FATAL) << "Unexpected use of GenDivRem for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700697 UNREACHABLE();
Mark Mendell2bf31e62014-01-23 12:13:40 -0800698}
699
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700700RegLocation ArmMir2Lir::GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit,
701 bool is_div) {
702 UNUSED(rl_dest, rl_src1, lit, is_div);
Mark Mendell2bf31e62014-01-23 12:13:40 -0800703 LOG(FATAL) << "Unexpected use of GenDivRemLit for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700704 UNREACHABLE();
Mark Mendell2bf31e62014-01-23 12:13:40 -0800705}
706
buzbee2700f7e2014-03-07 09:46:20 -0800707RegLocation ArmMir2Lir::GenDivRemLit(RegLocation rl_dest, RegStorage reg1, int lit, bool is_div) {
Dave Allison70202782013-10-22 17:52:19 -0700708 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
709
710 // Put the literal in a temp.
buzbee2700f7e2014-03-07 09:46:20 -0800711 RegStorage lit_temp = AllocTemp();
Dave Allison70202782013-10-22 17:52:19 -0700712 LoadConstant(lit_temp, lit);
713 // Use the generic case for div/rem with arg2 in a register.
714 // TODO: The literal temp can be freed earlier during a modulus to reduce reg pressure.
715 rl_result = GenDivRem(rl_result, reg1, lit_temp, is_div);
716 FreeTemp(lit_temp);
717
718 return rl_result;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719}
720
buzbee2700f7e2014-03-07 09:46:20 -0800721RegLocation ArmMir2Lir::GenDivRem(RegLocation rl_dest, RegStorage reg1, RegStorage reg2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700722 bool is_div) {
Dave Allison70202782013-10-22 17:52:19 -0700723 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
724 if (is_div) {
725 // Simple case, use sdiv instruction.
buzbee2700f7e2014-03-07 09:46:20 -0800726 OpRegRegReg(kOpDiv, rl_result.reg, reg1, reg2);
Dave Allison70202782013-10-22 17:52:19 -0700727 } else {
728 // Remainder case, use the following code:
729 // temp = reg1 / reg2 - integer division
730 // temp = temp * reg2
731 // dest = reg1 - temp
732
buzbee2700f7e2014-03-07 09:46:20 -0800733 RegStorage temp = AllocTemp();
Dave Allison70202782013-10-22 17:52:19 -0700734 OpRegRegReg(kOpDiv, temp, reg1, reg2);
735 OpRegReg(kOpMul, temp, reg2);
buzbee2700f7e2014-03-07 09:46:20 -0800736 OpRegRegReg(kOpSub, rl_result.reg, reg1, temp);
Dave Allison70202782013-10-22 17:52:19 -0700737 FreeTemp(temp);
738 }
739
740 return rl_result;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741}
742
Serban Constantinescu23abec92014-07-02 16:13:38 +0100743bool ArmMir2Lir::GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 DCHECK_EQ(cu_->instruction_set, kThumb2);
Serban Constantinescu23abec92014-07-02 16:13:38 +0100745 if (is_long) {
746 return false;
747 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 RegLocation rl_src1 = info->args[0];
749 RegLocation rl_src2 = info->args[1];
750 rl_src1 = LoadValue(rl_src1, kCoreReg);
751 rl_src2 = LoadValue(rl_src2, kCoreReg);
752 RegLocation rl_dest = InlineTarget(info);
753 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800754 OpRegReg(kOpCmp, rl_src1.reg, rl_src2.reg);
Dave Allison3da67a52014-04-02 17:03:45 -0700755 LIR* it = OpIT((is_min) ? kCondGt : kCondLt, "E");
buzbee2700f7e2014-03-07 09:46:20 -0800756 OpRegReg(kOpMov, rl_result.reg, rl_src2.reg);
757 OpRegReg(kOpMov, rl_result.reg, rl_src1.reg);
Dave Allison3da67a52014-04-02 17:03:45 -0700758 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700759 StoreValue(rl_dest, rl_result);
760 return true;
761}
762
Vladimir Markoe508a202013-11-04 15:24:22 +0000763bool ArmMir2Lir::GenInlinedPeek(CallInfo* info, OpSize size) {
764 RegLocation rl_src_address = info->args[0]; // long address
buzbee2700f7e2014-03-07 09:46:20 -0800765 rl_src_address = NarrowRegLoc(rl_src_address); // ignore high half in info->args[1]
Vladimir Markoe508a202013-11-04 15:24:22 +0000766 RegLocation rl_dest = InlineTarget(info);
767 RegLocation rl_address = LoadValue(rl_src_address, kCoreReg);
768 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700769 if (size == k64) {
Vladimir Markoe508a202013-11-04 15:24:22 +0000770 // Fake unaligned LDRD by two unaligned LDR instructions on ARMv7 with SCTLR.A set to 0.
buzbee2700f7e2014-03-07 09:46:20 -0800771 if (rl_address.reg.GetReg() != rl_result.reg.GetLowReg()) {
buzbee695d13a2014-04-19 13:32:20 -0700772 Load32Disp(rl_address.reg, 0, rl_result.reg.GetLow());
773 Load32Disp(rl_address.reg, 4, rl_result.reg.GetHigh());
Vladimir Markoe508a202013-11-04 15:24:22 +0000774 } else {
buzbee695d13a2014-04-19 13:32:20 -0700775 Load32Disp(rl_address.reg, 4, rl_result.reg.GetHigh());
776 Load32Disp(rl_address.reg, 0, rl_result.reg.GetLow());
Vladimir Markoe508a202013-11-04 15:24:22 +0000777 }
778 StoreValueWide(rl_dest, rl_result);
779 } else {
buzbee695d13a2014-04-19 13:32:20 -0700780 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32);
Vladimir Markoe508a202013-11-04 15:24:22 +0000781 // Unaligned load with LDR and LDRSH is allowed on ARMv7 with SCTLR.A set to 0.
Andreas Gampe3c12c512014-06-24 18:46:29 +0000782 LoadBaseDisp(rl_address.reg, 0, rl_result.reg, size, kNotVolatile);
Vladimir Markoe508a202013-11-04 15:24:22 +0000783 StoreValue(rl_dest, rl_result);
784 }
785 return true;
786}
787
788bool ArmMir2Lir::GenInlinedPoke(CallInfo* info, OpSize size) {
789 RegLocation rl_src_address = info->args[0]; // long address
buzbee2700f7e2014-03-07 09:46:20 -0800790 rl_src_address = NarrowRegLoc(rl_src_address); // ignore high half in info->args[1]
Vladimir Markoe508a202013-11-04 15:24:22 +0000791 RegLocation rl_src_value = info->args[2]; // [size] value
792 RegLocation rl_address = LoadValue(rl_src_address, kCoreReg);
buzbee695d13a2014-04-19 13:32:20 -0700793 if (size == k64) {
Vladimir Markoe508a202013-11-04 15:24:22 +0000794 // Fake unaligned STRD by two unaligned STR instructions on ARMv7 with SCTLR.A set to 0.
795 RegLocation rl_value = LoadValueWide(rl_src_value, kCoreReg);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000796 StoreBaseDisp(rl_address.reg, 0, rl_value.reg.GetLow(), k32, kNotVolatile);
797 StoreBaseDisp(rl_address.reg, 4, rl_value.reg.GetHigh(), k32, kNotVolatile);
Vladimir Markoe508a202013-11-04 15:24:22 +0000798 } else {
buzbee695d13a2014-04-19 13:32:20 -0700799 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32);
Vladimir Markoe508a202013-11-04 15:24:22 +0000800 // Unaligned store with STR and STRSH is allowed on ARMv7 with SCTLR.A set to 0.
801 RegLocation rl_value = LoadValue(rl_src_value, kCoreReg);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000802 StoreBaseDisp(rl_address.reg, 0, rl_value.reg, size, kNotVolatile);
Vladimir Markoe508a202013-11-04 15:24:22 +0000803 }
804 return true;
805}
806
Hans Boehm48f5c472014-06-27 14:50:10 -0700807// Generate a CAS with memory_order_seq_cst semantics.
Vladimir Marko1c282e22013-11-21 14:49:47 +0000808bool ArmMir2Lir::GenInlinedCas(CallInfo* info, bool is_long, bool is_object) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700809 DCHECK_EQ(cu_->instruction_set, kThumb2);
810 // Unused - RegLocation rl_src_unsafe = info->args[0];
Vladimir Marko1c282e22013-11-21 14:49:47 +0000811 RegLocation rl_src_obj = info->args[1]; // Object - known non-null
812 RegLocation rl_src_offset = info->args[2]; // long low
buzbee2700f7e2014-03-07 09:46:20 -0800813 rl_src_offset = NarrowRegLoc(rl_src_offset); // ignore high half in info->args[3]
Vladimir Marko1c282e22013-11-21 14:49:47 +0000814 RegLocation rl_src_expected = info->args[4]; // int, long or Object
Vladimir Marko3e5af822013-11-21 15:01:20 +0000815 // If is_long, high half is in info->args[5]
816 RegLocation rl_src_new_value = info->args[is_long ? 6 : 5]; // int, long or Object
817 // If is_long, high half is in info->args[7]
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 RegLocation rl_dest = InlineTarget(info); // boolean place for result
819
Vladimir Marko3e5af822013-11-21 15:01:20 +0000820 // We have only 5 temporary registers available and actually only 4 if the InlineTarget
821 // above locked one of the temps. For a straightforward CAS64 we need 7 registers:
822 // r_ptr (1), new_value (2), expected(2) and ldrexd result (2). If neither expected nor
823 // new_value is in a non-temp core register we shall reload them in the ldrex/strex loop
824 // into the same temps, reducing the number of required temps down to 5. We shall work
825 // around the potentially locked temp by using LR for r_ptr, unconditionally.
826 // TODO: Pass information about the need for more temps to the stack frame generation
827 // code so that we can rely on being able to allocate enough temps.
buzbee091cc402014-03-31 10:14:40 -0700828 DCHECK(!GetRegInfo(rs_rARM_LR)->IsTemp());
829 MarkTemp(rs_rARM_LR);
830 FreeTemp(rs_rARM_LR);
831 LockTemp(rs_rARM_LR);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000832 bool load_early = true;
833 if (is_long) {
buzbee091cc402014-03-31 10:14:40 -0700834 RegStorage expected_reg = rl_src_expected.reg.IsPair() ? rl_src_expected.reg.GetLow() :
835 rl_src_expected.reg;
836 RegStorage new_val_reg = rl_src_new_value.reg.IsPair() ? rl_src_new_value.reg.GetLow() :
837 rl_src_new_value.reg;
838 bool expected_is_core_reg = rl_src_expected.location == kLocPhysReg && !expected_reg.IsFloat();
839 bool new_value_is_core_reg = rl_src_new_value.location == kLocPhysReg && !new_val_reg.IsFloat();
buzbee2700f7e2014-03-07 09:46:20 -0800840 bool expected_is_good_reg = expected_is_core_reg && !IsTemp(expected_reg);
841 bool new_value_is_good_reg = new_value_is_core_reg && !IsTemp(new_val_reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000842
843 if (!expected_is_good_reg && !new_value_is_good_reg) {
844 // None of expected/new_value is non-temp reg, need to load both late
845 load_early = false;
846 // Make sure they are not in the temp regs and the load will not be skipped.
847 if (expected_is_core_reg) {
buzbee2700f7e2014-03-07 09:46:20 -0800848 FlushRegWide(rl_src_expected.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000849 ClobberSReg(rl_src_expected.s_reg_low);
850 ClobberSReg(GetSRegHi(rl_src_expected.s_reg_low));
851 rl_src_expected.location = kLocDalvikFrame;
852 }
853 if (new_value_is_core_reg) {
buzbee2700f7e2014-03-07 09:46:20 -0800854 FlushRegWide(rl_src_new_value.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000855 ClobberSReg(rl_src_new_value.s_reg_low);
856 ClobberSReg(GetSRegHi(rl_src_new_value.s_reg_low));
857 rl_src_new_value.location = kLocDalvikFrame;
858 }
859 }
860 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700861
Hans Boehm48f5c472014-06-27 14:50:10 -0700862 // Prevent reordering with prior memory operations.
863 GenMemBarrier(kAnyStore);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700864
buzbeea0cd2d72014-06-01 09:33:49 -0700865 RegLocation rl_object = LoadValue(rl_src_obj, kRefReg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000866 RegLocation rl_new_value;
867 if (!is_long) {
buzbee7c02e912014-10-03 13:14:17 -0700868 rl_new_value = LoadValue(rl_src_new_value, LocToRegClass(rl_src_new_value));
Vladimir Marko3e5af822013-11-21 15:01:20 +0000869 } else if (load_early) {
870 rl_new_value = LoadValueWide(rl_src_new_value, kCoreReg);
871 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700872
Vladimir Marko1c282e22013-11-21 14:49:47 +0000873 if (is_object && !mir_graph_->IsConstantNullRef(rl_new_value)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700874 // Mark card for object assuming new value is stored.
Vladimir Marko743b98c2014-11-24 19:45:41 +0000875 MarkGCCard(0, rl_new_value.reg, rl_object.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700876 }
877
878 RegLocation rl_offset = LoadValue(rl_src_offset, kCoreReg);
879
buzbee2700f7e2014-03-07 09:46:20 -0800880 RegStorage r_ptr = rs_rARM_LR;
881 OpRegRegReg(kOpAdd, r_ptr, rl_object.reg, rl_offset.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700882
883 // Free now unneeded rl_object and rl_offset to give more temps.
884 ClobberSReg(rl_object.s_reg_low);
buzbee091cc402014-03-31 10:14:40 -0700885 FreeTemp(rl_object.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700886 ClobberSReg(rl_offset.s_reg_low);
buzbee091cc402014-03-31 10:14:40 -0700887 FreeTemp(rl_offset.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700888
Vladimir Marko3e5af822013-11-21 15:01:20 +0000889 RegLocation rl_expected;
890 if (!is_long) {
buzbee7c02e912014-10-03 13:14:17 -0700891 rl_expected = LoadValue(rl_src_expected, LocToRegClass(rl_src_new_value));
Vladimir Marko3e5af822013-11-21 15:01:20 +0000892 } else if (load_early) {
893 rl_expected = LoadValueWide(rl_src_expected, kCoreReg);
894 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000895 // NOTE: partially defined rl_expected & rl_new_value - but we just want the regs.
buzbee091cc402014-03-31 10:14:40 -0700896 RegStorage low_reg = AllocTemp();
897 RegStorage high_reg = AllocTemp();
898 rl_new_value.reg = RegStorage::MakeRegPair(low_reg, high_reg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000899 rl_expected = rl_new_value;
Vladimir Marko3e5af822013-11-21 15:01:20 +0000900 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901
Vladimir Marko3e5af822013-11-21 15:01:20 +0000902 // do {
903 // tmp = [r_ptr] - expected;
904 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
905 // result = tmp != 0;
906
buzbee2700f7e2014-03-07 09:46:20 -0800907 RegStorage r_tmp = AllocTemp();
Jeff Hao2de2aa12013-09-12 17:20:31 -0700908 LIR* target = NewLIR0(kPseudoTargetLabel);
Jeff Hao2de2aa12013-09-12 17:20:31 -0700909
Dave Allison3da67a52014-04-02 17:03:45 -0700910 LIR* it = nullptr;
Vladimir Marko3e5af822013-11-21 15:01:20 +0000911 if (is_long) {
buzbee2700f7e2014-03-07 09:46:20 -0800912 RegStorage r_tmp_high = AllocTemp();
Vladimir Marko3e5af822013-11-21 15:01:20 +0000913 if (!load_early) {
buzbee2700f7e2014-03-07 09:46:20 -0800914 LoadValueDirectWide(rl_src_expected, rl_expected.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000915 }
buzbee2700f7e2014-03-07 09:46:20 -0800916 NewLIR3(kThumb2Ldrexd, r_tmp.GetReg(), r_tmp_high.GetReg(), r_ptr.GetReg());
917 OpRegReg(kOpSub, r_tmp, rl_expected.reg.GetLow());
918 OpRegReg(kOpSub, r_tmp_high, rl_expected.reg.GetHigh());
Vladimir Marko3e5af822013-11-21 15:01:20 +0000919 if (!load_early) {
buzbee2700f7e2014-03-07 09:46:20 -0800920 LoadValueDirectWide(rl_src_new_value, rl_new_value.reg);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000921 }
922 // Make sure we use ORR that sets the ccode
buzbee091cc402014-03-31 10:14:40 -0700923 if (r_tmp.Low8() && r_tmp_high.Low8()) {
buzbee2700f7e2014-03-07 09:46:20 -0800924 NewLIR2(kThumbOrr, r_tmp.GetReg(), r_tmp_high.GetReg());
Vladimir Marko3e5af822013-11-21 15:01:20 +0000925 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800926 NewLIR4(kThumb2OrrRRRs, r_tmp.GetReg(), r_tmp.GetReg(), r_tmp_high.GetReg(), 0);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000927 }
928 FreeTemp(r_tmp_high); // Now unneeded
929
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100930 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700931 it = OpIT(kCondEq, "T");
buzbee2700f7e2014-03-07 09:46:20 -0800932 NewLIR4(kThumb2Strexd /* eq */, r_tmp.GetReg(), rl_new_value.reg.GetLowReg(), rl_new_value.reg.GetHighReg(), r_ptr.GetReg());
Vladimir Marko3e5af822013-11-21 15:01:20 +0000933
934 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800935 NewLIR3(kThumb2Ldrex, r_tmp.GetReg(), r_ptr.GetReg(), 0);
936 OpRegReg(kOpSub, r_tmp, rl_expected.reg);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100937 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700938 it = OpIT(kCondEq, "T");
buzbee2700f7e2014-03-07 09:46:20 -0800939 NewLIR4(kThumb2Strex /* eq */, r_tmp.GetReg(), rl_new_value.reg.GetReg(), r_ptr.GetReg(), 0);
Vladimir Marko3e5af822013-11-21 15:01:20 +0000940 }
941
942 // Still one conditional left from OpIT(kCondEq, "T") from either branch
943 OpRegImm(kOpCmp /* eq */, r_tmp, 1);
Dave Allison3da67a52014-04-02 17:03:45 -0700944 OpEndIT(it);
Dave Allison43a065c2014-04-01 15:14:46 -0700945
Jeff Hao2de2aa12013-09-12 17:20:31 -0700946 OpCondBranch(kCondEq, target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700947
Vladimir Marko3e5af822013-11-21 15:01:20 +0000948 if (!load_early) {
buzbee2700f7e2014-03-07 09:46:20 -0800949 FreeTemp(rl_expected.reg); // Now unneeded.
Vladimir Marko3e5af822013-11-21 15:01:20 +0000950 }
951
Hans Boehm48f5c472014-06-27 14:50:10 -0700952 // Prevent reordering with subsequent memory operations.
953 GenMemBarrier(kLoadAny);
954
Vladimir Marko3e5af822013-11-21 15:01:20 +0000955 // result := (tmp1 != 0) ? 0 : 1;
956 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800957 OpRegRegImm(kOpRsub, rl_result.reg, r_tmp, 1);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100958 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Dave Allison3da67a52014-04-02 17:03:45 -0700959 it = OpIT(kCondUlt, "");
buzbee2700f7e2014-03-07 09:46:20 -0800960 LoadConstant(rl_result.reg, 0); /* cc */
Vladimir Marko3e5af822013-11-21 15:01:20 +0000961 FreeTemp(r_tmp); // Now unneeded.
Dave Allison3da67a52014-04-02 17:03:45 -0700962 OpEndIT(it); // Barrier to terminate OpIT.
Vladimir Marko3e5af822013-11-21 15:01:20 +0000963
Brian Carlstrom7940e442013-07-12 13:46:57 -0700964 StoreValue(rl_dest, rl_result);
965
Vladimir Marko3e5af822013-11-21 15:01:20 +0000966 // Now, restore lr to its non-temp status.
buzbee091cc402014-03-31 10:14:40 -0700967 Clobber(rs_rARM_LR);
968 UnmarkTemp(rs_rARM_LR);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700969 return true;
970}
971
Zheng Xu947717a2014-08-07 14:05:23 +0800972bool ArmMir2Lir::GenInlinedArrayCopyCharArray(CallInfo* info) {
973 constexpr int kLargeArrayThreshold = 256;
974
975 RegLocation rl_src = info->args[0];
976 RegLocation rl_src_pos = info->args[1];
977 RegLocation rl_dst = info->args[2];
978 RegLocation rl_dst_pos = info->args[3];
979 RegLocation rl_length = info->args[4];
980 // Compile time check, handle exception by non-inline method to reduce related meta-data.
981 if ((rl_src_pos.is_const && (mir_graph_->ConstantValue(rl_src_pos) < 0)) ||
982 (rl_dst_pos.is_const && (mir_graph_->ConstantValue(rl_dst_pos) < 0)) ||
983 (rl_length.is_const && (mir_graph_->ConstantValue(rl_length) < 0))) {
984 return false;
985 }
986
987 ClobberCallerSave();
988 LockCallTemps(); // Prepare for explicit register usage.
989 LockTemp(rs_r12);
990 RegStorage rs_src = rs_r0;
991 RegStorage rs_dst = rs_r1;
992 LoadValueDirectFixed(rl_src, rs_src);
993 LoadValueDirectFixed(rl_dst, rs_dst);
994
995 // Handle null pointer exception in slow-path.
996 LIR* src_check_branch = OpCmpImmBranch(kCondEq, rs_src, 0, nullptr);
997 LIR* dst_check_branch = OpCmpImmBranch(kCondEq, rs_dst, 0, nullptr);
998 // Handle potential overlapping in slow-path.
999 LIR* src_dst_same = OpCmpBranch(kCondEq, rs_src, rs_dst, nullptr);
1000 // Handle exception or big length in slow-path.
1001 RegStorage rs_length = rs_r2;
1002 LoadValueDirectFixed(rl_length, rs_length);
1003 LIR* len_neg_or_too_big = OpCmpImmBranch(kCondHi, rs_length, kLargeArrayThreshold, nullptr);
1004 // Src bounds check.
1005 RegStorage rs_pos = rs_r3;
1006 RegStorage rs_arr_length = rs_r12;
1007 LoadValueDirectFixed(rl_src_pos, rs_pos);
1008 LIR* src_pos_negative = OpCmpImmBranch(kCondLt, rs_pos, 0, nullptr);
1009 Load32Disp(rs_src, mirror::Array::LengthOffset().Int32Value(), rs_arr_length);
1010 OpRegReg(kOpSub, rs_arr_length, rs_pos);
1011 LIR* src_bad_len = OpCmpBranch(kCondLt, rs_arr_length, rs_length, nullptr);
1012 // Dst bounds check.
1013 LoadValueDirectFixed(rl_dst_pos, rs_pos);
1014 LIR* dst_pos_negative = OpCmpImmBranch(kCondLt, rs_pos, 0, nullptr);
1015 Load32Disp(rs_dst, mirror::Array::LengthOffset().Int32Value(), rs_arr_length);
1016 OpRegReg(kOpSub, rs_arr_length, rs_pos);
1017 LIR* dst_bad_len = OpCmpBranch(kCondLt, rs_arr_length, rs_length, nullptr);
1018
1019 // Everything is checked now.
1020 OpRegImm(kOpAdd, rs_dst, mirror::Array::DataOffset(2).Int32Value());
1021 OpRegReg(kOpAdd, rs_dst, rs_pos);
1022 OpRegReg(kOpAdd, rs_dst, rs_pos);
1023 OpRegImm(kOpAdd, rs_src, mirror::Array::DataOffset(2).Int32Value());
1024 LoadValueDirectFixed(rl_src_pos, rs_pos);
1025 OpRegReg(kOpAdd, rs_src, rs_pos);
1026 OpRegReg(kOpAdd, rs_src, rs_pos);
1027
1028 RegStorage rs_tmp = rs_pos;
1029 OpRegRegImm(kOpLsl, rs_length, rs_length, 1);
1030
1031 // Copy one element.
1032 OpRegRegImm(kOpAnd, rs_tmp, rs_length, 2);
1033 LIR* jmp_to_begin_loop = OpCmpImmBranch(kCondEq, rs_tmp, 0, nullptr);
1034 OpRegImm(kOpSub, rs_length, 2);
1035 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, kSignedHalf);
1036 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, kSignedHalf);
1037
1038 // Copy two elements.
1039 LIR *begin_loop = NewLIR0(kPseudoTargetLabel);
1040 LIR* jmp_to_ret = OpCmpImmBranch(kCondEq, rs_length, 0, nullptr);
1041 OpRegImm(kOpSub, rs_length, 4);
1042 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32);
1043 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32);
1044 OpUnconditionalBranch(begin_loop);
1045
1046 LIR *check_failed = NewLIR0(kPseudoTargetLabel);
1047 LIR* launchpad_branch = OpUnconditionalBranch(nullptr);
1048 LIR* return_point = NewLIR0(kPseudoTargetLabel);
1049
1050 src_check_branch->target = check_failed;
1051 dst_check_branch->target = check_failed;
1052 src_dst_same->target = check_failed;
1053 len_neg_or_too_big->target = check_failed;
1054 src_pos_negative->target = check_failed;
1055 src_bad_len->target = check_failed;
1056 dst_pos_negative->target = check_failed;
1057 dst_bad_len->target = check_failed;
1058 jmp_to_begin_loop->target = begin_loop;
1059 jmp_to_ret->target = return_point;
1060
1061 AddIntrinsicSlowPath(info, launchpad_branch, return_point);
Serguei Katkov9863daf2014-09-04 15:21:32 +07001062 ClobberCallerSave(); // We must clobber everything because slow path will return here
Zheng Xu947717a2014-08-07 14:05:23 +08001063
1064 return true;
1065}
1066
buzbee2700f7e2014-03-07 09:46:20 -08001067LIR* ArmMir2Lir::OpPcRelLoad(RegStorage reg, LIR* target) {
1068 return RawLIR(current_dalvik_offset_, kThumb2LdrPcRel12, reg.GetReg(), 0, 0, 0, 0, target);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001069}
1070
buzbee2700f7e2014-03-07 09:46:20 -08001071LIR* ArmMir2Lir::OpVldm(RegStorage r_base, int count) {
buzbee091cc402014-03-31 10:14:40 -07001072 return NewLIR3(kThumb2Vldms, r_base.GetReg(), rs_fr0.GetReg(), count);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001073}
1074
buzbee2700f7e2014-03-07 09:46:20 -08001075LIR* ArmMir2Lir::OpVstm(RegStorage r_base, int count) {
buzbee091cc402014-03-31 10:14:40 -07001076 return NewLIR3(kThumb2Vstms, r_base.GetReg(), rs_fr0.GetReg(), count);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001077}
1078
1079void ArmMir2Lir::GenMultiplyByTwoBitMultiplier(RegLocation rl_src,
1080 RegLocation rl_result, int lit,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001081 int first_bit, int second_bit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001082 UNUSED(lit);
Ian Rogerse2143c02014-03-28 08:47:16 -07001083 OpRegRegRegShift(kOpAdd, rl_result.reg, rl_src.reg, rl_src.reg,
Brian Carlstrom7940e442013-07-12 13:46:57 -07001084 EncodeShift(kArmLsl, second_bit - first_bit));
1085 if (first_bit != 0) {
buzbee2700f7e2014-03-07 09:46:20 -08001086 OpRegRegImm(kOpLsl, rl_result.reg, rl_result.reg, first_bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087 }
1088}
1089
Mingyao Yange643a172014-04-08 11:02:52 -07001090void ArmMir2Lir::GenDivZeroCheckWide(RegStorage reg) {
buzbee2700f7e2014-03-07 09:46:20 -08001091 DCHECK(reg.IsPair()); // TODO: support k64BitSolo.
1092 RegStorage t_reg = AllocTemp();
1093 NewLIR4(kThumb2OrrRRRs, t_reg.GetReg(), reg.GetLowReg(), reg.GetHighReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001094 FreeTemp(t_reg);
Mingyao Yange643a172014-04-08 11:02:52 -07001095 GenDivZeroCheck(kCondEq);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001096}
1097
1098// Test suspend flag, return target of taken suspend branch
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001099LIR* ArmMir2Lir::OpTestSuspend(LIR* target) {
Wei Jin04f4d8a2014-05-29 18:04:29 -07001100#ifdef ARM_R4_SUSPEND_FLAG
buzbee091cc402014-03-31 10:14:40 -07001101 NewLIR2(kThumbSubRI8, rs_rARM_SUSPEND.GetReg(), 1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001102 return OpCondBranch((target == NULL) ? kCondEq : kCondNe, target);
Wei Jin04f4d8a2014-05-29 18:04:29 -07001103#else
1104 RegStorage t_reg = AllocTemp();
1105 LoadBaseDisp(rs_rARM_SELF, Thread::ThreadFlagsOffset<4>().Int32Value(),
Ian Rogers8ba17f62014-10-27 18:48:49 -07001106 t_reg, kUnsignedHalf, kNotVolatile);
Wei Jin04f4d8a2014-05-29 18:04:29 -07001107 LIR* cmp_branch = OpCmpImmBranch((target == NULL) ? kCondNe : kCondEq, t_reg,
1108 0, target);
1109 FreeTemp(t_reg);
1110 return cmp_branch;
1111#endif
Brian Carlstrom7940e442013-07-12 13:46:57 -07001112}
1113
1114// Decrement register and branch on condition
buzbee2700f7e2014-03-07 09:46:20 -08001115LIR* ArmMir2Lir::OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001116 // Combine sub & test using sub setflags encoding here
Vladimir Markodbb8c492014-02-28 17:36:39 +00001117 OpRegRegImm(kOpSub, reg, reg, 1); // For value == 1, this should set flags.
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001118 DCHECK(last_lir_insn_->u.m.def_mask->HasBit(ResourceMask::kCCode));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001119 return OpCondBranch(c_code, target);
1120}
1121
Andreas Gampeb14329f2014-05-15 11:16:06 -07001122bool ArmMir2Lir::GenMemBarrier(MemBarrierKind barrier_kind) {
Elliott Hughes8366ca02014-11-17 12:02:05 -08001123 if (!cu_->GetInstructionSetFeatures()->IsSmp()) {
1124 return false;
1125 }
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001126 // Start off with using the last LIR as the barrier. If it is not enough, then we will generate one.
1127 LIR* barrier = last_lir_insn_;
1128
Brian Carlstrom7940e442013-07-12 13:46:57 -07001129 int dmb_flavor;
1130 // TODO: revisit Arm barrier kinds
1131 switch (barrier_kind) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001132 case kAnyStore: dmb_flavor = kISH; break;
1133 case kLoadAny: dmb_flavor = kISH; break;
Ian Rogersb122a4b2013-11-19 18:00:50 -08001134 case kStoreStore: dmb_flavor = kISHST; break;
Hans Boehm48f5c472014-06-27 14:50:10 -07001135 case kAnyAny: dmb_flavor = kISH; break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001136 default:
1137 LOG(FATAL) << "Unexpected MemBarrierKind: " << barrier_kind;
1138 dmb_flavor = kSY; // quiet gcc.
1139 break;
1140 }
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001141
Andreas Gampeb14329f2014-05-15 11:16:06 -07001142 bool ret = false;
1143
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001144 // If the same barrier already exists, don't generate another.
1145 if (barrier == nullptr
1146 || (barrier != nullptr && (barrier->opcode != kThumb2Dmb || barrier->operands[0] != dmb_flavor))) {
1147 barrier = NewLIR1(kThumb2Dmb, dmb_flavor);
Andreas Gampeb14329f2014-05-15 11:16:06 -07001148 ret = true;
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001149 }
1150
1151 // At this point we must have a memory barrier. Mark it as a scheduling barrier as well.
1152 DCHECK(!barrier->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001153 barrier->u.m.def_mask = &kEncodeAll;
Andreas Gampeb14329f2014-05-15 11:16:06 -07001154 return ret;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001155}
1156
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001157void ArmMir2Lir::GenNegLong(RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001158 rl_src = LoadValueWide(rl_src, kCoreReg);
1159 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001160 RegStorage z_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001161 LoadConstantNoClobber(z_reg, 0);
1162 // Check for destructive overlap
buzbee2700f7e2014-03-07 09:46:20 -08001163 if (rl_result.reg.GetLowReg() == rl_src.reg.GetHighReg()) {
1164 RegStorage t_reg = AllocTemp();
1165 OpRegRegReg(kOpSub, rl_result.reg.GetLow(), z_reg, rl_src.reg.GetLow());
1166 OpRegRegReg(kOpSbc, rl_result.reg.GetHigh(), z_reg, t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001167 FreeTemp(t_reg);
1168 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001169 OpRegRegReg(kOpSub, rl_result.reg.GetLow(), z_reg, rl_src.reg.GetLow());
1170 OpRegRegReg(kOpSbc, rl_result.reg.GetHigh(), z_reg, rl_src.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001171 }
1172 FreeTemp(z_reg);
1173 StoreValueWide(rl_dest, rl_result);
1174}
1175
Mark Mendelle02d48f2014-01-15 11:19:23 -08001176void ArmMir2Lir::GenMulLong(Instruction::Code opcode, RegLocation rl_dest,
1177 RegLocation rl_src1, RegLocation rl_src2) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001178 UNUSED(opcode);
1179 /*
1180 * tmp1 = src1.hi * src2.lo; // src1.hi is no longer needed
1181 * dest = src1.lo * src2.lo;
1182 * tmp1 += src1.lo * src2.hi;
1183 * dest.hi += tmp1;
1184 *
1185 * To pull off inline multiply, we have a worst-case requirement of 7 temporary
1186 * registers. Normally for Arm, we get 5. We can get to 6 by including
1187 * lr in the temp set. The only problematic case is all operands and result are
1188 * distinct, and none have been promoted. In that case, we can succeed by aggressively
1189 * freeing operand temp registers after they are no longer needed. All other cases
1190 * can proceed normally. We'll just punt on the case of the result having a misaligned
1191 * overlap with either operand and send that case to a runtime handler.
1192 */
1193 RegLocation rl_result;
1194 if (PartiallyIntersects(rl_src1, rl_dest) || (PartiallyIntersects(rl_src2, rl_dest))) {
1195 FlushAllRegs();
1196 CallRuntimeHelperRegLocationRegLocation(kQuickLmul, rl_src1, rl_src2, false);
1197 rl_result = GetReturnWide(kCoreReg);
Zheng Xud7f8e022014-03-13 13:40:30 +00001198 StoreValueWide(rl_dest, rl_result);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001199 return;
1200 }
1201
1202 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1203 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1204
1205 int reg_status = 0;
1206 RegStorage res_lo;
1207 RegStorage res_hi;
1208 bool dest_promoted = rl_dest.location == kLocPhysReg && rl_dest.reg.Valid() &&
1209 !IsTemp(rl_dest.reg.GetLow()) && !IsTemp(rl_dest.reg.GetHigh());
1210 bool src1_promoted = !IsTemp(rl_src1.reg.GetLow()) && !IsTemp(rl_src1.reg.GetHigh());
1211 bool src2_promoted = !IsTemp(rl_src2.reg.GetLow()) && !IsTemp(rl_src2.reg.GetHigh());
1212 // Check if rl_dest is *not* either operand and we have enough temp registers.
1213 if ((rl_dest.s_reg_low != rl_src1.s_reg_low && rl_dest.s_reg_low != rl_src2.s_reg_low) &&
1214 (dest_promoted || src1_promoted || src2_promoted)) {
1215 // In this case, we do not need to manually allocate temp registers for result.
1216 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1217 res_lo = rl_result.reg.GetLow();
1218 res_hi = rl_result.reg.GetHigh();
1219 } else {
1220 res_lo = AllocTemp();
1221 if ((rl_src1.s_reg_low == rl_src2.s_reg_low) || src1_promoted || src2_promoted) {
1222 // In this case, we have enough temp registers to be allocated for result.
1223 res_hi = AllocTemp();
1224 reg_status = 1;
1225 } else {
1226 // In this case, all temps are now allocated.
1227 // res_hi will be allocated after we can free src1_hi.
1228 reg_status = 2;
1229 }
1230 }
1231
1232 // Temporarily add LR to the temp pool, and assign it to tmp1
1233 MarkTemp(rs_rARM_LR);
1234 FreeTemp(rs_rARM_LR);
1235 RegStorage tmp1 = rs_rARM_LR;
1236 LockTemp(rs_rARM_LR);
1237
1238 if (rl_src1.reg == rl_src2.reg) {
1239 DCHECK(res_hi.Valid());
1240 DCHECK(res_lo.Valid());
1241 NewLIR3(kThumb2MulRRR, tmp1.GetReg(), rl_src1.reg.GetLowReg(), rl_src1.reg.GetHighReg());
1242 NewLIR4(kThumb2Umull, res_lo.GetReg(), res_hi.GetReg(), rl_src1.reg.GetLowReg(),
1243 rl_src1.reg.GetLowReg());
1244 OpRegRegRegShift(kOpAdd, res_hi, res_hi, tmp1, EncodeShift(kArmLsl, 1));
1245 } else {
1246 NewLIR3(kThumb2MulRRR, tmp1.GetReg(), rl_src2.reg.GetLowReg(), rl_src1.reg.GetHighReg());
1247 if (reg_status == 2) {
1248 DCHECK(!res_hi.Valid());
1249 DCHECK_NE(rl_src1.reg.GetLowReg(), rl_src2.reg.GetLowReg());
1250 DCHECK_NE(rl_src1.reg.GetHighReg(), rl_src2.reg.GetHighReg());
1251 // Will force free src1_hi, so must clobber.
1252 Clobber(rl_src1.reg);
1253 FreeTemp(rl_src1.reg.GetHigh());
1254 res_hi = AllocTemp();
1255 }
1256 DCHECK(res_hi.Valid());
1257 DCHECK(res_lo.Valid());
1258 NewLIR4(kThumb2Umull, res_lo.GetReg(), res_hi.GetReg(), rl_src2.reg.GetLowReg(),
1259 rl_src1.reg.GetLowReg());
1260 NewLIR4(kThumb2Mla, tmp1.GetReg(), rl_src1.reg.GetLowReg(), rl_src2.reg.GetHighReg(),
1261 tmp1.GetReg());
1262 NewLIR4(kThumb2AddRRR, res_hi.GetReg(), tmp1.GetReg(), res_hi.GetReg(), 0);
1263 if (reg_status == 2) {
1264 FreeTemp(rl_src1.reg.GetLow());
1265 }
1266 }
1267
1268 // Now, restore lr to its non-temp status.
1269 FreeTemp(tmp1);
1270 Clobber(rs_rARM_LR);
1271 UnmarkTemp(rs_rARM_LR);
1272
1273 if (reg_status != 0) {
1274 // We had manually allocated registers for rl_result.
1275 // Now construct a RegLocation.
1276 rl_result = GetReturnWide(kCoreReg); // Just using as a template.
1277 rl_result.reg = RegStorage::MakeRegPair(res_lo, res_hi);
1278 }
1279
1280 StoreValueWide(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001281}
1282
Andreas Gampec76c6142014-08-04 16:30:03 -07001283void ArmMir2Lir::GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001284 RegLocation rl_src2, int flags) {
Andreas Gampec76c6142014-08-04 16:30:03 -07001285 switch (opcode) {
1286 case Instruction::MUL_LONG:
1287 case Instruction::MUL_LONG_2ADDR:
1288 GenMulLong(opcode, rl_dest, rl_src1, rl_src2);
1289 return;
1290 case Instruction::NEG_LONG:
1291 GenNegLong(rl_dest, rl_src2);
1292 return;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001293
Andreas Gampec76c6142014-08-04 16:30:03 -07001294 default:
1295 break;
1296 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001297
Andreas Gampec76c6142014-08-04 16:30:03 -07001298 // Fallback for all other ops.
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001299 Mir2Lir::GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001300}
1301
1302/*
1303 * Generate array load
1304 */
1305void ArmMir2Lir::GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
Ian Rogersa9a82542013-10-04 11:17:26 -07001306 RegLocation rl_index, RegLocation rl_dest, int scale) {
buzbee091cc402014-03-31 10:14:40 -07001307 RegisterClass reg_class = RegClassBySize(size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001308 int len_offset = mirror::Array::LengthOffset().Int32Value();
1309 int data_offset;
1310 RegLocation rl_result;
1311 bool constant_index = rl_index.is_const;
buzbeea0cd2d72014-06-01 09:33:49 -07001312 rl_array = LoadValue(rl_array, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001313 if (!constant_index) {
1314 rl_index = LoadValue(rl_index, kCoreReg);
1315 }
1316
1317 if (rl_dest.wide) {
1318 data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Int32Value();
1319 } else {
1320 data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Int32Value();
1321 }
1322
1323 // If index is constant, just fold it into the data offset
1324 if (constant_index) {
1325 data_offset += mir_graph_->ConstantValue(rl_index) << scale;
1326 }
1327
1328 /* null object? */
buzbee2700f7e2014-03-07 09:46:20 -08001329 GenNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001330
1331 bool needs_range_check = (!(opt_flags & MIR_IGNORE_RANGE_CHECK));
buzbee2700f7e2014-03-07 09:46:20 -08001332 RegStorage reg_len;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001333 if (needs_range_check) {
1334 reg_len = AllocTemp();
1335 /* Get len */
buzbee695d13a2014-04-19 13:32:20 -07001336 Load32Disp(rl_array.reg, len_offset, reg_len);
Dave Allisonb373e092014-02-20 16:06:36 -08001337 MarkPossibleNullPointerException(opt_flags);
1338 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001339 ForceImplicitNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001340 }
1341 if (rl_dest.wide || rl_dest.fp || constant_index) {
buzbee2700f7e2014-03-07 09:46:20 -08001342 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001343 if (constant_index) {
buzbee2700f7e2014-03-07 09:46:20 -08001344 reg_ptr = rl_array.reg; // NOTE: must not alter reg_ptr in constant case.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001345 } else {
1346 // No special indexed operation, lea + load w/ displacement
buzbeea0cd2d72014-06-01 09:33:49 -07001347 reg_ptr = AllocTempRef();
Ian Rogerse2143c02014-03-28 08:47:16 -07001348 OpRegRegRegShift(kOpAdd, reg_ptr, rl_array.reg, rl_index.reg, EncodeShift(kArmLsl, scale));
buzbee091cc402014-03-31 10:14:40 -07001349 FreeTemp(rl_index.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001350 }
1351 rl_result = EvalLoc(rl_dest, reg_class, true);
1352
1353 if (needs_range_check) {
1354 if (constant_index) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001355 GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001356 } else {
Mingyao Yang80365d92014-04-18 12:10:58 -07001357 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001358 }
1359 FreeTemp(reg_len);
1360 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001361 LoadBaseDisp(reg_ptr, data_offset, rl_result.reg, size, kNotVolatile);
Vladimir Marko455759b2014-05-06 20:49:36 +01001362 if (!constant_index) {
1363 FreeTemp(reg_ptr);
1364 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001365 if (rl_dest.wide) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001366 StoreValueWide(rl_dest, rl_result);
1367 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001368 StoreValue(rl_dest, rl_result);
1369 }
1370 } else {
1371 // Offset base, then use indexed load
buzbeea0cd2d72014-06-01 09:33:49 -07001372 RegStorage reg_ptr = AllocTempRef();
buzbee2700f7e2014-03-07 09:46:20 -08001373 OpRegRegImm(kOpAdd, reg_ptr, rl_array.reg, data_offset);
buzbee091cc402014-03-31 10:14:40 -07001374 FreeTemp(rl_array.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001375 rl_result = EvalLoc(rl_dest, reg_class, true);
1376
1377 if (needs_range_check) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001378 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001379 FreeTemp(reg_len);
1380 }
buzbee2700f7e2014-03-07 09:46:20 -08001381 LoadBaseIndexed(reg_ptr, rl_index.reg, rl_result.reg, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001382 FreeTemp(reg_ptr);
1383 StoreValue(rl_dest, rl_result);
1384 }
1385}
1386
1387/*
1388 * Generate array store
1389 *
1390 */
1391void ArmMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
Ian Rogersa9a82542013-10-04 11:17:26 -07001392 RegLocation rl_index, RegLocation rl_src, int scale, bool card_mark) {
buzbee091cc402014-03-31 10:14:40 -07001393 RegisterClass reg_class = RegClassBySize(size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001394 int len_offset = mirror::Array::LengthOffset().Int32Value();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001395 bool constant_index = rl_index.is_const;
1396
Ian Rogersa9a82542013-10-04 11:17:26 -07001397 int data_offset;
buzbee695d13a2014-04-19 13:32:20 -07001398 if (size == k64 || size == kDouble) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001399 data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Int32Value();
1400 } else {
1401 data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Int32Value();
1402 }
1403
1404 // If index is constant, just fold it into the data offset.
1405 if (constant_index) {
1406 data_offset += mir_graph_->ConstantValue(rl_index) << scale;
1407 }
1408
buzbeea0cd2d72014-06-01 09:33:49 -07001409 rl_array = LoadValue(rl_array, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001410 if (!constant_index) {
1411 rl_index = LoadValue(rl_index, kCoreReg);
1412 }
1413
buzbee2700f7e2014-03-07 09:46:20 -08001414 RegStorage reg_ptr;
Ian Rogers773aab12013-10-14 13:50:10 -07001415 bool allocated_reg_ptr_temp = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001416 if (constant_index) {
buzbee2700f7e2014-03-07 09:46:20 -08001417 reg_ptr = rl_array.reg;
buzbee091cc402014-03-31 10:14:40 -07001418 } else if (IsTemp(rl_array.reg) && !card_mark) {
1419 Clobber(rl_array.reg);
buzbee2700f7e2014-03-07 09:46:20 -08001420 reg_ptr = rl_array.reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001421 } else {
Ian Rogers773aab12013-10-14 13:50:10 -07001422 allocated_reg_ptr_temp = true;
buzbeea0cd2d72014-06-01 09:33:49 -07001423 reg_ptr = AllocTempRef();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001424 }
1425
1426 /* null object? */
buzbee2700f7e2014-03-07 09:46:20 -08001427 GenNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001428
1429 bool needs_range_check = (!(opt_flags & MIR_IGNORE_RANGE_CHECK));
buzbee2700f7e2014-03-07 09:46:20 -08001430 RegStorage reg_len;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001431 if (needs_range_check) {
1432 reg_len = AllocTemp();
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001433 // NOTE: max live temps(4) here.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001434 /* Get len */
buzbee695d13a2014-04-19 13:32:20 -07001435 Load32Disp(rl_array.reg, len_offset, reg_len);
Dave Allisonb373e092014-02-20 16:06:36 -08001436 MarkPossibleNullPointerException(opt_flags);
1437 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001438 ForceImplicitNullCheck(rl_array.reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001439 }
1440 /* at this point, reg_ptr points to array, 2 live temps */
1441 if (rl_src.wide || rl_src.fp || constant_index) {
1442 if (rl_src.wide) {
1443 rl_src = LoadValueWide(rl_src, reg_class);
1444 } else {
1445 rl_src = LoadValue(rl_src, reg_class);
1446 }
1447 if (!constant_index) {
Ian Rogerse2143c02014-03-28 08:47:16 -07001448 OpRegRegRegShift(kOpAdd, reg_ptr, rl_array.reg, rl_index.reg, EncodeShift(kArmLsl, scale));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001449 }
1450 if (needs_range_check) {
1451 if (constant_index) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001452 GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001453 } else {
Mingyao Yang80365d92014-04-18 12:10:58 -07001454 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001455 }
1456 FreeTemp(reg_len);
1457 }
1458
Andreas Gampe3c12c512014-06-24 18:46:29 +00001459 StoreBaseDisp(reg_ptr, data_offset, rl_src.reg, size, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001460 } else {
1461 /* reg_ptr -> array data */
buzbee2700f7e2014-03-07 09:46:20 -08001462 OpRegRegImm(kOpAdd, reg_ptr, rl_array.reg, data_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001463 rl_src = LoadValue(rl_src, reg_class);
1464 if (needs_range_check) {
Mingyao Yang80365d92014-04-18 12:10:58 -07001465 GenArrayBoundsCheck(rl_index.reg, reg_len);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001466 FreeTemp(reg_len);
1467 }
buzbee2700f7e2014-03-07 09:46:20 -08001468 StoreBaseIndexed(reg_ptr, rl_index.reg, rl_src.reg, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001469 }
Ian Rogers773aab12013-10-14 13:50:10 -07001470 if (allocated_reg_ptr_temp) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001471 FreeTemp(reg_ptr);
1472 }
Ian Rogersa9a82542013-10-04 11:17:26 -07001473 if (card_mark) {
Vladimir Marko743b98c2014-11-24 19:45:41 +00001474 MarkGCCard(opt_flags, rl_src.reg, rl_array.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001475 }
1476}
1477
Ian Rogersa9a82542013-10-04 11:17:26 -07001478
Brian Carlstrom7940e442013-07-12 13:46:57 -07001479void ArmMir2Lir::GenShiftImmOpLong(Instruction::Code opcode,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001480 RegLocation rl_dest, RegLocation rl_src, RegLocation rl_shift,
1481 int flags) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001482 UNUSED(flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001483 rl_src = LoadValueWide(rl_src, kCoreReg);
1484 // Per spec, we only care about low 6 bits of shift amount.
1485 int shift_amount = mir_graph_->ConstantValue(rl_shift) & 0x3f;
1486 if (shift_amount == 0) {
1487 StoreValueWide(rl_dest, rl_src);
1488 return;
1489 }
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001490 if (PartiallyIntersects(rl_src, rl_dest)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001491 GenShiftOpLong(opcode, rl_dest, rl_src, rl_shift);
1492 return;
1493 }
1494 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Brian Carlstromdf629502013-07-17 22:39:56 -07001495 switch (opcode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001496 case Instruction::SHL_LONG:
1497 case Instruction::SHL_LONG_2ADDR:
1498 if (shift_amount == 1) {
buzbee2700f7e2014-03-07 09:46:20 -08001499 OpRegRegReg(kOpAdd, rl_result.reg.GetLow(), rl_src.reg.GetLow(), rl_src.reg.GetLow());
1500 OpRegRegReg(kOpAdc, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), rl_src.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001501 } else if (shift_amount == 32) {
buzbee2700f7e2014-03-07 09:46:20 -08001502 OpRegCopy(rl_result.reg.GetHigh(), rl_src.reg);
1503 LoadConstant(rl_result.reg.GetLow(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001504 } else if (shift_amount > 31) {
buzbee2700f7e2014-03-07 09:46:20 -08001505 OpRegRegImm(kOpLsl, rl_result.reg.GetHigh(), rl_src.reg.GetLow(), shift_amount - 32);
1506 LoadConstant(rl_result.reg.GetLow(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001507 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001508 OpRegRegImm(kOpLsl, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), shift_amount);
Ian Rogerse2143c02014-03-28 08:47:16 -07001509 OpRegRegRegShift(kOpOr, rl_result.reg.GetHigh(), rl_result.reg.GetHigh(), rl_src.reg.GetLow(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001510 EncodeShift(kArmLsr, 32 - shift_amount));
buzbee2700f7e2014-03-07 09:46:20 -08001511 OpRegRegImm(kOpLsl, rl_result.reg.GetLow(), rl_src.reg.GetLow(), shift_amount);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001512 }
1513 break;
1514 case Instruction::SHR_LONG:
1515 case Instruction::SHR_LONG_2ADDR:
1516 if (shift_amount == 32) {
buzbee2700f7e2014-03-07 09:46:20 -08001517 OpRegCopy(rl_result.reg.GetLow(), rl_src.reg.GetHigh());
1518 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001519 } else if (shift_amount > 31) {
buzbee2700f7e2014-03-07 09:46:20 -08001520 OpRegRegImm(kOpAsr, rl_result.reg.GetLow(), rl_src.reg.GetHigh(), shift_amount - 32);
1521 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001522 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001523 RegStorage t_reg = AllocTemp();
1524 OpRegRegImm(kOpLsr, t_reg, rl_src.reg.GetLow(), shift_amount);
Ian Rogerse2143c02014-03-28 08:47:16 -07001525 OpRegRegRegShift(kOpOr, rl_result.reg.GetLow(), t_reg, rl_src.reg.GetHigh(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001526 EncodeShift(kArmLsl, 32 - shift_amount));
1527 FreeTemp(t_reg);
buzbee2700f7e2014-03-07 09:46:20 -08001528 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), shift_amount);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001529 }
1530 break;
1531 case Instruction::USHR_LONG:
1532 case Instruction::USHR_LONG_2ADDR:
1533 if (shift_amount == 32) {
buzbee2700f7e2014-03-07 09:46:20 -08001534 OpRegCopy(rl_result.reg.GetLow(), rl_src.reg.GetHigh());
1535 LoadConstant(rl_result.reg.GetHigh(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001536 } else if (shift_amount > 31) {
buzbee2700f7e2014-03-07 09:46:20 -08001537 OpRegRegImm(kOpLsr, rl_result.reg.GetLow(), rl_src.reg.GetHigh(), shift_amount - 32);
1538 LoadConstant(rl_result.reg.GetHigh(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001539 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001540 RegStorage t_reg = AllocTemp();
1541 OpRegRegImm(kOpLsr, t_reg, rl_src.reg.GetLow(), shift_amount);
Ian Rogerse2143c02014-03-28 08:47:16 -07001542 OpRegRegRegShift(kOpOr, rl_result.reg.GetLow(), t_reg, rl_src.reg.GetHigh(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001543 EncodeShift(kArmLsl, 32 - shift_amount));
1544 FreeTemp(t_reg);
buzbee2700f7e2014-03-07 09:46:20 -08001545 OpRegRegImm(kOpLsr, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), shift_amount);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001546 }
1547 break;
1548 default:
1549 LOG(FATAL) << "Unexpected case";
1550 }
1551 StoreValueWide(rl_dest, rl_result);
1552}
1553
1554void ArmMir2Lir::GenArithImmOpLong(Instruction::Code opcode,
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001555 RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2,
1556 int flags) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001557 if ((opcode == Instruction::SUB_LONG_2ADDR) || (opcode == Instruction::SUB_LONG)) {
1558 if (!rl_src2.is_const) {
1559 // Don't bother with special handling for subtract from immediate.
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001560 GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001561 return;
1562 }
1563 } else {
1564 // Normalize
1565 if (!rl_src2.is_const) {
1566 DCHECK(rl_src1.is_const);
Vladimir Marko58af1f92013-12-19 13:31:15 +00001567 std::swap(rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001568 }
1569 }
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001570 if (PartiallyIntersects(rl_src1, rl_dest)) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001571 GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001572 return;
1573 }
1574 DCHECK(rl_src2.is_const);
1575 int64_t val = mir_graph_->ConstantValueWide(rl_src2);
1576 uint32_t val_lo = Low32Bits(val);
1577 uint32_t val_hi = High32Bits(val);
1578 int32_t mod_imm_lo = ModifiedImmediate(val_lo);
1579 int32_t mod_imm_hi = ModifiedImmediate(val_hi);
1580
1581 // Only a subset of add/sub immediate instructions set carry - so bail if we don't fit
Brian Carlstromdf629502013-07-17 22:39:56 -07001582 switch (opcode) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001583 case Instruction::ADD_LONG:
1584 case Instruction::ADD_LONG_2ADDR:
1585 case Instruction::SUB_LONG:
1586 case Instruction::SUB_LONG_2ADDR:
1587 if ((mod_imm_lo < 0) || (mod_imm_hi < 0)) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001588 GenArithOpLong(opcode, rl_dest, rl_src1, rl_src2, flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001589 return;
1590 }
1591 break;
1592 default:
1593 break;
1594 }
1595 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1596 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1597 // NOTE: once we've done the EvalLoc on dest, we can no longer bail.
1598 switch (opcode) {
1599 case Instruction::ADD_LONG:
1600 case Instruction::ADD_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001601 NewLIR3(kThumb2AddRRI8M, rl_result.reg.GetLowReg(), rl_src1.reg.GetLowReg(), mod_imm_lo);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001602 NewLIR3(kThumb2AdcRRI8M, rl_result.reg.GetHighReg(), rl_src1.reg.GetHighReg(), mod_imm_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001603 break;
1604 case Instruction::OR_LONG:
1605 case Instruction::OR_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001606 if ((val_lo != 0) || (rl_result.reg.GetLowReg() != rl_src1.reg.GetLowReg())) {
1607 OpRegRegImm(kOpOr, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), val_lo);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001608 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001609 if ((val_hi != 0) || (rl_result.reg.GetHighReg() != rl_src1.reg.GetHighReg())) {
buzbee2700f7e2014-03-07 09:46:20 -08001610 OpRegRegImm(kOpOr, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001611 }
1612 break;
1613 case Instruction::XOR_LONG:
1614 case Instruction::XOR_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001615 OpRegRegImm(kOpXor, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), val_lo);
1616 OpRegRegImm(kOpXor, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001617 break;
1618 case Instruction::AND_LONG:
1619 case Instruction::AND_LONG_2ADDR:
buzbee2700f7e2014-03-07 09:46:20 -08001620 if ((val_lo != 0xffffffff) || (rl_result.reg.GetLowReg() != rl_src1.reg.GetLowReg())) {
1621 OpRegRegImm(kOpAnd, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), val_lo);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001622 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001623 if ((val_hi != 0xffffffff) || (rl_result.reg.GetHighReg() != rl_src1.reg.GetHighReg())) {
buzbee2700f7e2014-03-07 09:46:20 -08001624 OpRegRegImm(kOpAnd, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001625 }
1626 break;
1627 case Instruction::SUB_LONG_2ADDR:
1628 case Instruction::SUB_LONG:
buzbee2700f7e2014-03-07 09:46:20 -08001629 NewLIR3(kThumb2SubRRI8M, rl_result.reg.GetLowReg(), rl_src1.reg.GetLowReg(), mod_imm_lo);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001630 NewLIR3(kThumb2SbcRRI8M, rl_result.reg.GetHighReg(), rl_src1.reg.GetHighReg(), mod_imm_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001631 break;
1632 default:
1633 LOG(FATAL) << "Unexpected opcode " << opcode;
1634 }
1635 StoreValueWide(rl_dest, rl_result);
1636}
1637
1638} // namespace art