blob: c75e68168368897a0c9ebffa306f75759f75706d [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_ir.h"
Vladimir Marko5c96e6b2013-11-14 15:34:17 +000018#include "dex/frontend.h"
19#include "dex/quick/dex_file_method_inliner.h"
20#include "dex/quick/dex_file_to_method_inliner_map.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "dex_file-inl.h"
Ian Rogers166db042013-07-26 12:05:57 -070022#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include "invoke_type.h"
24#include "mirror/array.h"
Dmitry Petrochenko37498b62014-05-05 20:33:38 +070025#include "mirror/object_array-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070026#include "mirror/string.h"
27#include "mir_to_lir-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070028#include "x86/codegen_x86.h"
29
30namespace art {
31
Dmitry Petrochenko37498b62014-05-05 20:33:38 +070032// Shortcuts to repeatedly used long types.
33typedef mirror::ObjectArray<mirror::Object> ObjArray;
34
Brian Carlstrom7940e442013-07-12 13:46:57 -070035/*
36 * This source files contains "gen" codegen routines that should
37 * be applicable to most targets. Only mid-level support utilities
38 * and "op" calls may be used here.
39 */
40
Mingyao Yang3a74d152014-04-21 15:39:44 -070041void Mir2Lir::AddIntrinsicSlowPath(CallInfo* info, LIR* branch, LIR* resume) {
42 class IntrinsicSlowPathPath : public Mir2Lir::LIRSlowPath {
Vladimir Marko3bc86152014-03-13 14:11:28 +000043 public:
Mingyao Yang3a74d152014-04-21 15:39:44 -070044 IntrinsicSlowPathPath(Mir2Lir* m2l, CallInfo* info, LIR* branch, LIR* resume = nullptr)
Vladimir Marko3bc86152014-03-13 14:11:28 +000045 : LIRSlowPath(m2l, info->offset, branch, resume), info_(info) {
46 }
47
48 void Compile() {
49 m2l_->ResetRegPool();
50 m2l_->ResetDefTracking();
Mingyao Yang6ffcfa02014-04-25 11:06:00 -070051 GenerateTargetLabel(kPseudoIntrinsicRetry);
Vladimir Marko3bc86152014-03-13 14:11:28 +000052 // NOTE: GenInvokeNoInline() handles MarkSafepointPC.
53 m2l_->GenInvokeNoInline(info_);
54 if (cont_ != nullptr) {
55 m2l_->OpUnconditionalBranch(cont_);
56 }
57 }
58
59 private:
60 CallInfo* const info_;
61 };
62
Mingyao Yang3a74d152014-04-21 15:39:44 -070063 AddSlowPath(new (arena_) IntrinsicSlowPathPath(this, info, branch, resume));
Vladimir Marko3bc86152014-03-13 14:11:28 +000064}
65
Andreas Gampe2f244e92014-05-08 03:35:25 -070066// Macro to help instantiate.
67// TODO: This might be used to only instantiate <4> on pure 32b systems.
68#define INSTANTIATE(sig_part1, ...) \
69 template sig_part1(ThreadOffset<4>, __VA_ARGS__); \
70 template sig_part1(ThreadOffset<8>, __VA_ARGS__); \
71
72
Brian Carlstrom7940e442013-07-12 13:46:57 -070073/*
74 * To save scheduling time, helper calls are broken into two parts: generation of
Dave Allisond6ed6422014-04-09 23:36:15 +000075 * the helper target address, and the actual call to the helper. Because x86
76 * has a memory call operation, part 1 is a NOP for x86. For other targets,
77 * load arguments between the two parts.
Brian Carlstrom7940e442013-07-12 13:46:57 -070078 */
Andreas Gampe2f244e92014-05-08 03:35:25 -070079// template <size_t pointer_size>
Ian Rogersdd7624d2014-03-14 17:43:00 -070080RegStorage Mir2Lir::CallHelperSetup(ThreadOffset<4> helper_offset) {
Andreas Gampe2f244e92014-05-08 03:35:25 -070081 // All CallRuntimeHelperXXX call this first. So make a central check here.
82 DCHECK_EQ(4U, GetInstructionSetPointerSize(cu_->instruction_set));
83
84 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
85 return RegStorage::InvalidReg();
86 } else {
87 return LoadHelper(helper_offset);
88 }
89}
90
91RegStorage Mir2Lir::CallHelperSetup(ThreadOffset<8> helper_offset) {
92 // All CallRuntimeHelperXXX call this first. So make a central check here.
93 DCHECK_EQ(8U, GetInstructionSetPointerSize(cu_->instruction_set));
94
95 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
96 return RegStorage::InvalidReg();
97 } else {
98 return LoadHelper(helper_offset);
99 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700100}
101
102/* NOTE: if r_tgt is a temp, it will be freed following use */
Andreas Gampe2f244e92014-05-08 03:35:25 -0700103template <size_t pointer_size>
104LIR* Mir2Lir::CallHelper(RegStorage r_tgt, ThreadOffset<pointer_size> helper_offset,
105 bool safepoint_pc, bool use_link) {
Dave Allisond6ed6422014-04-09 23:36:15 +0000106 LIR* call_inst;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700107 OpKind op = use_link ? kOpBlx : kOpBx;
Dave Allisond6ed6422014-04-09 23:36:15 +0000108 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
109 call_inst = OpThreadMem(op, helper_offset);
110 } else {
111 call_inst = OpReg(op, r_tgt);
112 FreeTemp(r_tgt);
113 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700114 if (safepoint_pc) {
115 MarkSafepointPC(call_inst);
116 }
117 return call_inst;
118}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700119template LIR* Mir2Lir::CallHelper(RegStorage r_tgt, ThreadOffset<4> helper_offset,
120 bool safepoint_pc, bool use_link);
121template LIR* Mir2Lir::CallHelper(RegStorage r_tgt, ThreadOffset<8> helper_offset,
122 bool safepoint_pc, bool use_link);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700123
Andreas Gampe2f244e92014-05-08 03:35:25 -0700124template <size_t pointer_size>
125void Mir2Lir::CallRuntimeHelper(ThreadOffset<pointer_size> helper_offset, bool safepoint_pc) {
Mingyao Yang42894562014-04-07 12:42:16 -0700126 RegStorage r_tgt = CallHelperSetup(helper_offset);
127 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700128 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Mingyao Yang42894562014-04-07 12:42:16 -0700129}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700130INSTANTIATE(void Mir2Lir::CallRuntimeHelper, bool safepoint_pc)
Mingyao Yang42894562014-04-07 12:42:16 -0700131
Andreas Gampe2f244e92014-05-08 03:35:25 -0700132template <size_t pointer_size>
133void Mir2Lir::CallRuntimeHelperImm(ThreadOffset<pointer_size> helper_offset, int arg0, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800134 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 LoadConstant(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000136 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700137 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700138}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700139INSTANTIATE(void Mir2Lir::CallRuntimeHelperImm, int arg0, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700140
Andreas Gampe2f244e92014-05-08 03:35:25 -0700141template <size_t pointer_size>
142void Mir2Lir::CallRuntimeHelperReg(ThreadOffset<pointer_size> helper_offset, RegStorage arg0,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700143 bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800144 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700145 OpRegCopy(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000146 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700147 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700148}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700149INSTANTIATE(void Mir2Lir::CallRuntimeHelperReg, RegStorage arg0, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700150
Andreas Gampe2f244e92014-05-08 03:35:25 -0700151template <size_t pointer_size>
152void Mir2Lir::CallRuntimeHelperRegLocation(ThreadOffset<pointer_size> helper_offset,
153 RegLocation arg0, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800154 RegStorage r_tgt = CallHelperSetup(helper_offset);
155 if (arg0.wide == 0) {
Douglas Leung2db3e262014-06-25 16:02:55 -0700156 LoadValueDirectFixed(arg0, TargetReg(arg0.fp ? kFArg0 : kArg0));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700157 } else {
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700158 RegStorage r_tmp;
buzbee33ae5582014-06-12 14:56:32 -0700159 if (cu_->target64) {
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700160 r_tmp = RegStorage::Solo64(TargetReg(kArg0).GetReg());
161 } else {
Douglas Leung2db3e262014-06-25 16:02:55 -0700162 r_tmp = RegStorage::MakeRegPair(TargetReg(arg0.fp ? kFArg0 : kArg0),
163 TargetReg(arg0.fp ? kFArg1 : kArg1));
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700164 }
buzbee2700f7e2014-03-07 09:46:20 -0800165 LoadValueDirectWideFixed(arg0, r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700166 }
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000167 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700168 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700169}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700170INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegLocation, RegLocation arg0, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700171
Andreas Gampe2f244e92014-05-08 03:35:25 -0700172template <size_t pointer_size>
173void Mir2Lir::CallRuntimeHelperImmImm(ThreadOffset<pointer_size> helper_offset, int arg0, int arg1,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174 bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800175 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700176 LoadConstant(TargetReg(kArg0), arg0);
177 LoadConstant(TargetReg(kArg1), arg1);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000178 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700179 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700180}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700181INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmImm, int arg0, int arg1, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700182
Andreas Gampe2f244e92014-05-08 03:35:25 -0700183template <size_t pointer_size>
184void Mir2Lir::CallRuntimeHelperImmRegLocation(ThreadOffset<pointer_size> helper_offset, int arg0,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700185 RegLocation arg1, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800186 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700187 if (arg1.wide == 0) {
Andreas Gampef9872f02014-07-01 19:00:09 -0700188 LoadValueDirectFixed(arg1, TargetReg(kArg1, arg1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700189 } else {
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700190 RegStorage r_tmp;
buzbee33ae5582014-06-12 14:56:32 -0700191 if (cu_->target64) {
Andreas Gampef9872f02014-07-01 19:00:09 -0700192 r_tmp = TargetReg(kArg1, true);
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700193 } else {
Douglas Leung2db3e262014-06-25 16:02:55 -0700194 if (cu_->instruction_set == kMips) {
195 // skip kArg1 for stack alignment.
196 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3));
197 } else {
198 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg1), TargetReg(kArg2));
199 }
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700200 }
buzbee2700f7e2014-03-07 09:46:20 -0800201 LoadValueDirectWideFixed(arg1, r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700202 }
203 LoadConstant(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000204 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700205 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700206}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700207INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmRegLocation, int arg0, RegLocation arg1,
208 bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700209
Andreas Gampe2f244e92014-05-08 03:35:25 -0700210template <size_t pointer_size>
211void Mir2Lir::CallRuntimeHelperRegLocationImm(ThreadOffset<pointer_size> helper_offset,
212 RegLocation arg0, int arg1, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800213 RegStorage r_tgt = CallHelperSetup(helper_offset);
Andreas Gampef9872f02014-07-01 19:00:09 -0700214 DCHECK(!arg0.wide);
215 LoadValueDirectFixed(arg0, TargetReg(kArg0, arg0));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700216 LoadConstant(TargetReg(kArg1), arg1);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000217 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700218 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700219}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700220INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegLocationImm, RegLocation arg0, int arg1,
221 bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700222
Andreas Gampe2f244e92014-05-08 03:35:25 -0700223template <size_t pointer_size>
224void Mir2Lir::CallRuntimeHelperImmReg(ThreadOffset<pointer_size> helper_offset, int arg0,
225 RegStorage arg1, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800226 RegStorage r_tgt = CallHelperSetup(helper_offset);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700227 OpRegCopy(TargetReg(kArg1, arg1.Is64Bit()), arg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700228 LoadConstant(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000229 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700230 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700231}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700232INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmReg, int arg0, RegStorage arg1, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700233
Andreas Gampe2f244e92014-05-08 03:35:25 -0700234template <size_t pointer_size>
235void Mir2Lir::CallRuntimeHelperRegImm(ThreadOffset<pointer_size> helper_offset, RegStorage arg0,
236 int arg1, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800237 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700238 OpRegCopy(TargetReg(kArg0), arg0);
239 LoadConstant(TargetReg(kArg1), arg1);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000240 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700241 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700242}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700243INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegImm, RegStorage arg0, int arg1, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700244
Andreas Gampe2f244e92014-05-08 03:35:25 -0700245template <size_t pointer_size>
246void Mir2Lir::CallRuntimeHelperImmMethod(ThreadOffset<pointer_size> helper_offset, int arg0,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700247 bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800248 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700249 LoadCurrMethodDirect(TargetReg(kArg1));
250 LoadConstant(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000251 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700252 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700253}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700254INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmMethod, int arg0, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700255
Andreas Gampe2f244e92014-05-08 03:35:25 -0700256template <size_t pointer_size>
257void Mir2Lir::CallRuntimeHelperRegMethod(ThreadOffset<pointer_size> helper_offset, RegStorage arg0,
buzbee2700f7e2014-03-07 09:46:20 -0800258 bool safepoint_pc) {
259 RegStorage r_tgt = CallHelperSetup(helper_offset);
buzbeeb5860fb2014-06-21 15:31:01 -0700260 DCHECK(!IsSameReg(TargetReg(kArg1), arg0));
261 if (TargetReg(kArg0, arg0.Is64Bit()).NotExactlyEquals(arg0)) {
262 OpRegCopy(TargetReg(kArg0, arg0.Is64Bit()), arg0);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800263 }
buzbeeb5860fb2014-06-21 15:31:01 -0700264 LoadCurrMethodDirect(TargetRefReg(kArg1));
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800265 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700266 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800267}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700268INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegMethod, RegStorage arg0, bool safepoint_pc)
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800269
Andreas Gampe2f244e92014-05-08 03:35:25 -0700270template <size_t pointer_size>
271void Mir2Lir::CallRuntimeHelperRegMethodRegLocation(ThreadOffset<pointer_size> helper_offset,
272 RegStorage arg0, RegLocation arg2,
273 bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800274 RegStorage r_tgt = CallHelperSetup(helper_offset);
buzbeeb5860fb2014-06-21 15:31:01 -0700275 DCHECK(!IsSameReg(TargetReg(kArg1), arg0));
276 if (TargetReg(kArg0, arg0.Is64Bit()).NotExactlyEquals(arg0)) {
277 OpRegCopy(TargetReg(kArg0, arg0.Is64Bit()), arg0);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800278 }
buzbeeb5860fb2014-06-21 15:31:01 -0700279 LoadCurrMethodDirect(TargetRefReg(kArg1));
Andreas Gampe4b537a82014-06-30 22:24:53 -0700280 LoadValueDirectFixed(arg2, TargetReg(kArg2, arg2));
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800281 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700282 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800283}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700284INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegMethodRegLocation, RegStorage arg0, RegLocation arg2,
285 bool safepoint_pc)
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800286
Andreas Gampe2f244e92014-05-08 03:35:25 -0700287template <size_t pointer_size>
288void Mir2Lir::CallRuntimeHelperRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700289 RegLocation arg0, RegLocation arg1,
290 bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800291 RegStorage r_tgt = CallHelperSetup(helper_offset);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700292 if (cu_->instruction_set == kArm64) {
293 RegStorage arg0_reg = TargetReg((arg0.fp) ? kFArg0 : kArg0, arg0);
294
295 RegStorage arg1_reg;
296 if (arg1.fp == arg0.fp) {
297 arg1_reg = TargetReg((arg1.fp) ? kFArg1 : kArg1, arg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700298 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700299 arg1_reg = TargetReg((arg1.fp) ? kFArg0 : kArg0, arg1);
300 }
301
302 if (arg0.wide == 0) {
303 LoadValueDirectFixed(arg0, arg0_reg);
304 } else {
305 LoadValueDirectWideFixed(arg0, arg0_reg);
306 }
307
308 if (arg1.wide == 0) {
309 LoadValueDirectFixed(arg1, arg1_reg);
310 } else {
311 LoadValueDirectWideFixed(arg1, arg1_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700312 }
313 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700314 if (arg0.wide == 0) {
315 LoadValueDirectFixed(arg0, arg0.fp ? TargetReg(kFArg0) : TargetReg(kArg0));
316 if (arg1.wide == 0) {
317 if (cu_->instruction_set == kMips) {
318 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg2) : TargetReg(kArg1));
319 } else if (cu_->instruction_set == kArm64) {
320 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg1) : TargetReg(kArg1));
321 } else if (cu_->instruction_set == kX86_64) {
322 if (arg0.fp) {
323 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg1) : TargetReg(kArg0));
324 } else {
325 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg0) : TargetReg(kArg1));
326 }
327 } else {
328 LoadValueDirectFixed(arg1, TargetReg(kArg1));
329 }
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700330 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700331 if (cu_->instruction_set == kMips) {
332 RegStorage r_tmp;
333 if (arg1.fp) {
334 r_tmp = RegStorage::MakeRegPair(TargetReg(kFArg2), TargetReg(kFArg3));
335 } else {
336 // skip kArg1 for stack alignment.
337 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3));
338 }
339 LoadValueDirectWideFixed(arg1, r_tmp);
340 } else {
341 RegStorage r_tmp;
342 if (cu_->target64) {
343 r_tmp = RegStorage::Solo64(TargetReg(kArg1).GetReg());
344 } else {
345 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg1), TargetReg(kArg2));
346 }
347 LoadValueDirectWideFixed(arg1, r_tmp);
348 }
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700349 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700350 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800351 RegStorage r_tmp;
Andreas Gampe4b537a82014-06-30 22:24:53 -0700352 if (arg0.fp) {
buzbee33ae5582014-06-12 14:56:32 -0700353 if (cu_->target64) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700354 r_tmp = RegStorage::FloatSolo64(TargetReg(kFArg0).GetReg());
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700355 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700356 r_tmp = RegStorage::MakeRegPair(TargetReg(kFArg0), TargetReg(kFArg1));
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700357 }
buzbee2700f7e2014-03-07 09:46:20 -0800358 } else {
buzbee33ae5582014-06-12 14:56:32 -0700359 if (cu_->target64) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700360 r_tmp = RegStorage::Solo64(TargetReg(kArg0).GetReg());
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700361 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700362 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg0), TargetReg(kArg1));
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700363 }
buzbee2700f7e2014-03-07 09:46:20 -0800364 }
Andreas Gampe4b537a82014-06-30 22:24:53 -0700365 LoadValueDirectWideFixed(arg0, r_tmp);
366 if (arg1.wide == 0) {
367 if (cu_->target64) {
368 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg1) : TargetReg(kArg1));
369 } else {
370 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg2) : TargetReg(kArg2));
371 }
372 } else {
373 RegStorage r_tmp;
374 if (arg1.fp) {
375 if (cu_->target64) {
376 r_tmp = RegStorage::FloatSolo64(TargetReg(kFArg1).GetReg());
377 } else {
378 r_tmp = RegStorage::MakeRegPair(TargetReg(kFArg2), TargetReg(kFArg3));
379 }
380 } else {
381 if (cu_->target64) {
382 r_tmp = RegStorage::Solo64(TargetReg(kArg1).GetReg());
383 } else {
384 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3));
385 }
386 }
387 LoadValueDirectWideFixed(arg1, r_tmp);
388 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700389 }
390 }
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000391 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700392 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700393}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700394INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegLocationRegLocation, RegLocation arg0,
395 RegLocation arg1, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700396
Mingyao Yang80365d92014-04-18 12:10:58 -0700397void Mir2Lir::CopyToArgumentRegs(RegStorage arg0, RegStorage arg1) {
Andreas Gampe49c5f502014-06-20 11:34:17 -0700398 if (IsSameReg(arg1, TargetReg(kArg0))) {
399 if (IsSameReg(arg0, TargetReg(kArg1))) {
Mingyao Yang80365d92014-04-18 12:10:58 -0700400 // Swap kArg0 and kArg1 with kArg2 as temp.
Andreas Gampe4b537a82014-06-30 22:24:53 -0700401 OpRegCopy(TargetReg(kArg2, arg1.Is64Bit()), arg1);
402 OpRegCopy(TargetReg(kArg0, arg0.Is64Bit()), arg0);
403 OpRegCopy(TargetReg(kArg1, arg1.Is64Bit()), TargetReg(kArg2, arg1.Is64Bit()));
Mingyao Yang80365d92014-04-18 12:10:58 -0700404 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700405 OpRegCopy(TargetReg(kArg1, arg1.Is64Bit()), arg1);
406 OpRegCopy(TargetReg(kArg0, arg0.Is64Bit()), arg0);
Mingyao Yang80365d92014-04-18 12:10:58 -0700407 }
408 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700409 OpRegCopy(TargetReg(kArg0, arg0.Is64Bit()), arg0);
410 OpRegCopy(TargetReg(kArg1, arg1.Is64Bit()), arg1);
Mingyao Yang80365d92014-04-18 12:10:58 -0700411 }
412}
413
Andreas Gampe2f244e92014-05-08 03:35:25 -0700414template <size_t pointer_size>
415void Mir2Lir::CallRuntimeHelperRegReg(ThreadOffset<pointer_size> helper_offset, RegStorage arg0,
buzbee2700f7e2014-03-07 09:46:20 -0800416 RegStorage arg1, bool safepoint_pc) {
417 RegStorage r_tgt = CallHelperSetup(helper_offset);
Mingyao Yang80365d92014-04-18 12:10:58 -0700418 CopyToArgumentRegs(arg0, arg1);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000419 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700420 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700421}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700422INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegReg, RegStorage arg0, RegStorage arg1,
423 bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700424
Andreas Gampe2f244e92014-05-08 03:35:25 -0700425template <size_t pointer_size>
426void Mir2Lir::CallRuntimeHelperRegRegImm(ThreadOffset<pointer_size> helper_offset, RegStorage arg0,
buzbee2700f7e2014-03-07 09:46:20 -0800427 RegStorage arg1, int arg2, bool safepoint_pc) {
428 RegStorage r_tgt = CallHelperSetup(helper_offset);
Mingyao Yang80365d92014-04-18 12:10:58 -0700429 CopyToArgumentRegs(arg0, arg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700430 LoadConstant(TargetReg(kArg2), arg2);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000431 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700432 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700433}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700434INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegRegImm, RegStorage arg0, RegStorage arg1, int arg2,
435 bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700436
Andreas Gampe2f244e92014-05-08 03:35:25 -0700437template <size_t pointer_size>
438void Mir2Lir::CallRuntimeHelperImmMethodRegLocation(ThreadOffset<pointer_size> helper_offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700439 int arg0, RegLocation arg2, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800440 RegStorage r_tgt = CallHelperSetup(helper_offset);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700441 LoadValueDirectFixed(arg2, TargetReg(kArg2, arg2));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700442 LoadCurrMethodDirect(TargetReg(kArg1));
Andreas Gampe4b537a82014-06-30 22:24:53 -0700443 LoadConstant(TargetReg(kArg0, arg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000444 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700445 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700446}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700447INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmMethodRegLocation, int arg0, RegLocation arg2,
448 bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700449
Andreas Gampe2f244e92014-05-08 03:35:25 -0700450template <size_t pointer_size>
451void Mir2Lir::CallRuntimeHelperImmMethodImm(ThreadOffset<pointer_size> helper_offset, int arg0,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700452 int arg2, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800453 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700454 LoadCurrMethodDirect(TargetReg(kArg1));
455 LoadConstant(TargetReg(kArg2), arg2);
456 LoadConstant(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000457 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700458 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700459}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700460INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmMethodImm, int arg0, int arg2, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700461
Andreas Gampe2f244e92014-05-08 03:35:25 -0700462template <size_t pointer_size>
463void Mir2Lir::CallRuntimeHelperImmRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700464 int arg0, RegLocation arg1,
465 RegLocation arg2, bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800466 RegStorage r_tgt = CallHelperSetup(helper_offset);
Andreas Gampe2f244e92014-05-08 03:35:25 -0700467 DCHECK_EQ(static_cast<unsigned int>(arg1.wide), 0U); // The static_cast works around an
468 // instantiation bug in GCC.
Andreas Gampe4b537a82014-06-30 22:24:53 -0700469 LoadValueDirectFixed(arg1, TargetReg(kArg1, arg1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700470 if (arg2.wide == 0) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700471 LoadValueDirectFixed(arg2, TargetReg(kArg2, arg2));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700472 } else {
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700473 RegStorage r_tmp;
buzbee33ae5582014-06-12 14:56:32 -0700474 if (cu_->target64) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700475 r_tmp = TargetReg(kArg2, true);
Chao-ying Fu7e399fd2014-06-10 18:11:11 -0700476 } else {
477 r_tmp = RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3));
478 }
buzbee2700f7e2014-03-07 09:46:20 -0800479 LoadValueDirectWideFixed(arg2, r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700480 }
481 LoadConstant(TargetReg(kArg0), arg0);
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000482 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700483 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700484}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700485INSTANTIATE(void Mir2Lir::CallRuntimeHelperImmRegLocationRegLocation, int arg0, RegLocation arg1,
486 RegLocation arg2, bool safepoint_pc)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700487
Andreas Gampe2f244e92014-05-08 03:35:25 -0700488template <size_t pointer_size>
489void Mir2Lir::CallRuntimeHelperRegLocationRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset,
Ian Rogersa9a82542013-10-04 11:17:26 -0700490 RegLocation arg0, RegLocation arg1,
491 RegLocation arg2,
492 bool safepoint_pc) {
buzbee2700f7e2014-03-07 09:46:20 -0800493 RegStorage r_tgt = CallHelperSetup(helper_offset);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700494 LoadValueDirectFixed(arg0, TargetReg(kArg0, arg0));
495 LoadValueDirectFixed(arg1, TargetReg(kArg1, arg1));
496 LoadValueDirectFixed(arg2, TargetReg(kArg2, arg2));
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000497 ClobberCallerSave();
Andreas Gampe2f244e92014-05-08 03:35:25 -0700498 CallHelper<pointer_size>(r_tgt, helper_offset, safepoint_pc);
Ian Rogersa9a82542013-10-04 11:17:26 -0700499}
Andreas Gampe2f244e92014-05-08 03:35:25 -0700500INSTANTIATE(void Mir2Lir::CallRuntimeHelperRegLocationRegLocationRegLocation, RegLocation arg0,
501 RegLocation arg1, RegLocation arg2, bool safepoint_pc)
Ian Rogersa9a82542013-10-04 11:17:26 -0700502
Brian Carlstrom7940e442013-07-12 13:46:57 -0700503/*
504 * If there are any ins passed in registers that have not been promoted
Matteo Franchine45fb9e2014-05-06 10:10:30 +0100505 * to a callee-save register, flush them to the frame. Perform initial
Brian Carlstrom7940e442013-07-12 13:46:57 -0700506 * assignment of promoted arguments.
507 *
508 * ArgLocs is an array of location records describing the incoming arguments
509 * with one location record per word of argument.
510 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700511void Mir2Lir::FlushIns(RegLocation* ArgLocs, RegLocation rl_method) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700512 /*
Zheng Xu511c8a62014-06-03 16:22:23 +0800513 * Dummy up a RegLocation for the incoming StackReference<mirror::ArtMethod>
Brian Carlstrom7940e442013-07-12 13:46:57 -0700514 * It will attempt to keep kArg0 live (or copy it to home location
515 * if promoted).
516 */
517 RegLocation rl_src = rl_method;
518 rl_src.location = kLocPhysReg;
Andreas Gampe4b537a82014-06-30 22:24:53 -0700519 rl_src.reg = TargetRefReg(kArg0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 rl_src.home = false;
buzbee091cc402014-03-31 10:14:40 -0700521 MarkLive(rl_src);
buzbeef2c3e562014-05-29 12:37:25 -0700522 StoreValue(rl_method, rl_src);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700523 // If Method* has been promoted, explicitly flush
524 if (rl_method.location == kLocPhysReg) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700525 StoreRefDisp(TargetReg(kSp), 0, rl_src.reg, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700526 }
527
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800528 if (cu_->num_ins == 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700529 return;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800530 }
531
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
533 /*
534 * Copy incoming arguments to their proper home locations.
535 * NOTE: an older version of dx had an issue in which
536 * it would reuse static method argument registers.
537 * This could result in the same Dalvik virtual register
538 * being promoted to both core and fp regs. To account for this,
539 * we only copy to the corresponding promoted physical register
540 * if it matches the type of the SSA name for the incoming
541 * argument. It is also possible that long and double arguments
542 * end up half-promoted. In those cases, we must flush the promoted
543 * half to memory as well.
544 */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100545 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700546 for (int i = 0; i < cu_->num_ins; i++) {
547 PromotionMap* v_map = &promotion_map_[start_vreg + i];
buzbee2700f7e2014-03-07 09:46:20 -0800548 RegStorage reg = GetArgMappingToPhysicalReg(i);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800549
buzbee2700f7e2014-03-07 09:46:20 -0800550 if (reg.Valid()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 // If arriving in register
552 bool need_flush = true;
553 RegLocation* t_loc = &ArgLocs[i];
554 if ((v_map->core_location == kLocPhysReg) && !t_loc->fp) {
buzbee2700f7e2014-03-07 09:46:20 -0800555 OpRegCopy(RegStorage::Solo32(v_map->core_reg), reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700556 need_flush = false;
557 } else if ((v_map->fp_location == kLocPhysReg) && t_loc->fp) {
buzbeeb5860fb2014-06-21 15:31:01 -0700558 OpRegCopy(RegStorage::Solo32(v_map->fp_reg), reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700559 need_flush = false;
560 } else {
561 need_flush = true;
562 }
563
buzbeed0a03b82013-09-14 08:21:05 -0700564 // For wide args, force flush if not fully promoted
Brian Carlstrom7940e442013-07-12 13:46:57 -0700565 if (t_loc->wide) {
566 PromotionMap* p_map = v_map + (t_loc->high_word ? -1 : +1);
buzbeed0a03b82013-09-14 08:21:05 -0700567 // Is only half promoted?
Brian Carlstrom7940e442013-07-12 13:46:57 -0700568 need_flush |= (p_map->core_location != v_map->core_location) ||
569 (p_map->fp_location != v_map->fp_location);
buzbeed0a03b82013-09-14 08:21:05 -0700570 if ((cu_->instruction_set == kThumb2) && t_loc->fp && !need_flush) {
571 /*
572 * In Arm, a double is represented as a pair of consecutive single float
573 * registers starting at an even number. It's possible that both Dalvik vRegs
574 * representing the incoming double were independently promoted as singles - but
575 * not in a form usable as a double. If so, we need to flush - even though the
576 * incoming arg appears fully in register. At this point in the code, both
577 * halves of the double are promoted. Make sure they are in a usable form.
578 */
579 int lowreg_index = start_vreg + i + (t_loc->high_word ? -1 : 0);
buzbeeb5860fb2014-06-21 15:31:01 -0700580 int low_reg = promotion_map_[lowreg_index].fp_reg;
581 int high_reg = promotion_map_[lowreg_index + 1].fp_reg;
buzbeed0a03b82013-09-14 08:21:05 -0700582 if (((low_reg & 0x1) != 0) || (high_reg != (low_reg + 1))) {
583 need_flush = true;
584 }
585 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586 }
587 if (need_flush) {
buzbee695d13a2014-04-19 13:32:20 -0700588 Store32Disp(TargetReg(kSp), SRegOffset(start_vreg + i), reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 }
590 } else {
591 // If arriving in frame & promoted
592 if (v_map->core_location == kLocPhysReg) {
buzbee695d13a2014-04-19 13:32:20 -0700593 Load32Disp(TargetReg(kSp), SRegOffset(start_vreg + i), RegStorage::Solo32(v_map->core_reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 }
595 if (v_map->fp_location == kLocPhysReg) {
buzbeeb5860fb2014-06-21 15:31:01 -0700596 Load32Disp(TargetReg(kSp), SRegOffset(start_vreg + i), RegStorage::Solo32(v_map->fp_reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700597 }
598 }
599 }
600}
601
602/*
603 * Bit of a hack here - in the absence of a real scheduling pass,
604 * emit the next instruction in static & direct invoke sequences.
605 */
606static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info,
607 int state, const MethodReference& target_method,
608 uint32_t unused,
609 uintptr_t direct_code, uintptr_t direct_method,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700610 InvokeType type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700611 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612 if (direct_code != 0 && direct_method != 0) {
613 switch (state) {
614 case 0: // Get the current Method* [sets kArg0]
Ian Rogersff093b32014-04-30 19:04:27 -0700615 if (direct_code != static_cast<uintptr_t>(-1)) {
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700616 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Ian Rogers83883d72013-10-21 21:07:24 -0700617 cg->LoadConstant(cg->TargetReg(kInvokeTgt), direct_code);
618 }
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700619 } else if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700620 cg->LoadCodeAddress(target_method, type, kInvokeTgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700621 }
Ian Rogersff093b32014-04-30 19:04:27 -0700622 if (direct_method != static_cast<uintptr_t>(-1)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700623 cg->LoadConstant(cg->TargetReg(kArg0), direct_method);
624 } else {
Jeff Hao49161ce2014-03-12 11:05:25 -0700625 cg->LoadMethodAddress(target_method, type, kArg0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700626 }
627 break;
628 default:
629 return -1;
630 }
631 } else {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700632 RegStorage arg0_ref = cg->TargetRefReg(kArg0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633 switch (state) {
634 case 0: // Get the current Method* [sets kArg0]
635 // TUNING: we can save a reg copy if Method* has been promoted.
Andreas Gampe4b537a82014-06-30 22:24:53 -0700636 cg->LoadCurrMethodDirect(arg0_ref);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700637 break;
638 case 1: // Get method->dex_cache_resolved_methods_
Andreas Gampe4b537a82014-06-30 22:24:53 -0700639 cg->LoadRefDisp(arg0_ref,
buzbee695d13a2014-04-19 13:32:20 -0700640 mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(),
Andreas Gampe4b537a82014-06-30 22:24:53 -0700641 arg0_ref,
Andreas Gampe3c12c512014-06-24 18:46:29 +0000642 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643 // Set up direct code if known.
644 if (direct_code != 0) {
Ian Rogersff093b32014-04-30 19:04:27 -0700645 if (direct_code != static_cast<uintptr_t>(-1)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700646 cg->LoadConstant(cg->TargetReg(kInvokeTgt), direct_code);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700647 } else if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Ian Rogers83883d72013-10-21 21:07:24 -0700648 CHECK_LT(target_method.dex_method_index, target_method.dex_file->NumMethodIds());
Jeff Hao49161ce2014-03-12 11:05:25 -0700649 cg->LoadCodeAddress(target_method, type, kInvokeTgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 }
651 }
652 break;
653 case 2: // Grab target method*
654 CHECK_EQ(cu->dex_file, target_method.dex_file);
Andreas Gampe4b537a82014-06-30 22:24:53 -0700655 cg->LoadRefDisp(arg0_ref,
Dmitry Petrochenko37498b62014-05-05 20:33:38 +0700656 ObjArray::OffsetOfElement(target_method.dex_method_index).Int32Value(),
Andreas Gampe4b537a82014-06-30 22:24:53 -0700657 arg0_ref,
Andreas Gampe3c12c512014-06-24 18:46:29 +0000658 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700659 break;
660 case 3: // Grab the code from the method*
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700661 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700662 if (direct_code == 0) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700663 cg->LoadWordDisp(arg0_ref,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800664 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700665 cg->TargetReg(kInvokeTgt));
666 }
667 break;
668 }
669 // Intentional fallthrough for x86
670 default:
671 return -1;
672 }
673 }
674 return state + 1;
675}
676
677/*
678 * Bit of a hack here - in the absence of a real scheduling pass,
679 * emit the next instruction in a virtual invoke sequence.
680 * We can use kLr as a temp prior to target address loading
681 * Note also that we'll load the first argument ("this") into
682 * kArg1 here rather than the standard LoadArgRegs.
683 */
684static int NextVCallInsn(CompilationUnit* cu, CallInfo* info,
685 int state, const MethodReference& target_method,
686 uint32_t method_idx, uintptr_t unused, uintptr_t unused2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700687 InvokeType unused3) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
689 /*
690 * This is the fast path in which the target virtual method is
691 * fully resolved at compile time.
692 */
693 switch (state) {
694 case 0: { // Get "this" [set kArg1]
695 RegLocation rl_arg = info->args[0];
Andreas Gampe4b537a82014-06-30 22:24:53 -0700696 cg->LoadValueDirectFixed(rl_arg, cg->TargetRefReg(kArg1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700697 break;
698 }
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700699 case 1: // Is "this" null? [use kArg1]
Andreas Gampe4b537a82014-06-30 22:24:53 -0700700 cg->GenNullCheck(cg->TargetRefReg(kArg1), info->opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700701 // get this->klass_ [use kArg1, set kInvokeTgt]
Andreas Gampe4b537a82014-06-30 22:24:53 -0700702 cg->LoadRefDisp(cg->TargetRefReg(kArg1), mirror::Object::ClassOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +0000703 cg->TargetReg(kInvokeTgt),
704 kNotVolatile);
Dave Allisonb373e092014-02-20 16:06:36 -0800705 cg->MarkPossibleNullPointerException(info->opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700707 case 2: // Get this->klass_->vtable [usr kInvokeTgt, set kInvokeTgt]
buzbee695d13a2014-04-19 13:32:20 -0700708 cg->LoadRefDisp(cg->TargetReg(kInvokeTgt), mirror::Class::VTableOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +0000709 cg->TargetReg(kInvokeTgt),
710 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700712 case 3: // Get target method [use kInvokeTgt, set kArg0]
Dmitry Petrochenko37498b62014-05-05 20:33:38 +0700713 cg->LoadRefDisp(cg->TargetReg(kInvokeTgt),
714 ObjArray::OffsetOfElement(method_idx).Int32Value(),
Andreas Gampe4b537a82014-06-30 22:24:53 -0700715 cg->TargetRefReg(kArg0),
Andreas Gampe3c12c512014-06-24 18:46:29 +0000716 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700717 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700718 case 4: // Get the compiled code address [uses kArg0, sets kInvokeTgt]
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700719 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700720 cg->LoadWordDisp(cg->TargetRefReg(kArg0),
Ian Rogersef7d42f2014-01-06 12:55:46 -0800721 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 cg->TargetReg(kInvokeTgt));
723 break;
724 }
725 // Intentional fallthrough for X86
726 default:
727 return -1;
728 }
729 return state + 1;
730}
731
732/*
Jeff Hao88474b42013-10-23 16:24:40 -0700733 * Emit the next instruction in an invoke interface sequence. This will do a lookup in the
734 * class's IMT, calling either the actual method or art_quick_imt_conflict_trampoline if
735 * more than one interface method map to the same index. Note also that we'll load the first
736 * argument ("this") into kArg1 here rather than the standard LoadArgRegs.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737 */
738static int NextInterfaceCallInsn(CompilationUnit* cu, CallInfo* info, int state,
739 const MethodReference& target_method,
Jeff Hao88474b42013-10-23 16:24:40 -0700740 uint32_t method_idx, uintptr_t unused,
741 uintptr_t direct_method, InvokeType unused2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700743
Jeff Hao88474b42013-10-23 16:24:40 -0700744 switch (state) {
745 case 0: // Set target method index in case of conflict [set kHiddenArg, kHiddenFpArg (x86)]
Jeff Hao88474b42013-10-23 16:24:40 -0700746 CHECK_LT(target_method.dex_method_index, target_method.dex_file->NumMethodIds());
747 cg->LoadConstant(cg->TargetReg(kHiddenArg), target_method.dex_method_index);
Mark Mendelld3703d82014-06-09 15:10:50 -0400748 if (cu->instruction_set == kX86) {
Jeff Hao88474b42013-10-23 16:24:40 -0700749 cg->OpRegCopy(cg->TargetReg(kHiddenFpArg), cg->TargetReg(kHiddenArg));
750 }
751 break;
752 case 1: { // Get "this" [set kArg1]
753 RegLocation rl_arg = info->args[0];
Andreas Gampe4b537a82014-06-30 22:24:53 -0700754 cg->LoadValueDirectFixed(rl_arg, cg->TargetRefReg(kArg1));
Jeff Hao88474b42013-10-23 16:24:40 -0700755 break;
756 }
757 case 2: // Is "this" null? [use kArg1]
Andreas Gampe4b537a82014-06-30 22:24:53 -0700758 cg->GenNullCheck(cg->TargetRefReg(kArg1), info->opt_flags);
Jeff Hao88474b42013-10-23 16:24:40 -0700759 // Get this->klass_ [use kArg1, set kInvokeTgt]
Andreas Gampe4b537a82014-06-30 22:24:53 -0700760 cg->LoadRefDisp(cg->TargetRefReg(kArg1), mirror::Object::ClassOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +0000761 cg->TargetReg(kInvokeTgt),
762 kNotVolatile);
Dave Allisonb373e092014-02-20 16:06:36 -0800763 cg->MarkPossibleNullPointerException(info->opt_flags);
Jeff Hao88474b42013-10-23 16:24:40 -0700764 break;
765 case 3: // Get this->klass_->imtable [use kInvokeTgt, set kInvokeTgt]
buzbee695d13a2014-04-19 13:32:20 -0700766 // NOTE: native pointer.
767 cg->LoadRefDisp(cg->TargetReg(kInvokeTgt), mirror::Class::ImTableOffset().Int32Value(),
Andreas Gampe3c12c512014-06-24 18:46:29 +0000768 cg->TargetReg(kInvokeTgt),
769 kNotVolatile);
Jeff Hao88474b42013-10-23 16:24:40 -0700770 break;
771 case 4: // Get target method [use kInvokeTgt, set kArg0]
buzbee695d13a2014-04-19 13:32:20 -0700772 // NOTE: native pointer.
Dmitry Petrochenko37498b62014-05-05 20:33:38 +0700773 cg->LoadRefDisp(cg->TargetReg(kInvokeTgt),
774 ObjArray::OffsetOfElement(method_idx % ClassLinker::kImtSize).Int32Value(),
Andreas Gampe4b537a82014-06-30 22:24:53 -0700775 cg->TargetRefReg(kArg0),
Andreas Gampe3c12c512014-06-24 18:46:29 +0000776 kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700777 break;
Jeff Hao88474b42013-10-23 16:24:40 -0700778 case 5: // Get the compiled code address [use kArg0, set kInvokeTgt]
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700779 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Andreas Gampe4b537a82014-06-30 22:24:53 -0700780 cg->LoadWordDisp(cg->TargetRefReg(kArg0),
Ian Rogersef7d42f2014-01-06 12:55:46 -0800781 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(),
Jeff Hao88474b42013-10-23 16:24:40 -0700782 cg->TargetReg(kInvokeTgt));
783 break;
784 }
785 // Intentional fallthrough for X86
Brian Carlstrom7940e442013-07-12 13:46:57 -0700786 default:
787 return -1;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700788 }
789 return state + 1;
790}
791
Andreas Gampe2f244e92014-05-08 03:35:25 -0700792template <size_t pointer_size>
793static int NextInvokeInsnSP(CompilationUnit* cu, CallInfo* info, ThreadOffset<pointer_size> trampoline,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700794 int state, const MethodReference& target_method,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700795 uint32_t method_idx) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700796 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
797 /*
798 * This handles the case in which the base method is not fully
799 * resolved at compile time, we bail to a runtime helper.
800 */
801 if (state == 0) {
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700802 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803 // Load trampoline target
Ian Rogers848871b2013-08-05 10:56:33 -0700804 cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(), cg->TargetReg(kInvokeTgt));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700805 }
806 // Load kArg0 with method index
807 CHECK_EQ(cu->dex_file, target_method.dex_file);
808 cg->LoadConstant(cg->TargetReg(kArg0), target_method.dex_method_index);
809 return 1;
810 }
811 return -1;
812}
813
814static int NextStaticCallInsnSP(CompilationUnit* cu, CallInfo* info,
815 int state,
816 const MethodReference& target_method,
Vladimir Markof096aad2014-01-23 15:51:58 +0000817 uint32_t unused, uintptr_t unused2,
818 uintptr_t unused3, InvokeType unused4) {
buzbee33ae5582014-06-12 14:56:32 -0700819 if (cu->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -0700820 ThreadOffset<8> trampoline = QUICK_ENTRYPOINT_OFFSET(8, pInvokeStaticTrampolineWithAccessCheck);
821 return NextInvokeInsnSP<8>(cu, info, trampoline, state, target_method, 0);
822 } else {
823 ThreadOffset<4> trampoline = QUICK_ENTRYPOINT_OFFSET(4, pInvokeStaticTrampolineWithAccessCheck);
824 return NextInvokeInsnSP<4>(cu, info, trampoline, state, target_method, 0);
825 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700826}
827
828static int NextDirectCallInsnSP(CompilationUnit* cu, CallInfo* info, int state,
829 const MethodReference& target_method,
Vladimir Markof096aad2014-01-23 15:51:58 +0000830 uint32_t unused, uintptr_t unused2,
831 uintptr_t unused3, InvokeType unused4) {
buzbee33ae5582014-06-12 14:56:32 -0700832 if (cu->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -0700833 ThreadOffset<8> trampoline = QUICK_ENTRYPOINT_OFFSET(8, pInvokeDirectTrampolineWithAccessCheck);
834 return NextInvokeInsnSP<8>(cu, info, trampoline, state, target_method, 0);
835 } else {
836 ThreadOffset<4> trampoline = QUICK_ENTRYPOINT_OFFSET(4, pInvokeDirectTrampolineWithAccessCheck);
837 return NextInvokeInsnSP<4>(cu, info, trampoline, state, target_method, 0);
838 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700839}
840
841static int NextSuperCallInsnSP(CompilationUnit* cu, CallInfo* info, int state,
842 const MethodReference& target_method,
Vladimir Markof096aad2014-01-23 15:51:58 +0000843 uint32_t unused, uintptr_t unused2,
844 uintptr_t unused3, InvokeType unused4) {
buzbee33ae5582014-06-12 14:56:32 -0700845 if (cu->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -0700846 ThreadOffset<8> trampoline = QUICK_ENTRYPOINT_OFFSET(8, pInvokeSuperTrampolineWithAccessCheck);
847 return NextInvokeInsnSP<8>(cu, info, trampoline, state, target_method, 0);
848 } else {
849 ThreadOffset<4> trampoline = QUICK_ENTRYPOINT_OFFSET(4, pInvokeSuperTrampolineWithAccessCheck);
850 return NextInvokeInsnSP<4>(cu, info, trampoline, state, target_method, 0);
851 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852}
853
854static int NextVCallInsnSP(CompilationUnit* cu, CallInfo* info, int state,
855 const MethodReference& target_method,
Vladimir Markof096aad2014-01-23 15:51:58 +0000856 uint32_t unused, uintptr_t unused2,
857 uintptr_t unused3, InvokeType unused4) {
buzbee33ae5582014-06-12 14:56:32 -0700858 if (cu->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -0700859 ThreadOffset<8> trampoline = QUICK_ENTRYPOINT_OFFSET(8, pInvokeVirtualTrampolineWithAccessCheck);
860 return NextInvokeInsnSP<8>(cu, info, trampoline, state, target_method, 0);
861 } else {
862 ThreadOffset<4> trampoline = QUICK_ENTRYPOINT_OFFSET(4, pInvokeVirtualTrampolineWithAccessCheck);
863 return NextInvokeInsnSP<4>(cu, info, trampoline, state, target_method, 0);
864 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700865}
866
867static int NextInterfaceCallInsnWithAccessCheck(CompilationUnit* cu,
868 CallInfo* info, int state,
869 const MethodReference& target_method,
Vladimir Markof096aad2014-01-23 15:51:58 +0000870 uint32_t unused, uintptr_t unused2,
871 uintptr_t unused3, InvokeType unused4) {
buzbee33ae5582014-06-12 14:56:32 -0700872 if (cu->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -0700873 ThreadOffset<8> trampoline = QUICK_ENTRYPOINT_OFFSET(8, pInvokeInterfaceTrampolineWithAccessCheck);
874 return NextInvokeInsnSP<8>(cu, info, trampoline, state, target_method, 0);
875 } else {
876 ThreadOffset<4> trampoline = QUICK_ENTRYPOINT_OFFSET(4, pInvokeInterfaceTrampolineWithAccessCheck);
877 return NextInvokeInsnSP<4>(cu, info, trampoline, state, target_method, 0);
878 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700879}
880
881int Mir2Lir::LoadArgRegs(CallInfo* info, int call_state,
882 NextCallInsn next_call_insn,
883 const MethodReference& target_method,
884 uint32_t vtable_idx, uintptr_t direct_code,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700885 uintptr_t direct_method, InvokeType type, bool skip_this) {
Dmitry Petrochenko26ee07a2014-05-13 12:58:19 +0700886 int last_arg_reg = 3 - 1;
887 int arg_regs[3] = {TargetReg(kArg1).GetReg(), TargetReg(kArg2).GetReg(), TargetReg(kArg3).GetReg()};
888
889 int next_reg = 0;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700890 int next_arg = 0;
891 if (skip_this) {
892 next_reg++;
893 next_arg++;
894 }
895 for (; (next_reg <= last_arg_reg) && (next_arg < info->num_arg_words); next_reg++) {
896 RegLocation rl_arg = info->args[next_arg++];
897 rl_arg = UpdateRawLoc(rl_arg);
Dmitry Petrochenko26ee07a2014-05-13 12:58:19 +0700898 if (rl_arg.wide && (next_reg <= last_arg_reg - 1)) {
899 RegStorage r_tmp(RegStorage::k64BitPair, arg_regs[next_reg], arg_regs[next_reg + 1]);
buzbee2700f7e2014-03-07 09:46:20 -0800900 LoadValueDirectWideFixed(rl_arg, r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901 next_reg++;
902 next_arg++;
903 } else {
904 if (rl_arg.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800905 rl_arg = NarrowRegLoc(rl_arg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700906 rl_arg.is_const = false;
907 }
Dmitry Petrochenko26ee07a2014-05-13 12:58:19 +0700908 LoadValueDirectFixed(rl_arg, RegStorage::Solo32(arg_regs[next_reg]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700909 }
910 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
911 direct_code, direct_method, type);
912 }
913 return call_state;
914}
915
916/*
917 * Load up to 5 arguments, the first three of which will be in
918 * kArg1 .. kArg3. On entry kArg0 contains the current method pointer,
919 * and as part of the load sequence, it must be replaced with
920 * the target method pointer. Note, this may also be called
921 * for "range" variants if the number of arguments is 5 or fewer.
922 */
923int Mir2Lir::GenDalvikArgsNoRange(CallInfo* info,
924 int call_state, LIR** pcrLabel, NextCallInsn next_call_insn,
925 const MethodReference& target_method,
926 uint32_t vtable_idx, uintptr_t direct_code,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700927 uintptr_t direct_method, InvokeType type, bool skip_this) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700928 RegLocation rl_arg;
929
930 /* If no arguments, just return */
931 if (info->num_arg_words == 0)
932 return call_state;
933
934 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
935 direct_code, direct_method, type);
936
937 DCHECK_LE(info->num_arg_words, 5);
938 if (info->num_arg_words > 3) {
939 int32_t next_use = 3;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700940 // Detect special case of wide arg spanning arg3/arg4
Brian Carlstrom7940e442013-07-12 13:46:57 -0700941 RegLocation rl_use0 = info->args[0];
942 RegLocation rl_use1 = info->args[1];
943 RegLocation rl_use2 = info->args[2];
buzbee2700f7e2014-03-07 09:46:20 -0800944 if (((!rl_use0.wide && !rl_use1.wide) || rl_use0.wide) && rl_use2.wide) {
945 RegStorage reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700946 // Wide spans, we need the 2nd half of uses[2].
947 rl_arg = UpdateLocWide(rl_use2);
948 if (rl_arg.location == kLocPhysReg) {
buzbee85089dd2014-05-25 15:10:52 -0700949 if (rl_arg.reg.IsPair()) {
950 reg = rl_arg.reg.GetHigh();
951 } else {
952 RegisterInfo* info = GetRegInfo(rl_arg.reg);
953 info = info->FindMatchingView(RegisterInfo::kHighSingleStorageMask);
954 if (info == nullptr) {
955 // NOTE: For hard float convention we won't split arguments across reg/mem.
956 UNIMPLEMENTED(FATAL) << "Needs hard float api.";
957 }
958 reg = info->GetReg();
959 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700960 } else {
961 // kArg2 & rArg3 can safely be used here
962 reg = TargetReg(kArg3);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100963 {
964 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
965 Load32Disp(TargetReg(kSp), SRegOffset(rl_arg.s_reg_low) + 4, reg);
966 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700967 call_state = next_call_insn(cu_, info, call_state, target_method,
968 vtable_idx, direct_code, direct_method, type);
969 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100970 {
971 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
972 Store32Disp(TargetReg(kSp), (next_use + 1) * 4, reg);
973 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700974 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
975 direct_code, direct_method, type);
976 next_use++;
977 }
978 // Loop through the rest
979 while (next_use < info->num_arg_words) {
buzbee091cc402014-03-31 10:14:40 -0700980 RegStorage arg_reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700981 rl_arg = info->args[next_use];
982 rl_arg = UpdateRawLoc(rl_arg);
983 if (rl_arg.location == kLocPhysReg) {
buzbee091cc402014-03-31 10:14:40 -0700984 arg_reg = rl_arg.reg;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700985 } else {
buzbee091cc402014-03-31 10:14:40 -0700986 arg_reg = rl_arg.wide ? RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3)) :
987 TargetReg(kArg2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700988 if (rl_arg.wide) {
buzbee091cc402014-03-31 10:14:40 -0700989 LoadValueDirectWideFixed(rl_arg, arg_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700990 } else {
buzbee091cc402014-03-31 10:14:40 -0700991 LoadValueDirectFixed(rl_arg, arg_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700992 }
993 call_state = next_call_insn(cu_, info, call_state, target_method,
994 vtable_idx, direct_code, direct_method, type);
995 }
996 int outs_offset = (next_use + 1) * 4;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100997 {
998 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
999 if (rl_arg.wide) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001000 StoreBaseDisp(TargetReg(kSp), outs_offset, arg_reg, k64, kNotVolatile);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001001 next_use += 2;
1002 } else {
1003 Store32Disp(TargetReg(kSp), outs_offset, arg_reg);
1004 next_use++;
1005 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001006 }
1007 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
1008 direct_code, direct_method, type);
1009 }
1010 }
1011
1012 call_state = LoadArgRegs(info, call_state, next_call_insn,
1013 target_method, vtable_idx, direct_code, direct_method,
1014 type, skip_this);
1015
1016 if (pcrLabel) {
Andreas Gampe5655e842014-06-17 16:36:07 -07001017 if (cu_->compiler_driver->GetCompilerOptions().GetExplicitNullChecks()) {
Dave Allisonf9439142014-03-27 15:10:22 -07001018 *pcrLabel = GenExplicitNullCheck(TargetReg(kArg1), info->opt_flags);
1019 } else {
1020 *pcrLabel = nullptr;
1021 // In lieu of generating a check for kArg1 being null, we need to
1022 // perform a load when doing implicit checks.
1023 RegStorage tmp = AllocTemp();
buzbee695d13a2014-04-19 13:32:20 -07001024 Load32Disp(TargetReg(kArg1), 0, tmp);
Dave Allisonf9439142014-03-27 15:10:22 -07001025 MarkPossibleNullPointerException(info->opt_flags);
1026 FreeTemp(tmp);
1027 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001028 }
1029 return call_state;
1030}
1031
1032/*
1033 * May have 0+ arguments (also used for jumbo). Note that
1034 * source virtual registers may be in physical registers, so may
1035 * need to be flushed to home location before copying. This
1036 * applies to arg3 and above (see below).
1037 *
1038 * Two general strategies:
1039 * If < 20 arguments
1040 * Pass args 3-18 using vldm/vstm block copy
1041 * Pass arg0, arg1 & arg2 in kArg1-kArg3
1042 * If 20+ arguments
1043 * Pass args arg19+ using memcpy block copy
1044 * Pass arg0, arg1 & arg2 in kArg1-kArg3
1045 *
1046 */
1047int Mir2Lir::GenDalvikArgsRange(CallInfo* info, int call_state,
1048 LIR** pcrLabel, NextCallInsn next_call_insn,
1049 const MethodReference& target_method,
1050 uint32_t vtable_idx, uintptr_t direct_code, uintptr_t direct_method,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001051 InvokeType type, bool skip_this) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001052 // If we can treat it as non-range (Jumbo ops will use range form)
1053 if (info->num_arg_words <= 5)
1054 return GenDalvikArgsNoRange(info, call_state, pcrLabel,
1055 next_call_insn, target_method, vtable_idx,
1056 direct_code, direct_method, type, skip_this);
1057 /*
1058 * First load the non-register arguments. Both forms expect all
1059 * of the source arguments to be in their home frame location, so
1060 * scan the s_reg names and flush any that have been promoted to
1061 * frame backing storage.
1062 */
1063 // Scan the rest of the args - if in phys_reg flush to memory
1064 for (int next_arg = 0; next_arg < info->num_arg_words;) {
1065 RegLocation loc = info->args[next_arg];
1066 if (loc.wide) {
1067 loc = UpdateLocWide(loc);
1068 if ((next_arg >= 2) && (loc.location == kLocPhysReg)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001069 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001070 StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k64, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001071 }
1072 next_arg += 2;
1073 } else {
1074 loc = UpdateLoc(loc);
1075 if ((next_arg >= 3) && (loc.location == kLocPhysReg)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001076 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
buzbee695d13a2014-04-19 13:32:20 -07001077 Store32Disp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001078 }
1079 next_arg++;
1080 }
1081 }
1082
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001083 // Logic below assumes that Method pointer is at offset zero from SP.
1084 DCHECK_EQ(VRegOffset(static_cast<int>(kVRegMethodPtrBaseReg)), 0);
1085
1086 // The first 3 arguments are passed via registers.
1087 // TODO: For 64-bit, instead of hardcoding 4 for Method* size, we should either
1088 // get size of uintptr_t or size of object reference according to model being used.
1089 int outs_offset = 4 /* Method* */ + (3 * sizeof(uint32_t));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001090 int start_offset = SRegOffset(info->args[3].s_reg_low);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001091 int regs_left_to_pass_via_stack = info->num_arg_words - 3;
1092 DCHECK_GT(regs_left_to_pass_via_stack, 0);
1093
1094 if (cu_->instruction_set == kThumb2 && regs_left_to_pass_via_stack <= 16) {
1095 // Use vldm/vstm pair using kArg3 as a temp
1096 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
1097 direct_code, direct_method, type);
1098 OpRegRegImm(kOpAdd, TargetReg(kArg3), TargetReg(kSp), start_offset);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001099 LIR* ld = nullptr;
1100 {
1101 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
1102 ld = OpVldm(TargetReg(kArg3), regs_left_to_pass_via_stack);
1103 }
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001104 // TUNING: loosen barrier
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001105 ld->u.m.def_mask = &kEncodeAll;
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001106 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
1107 direct_code, direct_method, type);
1108 OpRegRegImm(kOpAdd, TargetReg(kArg3), TargetReg(kSp), 4 /* Method* */ + (3 * 4));
1109 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
1110 direct_code, direct_method, type);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001111 LIR* st = nullptr;
1112 {
1113 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
1114 st = OpVstm(TargetReg(kArg3), regs_left_to_pass_via_stack);
1115 }
1116 st->u.m.def_mask = &kEncodeAll;
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001117 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
1118 direct_code, direct_method, type);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001119 } else if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001120 int current_src_offset = start_offset;
1121 int current_dest_offset = outs_offset;
1122
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001123 // Only davik regs are accessed in this loop; no next_call_insn() calls.
1124 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001125 while (regs_left_to_pass_via_stack > 0) {
1126 // This is based on the knowledge that the stack itself is 16-byte aligned.
1127 bool src_is_16b_aligned = (current_src_offset & 0xF) == 0;
1128 bool dest_is_16b_aligned = (current_dest_offset & 0xF) == 0;
1129 size_t bytes_to_move;
1130
1131 /*
1132 * The amount to move defaults to 32-bit. If there are 4 registers left to move, then do a
1133 * a 128-bit move because we won't get the chance to try to aligned. If there are more than
1134 * 4 registers left to move, consider doing a 128-bit only if either src or dest are aligned.
1135 * We do this because we could potentially do a smaller move to align.
1136 */
1137 if (regs_left_to_pass_via_stack == 4 ||
1138 (regs_left_to_pass_via_stack > 4 && (src_is_16b_aligned || dest_is_16b_aligned))) {
1139 // Moving 128-bits via xmm register.
1140 bytes_to_move = sizeof(uint32_t) * 4;
1141
1142 // Allocate a free xmm temp. Since we are working through the calling sequence,
Mark Mendelle87f9b52014-04-30 14:13:18 -04001143 // we expect to have an xmm temporary available. AllocTempDouble will abort if
1144 // there are no free registers.
buzbee2700f7e2014-03-07 09:46:20 -08001145 RegStorage temp = AllocTempDouble();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001146
1147 LIR* ld1 = nullptr;
1148 LIR* ld2 = nullptr;
1149 LIR* st1 = nullptr;
1150 LIR* st2 = nullptr;
1151
1152 /*
1153 * The logic is similar for both loads and stores. If we have 16-byte alignment,
1154 * do an aligned move. If we have 8-byte alignment, then do the move in two
1155 * parts. This approach prevents possible cache line splits. Finally, fall back
1156 * to doing an unaligned move. In most cases we likely won't split the cache
1157 * line but we cannot prove it and thus take a conservative approach.
1158 */
1159 bool src_is_8b_aligned = (current_src_offset & 0x7) == 0;
1160 bool dest_is_8b_aligned = (current_dest_offset & 0x7) == 0;
1161
1162 if (src_is_16b_aligned) {
1163 ld1 = OpMovRegMem(temp, TargetReg(kSp), current_src_offset, kMovA128FP);
1164 } else if (src_is_8b_aligned) {
1165 ld1 = OpMovRegMem(temp, TargetReg(kSp), current_src_offset, kMovLo128FP);
buzbee2700f7e2014-03-07 09:46:20 -08001166 ld2 = OpMovRegMem(temp, TargetReg(kSp), current_src_offset + (bytes_to_move >> 1),
1167 kMovHi128FP);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001168 } else {
1169 ld1 = OpMovRegMem(temp, TargetReg(kSp), current_src_offset, kMovU128FP);
1170 }
1171
1172 if (dest_is_16b_aligned) {
1173 st1 = OpMovMemReg(TargetReg(kSp), current_dest_offset, temp, kMovA128FP);
1174 } else if (dest_is_8b_aligned) {
1175 st1 = OpMovMemReg(TargetReg(kSp), current_dest_offset, temp, kMovLo128FP);
buzbee2700f7e2014-03-07 09:46:20 -08001176 st2 = OpMovMemReg(TargetReg(kSp), current_dest_offset + (bytes_to_move >> 1),
1177 temp, kMovHi128FP);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001178 } else {
1179 st1 = OpMovMemReg(TargetReg(kSp), current_dest_offset, temp, kMovU128FP);
1180 }
1181
1182 // TODO If we could keep track of aliasing information for memory accesses that are wider
1183 // than 64-bit, we wouldn't need to set up a barrier.
1184 if (ld1 != nullptr) {
1185 if (ld2 != nullptr) {
1186 // For 64-bit load we can actually set up the aliasing information.
1187 AnnotateDalvikRegAccess(ld1, current_src_offset >> 2, true, true);
1188 AnnotateDalvikRegAccess(ld2, (current_src_offset + (bytes_to_move >> 1)) >> 2, true, true);
1189 } else {
1190 // Set barrier for 128-bit load.
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001191 ld1->u.m.def_mask = &kEncodeAll;
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001192 }
1193 }
1194 if (st1 != nullptr) {
1195 if (st2 != nullptr) {
1196 // For 64-bit store we can actually set up the aliasing information.
1197 AnnotateDalvikRegAccess(st1, current_dest_offset >> 2, false, true);
1198 AnnotateDalvikRegAccess(st2, (current_dest_offset + (bytes_to_move >> 1)) >> 2, false, true);
1199 } else {
1200 // Set barrier for 128-bit store.
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001201 st1->u.m.def_mask = &kEncodeAll;
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001202 }
1203 }
1204
1205 // Free the temporary used for the data movement.
buzbee091cc402014-03-31 10:14:40 -07001206 FreeTemp(temp);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001207 } else {
1208 // Moving 32-bits via general purpose register.
1209 bytes_to_move = sizeof(uint32_t);
1210
1211 // Instead of allocating a new temp, simply reuse one of the registers being used
1212 // for argument passing.
buzbee2700f7e2014-03-07 09:46:20 -08001213 RegStorage temp = TargetReg(kArg3);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001214
1215 // Now load the argument VR and store to the outs.
buzbee695d13a2014-04-19 13:32:20 -07001216 Load32Disp(TargetReg(kSp), current_src_offset, temp);
1217 Store32Disp(TargetReg(kSp), current_dest_offset, temp);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -08001218 }
1219
1220 current_src_offset += bytes_to_move;
1221 current_dest_offset += bytes_to_move;
1222 regs_left_to_pass_via_stack -= (bytes_to_move >> 2);
1223 }
1224 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001225 // Generate memcpy
1226 OpRegRegImm(kOpAdd, TargetReg(kArg0), TargetReg(kSp), outs_offset);
1227 OpRegRegImm(kOpAdd, TargetReg(kArg1), TargetReg(kSp), start_offset);
buzbee33ae5582014-06-12 14:56:32 -07001228 if (cu_->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -07001229 CallRuntimeHelperRegRegImm(QUICK_ENTRYPOINT_OFFSET(8, pMemcpy), TargetReg(kArg0),
1230 TargetReg(kArg1), (info->num_arg_words - 3) * 4, false);
1231 } else {
1232 CallRuntimeHelperRegRegImm(QUICK_ENTRYPOINT_OFFSET(4, pMemcpy), TargetReg(kArg0),
1233 TargetReg(kArg1), (info->num_arg_words - 3) * 4, false);
1234 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001235 }
1236
1237 call_state = LoadArgRegs(info, call_state, next_call_insn,
1238 target_method, vtable_idx, direct_code, direct_method,
1239 type, skip_this);
1240
1241 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
1242 direct_code, direct_method, type);
1243 if (pcrLabel) {
Andreas Gampe5655e842014-06-17 16:36:07 -07001244 if (cu_->compiler_driver->GetCompilerOptions().GetExplicitNullChecks()) {
Dave Allisonf9439142014-03-27 15:10:22 -07001245 *pcrLabel = GenExplicitNullCheck(TargetReg(kArg1), info->opt_flags);
1246 } else {
1247 *pcrLabel = nullptr;
1248 // In lieu of generating a check for kArg1 being null, we need to
1249 // perform a load when doing implicit checks.
1250 RegStorage tmp = AllocTemp();
buzbee695d13a2014-04-19 13:32:20 -07001251 Load32Disp(TargetReg(kArg1), 0, tmp);
Dave Allisonf9439142014-03-27 15:10:22 -07001252 MarkPossibleNullPointerException(info->opt_flags);
1253 FreeTemp(tmp);
1254 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001255 }
1256 return call_state;
1257}
1258
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001259RegLocation Mir2Lir::InlineTarget(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001260 RegLocation res;
1261 if (info->result.location == kLocInvalid) {
buzbeea0cd2d72014-06-01 09:33:49 -07001262 res = GetReturn(LocToRegClass(info->result));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001263 } else {
1264 res = info->result;
1265 }
1266 return res;
1267}
1268
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001269RegLocation Mir2Lir::InlineTargetWide(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001270 RegLocation res;
1271 if (info->result.location == kLocInvalid) {
buzbeea0cd2d72014-06-01 09:33:49 -07001272 res = GetReturnWide(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001273 } else {
1274 res = info->result;
1275 }
1276 return res;
1277}
1278
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001279bool Mir2Lir::GenInlinedCharAt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001280 if (cu_->instruction_set == kMips) {
1281 // TODO - add Mips implementation
1282 return false;
1283 }
1284 // Location of reference to data array
1285 int value_offset = mirror::String::ValueOffset().Int32Value();
1286 // Location of count
1287 int count_offset = mirror::String::CountOffset().Int32Value();
1288 // Starting offset within data array
1289 int offset_offset = mirror::String::OffsetOffset().Int32Value();
1290 // Start of char data with array_
1291 int data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Int32Value();
1292
1293 RegLocation rl_obj = info->args[0];
1294 RegLocation rl_idx = info->args[1];
buzbeea0cd2d72014-06-01 09:33:49 -07001295 rl_obj = LoadValue(rl_obj, kRefReg);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001296 // X86 wants to avoid putting a constant index into a register.
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001297 if (!((cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64)&& rl_idx.is_const)) {
Mark Mendell2b724cb2014-02-06 05:24:20 -08001298 rl_idx = LoadValue(rl_idx, kCoreReg);
1299 }
buzbee2700f7e2014-03-07 09:46:20 -08001300 RegStorage reg_max;
1301 GenNullCheck(rl_obj.reg, info->opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001302 bool range_check = (!(info->opt_flags & MIR_IGNORE_RANGE_CHECK));
Vladimir Marko3bc86152014-03-13 14:11:28 +00001303 LIR* range_check_branch = nullptr;
buzbee2700f7e2014-03-07 09:46:20 -08001304 RegStorage reg_off;
1305 RegStorage reg_ptr;
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001306 if (cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001307 reg_off = AllocTemp();
buzbeea0cd2d72014-06-01 09:33:49 -07001308 reg_ptr = AllocTempRef();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001309 if (range_check) {
1310 reg_max = AllocTemp();
buzbee695d13a2014-04-19 13:32:20 -07001311 Load32Disp(rl_obj.reg, count_offset, reg_max);
Dave Allisonb373e092014-02-20 16:06:36 -08001312 MarkPossibleNullPointerException(info->opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001313 }
buzbee695d13a2014-04-19 13:32:20 -07001314 Load32Disp(rl_obj.reg, offset_offset, reg_off);
Dave Allisonb373e092014-02-20 16:06:36 -08001315 MarkPossibleNullPointerException(info->opt_flags);
buzbee695d13a2014-04-19 13:32:20 -07001316 Load32Disp(rl_obj.reg, value_offset, reg_ptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001317 if (range_check) {
Mingyao Yang3a74d152014-04-21 15:39:44 -07001318 // Set up a slow path to allow retry in case of bounds violation */
buzbee2700f7e2014-03-07 09:46:20 -08001319 OpRegReg(kOpCmp, rl_idx.reg, reg_max);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001320 FreeTemp(reg_max);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001321 range_check_branch = OpCondBranch(kCondUge, nullptr);
Brian Carlstrom6f485c62013-07-18 15:35:35 -07001322 }
Mark Mendell2b724cb2014-02-06 05:24:20 -08001323 OpRegImm(kOpAdd, reg_ptr, data_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001324 } else {
1325 if (range_check) {
Mark Mendell2b724cb2014-02-06 05:24:20 -08001326 // On x86, we can compare to memory directly
Brian Carlstrom7940e442013-07-12 13:46:57 -07001327 // Set up a launch pad to allow retry in case of bounds violation */
Mark Mendell2b724cb2014-02-06 05:24:20 -08001328 if (rl_idx.is_const) {
Vladimir Marko3bc86152014-03-13 14:11:28 +00001329 range_check_branch = OpCmpMemImmBranch(
buzbee2700f7e2014-03-07 09:46:20 -08001330 kCondUlt, RegStorage::InvalidReg(), rl_obj.reg, count_offset,
Vladimir Marko3bc86152014-03-13 14:11:28 +00001331 mir_graph_->ConstantValue(rl_idx.orig_sreg), nullptr);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001332 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001333 OpRegMem(kOpCmp, rl_idx.reg, rl_obj.reg, count_offset);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001334 range_check_branch = OpCondBranch(kCondUge, nullptr);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001335 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001336 }
1337 reg_off = AllocTemp();
buzbeea0cd2d72014-06-01 09:33:49 -07001338 reg_ptr = AllocTempRef();
buzbee695d13a2014-04-19 13:32:20 -07001339 Load32Disp(rl_obj.reg, offset_offset, reg_off);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001340 LoadRefDisp(rl_obj.reg, value_offset, reg_ptr, kNotVolatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001341 }
Mark Mendell2b724cb2014-02-06 05:24:20 -08001342 if (rl_idx.is_const) {
1343 OpRegImm(kOpAdd, reg_off, mir_graph_->ConstantValue(rl_idx.orig_sreg));
1344 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001345 OpRegReg(kOpAdd, reg_off, rl_idx.reg);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001346 }
buzbee2700f7e2014-03-07 09:46:20 -08001347 FreeTemp(rl_obj.reg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001348 if (rl_idx.location == kLocPhysReg) {
buzbee2700f7e2014-03-07 09:46:20 -08001349 FreeTemp(rl_idx.reg);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001350 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001351 RegLocation rl_dest = InlineTarget(info);
1352 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001353 if (cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64) {
buzbee2700f7e2014-03-07 09:46:20 -08001354 LoadBaseIndexed(reg_ptr, reg_off, rl_result.reg, 1, kUnsignedHalf);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001355 } else {
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001356 LoadBaseIndexedDisp(reg_ptr, reg_off, 1, data_offset, rl_result.reg, kUnsignedHalf);
Mark Mendell2b724cb2014-02-06 05:24:20 -08001357 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001358 FreeTemp(reg_off);
1359 FreeTemp(reg_ptr);
1360 StoreValue(rl_dest, rl_result);
1361 if (range_check) {
Vladimir Marko3bc86152014-03-13 14:11:28 +00001362 DCHECK(range_check_branch != nullptr);
1363 info->opt_flags |= MIR_IGNORE_NULL_CHECK; // Record that we've already null checked.
Mingyao Yang3a74d152014-04-21 15:39:44 -07001364 AddIntrinsicSlowPath(info, range_check_branch);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001365 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001366 return true;
1367}
1368
1369// Generates an inlined String.is_empty or String.length.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001370bool Mir2Lir::GenInlinedStringIsEmptyOrLength(CallInfo* info, bool is_empty) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001371 if (cu_->instruction_set == kMips) {
1372 // TODO - add Mips implementation
1373 return false;
1374 }
1375 // dst = src.length();
1376 RegLocation rl_obj = info->args[0];
buzbeea0cd2d72014-06-01 09:33:49 -07001377 rl_obj = LoadValue(rl_obj, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001378 RegLocation rl_dest = InlineTarget(info);
1379 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001380 GenNullCheck(rl_obj.reg, info->opt_flags);
buzbee695d13a2014-04-19 13:32:20 -07001381 Load32Disp(rl_obj.reg, mirror::String::CountOffset().Int32Value(), rl_result.reg);
Dave Allisonb373e092014-02-20 16:06:36 -08001382 MarkPossibleNullPointerException(info->opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001383 if (is_empty) {
1384 // dst = (dst == 0);
1385 if (cu_->instruction_set == kThumb2) {
buzbee2700f7e2014-03-07 09:46:20 -08001386 RegStorage t_reg = AllocTemp();
1387 OpRegReg(kOpNeg, t_reg, rl_result.reg);
1388 OpRegRegReg(kOpAdc, rl_result.reg, rl_result.reg, t_reg);
Serban Constantinescu169489b2014-06-11 16:43:35 +01001389 } else if (cu_->instruction_set == kArm64) {
1390 OpRegImm(kOpSub, rl_result.reg, 1);
1391 OpRegRegImm(kOpLsr, rl_result.reg, rl_result.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001392 } else {
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001393 DCHECK(cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64);
buzbee2700f7e2014-03-07 09:46:20 -08001394 OpRegImm(kOpSub, rl_result.reg, 1);
1395 OpRegImm(kOpLsr, rl_result.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001396 }
1397 }
1398 StoreValue(rl_dest, rl_result);
1399 return true;
1400}
1401
Vladimir Marko6bdf1ff2013-10-29 17:40:46 +00001402bool Mir2Lir::GenInlinedReverseBytes(CallInfo* info, OpSize size) {
1403 if (cu_->instruction_set == kMips) {
1404 // TODO - add Mips implementation
1405 return false;
1406 }
1407 RegLocation rl_src_i = info->args[0];
buzbee695d13a2014-04-19 13:32:20 -07001408 RegLocation rl_dest = (size == k64) ? InlineTargetWide(info) : InlineTarget(info); // result reg
Vladimir Marko6bdf1ff2013-10-29 17:40:46 +00001409 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -07001410 if (size == k64) {
Vladimir Marko6bdf1ff2013-10-29 17:40:46 +00001411 RegLocation rl_i = LoadValueWide(rl_src_i, kCoreReg);
Serban Constantinescu169489b2014-06-11 16:43:35 +01001412 if (cu_->instruction_set == kArm64) {
1413 OpRegReg(kOpRev, rl_result.reg, rl_i.reg);
1414 StoreValueWide(rl_dest, rl_result);
1415 return true;
1416 }
buzbee2700f7e2014-03-07 09:46:20 -08001417 RegStorage r_i_low = rl_i.reg.GetLow();
1418 if (rl_i.reg.GetLowReg() == rl_result.reg.GetLowReg()) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001419 // First REV shall clobber rl_result.reg.GetReg(), save the value in a temp for the second REV.
Vladimir Markof246af22013-11-27 12:30:15 +00001420 r_i_low = AllocTemp();
buzbee2700f7e2014-03-07 09:46:20 -08001421 OpRegCopy(r_i_low, rl_i.reg);
Vladimir Markof246af22013-11-27 12:30:15 +00001422 }
buzbee2700f7e2014-03-07 09:46:20 -08001423 OpRegReg(kOpRev, rl_result.reg.GetLow(), rl_i.reg.GetHigh());
1424 OpRegReg(kOpRev, rl_result.reg.GetHigh(), r_i_low);
1425 if (rl_i.reg.GetLowReg() == rl_result.reg.GetLowReg()) {
Vladimir Markof246af22013-11-27 12:30:15 +00001426 FreeTemp(r_i_low);
1427 }
Vladimir Marko6bdf1ff2013-10-29 17:40:46 +00001428 StoreValueWide(rl_dest, rl_result);
1429 } else {
buzbee695d13a2014-04-19 13:32:20 -07001430 DCHECK(size == k32 || size == kSignedHalf);
1431 OpKind op = (size == k32) ? kOpRev : kOpRevsh;
Vladimir Marko6bdf1ff2013-10-29 17:40:46 +00001432 RegLocation rl_i = LoadValue(rl_src_i, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001433 OpRegReg(op, rl_result.reg, rl_i.reg);
Vladimir Marko6bdf1ff2013-10-29 17:40:46 +00001434 StoreValue(rl_dest, rl_result);
1435 }
1436 return true;
1437}
1438
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001439bool Mir2Lir::GenInlinedAbsInt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001440 if (cu_->instruction_set == kMips) {
1441 // TODO - add Mips implementation
1442 return false;
1443 }
1444 RegLocation rl_src = info->args[0];
1445 rl_src = LoadValue(rl_src, kCoreReg);
1446 RegLocation rl_dest = InlineTarget(info);
1447 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001448 RegStorage sign_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001449 // abs(x) = y<=x>>31, (x+y)^y.
buzbee2700f7e2014-03-07 09:46:20 -08001450 OpRegRegImm(kOpAsr, sign_reg, rl_src.reg, 31);
1451 OpRegRegReg(kOpAdd, rl_result.reg, rl_src.reg, sign_reg);
1452 OpRegReg(kOpXor, rl_result.reg, sign_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001453 StoreValue(rl_dest, rl_result);
1454 return true;
1455}
1456
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001457bool Mir2Lir::GenInlinedAbsLong(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001458 if (cu_->instruction_set == kMips) {
1459 // TODO - add Mips implementation
1460 return false;
1461 }
Vladimir Markob9823312014-03-20 17:38:43 +00001462 RegLocation rl_src = info->args[0];
1463 rl_src = LoadValueWide(rl_src, kCoreReg);
1464 RegLocation rl_dest = InlineTargetWide(info);
1465 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1466
1467 // If on x86 or if we would clobber a register needed later, just copy the source first.
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001468 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64 || rl_result.reg.GetLowReg() == rl_src.reg.GetHighReg()) {
buzbee2700f7e2014-03-07 09:46:20 -08001469 OpRegCopyWide(rl_result.reg, rl_src.reg);
1470 if (rl_result.reg.GetLowReg() != rl_src.reg.GetLowReg() &&
1471 rl_result.reg.GetLowReg() != rl_src.reg.GetHighReg() &&
1472 rl_result.reg.GetHighReg() != rl_src.reg.GetLowReg() &&
Vladimir Markob9823312014-03-20 17:38:43 +00001473 rl_result.reg.GetHighReg() != rl_src.reg.GetHighReg()) {
1474 // Reuse source registers to avoid running out of temps.
buzbee2700f7e2014-03-07 09:46:20 -08001475 FreeTemp(rl_src.reg);
Vladimir Markob9823312014-03-20 17:38:43 +00001476 }
1477 rl_src = rl_result;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001478 }
Vladimir Markob9823312014-03-20 17:38:43 +00001479
1480 // abs(x) = y<=x>>31, (x+y)^y.
buzbee2700f7e2014-03-07 09:46:20 -08001481 RegStorage sign_reg = AllocTemp();
1482 OpRegRegImm(kOpAsr, sign_reg, rl_src.reg.GetHigh(), 31);
1483 OpRegRegReg(kOpAdd, rl_result.reg.GetLow(), rl_src.reg.GetLow(), sign_reg);
1484 OpRegRegReg(kOpAdc, rl_result.reg.GetHigh(), rl_src.reg.GetHigh(), sign_reg);
1485 OpRegReg(kOpXor, rl_result.reg.GetLow(), sign_reg);
1486 OpRegReg(kOpXor, rl_result.reg.GetHigh(), sign_reg);
buzbee082833c2014-05-17 23:16:26 -07001487 FreeTemp(sign_reg);
Vladimir Markob9823312014-03-20 17:38:43 +00001488 StoreValueWide(rl_dest, rl_result);
1489 return true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001490}
1491
Yixin Shoudbb17e32014-02-07 05:09:30 -08001492bool Mir2Lir::GenInlinedAbsFloat(CallInfo* info) {
1493 if (cu_->instruction_set == kMips) {
1494 // TODO - add Mips implementation
1495 return false;
1496 }
1497 RegLocation rl_src = info->args[0];
1498 rl_src = LoadValue(rl_src, kCoreReg);
1499 RegLocation rl_dest = InlineTarget(info);
1500 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001501 OpRegRegImm(kOpAnd, rl_result.reg, rl_src.reg, 0x7fffffff);
Yixin Shoudbb17e32014-02-07 05:09:30 -08001502 StoreValue(rl_dest, rl_result);
1503 return true;
1504}
1505
1506bool Mir2Lir::GenInlinedAbsDouble(CallInfo* info) {
1507 if (cu_->instruction_set == kMips) {
1508 // TODO - add Mips implementation
1509 return false;
1510 }
1511 RegLocation rl_src = info->args[0];
1512 rl_src = LoadValueWide(rl_src, kCoreReg);
1513 RegLocation rl_dest = InlineTargetWide(info);
1514 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Serban Constantinescu169489b2014-06-11 16:43:35 +01001515
1516 if (cu_->instruction_set == kArm64) {
1517 // TODO - Can ecode ? UBXF otherwise
1518 // OpRegRegImm(kOpAnd, rl_result.reg, 0x7fffffffffffffff);
1519 return false;
1520 } else {
1521 OpRegCopyWide(rl_result.reg, rl_src.reg);
1522 OpRegImm(kOpAnd, rl_result.reg.GetHigh(), 0x7fffffff);
1523 }
Yixin Shoudbb17e32014-02-07 05:09:30 -08001524 StoreValueWide(rl_dest, rl_result);
1525 return true;
1526}
1527
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001528bool Mir2Lir::GenInlinedFloatCvt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001529 if (cu_->instruction_set == kMips) {
1530 // TODO - add Mips implementation
1531 return false;
1532 }
1533 RegLocation rl_src = info->args[0];
1534 RegLocation rl_dest = InlineTarget(info);
1535 StoreValue(rl_dest, rl_src);
1536 return true;
1537}
1538
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001539bool Mir2Lir::GenInlinedDoubleCvt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001540 if (cu_->instruction_set == kMips) {
1541 // TODO - add Mips implementation
1542 return false;
1543 }
1544 RegLocation rl_src = info->args[0];
1545 RegLocation rl_dest = InlineTargetWide(info);
1546 StoreValueWide(rl_dest, rl_src);
1547 return true;
1548}
1549
1550/*
Vladimir Marko3bc86152014-03-13 14:11:28 +00001551 * Fast String.indexOf(I) & (II). Tests for simple case of char <= 0xFFFF,
Brian Carlstrom7940e442013-07-12 13:46:57 -07001552 * otherwise bails to standard library code.
1553 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001554bool Mir2Lir::GenInlinedIndexOf(CallInfo* info, bool zero_based) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001555 if (cu_->instruction_set == kMips) {
1556 // TODO - add Mips implementation
1557 return false;
1558 }
Vladimir Marko3bc86152014-03-13 14:11:28 +00001559 RegLocation rl_obj = info->args[0];
1560 RegLocation rl_char = info->args[1];
1561 if (rl_char.is_const && (mir_graph_->ConstantValue(rl_char) & ~0xFFFF) != 0) {
1562 // Code point beyond 0xFFFF. Punt to the real String.indexOf().
1563 return false;
1564 }
1565
Vladimir Marko31c2aac2013-12-09 16:31:19 +00001566 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001567 LockCallTemps(); // Using fixed registers
buzbee2700f7e2014-03-07 09:46:20 -08001568 RegStorage reg_ptr = TargetReg(kArg0);
1569 RegStorage reg_char = TargetReg(kArg1);
1570 RegStorage reg_start = TargetReg(kArg2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001571
Brian Carlstrom7940e442013-07-12 13:46:57 -07001572 LoadValueDirectFixed(rl_obj, reg_ptr);
1573 LoadValueDirectFixed(rl_char, reg_char);
1574 if (zero_based) {
1575 LoadConstant(reg_start, 0);
1576 } else {
buzbeea44d4f52014-03-05 11:26:39 -08001577 RegLocation rl_start = info->args[2]; // 3rd arg only present in III flavor of IndexOf.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001578 LoadValueDirectFixed(rl_start, reg_start);
1579 }
buzbee33ae5582014-06-12 14:56:32 -07001580 RegStorage r_tgt = cu_->target64 ?
Andreas Gampe2f244e92014-05-08 03:35:25 -07001581 LoadHelper(QUICK_ENTRYPOINT_OFFSET(8, pIndexOf)) :
1582 LoadHelper(QUICK_ENTRYPOINT_OFFSET(4, pIndexOf));
Dave Allisonf9439142014-03-27 15:10:22 -07001583 GenExplicitNullCheck(reg_ptr, info->opt_flags);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001584 LIR* high_code_point_branch =
1585 rl_char.is_const ? nullptr : OpCmpImmBranch(kCondGt, reg_char, 0xFFFF, nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001586 // NOTE: not a safepoint
Mark Mendell4028a6c2014-02-19 20:06:20 -08001587 OpReg(kOpBlx, r_tgt);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001588 if (!rl_char.is_const) {
1589 // Add the slow path for code points beyond 0xFFFF.
1590 DCHECK(high_code_point_branch != nullptr);
1591 LIR* resume_tgt = NewLIR0(kPseudoTargetLabel);
1592 info->opt_flags |= MIR_IGNORE_NULL_CHECK; // Record that we've null checked.
Mingyao Yang3a74d152014-04-21 15:39:44 -07001593 AddIntrinsicSlowPath(info, high_code_point_branch, resume_tgt);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001594 } else {
1595 DCHECK_EQ(mir_graph_->ConstantValue(rl_char) & ~0xFFFF, 0);
1596 DCHECK(high_code_point_branch == nullptr);
1597 }
buzbeea0cd2d72014-06-01 09:33:49 -07001598 RegLocation rl_return = GetReturn(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001599 RegLocation rl_dest = InlineTarget(info);
1600 StoreValue(rl_dest, rl_return);
1601 return true;
1602}
1603
1604/* Fast string.compareTo(Ljava/lang/string;)I. */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001605bool Mir2Lir::GenInlinedStringCompareTo(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001606 if (cu_->instruction_set == kMips) {
1607 // TODO - add Mips implementation
1608 return false;
1609 }
Vladimir Marko31c2aac2013-12-09 16:31:19 +00001610 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001611 LockCallTemps(); // Using fixed registers
buzbee2700f7e2014-03-07 09:46:20 -08001612 RegStorage reg_this = TargetReg(kArg0);
1613 RegStorage reg_cmp = TargetReg(kArg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001614
1615 RegLocation rl_this = info->args[0];
1616 RegLocation rl_cmp = info->args[1];
1617 LoadValueDirectFixed(rl_this, reg_this);
1618 LoadValueDirectFixed(rl_cmp, reg_cmp);
Andreas Gampe2f244e92014-05-08 03:35:25 -07001619 RegStorage r_tgt;
1620 if (cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64) {
buzbee33ae5582014-06-12 14:56:32 -07001621 if (cu_->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -07001622 r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(8, pStringCompareTo));
1623 } else {
1624 r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(4, pStringCompareTo));
1625 }
1626 } else {
1627 r_tgt = RegStorage::InvalidReg();
1628 }
Dave Allisonf9439142014-03-27 15:10:22 -07001629 GenExplicitNullCheck(reg_this, info->opt_flags);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001630 info->opt_flags |= MIR_IGNORE_NULL_CHECK; // Record that we've null checked.
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001631 // TUNING: check if rl_cmp.s_reg_low is already null checked
Vladimir Marko3bc86152014-03-13 14:11:28 +00001632 LIR* cmp_null_check_branch = OpCmpImmBranch(kCondEq, reg_cmp, 0, nullptr);
Mingyao Yang3a74d152014-04-21 15:39:44 -07001633 AddIntrinsicSlowPath(info, cmp_null_check_branch);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001634 // NOTE: not a safepoint
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001635 if (cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001636 OpReg(kOpBlx, r_tgt);
1637 } else {
buzbee33ae5582014-06-12 14:56:32 -07001638 if (cu_->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -07001639 OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(8, pStringCompareTo));
1640 } else {
1641 OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(4, pStringCompareTo));
1642 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001643 }
buzbeea0cd2d72014-06-01 09:33:49 -07001644 RegLocation rl_return = GetReturn(kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001645 RegLocation rl_dest = InlineTarget(info);
1646 StoreValue(rl_dest, rl_return);
1647 return true;
1648}
1649
1650bool Mir2Lir::GenInlinedCurrentThread(CallInfo* info) {
1651 RegLocation rl_dest = InlineTarget(info);
1652 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Andreas Gampe2f244e92014-05-08 03:35:25 -07001653
1654 switch (cu_->instruction_set) {
1655 case kArm:
1656 // Fall-through.
1657 case kThumb2:
1658 // Fall-through.
1659 case kMips:
1660 Load32Disp(TargetReg(kSelf), Thread::PeerOffset<4>().Int32Value(), rl_result.reg);
1661 break;
1662
1663 case kArm64:
1664 Load32Disp(TargetReg(kSelf), Thread::PeerOffset<8>().Int32Value(), rl_result.reg);
1665 break;
1666
1667 case kX86:
1668 reinterpret_cast<X86Mir2Lir*>(this)->OpRegThreadMem(kOpMov, rl_result.reg,
1669 Thread::PeerOffset<4>());
1670 break;
1671
1672 case kX86_64:
1673 reinterpret_cast<X86Mir2Lir*>(this)->OpRegThreadMem(kOpMov, rl_result.reg,
1674 Thread::PeerOffset<8>());
1675 break;
1676
1677 default:
1678 LOG(FATAL) << "Unexpected isa " << cu_->instruction_set;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001679 }
1680 StoreValue(rl_dest, rl_result);
1681 return true;
1682}
1683
1684bool Mir2Lir::GenInlinedUnsafeGet(CallInfo* info,
1685 bool is_long, bool is_volatile) {
1686 if (cu_->instruction_set == kMips) {
1687 // TODO - add Mips implementation
1688 return false;
1689 }
1690 // Unused - RegLocation rl_src_unsafe = info->args[0];
1691 RegLocation rl_src_obj = info->args[1]; // Object
1692 RegLocation rl_src_offset = info->args[2]; // long low
buzbee2700f7e2014-03-07 09:46:20 -08001693 rl_src_offset = NarrowRegLoc(rl_src_offset); // ignore high half in info->args[3]
Mark Mendell55d0eac2014-02-06 11:02:52 -08001694 RegLocation rl_dest = is_long ? InlineTargetWide(info) : InlineTarget(info); // result reg
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001695
buzbeea0cd2d72014-06-01 09:33:49 -07001696 RegLocation rl_object = LoadValue(rl_src_obj, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001697 RegLocation rl_offset = LoadValue(rl_src_offset, kCoreReg);
1698 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1699 if (is_long) {
Dmitry Petrochenko9bf549d2014-05-12 11:14:46 +07001700 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001701 LoadBaseIndexedDisp(rl_object.reg, rl_offset.reg, 0, 0, rl_result.reg, k64);
Mathieu Chartier7c95cef2014-04-02 17:09:17 -07001702 } else {
1703 RegStorage rl_temp_offset = AllocTemp();
1704 OpRegRegReg(kOpAdd, rl_temp_offset, rl_object.reg, rl_offset.reg);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001705 LoadBaseDisp(rl_temp_offset, 0, rl_result.reg, k64, kNotVolatile);
buzbee091cc402014-03-31 10:14:40 -07001706 FreeTemp(rl_temp_offset);
Mathieu Chartier7c95cef2014-04-02 17:09:17 -07001707 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001708 } else {
buzbee695d13a2014-04-19 13:32:20 -07001709 LoadBaseIndexed(rl_object.reg, rl_offset.reg, rl_result.reg, 0, k32);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001710 }
1711
1712 if (is_volatile) {
1713 // Without context sensitive analysis, we must issue the most conservative barriers.
1714 // In this case, either a load or store may follow so we issue both barriers.
1715 GenMemBarrier(kLoadLoad);
1716 GenMemBarrier(kLoadStore);
1717 }
1718
1719 if (is_long) {
1720 StoreValueWide(rl_dest, rl_result);
1721 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001722 StoreValue(rl_dest, rl_result);
1723 }
1724 return true;
1725}
1726
1727bool Mir2Lir::GenInlinedUnsafePut(CallInfo* info, bool is_long,
1728 bool is_object, bool is_volatile, bool is_ordered) {
1729 if (cu_->instruction_set == kMips) {
1730 // TODO - add Mips implementation
1731 return false;
1732 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001733 // Unused - RegLocation rl_src_unsafe = info->args[0];
1734 RegLocation rl_src_obj = info->args[1]; // Object
1735 RegLocation rl_src_offset = info->args[2]; // long low
buzbee2700f7e2014-03-07 09:46:20 -08001736 rl_src_offset = NarrowRegLoc(rl_src_offset); // ignore high half in info->args[3]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001737 RegLocation rl_src_value = info->args[4]; // value to store
1738 if (is_volatile || is_ordered) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001739 // There might have been a store before this volatile one so insert StoreStore barrier.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001740 GenMemBarrier(kStoreStore);
1741 }
buzbeea0cd2d72014-06-01 09:33:49 -07001742 RegLocation rl_object = LoadValue(rl_src_obj, kRefReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001743 RegLocation rl_offset = LoadValue(rl_src_offset, kCoreReg);
1744 RegLocation rl_value;
1745 if (is_long) {
1746 rl_value = LoadValueWide(rl_src_value, kCoreReg);
Dmitry Petrochenko9bf549d2014-05-12 11:14:46 +07001747 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001748 StoreBaseIndexedDisp(rl_object.reg, rl_offset.reg, 0, 0, rl_value.reg, k64);
Mathieu Chartier7c95cef2014-04-02 17:09:17 -07001749 } else {
1750 RegStorage rl_temp_offset = AllocTemp();
1751 OpRegRegReg(kOpAdd, rl_temp_offset, rl_object.reg, rl_offset.reg);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001752 StoreBaseDisp(rl_temp_offset, 0, rl_value.reg, k64, kNotVolatile);
buzbee091cc402014-03-31 10:14:40 -07001753 FreeTemp(rl_temp_offset);
Mathieu Chartier7c95cef2014-04-02 17:09:17 -07001754 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001755 } else {
buzbeea0cd2d72014-06-01 09:33:49 -07001756 rl_value = LoadValue(rl_src_value);
buzbee695d13a2014-04-19 13:32:20 -07001757 StoreBaseIndexed(rl_object.reg, rl_offset.reg, rl_value.reg, 0, k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001758 }
Mark Mendelldf8ee2e2014-01-27 16:37:47 -08001759
1760 // Free up the temp early, to ensure x86 doesn't run out of temporaries in MarkGCCard.
buzbee091cc402014-03-31 10:14:40 -07001761 FreeTemp(rl_offset.reg);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001762
Brian Carlstrom7940e442013-07-12 13:46:57 -07001763 if (is_volatile) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -08001764 // A load might follow the volatile store so insert a StoreLoad barrier.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001765 GenMemBarrier(kStoreLoad);
1766 }
1767 if (is_object) {
buzbee2700f7e2014-03-07 09:46:20 -08001768 MarkGCCard(rl_value.reg, rl_object.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001769 }
1770 return true;
1771}
1772
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001773void Mir2Lir::GenInvoke(CallInfo* info) {
Vladimir Marko9820b7c2014-01-02 16:40:37 +00001774 if ((info->opt_flags & MIR_INLINED) != 0) {
1775 // Already inlined but we may still need the null check.
1776 if (info->type != kStatic &&
1777 ((cu_->disable_opt & (1 << kNullCheckElimination)) != 0 ||
1778 (info->opt_flags & MIR_IGNORE_NULL_CHECK) == 0)) {
buzbeea0cd2d72014-06-01 09:33:49 -07001779 RegLocation rl_obj = LoadValue(info->args[0], kRefReg);
Mingyao Yange643a172014-04-08 11:02:52 -07001780 GenNullCheck(rl_obj.reg);
Vladimir Marko9820b7c2014-01-02 16:40:37 +00001781 }
1782 return;
1783 }
Vladimir Marko3bc86152014-03-13 14:11:28 +00001784 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
Dmitry Petrochenko4c800432014-05-08 12:20:24 +07001785 // TODO: Enable instrinsics for x86_64
1786 // Temporary disable intrinsics for x86_64. We will enable them later step by step.
buzbee33ae5582014-06-12 14:56:32 -07001787 // Temporary disable intrinsics for Arm64. We will enable them later step by step.
1788 if ((cu_->instruction_set != kX86_64) && (cu_->instruction_set != kArm64)) {
Dmitry Petrochenko4c800432014-05-08 12:20:24 +07001789 if (cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
1790 ->GenIntrinsic(this, info)) {
1791 return;
1792 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001793 }
Vladimir Marko3bc86152014-03-13 14:11:28 +00001794 GenInvokeNoInline(info);
1795}
1796
Andreas Gampe2f244e92014-05-08 03:35:25 -07001797template <size_t pointer_size>
1798static LIR* GenInvokeNoInlineCall(Mir2Lir* mir_to_lir, InvokeType type) {
1799 ThreadOffset<pointer_size> trampoline(-1);
1800 switch (type) {
1801 case kInterface:
1802 trampoline = QUICK_ENTRYPOINT_OFFSET(pointer_size, pInvokeInterfaceTrampolineWithAccessCheck);
1803 break;
1804 case kDirect:
1805 trampoline = QUICK_ENTRYPOINT_OFFSET(pointer_size, pInvokeDirectTrampolineWithAccessCheck);
1806 break;
1807 case kStatic:
1808 trampoline = QUICK_ENTRYPOINT_OFFSET(pointer_size, pInvokeStaticTrampolineWithAccessCheck);
1809 break;
1810 case kSuper:
1811 trampoline = QUICK_ENTRYPOINT_OFFSET(pointer_size, pInvokeSuperTrampolineWithAccessCheck);
1812 break;
1813 case kVirtual:
1814 trampoline = QUICK_ENTRYPOINT_OFFSET(pointer_size, pInvokeVirtualTrampolineWithAccessCheck);
1815 break;
1816 default:
1817 LOG(FATAL) << "Unexpected invoke type";
1818 }
1819 return mir_to_lir->OpThreadMem(kOpBlx, trampoline);
1820}
1821
Vladimir Marko3bc86152014-03-13 14:11:28 +00001822void Mir2Lir::GenInvokeNoInline(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001823 int call_state = 0;
1824 LIR* null_ck;
1825 LIR** p_null_ck = NULL;
1826 NextCallInsn next_call_insn;
1827 FlushAllRegs(); /* Everything to home location */
1828 // Explicit register usage
1829 LockCallTemps();
1830
Vladimir Markof096aad2014-01-23 15:51:58 +00001831 const MirMethodLoweringInfo& method_info = mir_graph_->GetMethodLoweringInfo(info->mir);
1832 cu_->compiler_driver->ProcessedInvoke(method_info.GetInvokeType(), method_info.StatsFlags());
Mark Mendelle87f9b52014-04-30 14:13:18 -04001833 BeginInvoke(info);
Vladimir Markof096aad2014-01-23 15:51:58 +00001834 InvokeType original_type = static_cast<InvokeType>(method_info.GetInvokeType());
1835 info->type = static_cast<InvokeType>(method_info.GetSharpType());
1836 bool fast_path = method_info.FastPath();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001837 bool skip_this;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001838 if (info->type == kInterface) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001839 next_call_insn = fast_path ? NextInterfaceCallInsn : NextInterfaceCallInsnWithAccessCheck;
Jeff Hao88474b42013-10-23 16:24:40 -07001840 skip_this = fast_path;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001841 } else if (info->type == kDirect) {
1842 if (fast_path) {
1843 p_null_ck = &null_ck;
1844 }
1845 next_call_insn = fast_path ? NextSDCallInsn : NextDirectCallInsnSP;
1846 skip_this = false;
1847 } else if (info->type == kStatic) {
1848 next_call_insn = fast_path ? NextSDCallInsn : NextStaticCallInsnSP;
1849 skip_this = false;
1850 } else if (info->type == kSuper) {
1851 DCHECK(!fast_path); // Fast path is a direct call.
1852 next_call_insn = NextSuperCallInsnSP;
1853 skip_this = false;
1854 } else {
1855 DCHECK_EQ(info->type, kVirtual);
1856 next_call_insn = fast_path ? NextVCallInsn : NextVCallInsnSP;
1857 skip_this = fast_path;
1858 }
Vladimir Markof096aad2014-01-23 15:51:58 +00001859 MethodReference target_method = method_info.GetTargetMethod();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001860 if (!info->is_range) {
1861 call_state = GenDalvikArgsNoRange(info, call_state, p_null_ck,
Vladimir Markof096aad2014-01-23 15:51:58 +00001862 next_call_insn, target_method, method_info.VTableIndex(),
1863 method_info.DirectCode(), method_info.DirectMethod(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001864 original_type, skip_this);
1865 } else {
1866 call_state = GenDalvikArgsRange(info, call_state, p_null_ck,
Vladimir Markof096aad2014-01-23 15:51:58 +00001867 next_call_insn, target_method, method_info.VTableIndex(),
1868 method_info.DirectCode(), method_info.DirectMethod(),
1869 original_type, skip_this);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001870 }
1871 // Finish up any of the call sequence not interleaved in arg loading
1872 while (call_state >= 0) {
Vladimir Markof096aad2014-01-23 15:51:58 +00001873 call_state = next_call_insn(cu_, info, call_state, target_method, method_info.VTableIndex(),
1874 method_info.DirectCode(), method_info.DirectMethod(), original_type);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001875 }
1876 LIR* call_inst;
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001877 if (cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001878 call_inst = OpReg(kOpBlx, TargetReg(kInvokeTgt));
1879 } else {
Jeff Hao88474b42013-10-23 16:24:40 -07001880 if (fast_path) {
Vladimir Markof096aad2014-01-23 15:51:58 +00001881 if (method_info.DirectCode() == static_cast<uintptr_t>(-1)) {
Mark Mendell55d0eac2014-02-06 11:02:52 -08001882 // We can have the linker fixup a call relative.
1883 call_inst =
Jeff Hao49161ce2014-03-12 11:05:25 -07001884 reinterpret_cast<X86Mir2Lir*>(this)->CallWithLinkerFixup(target_method, info->type);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001885 } else {
1886 call_inst = OpMem(kOpBlx, TargetReg(kArg0),
1887 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1888 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001889 } else {
Andreas Gampe2f244e92014-05-08 03:35:25 -07001890 // TODO: Extract?
buzbee33ae5582014-06-12 14:56:32 -07001891 if (cu_->target64) {
Andreas Gampe2f244e92014-05-08 03:35:25 -07001892 call_inst = GenInvokeNoInlineCall<8>(this, info->type);
1893 } else {
Andreas Gampe3ec5da22014-05-12 18:43:28 -07001894 call_inst = GenInvokeNoInlineCall<4>(this, info->type);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001895 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001896 }
1897 }
Mark Mendelle87f9b52014-04-30 14:13:18 -04001898 EndInvoke(info);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001899 MarkSafepointPC(call_inst);
1900
Vladimir Marko31c2aac2013-12-09 16:31:19 +00001901 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001902 if (info->result.location != kLocInvalid) {
1903 // We have a following MOVE_RESULT - do it now.
1904 if (info->result.wide) {
buzbeea0cd2d72014-06-01 09:33:49 -07001905 RegLocation ret_loc = GetReturnWide(LocToRegClass(info->result));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001906 StoreValueWide(info->result, ret_loc);
1907 } else {
buzbeea0cd2d72014-06-01 09:33:49 -07001908 RegLocation ret_loc = GetReturn(LocToRegClass(info->result));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001909 StoreValue(info->result, ret_loc);
1910 }
1911 }
1912}
1913
1914} // namespace art