blob: 0a0cc17ab2771507c9beaa73508e53baa8ec2408 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_ir.h"
18#include "dex_file-inl.h"
Ian Rogers166db042013-07-26 12:05:57 -070019#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "invoke_type.h"
21#include "mirror/array.h"
22#include "mirror/string.h"
23#include "mir_to_lir-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070024#include "x86/codegen_x86.h"
25
26namespace art {
27
28/*
29 * This source files contains "gen" codegen routines that should
30 * be applicable to most targets. Only mid-level support utilities
31 * and "op" calls may be used here.
32 */
33
34/*
35 * To save scheduling time, helper calls are broken into two parts: generation of
36 * the helper target address, and the actuall call to the helper. Because x86
37 * has a memory call operation, part 1 is a NOP for x86. For other targets,
38 * load arguments between the two parts.
39 */
Ian Rogers848871b2013-08-05 10:56:33 -070040int Mir2Lir::CallHelperSetup(ThreadOffset helper_offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070041 return (cu_->instruction_set == kX86) ? 0 : LoadHelper(helper_offset);
42}
43
44/* NOTE: if r_tgt is a temp, it will be freed following use */
Ian Rogers848871b2013-08-05 10:56:33 -070045LIR* Mir2Lir::CallHelper(int r_tgt, ThreadOffset helper_offset, bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070046 LIR* call_inst;
47 if (cu_->instruction_set == kX86) {
48 call_inst = OpThreadMem(kOpBlx, helper_offset);
49 } else {
50 call_inst = OpReg(kOpBlx, r_tgt);
51 FreeTemp(r_tgt);
52 }
53 if (safepoint_pc) {
54 MarkSafepointPC(call_inst);
55 }
56 return call_inst;
57}
58
Ian Rogers848871b2013-08-05 10:56:33 -070059void Mir2Lir::CallRuntimeHelperImm(ThreadOffset helper_offset, int arg0, bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070060 int r_tgt = CallHelperSetup(helper_offset);
61 LoadConstant(TargetReg(kArg0), arg0);
62 ClobberCalleeSave();
63 CallHelper(r_tgt, helper_offset, safepoint_pc);
64}
65
Ian Rogers848871b2013-08-05 10:56:33 -070066void Mir2Lir::CallRuntimeHelperReg(ThreadOffset helper_offset, int arg0, bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070067 int r_tgt = CallHelperSetup(helper_offset);
68 OpRegCopy(TargetReg(kArg0), arg0);
69 ClobberCalleeSave();
70 CallHelper(r_tgt, helper_offset, safepoint_pc);
71}
72
Ian Rogers848871b2013-08-05 10:56:33 -070073void Mir2Lir::CallRuntimeHelperRegLocation(ThreadOffset helper_offset, RegLocation arg0,
74 bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070075 int r_tgt = CallHelperSetup(helper_offset);
76 if (arg0.wide == 0) {
77 LoadValueDirectFixed(arg0, TargetReg(kArg0));
78 } else {
79 LoadValueDirectWideFixed(arg0, TargetReg(kArg0), TargetReg(kArg1));
80 }
81 ClobberCalleeSave();
82 CallHelper(r_tgt, helper_offset, safepoint_pc);
83}
84
Ian Rogers848871b2013-08-05 10:56:33 -070085void Mir2Lir::CallRuntimeHelperImmImm(ThreadOffset helper_offset, int arg0, int arg1,
Brian Carlstrom7940e442013-07-12 13:46:57 -070086 bool safepoint_pc) {
87 int r_tgt = CallHelperSetup(helper_offset);
88 LoadConstant(TargetReg(kArg0), arg0);
89 LoadConstant(TargetReg(kArg1), arg1);
90 ClobberCalleeSave();
91 CallHelper(r_tgt, helper_offset, safepoint_pc);
92}
93
Ian Rogers848871b2013-08-05 10:56:33 -070094void Mir2Lir::CallRuntimeHelperImmRegLocation(ThreadOffset helper_offset, int arg0,
Brian Carlstrom7940e442013-07-12 13:46:57 -070095 RegLocation arg1, bool safepoint_pc) {
96 int r_tgt = CallHelperSetup(helper_offset);
97 if (arg1.wide == 0) {
98 LoadValueDirectFixed(arg1, TargetReg(kArg1));
99 } else {
100 LoadValueDirectWideFixed(arg1, TargetReg(kArg1), TargetReg(kArg2));
101 }
102 LoadConstant(TargetReg(kArg0), arg0);
103 ClobberCalleeSave();
104 CallHelper(r_tgt, helper_offset, safepoint_pc);
105}
106
Ian Rogers848871b2013-08-05 10:56:33 -0700107void Mir2Lir::CallRuntimeHelperRegLocationImm(ThreadOffset helper_offset, RegLocation arg0, int arg1,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700108 bool safepoint_pc) {
109 int r_tgt = CallHelperSetup(helper_offset);
110 LoadValueDirectFixed(arg0, TargetReg(kArg0));
111 LoadConstant(TargetReg(kArg1), arg1);
112 ClobberCalleeSave();
113 CallHelper(r_tgt, helper_offset, safepoint_pc);
114}
115
Ian Rogers848871b2013-08-05 10:56:33 -0700116void Mir2Lir::CallRuntimeHelperImmReg(ThreadOffset helper_offset, int arg0, int arg1,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700117 bool safepoint_pc) {
118 int r_tgt = CallHelperSetup(helper_offset);
119 OpRegCopy(TargetReg(kArg1), arg1);
120 LoadConstant(TargetReg(kArg0), arg0);
121 ClobberCalleeSave();
122 CallHelper(r_tgt, helper_offset, safepoint_pc);
123}
124
Ian Rogers848871b2013-08-05 10:56:33 -0700125void Mir2Lir::CallRuntimeHelperRegImm(ThreadOffset helper_offset, int arg0, int arg1,
126 bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700127 int r_tgt = CallHelperSetup(helper_offset);
128 OpRegCopy(TargetReg(kArg0), arg0);
129 LoadConstant(TargetReg(kArg1), arg1);
130 ClobberCalleeSave();
131 CallHelper(r_tgt, helper_offset, safepoint_pc);
132}
133
Ian Rogers848871b2013-08-05 10:56:33 -0700134void Mir2Lir::CallRuntimeHelperImmMethod(ThreadOffset helper_offset, int arg0, bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 int r_tgt = CallHelperSetup(helper_offset);
136 LoadCurrMethodDirect(TargetReg(kArg1));
137 LoadConstant(TargetReg(kArg0), arg0);
138 ClobberCalleeSave();
139 CallHelper(r_tgt, helper_offset, safepoint_pc);
140}
141
Ian Rogers848871b2013-08-05 10:56:33 -0700142void Mir2Lir::CallRuntimeHelperRegLocationRegLocation(ThreadOffset helper_offset, RegLocation arg0,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700143 RegLocation arg1, bool safepoint_pc) {
144 int r_tgt = CallHelperSetup(helper_offset);
145 if (arg0.wide == 0) {
146 LoadValueDirectFixed(arg0, arg0.fp ? TargetReg(kFArg0) : TargetReg(kArg0));
147 if (arg1.wide == 0) {
148 if (cu_->instruction_set == kMips) {
149 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg2) : TargetReg(kArg1));
150 } else {
151 LoadValueDirectFixed(arg1, TargetReg(kArg1));
152 }
153 } else {
154 if (cu_->instruction_set == kMips) {
155 LoadValueDirectWideFixed(arg1, arg1.fp ? TargetReg(kFArg2) : TargetReg(kArg1), arg1.fp ? TargetReg(kFArg3) : TargetReg(kArg2));
156 } else {
157 LoadValueDirectWideFixed(arg1, TargetReg(kArg1), TargetReg(kArg2));
158 }
159 }
160 } else {
161 LoadValueDirectWideFixed(arg0, arg0.fp ? TargetReg(kFArg0) : TargetReg(kArg0), arg0.fp ? TargetReg(kFArg1) : TargetReg(kArg1));
162 if (arg1.wide == 0) {
163 LoadValueDirectFixed(arg1, arg1.fp ? TargetReg(kFArg2) : TargetReg(kArg2));
164 } else {
165 LoadValueDirectWideFixed(arg1, arg1.fp ? TargetReg(kFArg2) : TargetReg(kArg2), arg1.fp ? TargetReg(kFArg3) : TargetReg(kArg3));
166 }
167 }
168 ClobberCalleeSave();
169 CallHelper(r_tgt, helper_offset, safepoint_pc);
170}
171
Ian Rogers848871b2013-08-05 10:56:33 -0700172void Mir2Lir::CallRuntimeHelperRegReg(ThreadOffset helper_offset, int arg0, int arg1,
173 bool safepoint_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174 int r_tgt = CallHelperSetup(helper_offset);
175 DCHECK_NE(TargetReg(kArg0), arg1); // check copy into arg0 won't clobber arg1
176 OpRegCopy(TargetReg(kArg0), arg0);
177 OpRegCopy(TargetReg(kArg1), arg1);
178 ClobberCalleeSave();
179 CallHelper(r_tgt, helper_offset, safepoint_pc);
180}
181
Ian Rogers848871b2013-08-05 10:56:33 -0700182void Mir2Lir::CallRuntimeHelperRegRegImm(ThreadOffset helper_offset, int arg0, int arg1,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700183 int arg2, bool safepoint_pc) {
184 int r_tgt = CallHelperSetup(helper_offset);
185 DCHECK_NE(TargetReg(kArg0), arg1); // check copy into arg0 won't clobber arg1
186 OpRegCopy(TargetReg(kArg0), arg0);
187 OpRegCopy(TargetReg(kArg1), arg1);
188 LoadConstant(TargetReg(kArg2), arg2);
189 ClobberCalleeSave();
190 CallHelper(r_tgt, helper_offset, safepoint_pc);
191}
192
Ian Rogers848871b2013-08-05 10:56:33 -0700193void Mir2Lir::CallRuntimeHelperImmMethodRegLocation(ThreadOffset helper_offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700194 int arg0, RegLocation arg2, bool safepoint_pc) {
195 int r_tgt = CallHelperSetup(helper_offset);
196 LoadValueDirectFixed(arg2, TargetReg(kArg2));
197 LoadCurrMethodDirect(TargetReg(kArg1));
198 LoadConstant(TargetReg(kArg0), arg0);
199 ClobberCalleeSave();
200 CallHelper(r_tgt, helper_offset, safepoint_pc);
201}
202
Ian Rogers848871b2013-08-05 10:56:33 -0700203void Mir2Lir::CallRuntimeHelperImmMethodImm(ThreadOffset helper_offset, int arg0,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 int arg2, bool safepoint_pc) {
205 int r_tgt = CallHelperSetup(helper_offset);
206 LoadCurrMethodDirect(TargetReg(kArg1));
207 LoadConstant(TargetReg(kArg2), arg2);
208 LoadConstant(TargetReg(kArg0), arg0);
209 ClobberCalleeSave();
210 CallHelper(r_tgt, helper_offset, safepoint_pc);
211}
212
Ian Rogers848871b2013-08-05 10:56:33 -0700213void Mir2Lir::CallRuntimeHelperImmRegLocationRegLocation(ThreadOffset helper_offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700214 int arg0, RegLocation arg1,
215 RegLocation arg2, bool safepoint_pc) {
216 int r_tgt = CallHelperSetup(helper_offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700217 DCHECK_EQ(arg1.wide, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700218 LoadValueDirectFixed(arg1, TargetReg(kArg1));
219 if (arg2.wide == 0) {
220 LoadValueDirectFixed(arg2, TargetReg(kArg2));
221 } else {
222 LoadValueDirectWideFixed(arg2, TargetReg(kArg2), TargetReg(kArg3));
223 }
224 LoadConstant(TargetReg(kArg0), arg0);
225 ClobberCalleeSave();
226 CallHelper(r_tgt, helper_offset, safepoint_pc);
227}
228
Ian Rogersa9a82542013-10-04 11:17:26 -0700229void Mir2Lir::CallRuntimeHelperRegLocationRegLocationRegLocation(ThreadOffset helper_offset,
230 RegLocation arg0, RegLocation arg1,
231 RegLocation arg2,
232 bool safepoint_pc) {
233 int r_tgt = CallHelperSetup(helper_offset);
234 DCHECK_EQ(arg0.wide, 0U);
235 LoadValueDirectFixed(arg0, TargetReg(kArg0));
236 DCHECK_EQ(arg1.wide, 0U);
237 LoadValueDirectFixed(arg1, TargetReg(kArg1));
238 DCHECK_EQ(arg1.wide, 0U);
239 LoadValueDirectFixed(arg2, TargetReg(kArg2));
240 ClobberCalleeSave();
241 CallHelper(r_tgt, helper_offset, safepoint_pc);
242}
243
Brian Carlstrom7940e442013-07-12 13:46:57 -0700244/*
245 * If there are any ins passed in registers that have not been promoted
246 * to a callee-save register, flush them to the frame. Perform intial
247 * assignment of promoted arguments.
248 *
249 * ArgLocs is an array of location records describing the incoming arguments
250 * with one location record per word of argument.
251 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700252void Mir2Lir::FlushIns(RegLocation* ArgLocs, RegLocation rl_method) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700253 /*
254 * Dummy up a RegLocation for the incoming Method*
255 * It will attempt to keep kArg0 live (or copy it to home location
256 * if promoted).
257 */
258 RegLocation rl_src = rl_method;
259 rl_src.location = kLocPhysReg;
260 rl_src.low_reg = TargetReg(kArg0);
261 rl_src.home = false;
262 MarkLive(rl_src.low_reg, rl_src.s_reg_low);
263 StoreValue(rl_method, rl_src);
264 // If Method* has been promoted, explicitly flush
265 if (rl_method.location == kLocPhysReg) {
266 StoreWordDisp(TargetReg(kSp), 0, TargetReg(kArg0));
267 }
268
269 if (cu_->num_ins == 0)
270 return;
271 const int num_arg_regs = 3;
272 static SpecialTargetRegister arg_regs[] = {kArg1, kArg2, kArg3};
273 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
274 /*
275 * Copy incoming arguments to their proper home locations.
276 * NOTE: an older version of dx had an issue in which
277 * it would reuse static method argument registers.
278 * This could result in the same Dalvik virtual register
279 * being promoted to both core and fp regs. To account for this,
280 * we only copy to the corresponding promoted physical register
281 * if it matches the type of the SSA name for the incoming
282 * argument. It is also possible that long and double arguments
283 * end up half-promoted. In those cases, we must flush the promoted
284 * half to memory as well.
285 */
286 for (int i = 0; i < cu_->num_ins; i++) {
287 PromotionMap* v_map = &promotion_map_[start_vreg + i];
288 if (i < num_arg_regs) {
289 // If arriving in register
290 bool need_flush = true;
291 RegLocation* t_loc = &ArgLocs[i];
292 if ((v_map->core_location == kLocPhysReg) && !t_loc->fp) {
293 OpRegCopy(v_map->core_reg, TargetReg(arg_regs[i]));
294 need_flush = false;
295 } else if ((v_map->fp_location == kLocPhysReg) && t_loc->fp) {
296 OpRegCopy(v_map->FpReg, TargetReg(arg_regs[i]));
297 need_flush = false;
298 } else {
299 need_flush = true;
300 }
301
buzbeed0a03b82013-09-14 08:21:05 -0700302 // For wide args, force flush if not fully promoted
Brian Carlstrom7940e442013-07-12 13:46:57 -0700303 if (t_loc->wide) {
304 PromotionMap* p_map = v_map + (t_loc->high_word ? -1 : +1);
buzbeed0a03b82013-09-14 08:21:05 -0700305 // Is only half promoted?
Brian Carlstrom7940e442013-07-12 13:46:57 -0700306 need_flush |= (p_map->core_location != v_map->core_location) ||
307 (p_map->fp_location != v_map->fp_location);
buzbeed0a03b82013-09-14 08:21:05 -0700308 if ((cu_->instruction_set == kThumb2) && t_loc->fp && !need_flush) {
309 /*
310 * In Arm, a double is represented as a pair of consecutive single float
311 * registers starting at an even number. It's possible that both Dalvik vRegs
312 * representing the incoming double were independently promoted as singles - but
313 * not in a form usable as a double. If so, we need to flush - even though the
314 * incoming arg appears fully in register. At this point in the code, both
315 * halves of the double are promoted. Make sure they are in a usable form.
316 */
317 int lowreg_index = start_vreg + i + (t_loc->high_word ? -1 : 0);
318 int low_reg = promotion_map_[lowreg_index].FpReg;
319 int high_reg = promotion_map_[lowreg_index + 1].FpReg;
320 if (((low_reg & 0x1) != 0) || (high_reg != (low_reg + 1))) {
321 need_flush = true;
322 }
323 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700324 }
325 if (need_flush) {
326 StoreBaseDisp(TargetReg(kSp), SRegOffset(start_vreg + i),
327 TargetReg(arg_regs[i]), kWord);
328 }
329 } else {
330 // If arriving in frame & promoted
331 if (v_map->core_location == kLocPhysReg) {
332 LoadWordDisp(TargetReg(kSp), SRegOffset(start_vreg + i),
333 v_map->core_reg);
334 }
335 if (v_map->fp_location == kLocPhysReg) {
336 LoadWordDisp(TargetReg(kSp), SRegOffset(start_vreg + i),
337 v_map->FpReg);
338 }
339 }
340 }
341}
342
343/*
344 * Bit of a hack here - in the absence of a real scheduling pass,
345 * emit the next instruction in static & direct invoke sequences.
346 */
347static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info,
348 int state, const MethodReference& target_method,
349 uint32_t unused,
350 uintptr_t direct_code, uintptr_t direct_method,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700351 InvokeType type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700352 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
353 if (cu->instruction_set != kThumb2) {
354 // Disable sharpening
355 direct_code = 0;
356 direct_method = 0;
357 }
358 if (direct_code != 0 && direct_method != 0) {
359 switch (state) {
360 case 0: // Get the current Method* [sets kArg0]
361 if (direct_code != static_cast<unsigned int>(-1)) {
362 cg->LoadConstant(cg->TargetReg(kInvokeTgt), direct_code);
363 } else {
364 CHECK_EQ(cu->dex_file, target_method.dex_file);
365 LIR* data_target = cg->ScanLiteralPool(cg->code_literal_list_,
366 target_method.dex_method_index, 0);
367 if (data_target == NULL) {
368 data_target = cg->AddWordData(&cg->code_literal_list_, target_method.dex_method_index);
369 data_target->operands[1] = type;
370 }
371 LIR* load_pc_rel = cg->OpPcRelLoad(cg->TargetReg(kInvokeTgt), data_target);
372 cg->AppendLIR(load_pc_rel);
373 DCHECK_EQ(cu->instruction_set, kThumb2) << reinterpret_cast<void*>(data_target);
374 }
375 if (direct_method != static_cast<unsigned int>(-1)) {
376 cg->LoadConstant(cg->TargetReg(kArg0), direct_method);
377 } else {
378 CHECK_EQ(cu->dex_file, target_method.dex_file);
379 LIR* data_target = cg->ScanLiteralPool(cg->method_literal_list_,
380 target_method.dex_method_index, 0);
381 if (data_target == NULL) {
382 data_target = cg->AddWordData(&cg->method_literal_list_, target_method.dex_method_index);
383 data_target->operands[1] = type;
384 }
385 LIR* load_pc_rel = cg->OpPcRelLoad(cg->TargetReg(kArg0), data_target);
386 cg->AppendLIR(load_pc_rel);
387 DCHECK_EQ(cu->instruction_set, kThumb2) << reinterpret_cast<void*>(data_target);
388 }
389 break;
390 default:
391 return -1;
392 }
393 } else {
394 switch (state) {
395 case 0: // Get the current Method* [sets kArg0]
396 // TUNING: we can save a reg copy if Method* has been promoted.
397 cg->LoadCurrMethodDirect(cg->TargetReg(kArg0));
398 break;
399 case 1: // Get method->dex_cache_resolved_methods_
400 cg->LoadWordDisp(cg->TargetReg(kArg0),
Brian Carlstromea46f952013-07-30 01:26:50 -0700401 mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(), cg->TargetReg(kArg0));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700402 // Set up direct code if known.
403 if (direct_code != 0) {
404 if (direct_code != static_cast<unsigned int>(-1)) {
405 cg->LoadConstant(cg->TargetReg(kInvokeTgt), direct_code);
406 } else {
407 CHECK_EQ(cu->dex_file, target_method.dex_file);
408 LIR* data_target = cg->ScanLiteralPool(cg->code_literal_list_,
409 target_method.dex_method_index, 0);
410 if (data_target == NULL) {
411 data_target = cg->AddWordData(&cg->code_literal_list_, target_method.dex_method_index);
412 data_target->operands[1] = type;
413 }
414 LIR* load_pc_rel = cg->OpPcRelLoad(cg->TargetReg(kInvokeTgt), data_target);
415 cg->AppendLIR(load_pc_rel);
416 DCHECK_EQ(cu->instruction_set, kThumb2) << reinterpret_cast<void*>(data_target);
417 }
418 }
419 break;
420 case 2: // Grab target method*
421 CHECK_EQ(cu->dex_file, target_method.dex_file);
422 cg->LoadWordDisp(cg->TargetReg(kArg0),
423 mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
424 (target_method.dex_method_index * 4),
425 cg-> TargetReg(kArg0));
426 break;
427 case 3: // Grab the code from the method*
428 if (cu->instruction_set != kX86) {
429 if (direct_code == 0) {
430 cg->LoadWordDisp(cg->TargetReg(kArg0),
Brian Carlstromea46f952013-07-30 01:26:50 -0700431 mirror::ArtMethod::GetEntryPointFromCompiledCodeOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700432 cg->TargetReg(kInvokeTgt));
433 }
434 break;
435 }
436 // Intentional fallthrough for x86
437 default:
438 return -1;
439 }
440 }
441 return state + 1;
442}
443
444/*
445 * Bit of a hack here - in the absence of a real scheduling pass,
446 * emit the next instruction in a virtual invoke sequence.
447 * We can use kLr as a temp prior to target address loading
448 * Note also that we'll load the first argument ("this") into
449 * kArg1 here rather than the standard LoadArgRegs.
450 */
451static int NextVCallInsn(CompilationUnit* cu, CallInfo* info,
452 int state, const MethodReference& target_method,
453 uint32_t method_idx, uintptr_t unused, uintptr_t unused2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700454 InvokeType unused3) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700455 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
456 /*
457 * This is the fast path in which the target virtual method is
458 * fully resolved at compile time.
459 */
460 switch (state) {
461 case 0: { // Get "this" [set kArg1]
462 RegLocation rl_arg = info->args[0];
463 cg->LoadValueDirectFixed(rl_arg, cg->TargetReg(kArg1));
464 break;
465 }
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700466 case 1: // Is "this" null? [use kArg1]
Brian Carlstrom7940e442013-07-12 13:46:57 -0700467 cg->GenNullCheck(info->args[0].s_reg_low, cg->TargetReg(kArg1), info->opt_flags);
468 // get this->klass_ [use kArg1, set kInvokeTgt]
469 cg->LoadWordDisp(cg->TargetReg(kArg1), mirror::Object::ClassOffset().Int32Value(),
470 cg->TargetReg(kInvokeTgt));
471 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700472 case 2: // Get this->klass_->vtable [usr kInvokeTgt, set kInvokeTgt]
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473 cg->LoadWordDisp(cg->TargetReg(kInvokeTgt), mirror::Class::VTableOffset().Int32Value(),
474 cg->TargetReg(kInvokeTgt));
475 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700476 case 3: // Get target method [use kInvokeTgt, set kArg0]
Brian Carlstrom7940e442013-07-12 13:46:57 -0700477 cg->LoadWordDisp(cg->TargetReg(kInvokeTgt), (method_idx * 4) +
478 mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value(),
479 cg->TargetReg(kArg0));
480 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700481 case 4: // Get the compiled code address [uses kArg0, sets kInvokeTgt]
Brian Carlstrom7940e442013-07-12 13:46:57 -0700482 if (cu->instruction_set != kX86) {
483 cg->LoadWordDisp(cg->TargetReg(kArg0),
Brian Carlstromea46f952013-07-30 01:26:50 -0700484 mirror::ArtMethod::GetEntryPointFromCompiledCodeOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485 cg->TargetReg(kInvokeTgt));
486 break;
487 }
488 // Intentional fallthrough for X86
489 default:
490 return -1;
491 }
492 return state + 1;
493}
494
495/*
496 * All invoke-interface calls bounce off of art_quick_invoke_interface_trampoline,
497 * which will locate the target and continue on via a tail call.
498 */
499static int NextInterfaceCallInsn(CompilationUnit* cu, CallInfo* info, int state,
500 const MethodReference& target_method,
501 uint32_t unused, uintptr_t unused2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700502 uintptr_t direct_method, InvokeType unused4) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700503 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
504 if (cu->instruction_set != kThumb2) {
505 // Disable sharpening
506 direct_method = 0;
507 }
Ian Rogers848871b2013-08-05 10:56:33 -0700508 ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampoline);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700509
510 if (direct_method != 0) {
511 switch (state) {
512 case 0: // Load the trampoline target [sets kInvokeTgt].
513 if (cu->instruction_set != kX86) {
Ian Rogers848871b2013-08-05 10:56:33 -0700514 cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(),
515 cg->TargetReg(kInvokeTgt));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700516 }
517 // Get the interface Method* [sets kArg0]
518 if (direct_method != static_cast<unsigned int>(-1)) {
519 cg->LoadConstant(cg->TargetReg(kArg0), direct_method);
520 } else {
521 CHECK_EQ(cu->dex_file, target_method.dex_file);
522 LIR* data_target = cg->ScanLiteralPool(cg->method_literal_list_,
523 target_method.dex_method_index, 0);
524 if (data_target == NULL) {
525 data_target = cg->AddWordData(&cg->method_literal_list_,
526 target_method.dex_method_index);
527 data_target->operands[1] = kInterface;
528 }
529 LIR* load_pc_rel = cg->OpPcRelLoad(cg->TargetReg(kArg0), data_target);
530 cg->AppendLIR(load_pc_rel);
531 DCHECK_EQ(cu->instruction_set, kThumb2) << reinterpret_cast<void*>(data_target);
532 }
533 break;
534 default:
535 return -1;
536 }
537 } else {
538 switch (state) {
539 case 0:
540 // Get the current Method* [sets kArg0] - TUNING: remove copy of method if it is promoted.
541 cg->LoadCurrMethodDirect(cg->TargetReg(kArg0));
542 // Load the trampoline target [sets kInvokeTgt].
543 if (cu->instruction_set != kX86) {
Ian Rogers848871b2013-08-05 10:56:33 -0700544 cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(),
545 cg->TargetReg(kInvokeTgt));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700546 }
547 break;
548 case 1: // Get method->dex_cache_resolved_methods_ [set/use kArg0]
549 cg->LoadWordDisp(cg->TargetReg(kArg0),
Brian Carlstromea46f952013-07-30 01:26:50 -0700550 mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 cg->TargetReg(kArg0));
552 break;
553 case 2: // Grab target method* [set/use kArg0]
554 CHECK_EQ(cu->dex_file, target_method.dex_file);
555 cg->LoadWordDisp(cg->TargetReg(kArg0),
556 mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
557 (target_method.dex_method_index * 4),
558 cg->TargetReg(kArg0));
559 break;
560 default:
561 return -1;
562 }
563 }
564 return state + 1;
565}
566
Ian Rogers848871b2013-08-05 10:56:33 -0700567static int NextInvokeInsnSP(CompilationUnit* cu, CallInfo* info, ThreadOffset trampoline,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700568 int state, const MethodReference& target_method,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700569 uint32_t method_idx) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
571 /*
572 * This handles the case in which the base method is not fully
573 * resolved at compile time, we bail to a runtime helper.
574 */
575 if (state == 0) {
576 if (cu->instruction_set != kX86) {
577 // Load trampoline target
Ian Rogers848871b2013-08-05 10:56:33 -0700578 cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(), cg->TargetReg(kInvokeTgt));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700579 }
580 // Load kArg0 with method index
581 CHECK_EQ(cu->dex_file, target_method.dex_file);
582 cg->LoadConstant(cg->TargetReg(kArg0), target_method.dex_method_index);
583 return 1;
584 }
585 return -1;
586}
587
588static int NextStaticCallInsnSP(CompilationUnit* cu, CallInfo* info,
589 int state,
590 const MethodReference& target_method,
591 uint32_t method_idx,
592 uintptr_t unused, uintptr_t unused2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700593 InvokeType unused3) {
Ian Rogers848871b2013-08-05 10:56:33 -0700594 ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeStaticTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700595 return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
596}
597
598static int NextDirectCallInsnSP(CompilationUnit* cu, CallInfo* info, int state,
599 const MethodReference& target_method,
600 uint32_t method_idx, uintptr_t unused,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700601 uintptr_t unused2, InvokeType unused3) {
Ian Rogers848871b2013-08-05 10:56:33 -0700602 ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeDirectTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700603 return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
604}
605
606static int NextSuperCallInsnSP(CompilationUnit* cu, CallInfo* info, int state,
607 const MethodReference& target_method,
608 uint32_t method_idx, uintptr_t unused,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700609 uintptr_t unused2, InvokeType unused3) {
Ian Rogers848871b2013-08-05 10:56:33 -0700610 ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeSuperTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700611 return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
612}
613
614static int NextVCallInsnSP(CompilationUnit* cu, CallInfo* info, int state,
615 const MethodReference& target_method,
616 uint32_t method_idx, uintptr_t unused,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700617 uintptr_t unused2, InvokeType unused3) {
Ian Rogers848871b2013-08-05 10:56:33 -0700618 ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeVirtualTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700619 return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
620}
621
622static int NextInterfaceCallInsnWithAccessCheck(CompilationUnit* cu,
623 CallInfo* info, int state,
624 const MethodReference& target_method,
625 uint32_t unused,
626 uintptr_t unused2, uintptr_t unused3,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700627 InvokeType unused4) {
Ian Rogers848871b2013-08-05 10:56:33 -0700628 ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
630}
631
632int Mir2Lir::LoadArgRegs(CallInfo* info, int call_state,
633 NextCallInsn next_call_insn,
634 const MethodReference& target_method,
635 uint32_t vtable_idx, uintptr_t direct_code,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700636 uintptr_t direct_method, InvokeType type, bool skip_this) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700637 int last_arg_reg = TargetReg(kArg3);
638 int next_reg = TargetReg(kArg1);
639 int next_arg = 0;
640 if (skip_this) {
641 next_reg++;
642 next_arg++;
643 }
644 for (; (next_reg <= last_arg_reg) && (next_arg < info->num_arg_words); next_reg++) {
645 RegLocation rl_arg = info->args[next_arg++];
646 rl_arg = UpdateRawLoc(rl_arg);
647 if (rl_arg.wide && (next_reg <= TargetReg(kArg2))) {
648 LoadValueDirectWideFixed(rl_arg, next_reg, next_reg + 1);
649 next_reg++;
650 next_arg++;
651 } else {
652 if (rl_arg.wide) {
653 rl_arg.wide = false;
654 rl_arg.is_const = false;
655 }
656 LoadValueDirectFixed(rl_arg, next_reg);
657 }
658 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
659 direct_code, direct_method, type);
660 }
661 return call_state;
662}
663
664/*
665 * Load up to 5 arguments, the first three of which will be in
666 * kArg1 .. kArg3. On entry kArg0 contains the current method pointer,
667 * and as part of the load sequence, it must be replaced with
668 * the target method pointer. Note, this may also be called
669 * for "range" variants if the number of arguments is 5 or fewer.
670 */
671int Mir2Lir::GenDalvikArgsNoRange(CallInfo* info,
672 int call_state, LIR** pcrLabel, NextCallInsn next_call_insn,
673 const MethodReference& target_method,
674 uint32_t vtable_idx, uintptr_t direct_code,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700675 uintptr_t direct_method, InvokeType type, bool skip_this) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700676 RegLocation rl_arg;
677
678 /* If no arguments, just return */
679 if (info->num_arg_words == 0)
680 return call_state;
681
682 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
683 direct_code, direct_method, type);
684
685 DCHECK_LE(info->num_arg_words, 5);
686 if (info->num_arg_words > 3) {
687 int32_t next_use = 3;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700688 // Detect special case of wide arg spanning arg3/arg4
Brian Carlstrom7940e442013-07-12 13:46:57 -0700689 RegLocation rl_use0 = info->args[0];
690 RegLocation rl_use1 = info->args[1];
691 RegLocation rl_use2 = info->args[2];
692 if (((!rl_use0.wide && !rl_use1.wide) || rl_use0.wide) &&
693 rl_use2.wide) {
694 int reg = -1;
695 // Wide spans, we need the 2nd half of uses[2].
696 rl_arg = UpdateLocWide(rl_use2);
697 if (rl_arg.location == kLocPhysReg) {
698 reg = rl_arg.high_reg;
699 } else {
700 // kArg2 & rArg3 can safely be used here
701 reg = TargetReg(kArg3);
702 LoadWordDisp(TargetReg(kSp), SRegOffset(rl_arg.s_reg_low) + 4, reg);
703 call_state = next_call_insn(cu_, info, call_state, target_method,
704 vtable_idx, direct_code, direct_method, type);
705 }
706 StoreBaseDisp(TargetReg(kSp), (next_use + 1) * 4, reg, kWord);
707 StoreBaseDisp(TargetReg(kSp), 16 /* (3+1)*4 */, reg, kWord);
708 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
709 direct_code, direct_method, type);
710 next_use++;
711 }
712 // Loop through the rest
713 while (next_use < info->num_arg_words) {
714 int low_reg;
715 int high_reg = -1;
716 rl_arg = info->args[next_use];
717 rl_arg = UpdateRawLoc(rl_arg);
718 if (rl_arg.location == kLocPhysReg) {
719 low_reg = rl_arg.low_reg;
720 high_reg = rl_arg.high_reg;
721 } else {
722 low_reg = TargetReg(kArg2);
723 if (rl_arg.wide) {
724 high_reg = TargetReg(kArg3);
725 LoadValueDirectWideFixed(rl_arg, low_reg, high_reg);
726 } else {
727 LoadValueDirectFixed(rl_arg, low_reg);
728 }
729 call_state = next_call_insn(cu_, info, call_state, target_method,
730 vtable_idx, direct_code, direct_method, type);
731 }
732 int outs_offset = (next_use + 1) * 4;
733 if (rl_arg.wide) {
734 StoreBaseDispWide(TargetReg(kSp), outs_offset, low_reg, high_reg);
735 next_use += 2;
736 } else {
737 StoreWordDisp(TargetReg(kSp), outs_offset, low_reg);
738 next_use++;
739 }
740 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
741 direct_code, direct_method, type);
742 }
743 }
744
745 call_state = LoadArgRegs(info, call_state, next_call_insn,
746 target_method, vtable_idx, direct_code, direct_method,
747 type, skip_this);
748
749 if (pcrLabel) {
750 *pcrLabel = GenNullCheck(info->args[0].s_reg_low, TargetReg(kArg1), info->opt_flags);
751 }
752 return call_state;
753}
754
755/*
756 * May have 0+ arguments (also used for jumbo). Note that
757 * source virtual registers may be in physical registers, so may
758 * need to be flushed to home location before copying. This
759 * applies to arg3 and above (see below).
760 *
761 * Two general strategies:
762 * If < 20 arguments
763 * Pass args 3-18 using vldm/vstm block copy
764 * Pass arg0, arg1 & arg2 in kArg1-kArg3
765 * If 20+ arguments
766 * Pass args arg19+ using memcpy block copy
767 * Pass arg0, arg1 & arg2 in kArg1-kArg3
768 *
769 */
770int Mir2Lir::GenDalvikArgsRange(CallInfo* info, int call_state,
771 LIR** pcrLabel, NextCallInsn next_call_insn,
772 const MethodReference& target_method,
773 uint32_t vtable_idx, uintptr_t direct_code, uintptr_t direct_method,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700774 InvokeType type, bool skip_this) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 // If we can treat it as non-range (Jumbo ops will use range form)
776 if (info->num_arg_words <= 5)
777 return GenDalvikArgsNoRange(info, call_state, pcrLabel,
778 next_call_insn, target_method, vtable_idx,
779 direct_code, direct_method, type, skip_this);
780 /*
781 * First load the non-register arguments. Both forms expect all
782 * of the source arguments to be in their home frame location, so
783 * scan the s_reg names and flush any that have been promoted to
784 * frame backing storage.
785 */
786 // Scan the rest of the args - if in phys_reg flush to memory
787 for (int next_arg = 0; next_arg < info->num_arg_words;) {
788 RegLocation loc = info->args[next_arg];
789 if (loc.wide) {
790 loc = UpdateLocWide(loc);
791 if ((next_arg >= 2) && (loc.location == kLocPhysReg)) {
792 StoreBaseDispWide(TargetReg(kSp), SRegOffset(loc.s_reg_low),
793 loc.low_reg, loc.high_reg);
794 }
795 next_arg += 2;
796 } else {
797 loc = UpdateLoc(loc);
798 if ((next_arg >= 3) && (loc.location == kLocPhysReg)) {
799 StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low),
800 loc.low_reg, kWord);
801 }
802 next_arg++;
803 }
804 }
805
806 int start_offset = SRegOffset(info->args[3].s_reg_low);
807 int outs_offset = 4 /* Method* */ + (3 * 4);
808 if (cu_->instruction_set != kThumb2) {
809 // Generate memcpy
810 OpRegRegImm(kOpAdd, TargetReg(kArg0), TargetReg(kSp), outs_offset);
811 OpRegRegImm(kOpAdd, TargetReg(kArg1), TargetReg(kSp), start_offset);
Ian Rogers7655f292013-07-29 11:07:13 -0700812 CallRuntimeHelperRegRegImm(QUICK_ENTRYPOINT_OFFSET(pMemcpy), TargetReg(kArg0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700813 TargetReg(kArg1), (info->num_arg_words - 3) * 4, false);
814 } else {
815 if (info->num_arg_words >= 20) {
816 // Generate memcpy
817 OpRegRegImm(kOpAdd, TargetReg(kArg0), TargetReg(kSp), outs_offset);
818 OpRegRegImm(kOpAdd, TargetReg(kArg1), TargetReg(kSp), start_offset);
Ian Rogers7655f292013-07-29 11:07:13 -0700819 CallRuntimeHelperRegRegImm(QUICK_ENTRYPOINT_OFFSET(pMemcpy), TargetReg(kArg0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700820 TargetReg(kArg1), (info->num_arg_words - 3) * 4, false);
821 } else {
822 // Use vldm/vstm pair using kArg3 as a temp
823 int regs_left = std::min(info->num_arg_words - 3, 16);
824 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
825 direct_code, direct_method, type);
826 OpRegRegImm(kOpAdd, TargetReg(kArg3), TargetReg(kSp), start_offset);
827 LIR* ld = OpVldm(TargetReg(kArg3), regs_left);
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700828 // TUNING: loosen barrier
buzbeeb48819d2013-09-14 16:15:25 -0700829 ld->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700830 SetMemRefType(ld, true /* is_load */, kDalvikReg);
831 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
832 direct_code, direct_method, type);
833 OpRegRegImm(kOpAdd, TargetReg(kArg3), TargetReg(kSp), 4 /* Method* */ + (3 * 4));
834 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
835 direct_code, direct_method, type);
836 LIR* st = OpVstm(TargetReg(kArg3), regs_left);
837 SetMemRefType(st, false /* is_load */, kDalvikReg);
buzbeeb48819d2013-09-14 16:15:25 -0700838 st->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700839 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
840 direct_code, direct_method, type);
841 }
842 }
843
844 call_state = LoadArgRegs(info, call_state, next_call_insn,
845 target_method, vtable_idx, direct_code, direct_method,
846 type, skip_this);
847
848 call_state = next_call_insn(cu_, info, call_state, target_method, vtable_idx,
849 direct_code, direct_method, type);
850 if (pcrLabel) {
851 *pcrLabel = GenNullCheck(info->args[0].s_reg_low, TargetReg(kArg1), info->opt_flags);
852 }
853 return call_state;
854}
855
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700856RegLocation Mir2Lir::InlineTarget(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857 RegLocation res;
858 if (info->result.location == kLocInvalid) {
859 res = GetReturn(false);
860 } else {
861 res = info->result;
862 }
863 return res;
864}
865
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700866RegLocation Mir2Lir::InlineTargetWide(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700867 RegLocation res;
868 if (info->result.location == kLocInvalid) {
869 res = GetReturnWide(false);
870 } else {
871 res = info->result;
872 }
873 return res;
874}
875
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700876bool Mir2Lir::GenInlinedCharAt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700877 if (cu_->instruction_set == kMips) {
878 // TODO - add Mips implementation
879 return false;
880 }
881 // Location of reference to data array
882 int value_offset = mirror::String::ValueOffset().Int32Value();
883 // Location of count
884 int count_offset = mirror::String::CountOffset().Int32Value();
885 // Starting offset within data array
886 int offset_offset = mirror::String::OffsetOffset().Int32Value();
887 // Start of char data with array_
888 int data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Int32Value();
889
890 RegLocation rl_obj = info->args[0];
891 RegLocation rl_idx = info->args[1];
892 rl_obj = LoadValue(rl_obj, kCoreReg);
893 rl_idx = LoadValue(rl_idx, kCoreReg);
894 int reg_max;
895 GenNullCheck(rl_obj.s_reg_low, rl_obj.low_reg, info->opt_flags);
896 bool range_check = (!(info->opt_flags & MIR_IGNORE_RANGE_CHECK));
897 LIR* launch_pad = NULL;
898 int reg_off = INVALID_REG;
899 int reg_ptr = INVALID_REG;
900 if (cu_->instruction_set != kX86) {
901 reg_off = AllocTemp();
902 reg_ptr = AllocTemp();
903 if (range_check) {
904 reg_max = AllocTemp();
905 LoadWordDisp(rl_obj.low_reg, count_offset, reg_max);
906 }
907 LoadWordDisp(rl_obj.low_reg, offset_offset, reg_off);
908 LoadWordDisp(rl_obj.low_reg, value_offset, reg_ptr);
909 if (range_check) {
910 // Set up a launch pad to allow retry in case of bounds violation */
911 launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
912 intrinsic_launchpads_.Insert(launch_pad);
913 OpRegReg(kOpCmp, rl_idx.low_reg, reg_max);
914 FreeTemp(reg_max);
915 OpCondBranch(kCondCs, launch_pad);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700916 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700917 } else {
918 if (range_check) {
919 reg_max = AllocTemp();
920 LoadWordDisp(rl_obj.low_reg, count_offset, reg_max);
921 // Set up a launch pad to allow retry in case of bounds violation */
922 launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
923 intrinsic_launchpads_.Insert(launch_pad);
924 OpRegReg(kOpCmp, rl_idx.low_reg, reg_max);
925 FreeTemp(reg_max);
926 OpCondBranch(kCondCc, launch_pad);
927 }
928 reg_off = AllocTemp();
929 reg_ptr = AllocTemp();
930 LoadWordDisp(rl_obj.low_reg, offset_offset, reg_off);
931 LoadWordDisp(rl_obj.low_reg, value_offset, reg_ptr);
932 }
933 OpRegImm(kOpAdd, reg_ptr, data_offset);
934 OpRegReg(kOpAdd, reg_off, rl_idx.low_reg);
935 FreeTemp(rl_obj.low_reg);
936 FreeTemp(rl_idx.low_reg);
937 RegLocation rl_dest = InlineTarget(info);
938 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
939 LoadBaseIndexed(reg_ptr, reg_off, rl_result.low_reg, 1, kUnsignedHalf);
940 FreeTemp(reg_off);
941 FreeTemp(reg_ptr);
942 StoreValue(rl_dest, rl_result);
943 if (range_check) {
944 launch_pad->operands[2] = 0; // no resumption
945 }
946 // Record that we've already inlined & null checked
947 info->opt_flags |= (MIR_INLINED | MIR_IGNORE_NULL_CHECK);
948 return true;
949}
950
951// Generates an inlined String.is_empty or String.length.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700952bool Mir2Lir::GenInlinedStringIsEmptyOrLength(CallInfo* info, bool is_empty) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700953 if (cu_->instruction_set == kMips) {
954 // TODO - add Mips implementation
955 return false;
956 }
957 // dst = src.length();
958 RegLocation rl_obj = info->args[0];
959 rl_obj = LoadValue(rl_obj, kCoreReg);
960 RegLocation rl_dest = InlineTarget(info);
961 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
962 GenNullCheck(rl_obj.s_reg_low, rl_obj.low_reg, info->opt_flags);
963 LoadWordDisp(rl_obj.low_reg, mirror::String::CountOffset().Int32Value(), rl_result.low_reg);
964 if (is_empty) {
965 // dst = (dst == 0);
966 if (cu_->instruction_set == kThumb2) {
967 int t_reg = AllocTemp();
968 OpRegReg(kOpNeg, t_reg, rl_result.low_reg);
969 OpRegRegReg(kOpAdc, rl_result.low_reg, rl_result.low_reg, t_reg);
970 } else {
971 DCHECK_EQ(cu_->instruction_set, kX86);
972 OpRegImm(kOpSub, rl_result.low_reg, 1);
973 OpRegImm(kOpLsr, rl_result.low_reg, 31);
974 }
975 }
976 StoreValue(rl_dest, rl_result);
977 return true;
978}
979
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700980bool Mir2Lir::GenInlinedAbsInt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700981 if (cu_->instruction_set == kMips) {
982 // TODO - add Mips implementation
983 return false;
984 }
985 RegLocation rl_src = info->args[0];
986 rl_src = LoadValue(rl_src, kCoreReg);
987 RegLocation rl_dest = InlineTarget(info);
988 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
989 int sign_reg = AllocTemp();
990 // abs(x) = y<=x>>31, (x+y)^y.
991 OpRegRegImm(kOpAsr, sign_reg, rl_src.low_reg, 31);
992 OpRegRegReg(kOpAdd, rl_result.low_reg, rl_src.low_reg, sign_reg);
993 OpRegReg(kOpXor, rl_result.low_reg, sign_reg);
994 StoreValue(rl_dest, rl_result);
995 return true;
996}
997
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700998bool Mir2Lir::GenInlinedAbsLong(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700999 if (cu_->instruction_set == kMips) {
1000 // TODO - add Mips implementation
1001 return false;
1002 }
1003 if (cu_->instruction_set == kThumb2) {
1004 RegLocation rl_src = info->args[0];
1005 rl_src = LoadValueWide(rl_src, kCoreReg);
1006 RegLocation rl_dest = InlineTargetWide(info);
1007 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1008 int sign_reg = AllocTemp();
1009 // abs(x) = y<=x>>31, (x+y)^y.
1010 OpRegRegImm(kOpAsr, sign_reg, rl_src.high_reg, 31);
1011 OpRegRegReg(kOpAdd, rl_result.low_reg, rl_src.low_reg, sign_reg);
1012 OpRegRegReg(kOpAdc, rl_result.high_reg, rl_src.high_reg, sign_reg);
1013 OpRegReg(kOpXor, rl_result.low_reg, sign_reg);
1014 OpRegReg(kOpXor, rl_result.high_reg, sign_reg);
1015 StoreValueWide(rl_dest, rl_result);
1016 return true;
1017 } else {
1018 DCHECK_EQ(cu_->instruction_set, kX86);
1019 // Reuse source registers to avoid running out of temps
1020 RegLocation rl_src = info->args[0];
1021 rl_src = LoadValueWide(rl_src, kCoreReg);
1022 RegLocation rl_dest = InlineTargetWide(info);
1023 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1024 OpRegCopyWide(rl_result.low_reg, rl_result.high_reg, rl_src.low_reg, rl_src.high_reg);
1025 FreeTemp(rl_src.low_reg);
1026 FreeTemp(rl_src.high_reg);
1027 int sign_reg = AllocTemp();
1028 // abs(x) = y<=x>>31, (x+y)^y.
1029 OpRegRegImm(kOpAsr, sign_reg, rl_result.high_reg, 31);
1030 OpRegReg(kOpAdd, rl_result.low_reg, sign_reg);
1031 OpRegReg(kOpAdc, rl_result.high_reg, sign_reg);
1032 OpRegReg(kOpXor, rl_result.low_reg, sign_reg);
1033 OpRegReg(kOpXor, rl_result.high_reg, sign_reg);
1034 StoreValueWide(rl_dest, rl_result);
1035 return true;
1036 }
1037}
1038
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001039bool Mir2Lir::GenInlinedFloatCvt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001040 if (cu_->instruction_set == kMips) {
1041 // TODO - add Mips implementation
1042 return false;
1043 }
1044 RegLocation rl_src = info->args[0];
1045 RegLocation rl_dest = InlineTarget(info);
1046 StoreValue(rl_dest, rl_src);
1047 return true;
1048}
1049
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001050bool Mir2Lir::GenInlinedDoubleCvt(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001051 if (cu_->instruction_set == kMips) {
1052 // TODO - add Mips implementation
1053 return false;
1054 }
1055 RegLocation rl_src = info->args[0];
1056 RegLocation rl_dest = InlineTargetWide(info);
1057 StoreValueWide(rl_dest, rl_src);
1058 return true;
1059}
1060
1061/*
1062 * Fast string.index_of(I) & (II). Tests for simple case of char <= 0xffff,
1063 * otherwise bails to standard library code.
1064 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001065bool Mir2Lir::GenInlinedIndexOf(CallInfo* info, bool zero_based) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001066 if (cu_->instruction_set == kMips) {
1067 // TODO - add Mips implementation
1068 return false;
1069 }
1070 ClobberCalleeSave();
1071 LockCallTemps(); // Using fixed registers
1072 int reg_ptr = TargetReg(kArg0);
1073 int reg_char = TargetReg(kArg1);
1074 int reg_start = TargetReg(kArg2);
1075
1076 RegLocation rl_obj = info->args[0];
1077 RegLocation rl_char = info->args[1];
1078 RegLocation rl_start = info->args[2];
1079 LoadValueDirectFixed(rl_obj, reg_ptr);
1080 LoadValueDirectFixed(rl_char, reg_char);
1081 if (zero_based) {
1082 LoadConstant(reg_start, 0);
1083 } else {
1084 LoadValueDirectFixed(rl_start, reg_start);
1085 }
Ian Rogers7655f292013-07-29 11:07:13 -07001086 int r_tgt = (cu_->instruction_set != kX86) ? LoadHelper(QUICK_ENTRYPOINT_OFFSET(pIndexOf)) : 0;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087 GenNullCheck(rl_obj.s_reg_low, reg_ptr, info->opt_flags);
1088 LIR* launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
1089 intrinsic_launchpads_.Insert(launch_pad);
1090 OpCmpImmBranch(kCondGt, reg_char, 0xFFFF, launch_pad);
1091 // NOTE: not a safepoint
1092 if (cu_->instruction_set != kX86) {
1093 OpReg(kOpBlx, r_tgt);
1094 } else {
Ian Rogers7655f292013-07-29 11:07:13 -07001095 OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(pIndexOf));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001096 }
1097 LIR* resume_tgt = NewLIR0(kPseudoTargetLabel);
1098 launch_pad->operands[2] = reinterpret_cast<uintptr_t>(resume_tgt);
1099 // Record that we've already inlined & null checked
1100 info->opt_flags |= (MIR_INLINED | MIR_IGNORE_NULL_CHECK);
1101 RegLocation rl_return = GetReturn(false);
1102 RegLocation rl_dest = InlineTarget(info);
1103 StoreValue(rl_dest, rl_return);
1104 return true;
1105}
1106
1107/* Fast string.compareTo(Ljava/lang/string;)I. */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001108bool Mir2Lir::GenInlinedStringCompareTo(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001109 if (cu_->instruction_set == kMips) {
1110 // TODO - add Mips implementation
1111 return false;
1112 }
1113 ClobberCalleeSave();
1114 LockCallTemps(); // Using fixed registers
1115 int reg_this = TargetReg(kArg0);
1116 int reg_cmp = TargetReg(kArg1);
1117
1118 RegLocation rl_this = info->args[0];
1119 RegLocation rl_cmp = info->args[1];
1120 LoadValueDirectFixed(rl_this, reg_this);
1121 LoadValueDirectFixed(rl_cmp, reg_cmp);
1122 int r_tgt = (cu_->instruction_set != kX86) ?
Ian Rogers7655f292013-07-29 11:07:13 -07001123 LoadHelper(QUICK_ENTRYPOINT_OFFSET(pStringCompareTo)) : 0;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001124 GenNullCheck(rl_this.s_reg_low, reg_this, info->opt_flags);
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001125 // TUNING: check if rl_cmp.s_reg_low is already null checked
Brian Carlstrom7940e442013-07-12 13:46:57 -07001126 LIR* launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
1127 intrinsic_launchpads_.Insert(launch_pad);
1128 OpCmpImmBranch(kCondEq, reg_cmp, 0, launch_pad);
1129 // NOTE: not a safepoint
1130 if (cu_->instruction_set != kX86) {
1131 OpReg(kOpBlx, r_tgt);
1132 } else {
Ian Rogers7655f292013-07-29 11:07:13 -07001133 OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(pStringCompareTo));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134 }
1135 launch_pad->operands[2] = 0; // No return possible
1136 // Record that we've already inlined & null checked
1137 info->opt_flags |= (MIR_INLINED | MIR_IGNORE_NULL_CHECK);
1138 RegLocation rl_return = GetReturn(false);
1139 RegLocation rl_dest = InlineTarget(info);
1140 StoreValue(rl_dest, rl_return);
1141 return true;
1142}
1143
1144bool Mir2Lir::GenInlinedCurrentThread(CallInfo* info) {
1145 RegLocation rl_dest = InlineTarget(info);
1146 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001147 ThreadOffset offset = Thread::PeerOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001148 if (cu_->instruction_set == kThumb2 || cu_->instruction_set == kMips) {
Ian Rogers848871b2013-08-05 10:56:33 -07001149 LoadWordDisp(TargetReg(kSelf), offset.Int32Value(), rl_result.low_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001150 } else {
1151 CHECK(cu_->instruction_set == kX86);
Brian Carlstrom2d888622013-07-18 17:02:00 -07001152 reinterpret_cast<X86Mir2Lir*>(this)->OpRegThreadMem(kOpMov, rl_result.low_reg, offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001153 }
1154 StoreValue(rl_dest, rl_result);
1155 return true;
1156}
1157
1158bool Mir2Lir::GenInlinedUnsafeGet(CallInfo* info,
1159 bool is_long, bool is_volatile) {
1160 if (cu_->instruction_set == kMips) {
1161 // TODO - add Mips implementation
1162 return false;
1163 }
1164 // Unused - RegLocation rl_src_unsafe = info->args[0];
1165 RegLocation rl_src_obj = info->args[1]; // Object
1166 RegLocation rl_src_offset = info->args[2]; // long low
1167 rl_src_offset.wide = 0; // ignore high half in info->args[3]
1168 RegLocation rl_dest = InlineTarget(info); // result reg
1169 if (is_volatile) {
1170 GenMemBarrier(kLoadLoad);
1171 }
1172 RegLocation rl_object = LoadValue(rl_src_obj, kCoreReg);
1173 RegLocation rl_offset = LoadValue(rl_src_offset, kCoreReg);
1174 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1175 if (is_long) {
1176 OpRegReg(kOpAdd, rl_object.low_reg, rl_offset.low_reg);
1177 LoadBaseDispWide(rl_object.low_reg, 0, rl_result.low_reg, rl_result.high_reg, INVALID_SREG);
1178 StoreValueWide(rl_dest, rl_result);
1179 } else {
1180 LoadBaseIndexed(rl_object.low_reg, rl_offset.low_reg, rl_result.low_reg, 0, kWord);
1181 StoreValue(rl_dest, rl_result);
1182 }
1183 return true;
1184}
1185
1186bool Mir2Lir::GenInlinedUnsafePut(CallInfo* info, bool is_long,
1187 bool is_object, bool is_volatile, bool is_ordered) {
1188 if (cu_->instruction_set == kMips) {
1189 // TODO - add Mips implementation
1190 return false;
1191 }
1192 if (cu_->instruction_set == kX86 && is_object) {
1193 // TODO: fix X86, it exhausts registers for card marking.
1194 return false;
1195 }
1196 // Unused - RegLocation rl_src_unsafe = info->args[0];
1197 RegLocation rl_src_obj = info->args[1]; // Object
1198 RegLocation rl_src_offset = info->args[2]; // long low
1199 rl_src_offset.wide = 0; // ignore high half in info->args[3]
1200 RegLocation rl_src_value = info->args[4]; // value to store
1201 if (is_volatile || is_ordered) {
1202 GenMemBarrier(kStoreStore);
1203 }
1204 RegLocation rl_object = LoadValue(rl_src_obj, kCoreReg);
1205 RegLocation rl_offset = LoadValue(rl_src_offset, kCoreReg);
1206 RegLocation rl_value;
1207 if (is_long) {
1208 rl_value = LoadValueWide(rl_src_value, kCoreReg);
1209 OpRegReg(kOpAdd, rl_object.low_reg, rl_offset.low_reg);
1210 StoreBaseDispWide(rl_object.low_reg, 0, rl_value.low_reg, rl_value.high_reg);
1211 } else {
1212 rl_value = LoadValue(rl_src_value, kCoreReg);
1213 StoreBaseIndexed(rl_object.low_reg, rl_offset.low_reg, rl_value.low_reg, 0, kWord);
1214 }
1215 if (is_volatile) {
1216 GenMemBarrier(kStoreLoad);
1217 }
1218 if (is_object) {
1219 MarkGCCard(rl_value.low_reg, rl_object.low_reg);
1220 }
1221 return true;
1222}
1223
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001224bool Mir2Lir::GenIntrinsic(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001225 if (info->opt_flags & MIR_INLINED) {
1226 return false;
1227 }
1228 /*
1229 * TODO: move these to a target-specific structured constant array
1230 * and use a generic match function. The list of intrinsics may be
1231 * slightly different depending on target.
1232 * TODO: Fold this into a matching function that runs during
1233 * basic block building. This should be part of the action for
1234 * small method inlining and recognition of the special object init
1235 * method. By doing this during basic block construction, we can also
1236 * take advantage of/generate new useful dataflow info.
1237 */
Ian Rogersfc0e94b2013-09-23 23:51:32 -07001238 const DexFile::MethodId& target_mid = cu_->dex_file->GetMethodId(info->index);
1239 const DexFile::TypeId& declaring_type = cu_->dex_file->GetTypeId(target_mid.class_idx_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001240 StringPiece tgt_methods_declaring_class(
Ian Rogersfc0e94b2013-09-23 23:51:32 -07001241 cu_->dex_file->StringDataAsStringPieceByIdx(declaring_type.descriptor_idx_));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001242 if (tgt_methods_declaring_class.starts_with("Ljava/lang/Double;")) {
1243 std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
1244 if (tgt_method == "long java.lang.Double.doubleToRawLongBits(double)") {
1245 return GenInlinedDoubleCvt(info);
1246 }
1247 if (tgt_method == "double java.lang.Double.longBitsToDouble(long)") {
1248 return GenInlinedDoubleCvt(info);
1249 }
1250 } else if (tgt_methods_declaring_class.starts_with("Ljava/lang/Float;")) {
1251 std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
1252 if (tgt_method == "int java.lang.Float.float_to_raw_int_bits(float)") {
1253 return GenInlinedFloatCvt(info);
1254 }
1255 if (tgt_method == "float java.lang.Float.intBitsToFloat(int)") {
1256 return GenInlinedFloatCvt(info);
1257 }
1258 } else if (tgt_methods_declaring_class.starts_with("Ljava/lang/Math;") ||
1259 tgt_methods_declaring_class.starts_with("Ljava/lang/StrictMath;")) {
1260 std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
1261 if (tgt_method == "int java.lang.Math.abs(int)" ||
1262 tgt_method == "int java.lang.StrictMath.abs(int)") {
1263 return GenInlinedAbsInt(info);
1264 }
1265 if (tgt_method == "long java.lang.Math.abs(long)" ||
1266 tgt_method == "long java.lang.StrictMath.abs(long)") {
1267 return GenInlinedAbsLong(info);
1268 }
1269 if (tgt_method == "int java.lang.Math.max(int, int)" ||
1270 tgt_method == "int java.lang.StrictMath.max(int, int)") {
1271 return GenInlinedMinMaxInt(info, false /* is_min */);
1272 }
1273 if (tgt_method == "int java.lang.Math.min(int, int)" ||
1274 tgt_method == "int java.lang.StrictMath.min(int, int)") {
1275 return GenInlinedMinMaxInt(info, true /* is_min */);
1276 }
1277 if (tgt_method == "double java.lang.Math.sqrt(double)" ||
1278 tgt_method == "double java.lang.StrictMath.sqrt(double)") {
1279 return GenInlinedSqrt(info);
1280 }
1281 } else if (tgt_methods_declaring_class.starts_with("Ljava/lang/String;")) {
1282 std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
1283 if (tgt_method == "char java.lang.String.charAt(int)") {
1284 return GenInlinedCharAt(info);
1285 }
1286 if (tgt_method == "int java.lang.String.compareTo(java.lang.String)") {
1287 return GenInlinedStringCompareTo(info);
1288 }
1289 if (tgt_method == "boolean java.lang.String.is_empty()") {
1290 return GenInlinedStringIsEmptyOrLength(info, true /* is_empty */);
1291 }
1292 if (tgt_method == "int java.lang.String.index_of(int, int)") {
1293 return GenInlinedIndexOf(info, false /* base 0 */);
1294 }
1295 if (tgt_method == "int java.lang.String.index_of(int)") {
1296 return GenInlinedIndexOf(info, true /* base 0 */);
1297 }
1298 if (tgt_method == "int java.lang.String.length()") {
1299 return GenInlinedStringIsEmptyOrLength(info, false /* is_empty */);
1300 }
1301 } else if (tgt_methods_declaring_class.starts_with("Ljava/lang/Thread;")) {
1302 std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
1303 if (tgt_method == "java.lang.Thread java.lang.Thread.currentThread()") {
1304 return GenInlinedCurrentThread(info);
1305 }
1306 } else if (tgt_methods_declaring_class.starts_with("Lsun/misc/Unsafe;")) {
1307 std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
1308 if (tgt_method == "boolean sun.misc.Unsafe.compareAndSwapInt(java.lang.Object, long, int, int)") {
1309 return GenInlinedCas32(info, false);
1310 }
1311 if (tgt_method == "boolean sun.misc.Unsafe.compareAndSwapObject(java.lang.Object, long, java.lang.Object, java.lang.Object)") {
1312 return GenInlinedCas32(info, true);
1313 }
1314 if (tgt_method == "int sun.misc.Unsafe.getInt(java.lang.Object, long)") {
1315 return GenInlinedUnsafeGet(info, false /* is_long */, false /* is_volatile */);
1316 }
1317 if (tgt_method == "int sun.misc.Unsafe.getIntVolatile(java.lang.Object, long)") {
1318 return GenInlinedUnsafeGet(info, false /* is_long */, true /* is_volatile */);
1319 }
1320 if (tgt_method == "void sun.misc.Unsafe.putInt(java.lang.Object, long, int)") {
1321 return GenInlinedUnsafePut(info, false /* is_long */, false /* is_object */,
1322 false /* is_volatile */, false /* is_ordered */);
1323 }
1324 if (tgt_method == "void sun.misc.Unsafe.putIntVolatile(java.lang.Object, long, int)") {
1325 return GenInlinedUnsafePut(info, false /* is_long */, false /* is_object */,
1326 true /* is_volatile */, false /* is_ordered */);
1327 }
1328 if (tgt_method == "void sun.misc.Unsafe.putOrderedInt(java.lang.Object, long, int)") {
1329 return GenInlinedUnsafePut(info, false /* is_long */, false /* is_object */,
1330 false /* is_volatile */, true /* is_ordered */);
1331 }
1332 if (tgt_method == "long sun.misc.Unsafe.getLong(java.lang.Object, long)") {
1333 return GenInlinedUnsafeGet(info, true /* is_long */, false /* is_volatile */);
1334 }
1335 if (tgt_method == "long sun.misc.Unsafe.getLongVolatile(java.lang.Object, long)") {
1336 return GenInlinedUnsafeGet(info, true /* is_long */, true /* is_volatile */);
1337 }
1338 if (tgt_method == "void sun.misc.Unsafe.putLong(java.lang.Object, long, long)") {
1339 return GenInlinedUnsafePut(info, true /* is_long */, false /* is_object */,
1340 false /* is_volatile */, false /* is_ordered */);
1341 }
1342 if (tgt_method == "void sun.misc.Unsafe.putLongVolatile(java.lang.Object, long, long)") {
1343 return GenInlinedUnsafePut(info, true /* is_long */, false /* is_object */,
1344 true /* is_volatile */, false /* is_ordered */);
1345 }
1346 if (tgt_method == "void sun.misc.Unsafe.putOrderedLong(java.lang.Object, long, long)") {
1347 return GenInlinedUnsafePut(info, true /* is_long */, false /* is_object */,
1348 false /* is_volatile */, true /* is_ordered */);
1349 }
1350 if (tgt_method == "java.lang.Object sun.misc.Unsafe.getObject(java.lang.Object, long)") {
1351 return GenInlinedUnsafeGet(info, false /* is_long */, false /* is_volatile */);
1352 }
1353 if (tgt_method == "java.lang.Object sun.misc.Unsafe.getObjectVolatile(java.lang.Object, long)") {
1354 return GenInlinedUnsafeGet(info, false /* is_long */, true /* is_volatile */);
1355 }
1356 if (tgt_method == "void sun.misc.Unsafe.putObject(java.lang.Object, long, java.lang.Object)") {
1357 return GenInlinedUnsafePut(info, false /* is_long */, true /* is_object */,
1358 false /* is_volatile */, false /* is_ordered */);
1359 }
1360 if (tgt_method == "void sun.misc.Unsafe.putObjectVolatile(java.lang.Object, long, java.lang.Object)") {
1361 return GenInlinedUnsafePut(info, false /* is_long */, true /* is_object */,
1362 true /* is_volatile */, false /* is_ordered */);
1363 }
1364 if (tgt_method == "void sun.misc.Unsafe.putOrderedObject(java.lang.Object, long, java.lang.Object)") {
1365 return GenInlinedUnsafePut(info, false /* is_long */, true /* is_object */,
1366 false /* is_volatile */, true /* is_ordered */);
1367 }
1368 }
1369 return false;
1370}
1371
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001372void Mir2Lir::GenInvoke(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001373 if (GenIntrinsic(info)) {
1374 return;
1375 }
1376 InvokeType original_type = info->type; // avoiding mutation by ComputeInvokeInfo
1377 int call_state = 0;
1378 LIR* null_ck;
1379 LIR** p_null_ck = NULL;
1380 NextCallInsn next_call_insn;
1381 FlushAllRegs(); /* Everything to home location */
1382 // Explicit register usage
1383 LockCallTemps();
1384
1385 DexCompilationUnit* cUnit = mir_graph_->GetCurrentDexCompilationUnit();
1386 MethodReference target_method(cUnit->GetDexFile(), info->index);
1387 int vtable_idx;
1388 uintptr_t direct_code;
1389 uintptr_t direct_method;
1390 bool skip_this;
1391 bool fast_path =
1392 cu_->compiler_driver->ComputeInvokeInfo(mir_graph_->GetCurrentDexCompilationUnit(),
1393 current_dalvik_offset_,
Ian Rogers65ec92c2013-09-06 10:49:58 -07001394 true, true,
1395 &info->type, &target_method,
1396 &vtable_idx,
1397 &direct_code, &direct_method) && !SLOW_INVOKE_PATH;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001398 if (info->type == kInterface) {
1399 if (fast_path) {
1400 p_null_ck = &null_ck;
1401 }
1402 next_call_insn = fast_path ? NextInterfaceCallInsn : NextInterfaceCallInsnWithAccessCheck;
1403 skip_this = false;
1404 } else if (info->type == kDirect) {
1405 if (fast_path) {
1406 p_null_ck = &null_ck;
1407 }
1408 next_call_insn = fast_path ? NextSDCallInsn : NextDirectCallInsnSP;
1409 skip_this = false;
1410 } else if (info->type == kStatic) {
1411 next_call_insn = fast_path ? NextSDCallInsn : NextStaticCallInsnSP;
1412 skip_this = false;
1413 } else if (info->type == kSuper) {
1414 DCHECK(!fast_path); // Fast path is a direct call.
1415 next_call_insn = NextSuperCallInsnSP;
1416 skip_this = false;
1417 } else {
1418 DCHECK_EQ(info->type, kVirtual);
1419 next_call_insn = fast_path ? NextVCallInsn : NextVCallInsnSP;
1420 skip_this = fast_path;
1421 }
1422 if (!info->is_range) {
1423 call_state = GenDalvikArgsNoRange(info, call_state, p_null_ck,
1424 next_call_insn, target_method,
1425 vtable_idx, direct_code, direct_method,
1426 original_type, skip_this);
1427 } else {
1428 call_state = GenDalvikArgsRange(info, call_state, p_null_ck,
1429 next_call_insn, target_method, vtable_idx,
1430 direct_code, direct_method, original_type,
1431 skip_this);
1432 }
1433 // Finish up any of the call sequence not interleaved in arg loading
1434 while (call_state >= 0) {
1435 call_state = next_call_insn(cu_, info, call_state, target_method,
1436 vtable_idx, direct_code, direct_method,
1437 original_type);
1438 }
1439 LIR* call_inst;
1440 if (cu_->instruction_set != kX86) {
1441 call_inst = OpReg(kOpBlx, TargetReg(kInvokeTgt));
1442 } else {
1443 if (fast_path && info->type != kInterface) {
1444 call_inst = OpMem(kOpBlx, TargetReg(kArg0),
Brian Carlstromea46f952013-07-30 01:26:50 -07001445 mirror::ArtMethod::GetEntryPointFromCompiledCodeOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001446 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001447 ThreadOffset trampoline(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001448 switch (info->type) {
1449 case kInterface:
Ian Rogers7655f292013-07-29 11:07:13 -07001450 trampoline = fast_path ? QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampoline)
1451 : QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001452 break;
1453 case kDirect:
Ian Rogers7655f292013-07-29 11:07:13 -07001454 trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeDirectTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001455 break;
1456 case kStatic:
Ian Rogers7655f292013-07-29 11:07:13 -07001457 trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeStaticTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001458 break;
1459 case kSuper:
Ian Rogers7655f292013-07-29 11:07:13 -07001460 trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeSuperTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001461 break;
1462 case kVirtual:
Ian Rogers7655f292013-07-29 11:07:13 -07001463 trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeVirtualTrampolineWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001464 break;
1465 default:
1466 LOG(FATAL) << "Unexpected invoke type";
1467 }
1468 call_inst = OpThreadMem(kOpBlx, trampoline);
1469 }
1470 }
1471 MarkSafepointPC(call_inst);
1472
1473 ClobberCalleeSave();
1474 if (info->result.location != kLocInvalid) {
1475 // We have a following MOVE_RESULT - do it now.
1476 if (info->result.wide) {
1477 RegLocation ret_loc = GetReturnWide(info->result.fp);
1478 StoreValueWide(info->result, ret_loc);
1479 } else {
1480 RegLocation ret_loc = GetReturn(info->result.fp);
1481 StoreValue(info->result, ret_loc);
1482 }
1483 }
1484}
1485
1486} // namespace art