blob: 2c5fbc78bf8861336036e8c1716648a9c6f0c916 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Zheng Xu59f054d2015-12-07 17:17:03 +080044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain62a46b22015-06-01 18:24:13 +010054#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())->
Calin Juravle175dc732015-08-25 15:42:32 +010055#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010056
Andreas Gampe85b62f22015-09-09 13:15:38 -070057class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010058 public:
Nicolas Geoffray39468442014-09-02 15:17:15 +010059 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : instruction_(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060
Alexandre Rames2ed20af2015-03-06 13:55:35 +000061 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000062 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000064 if (instruction_->CanThrowIntoCatchBlock()) {
65 // Live registers will be restored in the catch block if caught.
66 SaveLiveRegisters(codegen, instruction_->GetLocations());
67 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000068 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
69 instruction_,
70 instruction_->GetDexPc(),
71 this);
Roland Levillain888d0672015-11-23 18:53:50 +000072 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 }
74
Alexandre Rames8158f282015-08-07 10:26:17 +010075 bool IsFatal() const OVERRIDE { return true; }
76
Alexandre Rames9931f312015-06-19 14:47:01 +010077 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
78
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +010080 HNullCheck* const instruction_;
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
86 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : instruction_(instruction) {}
87
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
107 HDivZeroCheck* const instruction_;
108 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
109};
110
Andreas Gampe85b62f22015-09-09 13:15:38 -0700111class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000112 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100113 DivRemMinusOneSlowPathX86_64(Register reg, Primitive::Type type, bool is_div)
Calin Juravlebacfec32014-11-14 15:54:36 +0000114 : cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000115
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000116 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000117 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000118 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negl(cpu_reg_);
121 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
124
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000125 } else {
126 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 if (is_div_) {
128 __ negq(cpu_reg_);
129 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400130 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 }
Calin Juravled0d48522014-11-04 16:40:20 +0000133 __ jmp(GetExitLabel());
134 }
135
Alexandre Rames9931f312015-06-19 14:47:01 +0100136 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
137
Calin Juravled0d48522014-11-04 16:40:20 +0000138 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000139 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000140 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000141 const bool is_div_;
142 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000143};
144
Andreas Gampe85b62f22015-09-09 13:15:38 -0700145class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000146 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100147 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100148 : instruction_(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000149
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000150 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000151 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000153 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000154 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
155 instruction_,
156 instruction_->GetDexPc(),
157 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000158 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000159 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100160 if (successor_ == nullptr) {
161 __ jmp(GetReturnLabel());
162 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000163 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 }
166
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100167 Label* GetReturnLabel() {
168 DCHECK(successor_ == nullptr);
169 return &return_label_;
170 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000171
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100172 HBasicBlock* GetSuccessor() const {
173 return successor_;
174 }
175
Alexandre Rames9931f312015-06-19 14:47:01 +0100176 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
177
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000178 private:
179 HSuspendCheck* const instruction_;
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100180 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000181 Label return_label_;
182
183 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
184};
185
Andreas Gampe85b62f22015-09-09 13:15:38 -0700186class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100188 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
189 : instruction_(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100192 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000193 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100194 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000195 if (instruction_->CanThrowIntoCatchBlock()) {
196 // Live registers will be restored in the catch block if caught.
197 SaveLiveRegisters(codegen, instruction_->GetLocations());
198 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000199 // We're moving two locations to locations that could overlap, so we need a parallel
200 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100201 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100203 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000204 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100206 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100207 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
208 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000209 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
210 instruction_,
211 instruction_->GetDexPc(),
212 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000213 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100214 }
215
Alexandre Rames8158f282015-08-07 10:26:17 +0100216 bool IsFatal() const OVERRIDE { return true; }
217
Alexandre Rames9931f312015-06-19 14:47:01 +0100218 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
219
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100220 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +0100221 HBoundsCheck* const instruction_;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222
223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
224};
225
Andreas Gampe85b62f22015-09-09 13:15:38 -0700226class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100227 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 LoadClassSlowPathX86_64(HLoadClass* cls,
229 HInstruction* at,
230 uint32_t dex_pc,
231 bool do_clinit)
232 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
233 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
234 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000236 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000238 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000241 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000245 x86_64_codegen->InvokeRuntime(do_clinit_ ?
246 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
247 QUICK_ENTRY_POINT(pInitializeType),
248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 public:
289 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : instruction_(instruction) {}
290
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 LocationSummary* locations = instruction_->GetLocations();
293 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
294
Roland Levillain0d5a2812015-11-13 10:07:31 +0000295 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000296 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000297 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000298
299 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000301 Immediate(instruction_->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
303 instruction_,
304 instruction_->GetDexPc(),
305 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000306 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000307 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 __ jmp(GetExitLabel());
310 }
311
Alexandre Rames9931f312015-06-19 14:47:01 +0100312 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
313
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000314 private:
315 HLoadString* const instruction_;
316
317 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
318};
319
Andreas Gampe85b62f22015-09-09 13:15:38 -0700320class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000321 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000322 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
323 : instruction_(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000325 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100327 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
328 : locations->Out();
329 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000330 DCHECK(instruction_->IsCheckCast()
331 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
Roland Levillain0d5a2812015-11-13 10:07:31 +0000333 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000334 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000335
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000336 if (!is_fatal_) {
337 SaveLiveRegisters(codegen, locations);
338 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
340 // We're moving two locations to locations that could overlap, so we need a parallel
341 // move resolver.
342 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000343 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000345 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100346 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100347 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100348 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
349 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000351 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000352 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
353 instruction_,
354 dex_pc,
355 this);
356 CheckEntrypointTypes<
357 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000358 } else {
359 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000360 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
361 instruction_,
362 dex_pc,
363 this);
364 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000365 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000367 if (!is_fatal_) {
368 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000369 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 RestoreLiveRegisters(codegen, locations);
373 __ jmp(GetExitLabel());
374 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000375 }
376
Alexandre Rames9931f312015-06-19 14:47:01 +0100377 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
378
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000379 bool IsFatal() const OVERRIDE { return is_fatal_; }
380
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381 private:
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000382 HInstruction* const instruction_;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000384
385 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
386};
387
Andreas Gampe85b62f22015-09-09 13:15:38 -0700388class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389 public:
390 explicit DeoptimizationSlowPathX86_64(HInstruction* instruction)
391 : instruction_(instruction) {}
392
393 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 __ Bind(GetEntryLabel());
396 SaveLiveRegisters(codegen, instruction_->GetLocations());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700397 DCHECK(instruction_->IsDeoptimize());
398 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000399 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
400 deoptimize,
401 deoptimize->GetDexPc(),
402 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000403 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404 }
405
Alexandre Rames9931f312015-06-19 14:47:01 +0100406 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
407
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 private:
409 HInstruction* const instruction_;
410 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
411};
412
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100413class ArraySetSlowPathX86_64 : public SlowPathCode {
414 public:
415 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : instruction_(instruction) {}
416
417 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
418 LocationSummary* locations = instruction_->GetLocations();
419 __ Bind(GetEntryLabel());
420 SaveLiveRegisters(codegen, locations);
421
422 InvokeRuntimeCallingConvention calling_convention;
423 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
424 parallel_move.AddMove(
425 locations->InAt(0),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
427 Primitive::kPrimNot,
428 nullptr);
429 parallel_move.AddMove(
430 locations->InAt(1),
431 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
432 Primitive::kPrimInt,
433 nullptr);
434 parallel_move.AddMove(
435 locations->InAt(2),
436 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
437 Primitive::kPrimNot,
438 nullptr);
439 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
440
Roland Levillain0d5a2812015-11-13 10:07:31 +0000441 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
442 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
443 instruction_,
444 instruction_->GetDexPc(),
445 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000446 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100447 RestoreLiveRegisters(codegen, locations);
448 __ jmp(GetExitLabel());
449 }
450
451 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
452
453 private:
454 HInstruction* const instruction_;
455
456 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
457};
458
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000459// Slow path marking an object during a read barrier.
460class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
461 public:
462 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
463 : instruction_(instruction), out_(out), obj_(obj) {
464 DCHECK(kEmitCompilerReadBarrier);
465 }
466
467 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
468
469 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
470 LocationSummary* locations = instruction_->GetLocations();
471 Register reg_out = out_.AsRegister<Register>();
472 DCHECK(locations->CanCall());
473 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
474 DCHECK(instruction_->IsInstanceFieldGet() ||
475 instruction_->IsStaticFieldGet() ||
476 instruction_->IsArrayGet() ||
477 instruction_->IsLoadClass() ||
478 instruction_->IsLoadString() ||
479 instruction_->IsInstanceOf() ||
480 instruction_->IsCheckCast())
481 << "Unexpected instruction in read barrier marking slow path: "
482 << instruction_->DebugName();
483
484 __ Bind(GetEntryLabel());
485 SaveLiveRegisters(codegen, locations);
486
487 InvokeRuntimeCallingConvention calling_convention;
488 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
489 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
490 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
491 instruction_,
492 instruction_->GetDexPc(),
493 this);
494 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
495 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
496
497 RestoreLiveRegisters(codegen, locations);
498 __ jmp(GetExitLabel());
499 }
500
501 private:
502 HInstruction* const instruction_;
503 const Location out_;
504 const Location obj_;
505
506 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
507};
508
Roland Levillain0d5a2812015-11-13 10:07:31 +0000509// Slow path generating a read barrier for a heap reference.
510class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
511 public:
512 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
513 Location out,
514 Location ref,
515 Location obj,
516 uint32_t offset,
517 Location index)
518 : instruction_(instruction),
519 out_(out),
520 ref_(ref),
521 obj_(obj),
522 offset_(offset),
523 index_(index) {
524 DCHECK(kEmitCompilerReadBarrier);
525 // If `obj` is equal to `out` or `ref`, it means the initial
526 // object has been overwritten by (or after) the heap object
527 // reference load to be instrumented, e.g.:
528 //
529 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000530 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000531 //
532 // In that case, we have lost the information about the original
533 // object, and the emitted read barrier cannot work properly.
534 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
535 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
536}
537
538 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
539 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
540 LocationSummary* locations = instruction_->GetLocations();
541 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
542 DCHECK(locations->CanCall());
543 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
544 DCHECK(!instruction_->IsInvoke() ||
545 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000546 instruction_->GetLocations()->Intrinsified()))
547 << "Unexpected instruction in read barrier for heap reference slow path: "
548 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000549
550 __ Bind(GetEntryLabel());
551 SaveLiveRegisters(codegen, locations);
552
553 // We may have to change the index's value, but as `index_` is a
554 // constant member (like other "inputs" of this slow path),
555 // introduce a copy of it, `index`.
556 Location index = index_;
557 if (index_.IsValid()) {
558 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
559 if (instruction_->IsArrayGet()) {
560 // Compute real offset and store it in index_.
561 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
562 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
563 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
564 // We are about to change the value of `index_reg` (see the
565 // calls to art::x86_64::X86_64Assembler::shll and
566 // art::x86_64::X86_64Assembler::AddImmediate below), but it
567 // has not been saved by the previous call to
568 // art::SlowPathCode::SaveLiveRegisters, as it is a
569 // callee-save register --
570 // art::SlowPathCode::SaveLiveRegisters does not consider
571 // callee-save registers, as it has been designed with the
572 // assumption that callee-save registers are supposed to be
573 // handled by the called function. So, as a callee-save
574 // register, `index_reg` _would_ eventually be saved onto
575 // the stack, but it would be too late: we would have
576 // changed its value earlier. Therefore, we manually save
577 // it here into another freely available register,
578 // `free_reg`, chosen of course among the caller-save
579 // registers (as a callee-save `free_reg` register would
580 // exhibit the same problem).
581 //
582 // Note we could have requested a temporary register from
583 // the register allocator instead; but we prefer not to, as
584 // this is a slow path, and we know we can find a
585 // caller-save register that is available.
586 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
587 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
588 index_reg = free_reg;
589 index = Location::RegisterLocation(index_reg);
590 } else {
591 // The initial register stored in `index_` has already been
592 // saved in the call to art::SlowPathCode::SaveLiveRegisters
593 // (as it is not a callee-save register), so we can freely
594 // use it.
595 }
596 // Shifting the index value contained in `index_reg` by the
597 // scale factor (2) cannot overflow in practice, as the
598 // runtime is unable to allocate object arrays with a size
599 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
600 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
601 static_assert(
602 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
603 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
604 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
605 } else {
606 DCHECK(instruction_->IsInvoke());
607 DCHECK(instruction_->GetLocations()->Intrinsified());
608 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
609 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
610 << instruction_->AsInvoke()->GetIntrinsic();
611 DCHECK_EQ(offset_, 0U);
612 DCHECK(index_.IsRegister());
613 }
614 }
615
616 // We're moving two or three locations to locations that could
617 // overlap, so we need a parallel move resolver.
618 InvokeRuntimeCallingConvention calling_convention;
619 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
620 parallel_move.AddMove(ref_,
621 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
622 Primitive::kPrimNot,
623 nullptr);
624 parallel_move.AddMove(obj_,
625 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
626 Primitive::kPrimNot,
627 nullptr);
628 if (index.IsValid()) {
629 parallel_move.AddMove(index,
630 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
631 Primitive::kPrimInt,
632 nullptr);
633 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
634 } else {
635 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
636 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
637 }
638 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
639 instruction_,
640 instruction_->GetDexPc(),
641 this);
642 CheckEntrypointTypes<
643 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
644 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
645
646 RestoreLiveRegisters(codegen, locations);
647 __ jmp(GetExitLabel());
648 }
649
650 const char* GetDescription() const OVERRIDE {
651 return "ReadBarrierForHeapReferenceSlowPathX86_64";
652 }
653
654 private:
655 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
656 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
657 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
658 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
659 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
660 return static_cast<CpuRegister>(i);
661 }
662 }
663 // We shall never fail to find a free caller-save register, as
664 // there are more than two core caller-save registers on x86-64
665 // (meaning it is possible to find one which is different from
666 // `ref` and `obj`).
667 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
668 LOG(FATAL) << "Could not find a free caller-save register";
669 UNREACHABLE();
670 }
671
672 HInstruction* const instruction_;
673 const Location out_;
674 const Location ref_;
675 const Location obj_;
676 const uint32_t offset_;
677 // An additional location containing an index to an array.
678 // Only used for HArrayGet and the UnsafeGetObject &
679 // UnsafeGetObjectVolatile intrinsics.
680 const Location index_;
681
682 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
683};
684
685// Slow path generating a read barrier for a GC root.
686class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
687 public:
688 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000689 : instruction_(instruction), out_(out), root_(root) {
690 DCHECK(kEmitCompilerReadBarrier);
691 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000692
693 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
694 LocationSummary* locations = instruction_->GetLocations();
695 DCHECK(locations->CanCall());
696 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000697 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
698 << "Unexpected instruction in read barrier for GC root slow path: "
699 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000700
701 __ Bind(GetEntryLabel());
702 SaveLiveRegisters(codegen, locations);
703
704 InvokeRuntimeCallingConvention calling_convention;
705 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
706 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
707 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
708 instruction_,
709 instruction_->GetDexPc(),
710 this);
711 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
712 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
713
714 RestoreLiveRegisters(codegen, locations);
715 __ jmp(GetExitLabel());
716 }
717
718 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
719
720 private:
721 HInstruction* const instruction_;
722 const Location out_;
723 const Location root_;
724
725 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
726};
727
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100728#undef __
Roland Levillain62a46b22015-06-01 18:24:13 +0100729#define __ down_cast<X86_64Assembler*>(GetAssembler())->
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100730
Roland Levillain4fa13f62015-07-06 18:11:54 +0100731inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700732 switch (cond) {
733 case kCondEQ: return kEqual;
734 case kCondNE: return kNotEqual;
735 case kCondLT: return kLess;
736 case kCondLE: return kLessEqual;
737 case kCondGT: return kGreater;
738 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700739 case kCondB: return kBelow;
740 case kCondBE: return kBelowEqual;
741 case kCondA: return kAbove;
742 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700743 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100744 LOG(FATAL) << "Unreachable";
745 UNREACHABLE();
746}
747
Aart Bike9f37602015-10-09 11:15:55 -0700748// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100749inline Condition X86_64FPCondition(IfCondition cond) {
750 switch (cond) {
751 case kCondEQ: return kEqual;
752 case kCondNE: return kNotEqual;
753 case kCondLT: return kBelow;
754 case kCondLE: return kBelowEqual;
755 case kCondGT: return kAbove;
756 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700757 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100758 };
759 LOG(FATAL) << "Unreachable";
760 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700761}
762
Vladimir Markodc151b22015-10-15 18:02:30 +0100763HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
764 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
765 MethodReference target_method ATTRIBUTE_UNUSED) {
766 switch (desired_dispatch_info.code_ptr_location) {
767 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
768 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
769 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
770 return HInvokeStaticOrDirect::DispatchInfo {
771 desired_dispatch_info.method_load_kind,
772 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
773 desired_dispatch_info.method_load_data,
774 0u
775 };
776 default:
777 return desired_dispatch_info;
778 }
779}
780
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800781void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100782 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800783 // All registers are assumed to be correctly set up.
784
Vladimir Marko58155012015-08-19 12:49:41 +0000785 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
786 switch (invoke->GetMethodLoadKind()) {
787 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
788 // temp = thread->string_init_entrypoint
789 __ gs()->movl(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000790 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000791 break;
792 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000793 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000794 break;
795 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
796 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
797 break;
798 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
799 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
800 method_patches_.emplace_back(invoke->GetTargetMethod());
801 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
802 break;
803 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000804 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
805 invoke->GetDexCacheArrayOffset());
Vladimir Marko58155012015-08-19 12:49:41 +0000806 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000807 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko58155012015-08-19 12:49:41 +0000808 // Bind the label at the end of the "movl" insn.
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000809 __ Bind(&pc_relative_dex_cache_patches_.back().label);
Vladimir Marko58155012015-08-19 12:49:41 +0000810 break;
811 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000812 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000813 Register method_reg;
814 CpuRegister reg = temp.AsRegister<CpuRegister>();
815 if (current_method.IsRegister()) {
816 method_reg = current_method.AsRegister<Register>();
817 } else {
818 DCHECK(invoke->GetLocations()->Intrinsified());
819 DCHECK(!current_method.IsValid());
820 method_reg = reg.AsRegister();
821 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
822 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000823 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100824 __ movq(reg,
825 Address(CpuRegister(method_reg),
826 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000827 // temp = temp[index_in_cache]
828 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
829 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
830 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100831 }
Vladimir Marko58155012015-08-19 12:49:41 +0000832 }
833
834 switch (invoke->GetCodePtrLocation()) {
835 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
836 __ call(&frame_entry_label_);
837 break;
838 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
839 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
840 Label* label = &relative_call_patches_.back().label;
841 __ call(label); // Bind to the patch label, override at link time.
842 __ Bind(label); // Bind the label at the end of the "call" insn.
843 break;
844 }
845 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
846 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100847 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
848 LOG(FATAL) << "Unsupported";
849 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000850 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
851 // (callee_method + offset_of_quick_compiled_code)()
852 __ call(Address(callee_method.AsRegister<CpuRegister>(),
853 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
854 kX86_64WordSize).SizeValue()));
855 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000856 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800857
858 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800859}
860
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000861void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
862 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
863 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
864 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000865
866 // Use the calling convention instead of the location of the receiver, as
867 // intrinsics may have put the receiver in a different register. In the intrinsics
868 // slow path, the arguments have been moved to the right place, so here we are
869 // guaranteed that the receiver is the first register of the calling convention.
870 InvokeDexCallingConvention calling_convention;
871 Register receiver = calling_convention.GetRegisterAt(0);
872
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000873 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000874 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000875 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000876 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000877 // Instead of simply (possibly) unpoisoning `temp` here, we should
878 // emit a read barrier for the previous class reference load.
879 // However this is not required in practice, as this is an
880 // intermediate/temporary reference and because the current
881 // concurrent copying collector keeps the from-space memory
882 // intact/accessible until the end of the marking phase (the
883 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000884 __ MaybeUnpoisonHeapReference(temp);
885 // temp = temp->GetMethodAt(method_offset);
886 __ movq(temp, Address(temp, method_offset));
887 // call temp->GetEntryPoint();
888 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
889 kX86_64WordSize).SizeValue()));
890}
891
Vladimir Marko58155012015-08-19 12:49:41 +0000892void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
893 DCHECK(linker_patches->empty());
894 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000895 method_patches_.size() +
896 relative_call_patches_.size() +
897 pc_relative_dex_cache_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000898 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000899 // The label points to the end of the "movl" insn but the literal offset for method
900 // patch needs to point to the embedded constant which occupies the last 4 bytes.
901 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000902 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000903 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000904 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
905 info.target_method.dex_file,
906 info.target_method.dex_method_index));
907 }
908 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000909 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000910 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
911 info.target_method.dex_file,
912 info.target_method.dex_method_index));
913 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000914 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
915 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000916 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
917 &info.target_dex_file,
918 info.label.Position(),
919 info.element_offset));
920 }
921}
922
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100923void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100924 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100925}
926
927void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100928 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100929}
930
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100931size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
932 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
933 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100934}
935
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100936size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
937 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
938 return kX86_64WordSize;
939}
940
941size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
942 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
943 return kX86_64WordSize;
944}
945
946size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
947 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
948 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100949}
950
Calin Juravle175dc732015-08-25 15:42:32 +0100951void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
952 HInstruction* instruction,
953 uint32_t dex_pc,
954 SlowPathCode* slow_path) {
955 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
956 instruction,
957 dex_pc,
958 slow_path);
959}
960
961void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100962 HInstruction* instruction,
963 uint32_t dex_pc,
964 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100965 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000966 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100967 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100968}
969
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000970static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000971// Use a fake return address register to mimic Quick.
972static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400973CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000974 const X86_64InstructionSetFeatures& isa_features,
975 const CompilerOptions& compiler_options,
976 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +0000977 : CodeGenerator(graph,
978 kNumberOfCpuRegisters,
979 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000980 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000981 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
982 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000983 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000984 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
985 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100986 compiler_options,
987 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100988 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100989 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000990 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -0400991 move_resolver_(graph->GetArena(), this),
Mark Mendellf55c3e02015-03-26 21:07:46 -0400992 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +0000993 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +0100994 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
995 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000996 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -0400997 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000998 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
999}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001000
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001001InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1002 CodeGeneratorX86_64* codegen)
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001003 : HGraphVisitor(graph),
1004 assembler_(codegen->GetAssembler()),
1005 codegen_(codegen) {}
1006
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001007Location CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type) const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001008 switch (type) {
1009 case Primitive::kPrimLong:
1010 case Primitive::kPrimByte:
1011 case Primitive::kPrimBoolean:
1012 case Primitive::kPrimChar:
1013 case Primitive::kPrimShort:
1014 case Primitive::kPrimInt:
1015 case Primitive::kPrimNot: {
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001016 size_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfCpuRegisters);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001017 return Location::RegisterLocation(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001018 }
1019
1020 case Primitive::kPrimFloat:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001021 case Primitive::kPrimDouble: {
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001022 size_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfFloatRegisters);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001023 return Location::FpuRegisterLocation(reg);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001024 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001025
1026 case Primitive::kPrimVoid:
1027 LOG(FATAL) << "Unreachable type " << type;
1028 }
1029
Roland Levillain0d5a2812015-11-13 10:07:31 +00001030 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001031}
1032
Nicolas Geoffray98893962015-01-21 12:32:32 +00001033void CodeGeneratorX86_64::SetupBlockedRegisters(bool is_baseline) const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001034 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001035 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001036
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001037 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001038 blocked_core_registers_[TMP] = true;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001039
Nicolas Geoffray98893962015-01-21 12:32:32 +00001040 if (is_baseline) {
1041 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1042 blocked_core_registers_[kCoreCalleeSaves[i]] = true;
1043 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001044 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1045 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1046 }
Nicolas Geoffray98893962015-01-21 12:32:32 +00001047 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001048}
1049
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001050static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001051 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001052}
David Srbecky9d8606d2015-04-12 09:35:32 +01001053
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001054static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001055 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001056}
1057
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001058void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001059 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001060 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001061 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001062 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001063 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001064
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001065 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001066 __ testq(CpuRegister(RAX), Address(
1067 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001068 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001069 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001070
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001071 if (HasEmptyFrame()) {
1072 return;
1073 }
1074
Nicolas Geoffray98893962015-01-21 12:32:32 +00001075 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001076 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001077 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001078 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001079 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1080 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001081 }
1082 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001083
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001084 int adjust = GetFrameSize() - GetCoreSpillSize();
1085 __ subq(CpuRegister(RSP), Immediate(adjust));
1086 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001087 uint32_t xmm_spill_location = GetFpuSpillStart();
1088 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001089
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001090 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1091 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001092 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1093 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1094 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001095 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001096 }
1097
Mathieu Chartiere401d142015-04-22 13:56:20 -07001098 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001099 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001100}
1101
1102void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001103 __ cfi().RememberState();
1104 if (!HasEmptyFrame()) {
1105 uint32_t xmm_spill_location = GetFpuSpillStart();
1106 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1107 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1108 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1109 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1110 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1111 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1112 }
1113 }
1114
1115 int adjust = GetFrameSize() - GetCoreSpillSize();
1116 __ addq(CpuRegister(RSP), Immediate(adjust));
1117 __ cfi().AdjustCFAOffset(-adjust);
1118
1119 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1120 Register reg = kCoreCalleeSaves[i];
1121 if (allocated_registers_.ContainsCoreRegister(reg)) {
1122 __ popq(CpuRegister(reg));
1123 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1124 __ cfi().Restore(DWARFReg(reg));
1125 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001126 }
1127 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001128 __ ret();
1129 __ cfi().RestoreState();
1130 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001131}
1132
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001133void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1134 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001135}
1136
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001137Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
1138 switch (load->GetType()) {
1139 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001140 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001142
1143 case Primitive::kPrimInt:
1144 case Primitive::kPrimNot:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001145 case Primitive::kPrimFloat:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001146 return Location::StackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001147
1148 case Primitive::kPrimBoolean:
1149 case Primitive::kPrimByte:
1150 case Primitive::kPrimChar:
1151 case Primitive::kPrimShort:
1152 case Primitive::kPrimVoid:
1153 LOG(FATAL) << "Unexpected type " << load->GetType();
Andreas Gampe65b798e2015-04-06 09:35:22 -07001154 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001155 }
1156
1157 LOG(FATAL) << "Unreachable";
Andreas Gampe65b798e2015-04-06 09:35:22 -07001158 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001159}
1160
1161void CodeGeneratorX86_64::Move(Location destination, Location source) {
1162 if (source.Equals(destination)) {
1163 return;
1164 }
1165 if (destination.IsRegister()) {
1166 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001167 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001168 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001169 __ movd(destination.AsRegister<CpuRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001170 } else if (source.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001171 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001172 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001173 } else {
1174 DCHECK(source.IsDoubleStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001175 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001176 Address(CpuRegister(RSP), source.GetStackIndex()));
1177 }
1178 } else if (destination.IsFpuRegister()) {
1179 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001180 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001181 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001182 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001183 } else if (source.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001184 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 Address(CpuRegister(RSP), source.GetStackIndex()));
1186 } else {
1187 DCHECK(source.IsDoubleStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001188 __ movsd(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001189 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001190 }
1191 } else if (destination.IsStackSlot()) {
1192 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001193 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001194 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001195 } else if (source.IsFpuRegister()) {
1196 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001197 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001198 } else if (source.IsConstant()) {
1199 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001200 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001201 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001202 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001203 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001204 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1205 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001206 }
1207 } else {
1208 DCHECK(destination.IsDoubleStackSlot());
1209 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001210 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001211 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001212 } else if (source.IsFpuRegister()) {
1213 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001214 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001215 } else if (source.IsConstant()) {
1216 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001217 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001218 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001219 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001220 } else {
1221 DCHECK(constant->IsLongConstant());
1222 value = constant->AsLongConstant()->GetValue();
1223 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001224 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001225 } else {
1226 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001227 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1228 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001229 }
1230 }
1231}
1232
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001233void CodeGeneratorX86_64::Move(HInstruction* instruction,
1234 Location location,
1235 HInstruction* move_for) {
Calin Juravlea21f5982014-11-13 15:53:04 +00001236 LocationSummary* locations = instruction->GetLocations();
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001237 if (instruction->IsCurrentMethod()) {
Mathieu Chartiere3b034a2015-05-31 14:29:23 -07001238 Move(location, Location::DoubleStackSlot(kCurrentMethodStackOffset));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001239 } else if (locations != nullptr && locations->Out().Equals(location)) {
Calin Juravlea21f5982014-11-13 15:53:04 +00001240 return;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001241 } else if (locations != nullptr && locations->Out().IsConstant()) {
Calin Juravlea21f5982014-11-13 15:53:04 +00001242 HConstant* const_to_move = locations->Out().GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001243 if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
1244 Immediate imm(GetInt32ValueOf(const_to_move));
Calin Juravlea21f5982014-11-13 15:53:04 +00001245 if (location.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001246 __ movl(location.AsRegister<CpuRegister>(), imm);
Calin Juravlea21f5982014-11-13 15:53:04 +00001247 } else if (location.IsStackSlot()) {
1248 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
1249 } else {
1250 DCHECK(location.IsConstant());
1251 DCHECK_EQ(location.GetConstant(), const_to_move);
1252 }
1253 } else if (const_to_move->IsLongConstant()) {
1254 int64_t value = const_to_move->AsLongConstant()->GetValue();
1255 if (location.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04001256 Load64BitValue(location.AsRegister<CpuRegister>(), value);
Calin Juravlea21f5982014-11-13 15:53:04 +00001257 } else if (location.IsDoubleStackSlot()) {
Mark Mendellcfa410b2015-05-25 16:02:44 -04001258 Store64BitValueToStack(location, value);
Calin Juravlea21f5982014-11-13 15:53:04 +00001259 } else {
1260 DCHECK(location.IsConstant());
1261 DCHECK_EQ(location.GetConstant(), const_to_move);
1262 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001263 }
Roland Levillain476df552014-10-09 17:51:36 +01001264 } else if (instruction->IsLoadLocal()) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001265 switch (instruction->GetType()) {
1266 case Primitive::kPrimBoolean:
1267 case Primitive::kPrimByte:
1268 case Primitive::kPrimChar:
1269 case Primitive::kPrimShort:
1270 case Primitive::kPrimInt:
1271 case Primitive::kPrimNot:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001272 case Primitive::kPrimFloat:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001273 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
1274 break;
1275
1276 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001277 case Primitive::kPrimDouble:
Roland Levillain199f3362014-11-27 17:15:16 +00001278 Move(location,
1279 Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001280 break;
1281
1282 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001283 LOG(FATAL) << "Unexpected local type " << instruction->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001284 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +00001285 } else if (instruction->IsTemporary()) {
1286 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
1287 Move(location, temp_location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001288 } else {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001289 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001290 switch (instruction->GetType()) {
1291 case Primitive::kPrimBoolean:
1292 case Primitive::kPrimByte:
1293 case Primitive::kPrimChar:
1294 case Primitive::kPrimShort:
1295 case Primitive::kPrimInt:
1296 case Primitive::kPrimNot:
1297 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001298 case Primitive::kPrimFloat:
1299 case Primitive::kPrimDouble:
Calin Juravlea21f5982014-11-13 15:53:04 +00001300 Move(location, locations->Out());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001301 break;
1302
1303 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001304 LOG(FATAL) << "Unexpected type " << instruction->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001305 }
1306 }
1307}
1308
Calin Juravle175dc732015-08-25 15:42:32 +01001309void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1310 DCHECK(location.IsRegister());
1311 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1312}
1313
Calin Juravlee460d1d2015-09-29 04:52:17 +01001314void CodeGeneratorX86_64::MoveLocation(
1315 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1316 Move(dst, src);
1317}
1318
1319void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1320 if (location.IsRegister()) {
1321 locations->AddTemp(location);
1322 } else {
1323 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1324 }
1325}
1326
David Brazdilfc6a86a2015-06-26 10:33:45 +00001327void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001328 DCHECK(!successor->IsExitBlock());
1329
1330 HBasicBlock* block = got->GetBlock();
1331 HInstruction* previous = got->GetPrevious();
1332
1333 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001334 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001335 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1336 return;
1337 }
1338
1339 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1340 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1341 }
1342 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001343 __ jmp(codegen_->GetLabelOf(successor));
1344 }
1345}
1346
David Brazdilfc6a86a2015-06-26 10:33:45 +00001347void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1348 got->SetLocations(nullptr);
1349}
1350
1351void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1352 HandleGoto(got, got->GetSuccessor());
1353}
1354
1355void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1356 try_boundary->SetLocations(nullptr);
1357}
1358
1359void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1360 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1361 if (!successor->IsExitBlock()) {
1362 HandleGoto(try_boundary, successor);
1363 }
1364}
1365
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001366void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1367 exit->SetLocations(nullptr);
1368}
1369
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001370void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001371}
1372
Mark Mendellc4701932015-04-10 13:18:51 -04001373void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
1374 Label* true_label,
1375 Label* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001376 if (cond->IsFPConditionTrueIfNaN()) {
1377 __ j(kUnordered, true_label);
1378 } else if (cond->IsFPConditionFalseIfNaN()) {
1379 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001380 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001381 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001382}
1383
David Brazdil0debae72015-11-12 18:37:00 +00001384void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1385 Label* true_target_in,
1386 Label* false_target_in) {
1387 // Generated branching requires both targets to be explicit. If either of the
1388 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1389 Label fallthrough_target;
1390 Label* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1391 Label* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1392
Mark Mendellc4701932015-04-10 13:18:51 -04001393 LocationSummary* locations = condition->GetLocations();
1394 Location left = locations->InAt(0);
1395 Location right = locations->InAt(1);
1396
Mark Mendellc4701932015-04-10 13:18:51 -04001397 Primitive::Type type = condition->InputAt(0)->GetType();
1398 switch (type) {
1399 case Primitive::kPrimLong: {
1400 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1401 if (right.IsConstant()) {
1402 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
1403 if (IsInt<32>(value)) {
1404 if (value == 0) {
1405 __ testq(left_reg, left_reg);
1406 } else {
1407 __ cmpq(left_reg, Immediate(static_cast<int32_t>(value)));
1408 }
1409 } else {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001410 // Value won't fit in a 32-bit integer.
Mark Mendellc4701932015-04-10 13:18:51 -04001411 __ cmpq(left_reg, codegen_->LiteralInt64Address(value));
1412 }
1413 } else if (right.IsDoubleStackSlot()) {
1414 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1415 } else {
1416 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1417 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001418 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Mark Mendellc4701932015-04-10 13:18:51 -04001419 break;
1420 }
1421 case Primitive::kPrimFloat: {
1422 if (right.IsFpuRegister()) {
1423 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1424 } else if (right.IsConstant()) {
1425 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1426 codegen_->LiteralFloatAddress(
1427 right.GetConstant()->AsFloatConstant()->GetValue()));
1428 } else {
1429 DCHECK(right.IsStackSlot());
1430 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1431 Address(CpuRegister(RSP), right.GetStackIndex()));
1432 }
1433 GenerateFPJumps(condition, true_target, false_target);
1434 break;
1435 }
1436 case Primitive::kPrimDouble: {
1437 if (right.IsFpuRegister()) {
1438 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1439 } else if (right.IsConstant()) {
1440 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1441 codegen_->LiteralDoubleAddress(
1442 right.GetConstant()->AsDoubleConstant()->GetValue()));
1443 } else {
1444 DCHECK(right.IsDoubleStackSlot());
1445 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1446 Address(CpuRegister(RSP), right.GetStackIndex()));
1447 }
1448 GenerateFPJumps(condition, true_target, false_target);
1449 break;
1450 }
1451 default:
1452 LOG(FATAL) << "Unexpected condition type " << type;
1453 }
1454
David Brazdil0debae72015-11-12 18:37:00 +00001455 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001456 __ jmp(false_target);
1457 }
David Brazdil0debae72015-11-12 18:37:00 +00001458
1459 if (fallthrough_target.IsLinked()) {
1460 __ Bind(&fallthrough_target);
1461 }
Mark Mendellc4701932015-04-10 13:18:51 -04001462}
1463
David Brazdil0debae72015-11-12 18:37:00 +00001464static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1465 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1466 // are set only strictly before `branch`. We can't use the eflags on long
1467 // conditions if they are materialized due to the complex branching.
1468 return cond->IsCondition() &&
1469 cond->GetNext() == branch &&
1470 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1471}
1472
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001473void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001474 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001475 Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00001476 Label* false_target) {
1477 HInstruction* cond = instruction->InputAt(condition_input_index);
1478
1479 if (true_target == nullptr && false_target == nullptr) {
1480 // Nothing to do. The code always falls through.
1481 return;
1482 } else if (cond->IsIntConstant()) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001483 // Constant condition, statically compared against 1.
David Brazdil0debae72015-11-12 18:37:00 +00001484 if (cond->AsIntConstant()->IsOne()) {
1485 if (true_target != nullptr) {
1486 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001487 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001488 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001489 DCHECK(cond->AsIntConstant()->IsZero());
1490 if (false_target != nullptr) {
1491 __ jmp(false_target);
1492 }
1493 }
1494 return;
1495 }
1496
1497 // The following code generates these patterns:
1498 // (1) true_target == nullptr && false_target != nullptr
1499 // - opposite condition true => branch to false_target
1500 // (2) true_target != nullptr && false_target == nullptr
1501 // - condition true => branch to true_target
1502 // (3) true_target != nullptr && false_target != nullptr
1503 // - condition true => branch to true_target
1504 // - branch to false_target
1505 if (IsBooleanValueOrMaterializedCondition(cond)) {
1506 if (AreEflagsSetFrom(cond, instruction)) {
1507 if (true_target == nullptr) {
1508 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1509 } else {
1510 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1511 }
1512 } else {
1513 // Materialized condition, compare against 0.
1514 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1515 if (lhs.IsRegister()) {
1516 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1517 } else {
1518 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1519 }
1520 if (true_target == nullptr) {
1521 __ j(kEqual, false_target);
1522 } else {
1523 __ j(kNotEqual, true_target);
1524 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001525 }
1526 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001527 // Condition has not been materialized, use its inputs as the
1528 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001529 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001530
David Brazdil0debae72015-11-12 18:37:00 +00001531 // If this is a long or FP comparison that has been folded into
1532 // the HCondition, generate the comparison directly.
1533 Primitive::Type type = condition->InputAt(0)->GetType();
1534 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1535 GenerateCompareTestAndBranch(condition, true_target, false_target);
1536 return;
1537 }
1538
1539 Location lhs = condition->GetLocations()->InAt(0);
1540 Location rhs = condition->GetLocations()->InAt(1);
1541 if (rhs.IsRegister()) {
1542 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1543 } else if (rhs.IsConstant()) {
1544 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
1545 if (constant == 0) {
1546 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001547 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001548 __ cmpl(lhs.AsRegister<CpuRegister>(), Immediate(constant));
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001549 }
1550 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001551 __ cmpl(lhs.AsRegister<CpuRegister>(),
1552 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1553 }
1554 if (true_target == nullptr) {
1555 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1556 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001557 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001558 }
Dave Allison20dfc792014-06-16 20:44:29 -07001559 }
David Brazdil0debae72015-11-12 18:37:00 +00001560
1561 // If neither branch falls through (case 3), the conditional branch to `true_target`
1562 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1563 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001564 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001565 }
1566}
1567
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001568void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1570 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001571 locations->SetInAt(0, Location::Any());
1572 }
1573}
1574
1575void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001576 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1577 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1578 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1579 nullptr : codegen_->GetLabelOf(true_successor);
1580 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1581 nullptr : codegen_->GetLabelOf(false_successor);
1582 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001583}
1584
1585void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1586 LocationSummary* locations = new (GetGraph()->GetArena())
1587 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001588 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001589 locations->SetInAt(0, Location::Any());
1590 }
1591}
1592
1593void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07001594 SlowPathCode* slow_path = new (GetGraph()->GetArena())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001595 DeoptimizationSlowPathX86_64(deoptimize);
1596 codegen_->AddSlowPath(slow_path);
David Brazdil0debae72015-11-12 18:37:00 +00001597 GenerateTestAndBranch(deoptimize,
1598 /* condition_input_index */ 0,
1599 slow_path->GetEntryLabel(),
1600 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001601}
1602
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001603void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
1604 local->SetLocations(nullptr);
1605}
1606
1607void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
1608 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1609}
1610
1611void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
1612 local->SetLocations(nullptr);
1613}
1614
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001615void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001616 // Nothing to do, this is driven by the code generator.
1617}
1618
1619void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001620 LocationSummary* locations =
1621 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001622 switch (store->InputAt(1)->GetType()) {
1623 case Primitive::kPrimBoolean:
1624 case Primitive::kPrimByte:
1625 case Primitive::kPrimChar:
1626 case Primitive::kPrimShort:
1627 case Primitive::kPrimInt:
1628 case Primitive::kPrimNot:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001629 case Primitive::kPrimFloat:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001630 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1631 break;
1632
1633 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001634 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001635 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1636 break;
1637
1638 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001639 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001640 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001641}
1642
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001643void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001644}
1645
Roland Levillain0d37cd02015-05-27 16:39:19 +01001646void LocationsBuilderX86_64::VisitCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001647 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001648 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001649 // Handle the long/FP comparisons made in instruction simplification.
1650 switch (cond->InputAt(0)->GetType()) {
1651 case Primitive::kPrimLong:
1652 locations->SetInAt(0, Location::RequiresRegister());
1653 locations->SetInAt(1, Location::Any());
1654 break;
1655 case Primitive::kPrimFloat:
1656 case Primitive::kPrimDouble:
1657 locations->SetInAt(0, Location::RequiresFpuRegister());
1658 locations->SetInAt(1, Location::Any());
1659 break;
1660 default:
1661 locations->SetInAt(0, Location::RequiresRegister());
1662 locations->SetInAt(1, Location::Any());
1663 break;
1664 }
Roland Levillain0d37cd02015-05-27 16:39:19 +01001665 if (cond->NeedsMaterialization()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001666 locations->SetOut(Location::RequiresRegister());
1667 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001668}
1669
Roland Levillain0d37cd02015-05-27 16:39:19 +01001670void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* cond) {
Mark Mendellc4701932015-04-10 13:18:51 -04001671 if (!cond->NeedsMaterialization()) {
1672 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001673 }
Mark Mendellc4701932015-04-10 13:18:51 -04001674
1675 LocationSummary* locations = cond->GetLocations();
1676 Location lhs = locations->InAt(0);
1677 Location rhs = locations->InAt(1);
1678 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
1679 Label true_label, false_label;
1680
1681 switch (cond->InputAt(0)->GetType()) {
1682 default:
1683 // Integer case.
1684
1685 // Clear output register: setcc only sets the low byte.
1686 __ xorl(reg, reg);
1687
1688 if (rhs.IsRegister()) {
1689 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1690 } else if (rhs.IsConstant()) {
1691 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
1692 if (constant == 0) {
1693 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1694 } else {
1695 __ cmpl(lhs.AsRegister<CpuRegister>(), Immediate(constant));
1696 }
1697 } else {
1698 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1699 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001700 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001701 return;
1702 case Primitive::kPrimLong:
1703 // Clear output register: setcc only sets the low byte.
1704 __ xorl(reg, reg);
1705
1706 if (rhs.IsRegister()) {
1707 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1708 } else if (rhs.IsConstant()) {
1709 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
1710 if (IsInt<32>(value)) {
1711 if (value == 0) {
1712 __ testq(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1713 } else {
1714 __ cmpq(lhs.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
1715 }
1716 } else {
1717 // Value won't fit in an int.
1718 __ cmpq(lhs.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
1719 }
1720 } else {
1721 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1722 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001723 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001724 return;
1725 case Primitive::kPrimFloat: {
1726 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1727 if (rhs.IsConstant()) {
1728 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1729 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1730 } else if (rhs.IsStackSlot()) {
1731 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1732 } else {
1733 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1734 }
1735 GenerateFPJumps(cond, &true_label, &false_label);
1736 break;
1737 }
1738 case Primitive::kPrimDouble: {
1739 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1740 if (rhs.IsConstant()) {
1741 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1742 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1743 } else if (rhs.IsDoubleStackSlot()) {
1744 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1745 } else {
1746 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1747 }
1748 GenerateFPJumps(cond, &true_label, &false_label);
1749 break;
1750 }
1751 }
1752
1753 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001754 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001755
Roland Levillain4fa13f62015-07-06 18:11:54 +01001756 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001757 __ Bind(&false_label);
1758 __ xorl(reg, reg);
1759 __ jmp(&done_label);
1760
Roland Levillain4fa13f62015-07-06 18:11:54 +01001761 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001762 __ Bind(&true_label);
1763 __ movl(reg, Immediate(1));
1764 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001765}
1766
1767void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
1768 VisitCondition(comp);
1769}
1770
1771void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
1772 VisitCondition(comp);
1773}
1774
1775void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
1776 VisitCondition(comp);
1777}
1778
1779void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
1780 VisitCondition(comp);
1781}
1782
1783void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
1784 VisitCondition(comp);
1785}
1786
1787void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
1788 VisitCondition(comp);
1789}
1790
1791void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1792 VisitCondition(comp);
1793}
1794
1795void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1796 VisitCondition(comp);
1797}
1798
1799void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
1800 VisitCondition(comp);
1801}
1802
1803void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
1804 VisitCondition(comp);
1805}
1806
1807void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1808 VisitCondition(comp);
1809}
1810
1811void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1812 VisitCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001813}
1814
Aart Bike9f37602015-10-09 11:15:55 -07001815void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
1816 VisitCondition(comp);
1817}
1818
1819void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
1820 VisitCondition(comp);
1821}
1822
1823void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
1824 VisitCondition(comp);
1825}
1826
1827void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
1828 VisitCondition(comp);
1829}
1830
1831void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
1832 VisitCondition(comp);
1833}
1834
1835void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
1836 VisitCondition(comp);
1837}
1838
1839void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
1840 VisitCondition(comp);
1841}
1842
1843void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
1844 VisitCondition(comp);
1845}
1846
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001847void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001848 LocationSummary* locations =
1849 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001850 switch (compare->InputAt(0)->GetType()) {
1851 case Primitive::kPrimLong: {
1852 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001853 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001854 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1855 break;
1856 }
1857 case Primitive::kPrimFloat:
1858 case Primitive::kPrimDouble: {
1859 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001860 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001861 locations->SetOut(Location::RequiresRegister());
1862 break;
1863 }
1864 default:
1865 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1866 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001867}
1868
1869void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001870 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001871 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001872 Location left = locations->InAt(0);
1873 Location right = locations->InAt(1);
1874
Mark Mendell0c9497d2015-08-21 09:30:05 -04001875 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001876 Primitive::Type type = compare->InputAt(0)->GetType();
1877 switch (type) {
1878 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001879 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1880 if (right.IsConstant()) {
1881 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell40741f32015-04-20 22:10:34 -04001882 if (IsInt<32>(value)) {
1883 if (value == 0) {
1884 __ testq(left_reg, left_reg);
1885 } else {
1886 __ cmpq(left_reg, Immediate(static_cast<int32_t>(value)));
1887 }
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001888 } else {
Mark Mendell40741f32015-04-20 22:10:34 -04001889 // Value won't fit in an int.
1890 __ cmpq(left_reg, codegen_->LiteralInt64Address(value));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001891 }
Mark Mendell40741f32015-04-20 22:10:34 -04001892 } else if (right.IsDoubleStackSlot()) {
1893 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001894 } else {
1895 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1896 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001897 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001898 }
1899 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001900 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1901 if (right.IsConstant()) {
1902 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1903 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1904 } else if (right.IsStackSlot()) {
1905 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1906 } else {
1907 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1908 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001909 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
1910 break;
1911 }
1912 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001913 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1914 if (right.IsConstant()) {
1915 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1916 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1917 } else if (right.IsDoubleStackSlot()) {
1918 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1919 } else {
1920 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1921 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001922 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
1923 break;
1924 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001925 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001926 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001927 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001928 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001929 __ j(kEqual, &done);
Calin Juravleddb7df22014-11-25 20:56:51 +00001930 __ j(type == Primitive::kPrimLong ? kLess : kBelow, &less); // ucomis{s,d} sets CF (kBelow)
Calin Juravlefd861242014-11-25 20:56:51 +00001931
Calin Juravle91debbc2014-11-26 19:01:09 +00001932 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001933 __ movl(out, Immediate(1));
1934 __ jmp(&done);
1935
1936 __ Bind(&less);
1937 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001938
1939 __ Bind(&done);
1940}
1941
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001942void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001943 LocationSummary* locations =
1944 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001945 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001946}
1947
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001948void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001949 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001950}
1951
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001952void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1953 LocationSummary* locations =
1954 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1955 locations->SetOut(Location::ConstantLocation(constant));
1956}
1957
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001958void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001959 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001960}
1961
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001962void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001963 LocationSummary* locations =
1964 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001965 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001966}
1967
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001968void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001969 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001970}
1971
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001972void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1973 LocationSummary* locations =
1974 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1975 locations->SetOut(Location::ConstantLocation(constant));
1976}
1977
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001978void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001979 // Will be generated at use site.
1980}
1981
1982void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1983 LocationSummary* locations =
1984 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1985 locations->SetOut(Location::ConstantLocation(constant));
1986}
1987
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001988void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1989 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001990 // Will be generated at use site.
1991}
1992
Calin Juravle27df7582015-04-17 19:12:31 +01001993void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1994 memory_barrier->SetLocations(nullptr);
1995}
1996
1997void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001998 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01001999}
2000
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002001void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2002 ret->SetLocations(nullptr);
2003}
2004
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002005void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002006 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002007}
2008
2009void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002010 LocationSummary* locations =
2011 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002012 switch (ret->InputAt(0)->GetType()) {
2013 case Primitive::kPrimBoolean:
2014 case Primitive::kPrimByte:
2015 case Primitive::kPrimChar:
2016 case Primitive::kPrimShort:
2017 case Primitive::kPrimInt:
2018 case Primitive::kPrimNot:
2019 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002020 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002021 break;
2022
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002023 case Primitive::kPrimFloat:
2024 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002025 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002026 break;
2027
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002028 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002029 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002030 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002031}
2032
2033void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2034 if (kIsDebugBuild) {
2035 switch (ret->InputAt(0)->GetType()) {
2036 case Primitive::kPrimBoolean:
2037 case Primitive::kPrimByte:
2038 case Primitive::kPrimChar:
2039 case Primitive::kPrimShort:
2040 case Primitive::kPrimInt:
2041 case Primitive::kPrimNot:
2042 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002043 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002044 break;
2045
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002046 case Primitive::kPrimFloat:
2047 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002048 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002049 XMM0);
2050 break;
2051
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002052 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002053 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002054 }
2055 }
2056 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002057}
2058
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002059Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2060 switch (type) {
2061 case Primitive::kPrimBoolean:
2062 case Primitive::kPrimByte:
2063 case Primitive::kPrimChar:
2064 case Primitive::kPrimShort:
2065 case Primitive::kPrimInt:
2066 case Primitive::kPrimNot:
2067 case Primitive::kPrimLong:
2068 return Location::RegisterLocation(RAX);
2069
2070 case Primitive::kPrimVoid:
2071 return Location::NoLocation();
2072
2073 case Primitive::kPrimDouble:
2074 case Primitive::kPrimFloat:
2075 return Location::FpuRegisterLocation(XMM0);
2076 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002077
2078 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002079}
2080
2081Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2082 return Location::RegisterLocation(kMethodRegisterArgument);
2083}
2084
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002085Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002086 switch (type) {
2087 case Primitive::kPrimBoolean:
2088 case Primitive::kPrimByte:
2089 case Primitive::kPrimChar:
2090 case Primitive::kPrimShort:
2091 case Primitive::kPrimInt:
2092 case Primitive::kPrimNot: {
2093 uint32_t index = gp_index_++;
2094 stack_index_++;
2095 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002096 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002097 } else {
2098 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2099 }
2100 }
2101
2102 case Primitive::kPrimLong: {
2103 uint32_t index = gp_index_;
2104 stack_index_ += 2;
2105 if (index < calling_convention.GetNumberOfRegisters()) {
2106 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002107 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002108 } else {
2109 gp_index_ += 2;
2110 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2111 }
2112 }
2113
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002114 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002115 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002116 stack_index_++;
2117 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002118 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002119 } else {
2120 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2121 }
2122 }
2123
2124 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002125 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002126 stack_index_ += 2;
2127 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002128 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002129 } else {
2130 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2131 }
2132 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002133
2134 case Primitive::kPrimVoid:
2135 LOG(FATAL) << "Unexpected parameter type " << type;
2136 break;
2137 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002138 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002139}
2140
Calin Juravle175dc732015-08-25 15:42:32 +01002141void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2142 // The trampoline uses the same calling convention as dex calling conventions,
2143 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2144 // the method_idx.
2145 HandleInvoke(invoke);
2146}
2147
2148void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2149 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2150}
2151
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002152void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002153 // When we do not run baseline, explicit clinit checks triggered by static
2154 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2155 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002156
Mark Mendellfb8d2792015-03-31 22:16:59 -04002157 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002158 if (intrinsic.TryDispatch(invoke)) {
2159 return;
2160 }
2161
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002162 HandleInvoke(invoke);
2163}
2164
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002165static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2166 if (invoke->GetLocations()->Intrinsified()) {
2167 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2168 intrinsic.Dispatch(invoke);
2169 return true;
2170 }
2171 return false;
2172}
2173
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002174void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002175 // When we do not run baseline, explicit clinit checks triggered by static
2176 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2177 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002178
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002179 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2180 return;
2181 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002182
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002183 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002184 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002185 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002186 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002187}
2188
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002189void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002190 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002191 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002192}
2193
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002194void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002195 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002196 if (intrinsic.TryDispatch(invoke)) {
2197 return;
2198 }
2199
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002200 HandleInvoke(invoke);
2201}
2202
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002203void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002204 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2205 return;
2206 }
2207
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002208 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002209 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002210 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002211}
2212
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002213void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2214 HandleInvoke(invoke);
2215 // Add the hidden argument.
2216 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2217}
2218
2219void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2220 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002221 LocationSummary* locations = invoke->GetLocations();
2222 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2223 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002224 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2225 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002226 Location receiver = locations->InAt(0);
2227 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2228
Roland Levillain0d5a2812015-11-13 10:07:31 +00002229 // Set the hidden argument. This is safe to do this here, as RAX
2230 // won't be modified thereafter, before the `call` instruction.
2231 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002232 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002233
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002234 if (receiver.IsStackSlot()) {
2235 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002236 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002237 __ movl(temp, Address(temp, class_offset));
2238 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002239 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002240 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002241 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002242 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002243 // Instead of simply (possibly) unpoisoning `temp` here, we should
2244 // emit a read barrier for the previous class reference load.
2245 // However this is not required in practice, as this is an
2246 // intermediate/temporary reference and because the current
2247 // concurrent copying collector keeps the from-space memory
2248 // intact/accessible until the end of the marking phase (the
2249 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002250 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002251 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002252 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002253 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002254 __ call(Address(temp,
2255 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002256
2257 DCHECK(!codegen_->IsLeafMethod());
2258 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2259}
2260
Roland Levillain88cb1752014-10-20 16:36:47 +01002261void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2262 LocationSummary* locations =
2263 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2264 switch (neg->GetResultType()) {
2265 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002266 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002267 locations->SetInAt(0, Location::RequiresRegister());
2268 locations->SetOut(Location::SameAsFirstInput());
2269 break;
2270
Roland Levillain88cb1752014-10-20 16:36:47 +01002271 case Primitive::kPrimFloat:
2272 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002273 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002274 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002275 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002276 break;
2277
2278 default:
2279 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2280 }
2281}
2282
2283void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2284 LocationSummary* locations = neg->GetLocations();
2285 Location out = locations->Out();
2286 Location in = locations->InAt(0);
2287 switch (neg->GetResultType()) {
2288 case Primitive::kPrimInt:
2289 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002290 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002291 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002292 break;
2293
2294 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002295 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002296 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002297 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002298 break;
2299
Roland Levillain5368c212014-11-27 15:03:41 +00002300 case Primitive::kPrimFloat: {
2301 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002302 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002303 // Implement float negation with an exclusive or with value
2304 // 0x80000000 (mask for bit 31, representing the sign of a
2305 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002306 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002307 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002308 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002309 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002310
Roland Levillain5368c212014-11-27 15:03:41 +00002311 case Primitive::kPrimDouble: {
2312 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002313 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002314 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002315 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002316 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002317 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002318 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002319 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002320 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002321
2322 default:
2323 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2324 }
2325}
2326
Roland Levillaindff1f282014-11-05 14:15:05 +00002327void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2328 LocationSummary* locations =
2329 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2330 Primitive::Type result_type = conversion->GetResultType();
2331 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002332 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002333
David Brazdilb2bd1c52015-03-25 11:17:37 +00002334 // The Java language does not allow treating boolean as an integral type but
2335 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002336
Roland Levillaindff1f282014-11-05 14:15:05 +00002337 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002338 case Primitive::kPrimByte:
2339 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002340 case Primitive::kPrimBoolean:
2341 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002342 case Primitive::kPrimShort:
2343 case Primitive::kPrimInt:
2344 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002345 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002346 locations->SetInAt(0, Location::Any());
2347 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2348 break;
2349
2350 default:
2351 LOG(FATAL) << "Unexpected type conversion from " << input_type
2352 << " to " << result_type;
2353 }
2354 break;
2355
Roland Levillain01a8d712014-11-14 16:27:39 +00002356 case Primitive::kPrimShort:
2357 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002358 case Primitive::kPrimBoolean:
2359 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002360 case Primitive::kPrimByte:
2361 case Primitive::kPrimInt:
2362 case Primitive::kPrimChar:
2363 // Processing a Dex `int-to-short' instruction.
2364 locations->SetInAt(0, Location::Any());
2365 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2366 break;
2367
2368 default:
2369 LOG(FATAL) << "Unexpected type conversion from " << input_type
2370 << " to " << result_type;
2371 }
2372 break;
2373
Roland Levillain946e1432014-11-11 17:35:19 +00002374 case Primitive::kPrimInt:
2375 switch (input_type) {
2376 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002377 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002378 locations->SetInAt(0, Location::Any());
2379 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2380 break;
2381
2382 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002383 // Processing a Dex `float-to-int' instruction.
2384 locations->SetInAt(0, Location::RequiresFpuRegister());
2385 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002386 break;
2387
Roland Levillain946e1432014-11-11 17:35:19 +00002388 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002389 // Processing a Dex `double-to-int' instruction.
2390 locations->SetInAt(0, Location::RequiresFpuRegister());
2391 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002392 break;
2393
2394 default:
2395 LOG(FATAL) << "Unexpected type conversion from " << input_type
2396 << " to " << result_type;
2397 }
2398 break;
2399
Roland Levillaindff1f282014-11-05 14:15:05 +00002400 case Primitive::kPrimLong:
2401 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002402 case Primitive::kPrimBoolean:
2403 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002404 case Primitive::kPrimByte:
2405 case Primitive::kPrimShort:
2406 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002407 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002408 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002409 // TODO: We would benefit from a (to-be-implemented)
2410 // Location::RegisterOrStackSlot requirement for this input.
2411 locations->SetInAt(0, Location::RequiresRegister());
2412 locations->SetOut(Location::RequiresRegister());
2413 break;
2414
2415 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002416 // Processing a Dex `float-to-long' instruction.
2417 locations->SetInAt(0, Location::RequiresFpuRegister());
2418 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002419 break;
2420
Roland Levillaindff1f282014-11-05 14:15:05 +00002421 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002422 // Processing a Dex `double-to-long' instruction.
2423 locations->SetInAt(0, Location::RequiresFpuRegister());
2424 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002425 break;
2426
2427 default:
2428 LOG(FATAL) << "Unexpected type conversion from " << input_type
2429 << " to " << result_type;
2430 }
2431 break;
2432
Roland Levillain981e4542014-11-14 11:47:14 +00002433 case Primitive::kPrimChar:
2434 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002435 case Primitive::kPrimBoolean:
2436 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002437 case Primitive::kPrimByte:
2438 case Primitive::kPrimShort:
2439 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002440 // Processing a Dex `int-to-char' instruction.
2441 locations->SetInAt(0, Location::Any());
2442 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2443 break;
2444
2445 default:
2446 LOG(FATAL) << "Unexpected type conversion from " << input_type
2447 << " to " << result_type;
2448 }
2449 break;
2450
Roland Levillaindff1f282014-11-05 14:15:05 +00002451 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002452 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002453 case Primitive::kPrimBoolean:
2454 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002455 case Primitive::kPrimByte:
2456 case Primitive::kPrimShort:
2457 case Primitive::kPrimInt:
2458 case Primitive::kPrimChar:
2459 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002460 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002461 locations->SetOut(Location::RequiresFpuRegister());
2462 break;
2463
2464 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002465 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002466 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002467 locations->SetOut(Location::RequiresFpuRegister());
2468 break;
2469
Roland Levillaincff13742014-11-17 14:32:17 +00002470 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002471 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002472 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002473 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002474 break;
2475
2476 default:
2477 LOG(FATAL) << "Unexpected type conversion from " << input_type
2478 << " to " << result_type;
2479 };
2480 break;
2481
Roland Levillaindff1f282014-11-05 14:15:05 +00002482 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002483 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002484 case Primitive::kPrimBoolean:
2485 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002486 case Primitive::kPrimByte:
2487 case Primitive::kPrimShort:
2488 case Primitive::kPrimInt:
2489 case Primitive::kPrimChar:
2490 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002491 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002492 locations->SetOut(Location::RequiresFpuRegister());
2493 break;
2494
2495 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002496 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002497 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002498 locations->SetOut(Location::RequiresFpuRegister());
2499 break;
2500
Roland Levillaincff13742014-11-17 14:32:17 +00002501 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002502 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002503 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002504 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002505 break;
2506
2507 default:
2508 LOG(FATAL) << "Unexpected type conversion from " << input_type
2509 << " to " << result_type;
2510 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002511 break;
2512
2513 default:
2514 LOG(FATAL) << "Unexpected type conversion from " << input_type
2515 << " to " << result_type;
2516 }
2517}
2518
2519void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2520 LocationSummary* locations = conversion->GetLocations();
2521 Location out = locations->Out();
2522 Location in = locations->InAt(0);
2523 Primitive::Type result_type = conversion->GetResultType();
2524 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002525 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002526 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002527 case Primitive::kPrimByte:
2528 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002529 case Primitive::kPrimBoolean:
2530 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002531 case Primitive::kPrimShort:
2532 case Primitive::kPrimInt:
2533 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002534 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002535 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002536 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain51d3fc42014-11-13 14:11:42 +00002537 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002538 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002539 Address(CpuRegister(RSP), in.GetStackIndex()));
2540 } else {
2541 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002542 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002543 Immediate(static_cast<int8_t>(in.GetConstant()->AsIntConstant()->GetValue())));
2544 }
2545 break;
2546
2547 default:
2548 LOG(FATAL) << "Unexpected type conversion from " << input_type
2549 << " to " << result_type;
2550 }
2551 break;
2552
Roland Levillain01a8d712014-11-14 16:27:39 +00002553 case Primitive::kPrimShort:
2554 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002555 case Primitive::kPrimBoolean:
2556 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002557 case Primitive::kPrimByte:
2558 case Primitive::kPrimInt:
2559 case Primitive::kPrimChar:
2560 // Processing a Dex `int-to-short' instruction.
2561 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002562 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002563 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002564 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002565 Address(CpuRegister(RSP), in.GetStackIndex()));
2566 } else {
2567 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002568 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002569 Immediate(static_cast<int16_t>(in.GetConstant()->AsIntConstant()->GetValue())));
2570 }
2571 break;
2572
2573 default:
2574 LOG(FATAL) << "Unexpected type conversion from " << input_type
2575 << " to " << result_type;
2576 }
2577 break;
2578
Roland Levillain946e1432014-11-11 17:35:19 +00002579 case Primitive::kPrimInt:
2580 switch (input_type) {
2581 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002582 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002583 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002584 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002585 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002586 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002587 Address(CpuRegister(RSP), in.GetStackIndex()));
2588 } else {
2589 DCHECK(in.IsConstant());
2590 DCHECK(in.GetConstant()->IsLongConstant());
2591 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002592 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002593 }
2594 break;
2595
Roland Levillain3f8f9362014-12-02 17:45:01 +00002596 case Primitive::kPrimFloat: {
2597 // Processing a Dex `float-to-int' instruction.
2598 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2599 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002600 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002601
2602 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002603 // if input >= (float)INT_MAX goto done
2604 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002605 __ j(kAboveEqual, &done);
2606 // if input == NaN goto nan
2607 __ j(kUnordered, &nan);
2608 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002609 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002610 __ jmp(&done);
2611 __ Bind(&nan);
2612 // output = 0
2613 __ xorl(output, output);
2614 __ Bind(&done);
2615 break;
2616 }
2617
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002618 case Primitive::kPrimDouble: {
2619 // Processing a Dex `double-to-int' instruction.
2620 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2621 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002622 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002623
2624 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002625 // if input >= (double)INT_MAX goto done
2626 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002627 __ j(kAboveEqual, &done);
2628 // if input == NaN goto nan
2629 __ j(kUnordered, &nan);
2630 // output = double-to-int-truncate(input)
2631 __ cvttsd2si(output, input);
2632 __ jmp(&done);
2633 __ Bind(&nan);
2634 // output = 0
2635 __ xorl(output, output);
2636 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002637 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002638 }
Roland Levillain946e1432014-11-11 17:35:19 +00002639
2640 default:
2641 LOG(FATAL) << "Unexpected type conversion from " << input_type
2642 << " to " << result_type;
2643 }
2644 break;
2645
Roland Levillaindff1f282014-11-05 14:15:05 +00002646 case Primitive::kPrimLong:
2647 switch (input_type) {
2648 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002649 case Primitive::kPrimBoolean:
2650 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002651 case Primitive::kPrimByte:
2652 case Primitive::kPrimShort:
2653 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002654 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002655 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002656 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002657 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002658 break;
2659
Roland Levillain624279f2014-12-04 11:54:28 +00002660 case Primitive::kPrimFloat: {
2661 // Processing a Dex `float-to-long' instruction.
2662 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2663 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002664 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002665
Mark Mendell92e83bf2015-05-07 11:25:03 -04002666 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002667 // if input >= (float)LONG_MAX goto done
2668 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002669 __ j(kAboveEqual, &done);
2670 // if input == NaN goto nan
2671 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002672 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002673 __ cvttss2si(output, input, true);
2674 __ jmp(&done);
2675 __ Bind(&nan);
2676 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002677 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002678 __ Bind(&done);
2679 break;
2680 }
2681
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002682 case Primitive::kPrimDouble: {
2683 // Processing a Dex `double-to-long' instruction.
2684 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2685 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002686 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002687
Mark Mendell92e83bf2015-05-07 11:25:03 -04002688 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002689 // if input >= (double)LONG_MAX goto done
2690 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002691 __ j(kAboveEqual, &done);
2692 // if input == NaN goto nan
2693 __ j(kUnordered, &nan);
2694 // output = double-to-long-truncate(input)
2695 __ cvttsd2si(output, input, true);
2696 __ jmp(&done);
2697 __ Bind(&nan);
2698 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002699 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002700 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002701 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002702 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002703
2704 default:
2705 LOG(FATAL) << "Unexpected type conversion from " << input_type
2706 << " to " << result_type;
2707 }
2708 break;
2709
Roland Levillain981e4542014-11-14 11:47:14 +00002710 case Primitive::kPrimChar:
2711 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002712 case Primitive::kPrimBoolean:
2713 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002714 case Primitive::kPrimByte:
2715 case Primitive::kPrimShort:
2716 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002717 // Processing a Dex `int-to-char' instruction.
2718 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002719 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain981e4542014-11-14 11:47:14 +00002720 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002721 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002722 Address(CpuRegister(RSP), in.GetStackIndex()));
2723 } else {
2724 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002725 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002726 Immediate(static_cast<uint16_t>(
2727 in.GetConstant()->AsIntConstant()->GetValue())));
Roland Levillain981e4542014-11-14 11:47:14 +00002728 }
2729 break;
2730
2731 default:
2732 LOG(FATAL) << "Unexpected type conversion from " << input_type
2733 << " to " << result_type;
2734 }
2735 break;
2736
Roland Levillaindff1f282014-11-05 14:15:05 +00002737 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002738 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002739 case Primitive::kPrimBoolean:
2740 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002741 case Primitive::kPrimByte:
2742 case Primitive::kPrimShort:
2743 case Primitive::kPrimInt:
2744 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002745 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002746 if (in.IsRegister()) {
2747 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2748 } else if (in.IsConstant()) {
2749 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2750 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2751 if (v == 0) {
2752 __ xorps(dest, dest);
2753 } else {
2754 __ movss(dest, codegen_->LiteralFloatAddress(static_cast<float>(v)));
2755 }
2756 } else {
2757 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2758 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2759 }
Roland Levillaincff13742014-11-17 14:32:17 +00002760 break;
2761
2762 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002763 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002764 if (in.IsRegister()) {
2765 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2766 } else if (in.IsConstant()) {
2767 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2768 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2769 if (v == 0) {
2770 __ xorps(dest, dest);
2771 } else {
2772 __ movss(dest, codegen_->LiteralFloatAddress(static_cast<float>(v)));
2773 }
2774 } else {
2775 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2776 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2777 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002778 break;
2779
Roland Levillaincff13742014-11-17 14:32:17 +00002780 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002781 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002782 if (in.IsFpuRegister()) {
2783 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2784 } else if (in.IsConstant()) {
2785 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2786 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2787 if (bit_cast<int64_t, double>(v) == 0) {
2788 __ xorps(dest, dest);
2789 } else {
2790 __ movss(dest, codegen_->LiteralFloatAddress(static_cast<float>(v)));
2791 }
2792 } else {
2793 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2794 Address(CpuRegister(RSP), in.GetStackIndex()));
2795 }
Roland Levillaincff13742014-11-17 14:32:17 +00002796 break;
2797
2798 default:
2799 LOG(FATAL) << "Unexpected type conversion from " << input_type
2800 << " to " << result_type;
2801 };
2802 break;
2803
Roland Levillaindff1f282014-11-05 14:15:05 +00002804 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002805 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002806 case Primitive::kPrimBoolean:
2807 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002808 case Primitive::kPrimByte:
2809 case Primitive::kPrimShort:
2810 case Primitive::kPrimInt:
2811 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002812 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002813 if (in.IsRegister()) {
2814 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2815 } else if (in.IsConstant()) {
2816 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2817 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2818 if (v == 0) {
2819 __ xorpd(dest, dest);
2820 } else {
2821 __ movsd(dest, codegen_->LiteralDoubleAddress(static_cast<double>(v)));
2822 }
2823 } else {
2824 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2825 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2826 }
Roland Levillaincff13742014-11-17 14:32:17 +00002827 break;
2828
2829 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002830 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002831 if (in.IsRegister()) {
2832 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2833 } else if (in.IsConstant()) {
2834 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2835 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2836 if (v == 0) {
2837 __ xorpd(dest, dest);
2838 } else {
2839 __ movsd(dest, codegen_->LiteralDoubleAddress(static_cast<double>(v)));
2840 }
2841 } else {
2842 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2843 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2844 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002845 break;
2846
Roland Levillaincff13742014-11-17 14:32:17 +00002847 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002848 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002849 if (in.IsFpuRegister()) {
2850 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2851 } else if (in.IsConstant()) {
2852 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2853 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2854 if (bit_cast<int32_t, float>(v) == 0) {
2855 __ xorpd(dest, dest);
2856 } else {
2857 __ movsd(dest, codegen_->LiteralDoubleAddress(static_cast<double>(v)));
2858 }
2859 } else {
2860 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2861 Address(CpuRegister(RSP), in.GetStackIndex()));
2862 }
Roland Levillaincff13742014-11-17 14:32:17 +00002863 break;
2864
2865 default:
2866 LOG(FATAL) << "Unexpected type conversion from " << input_type
2867 << " to " << result_type;
2868 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002869 break;
2870
2871 default:
2872 LOG(FATAL) << "Unexpected type conversion from " << input_type
2873 << " to " << result_type;
2874 }
2875}
2876
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002877void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002878 LocationSummary* locations =
2879 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002880 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002881 case Primitive::kPrimInt: {
2882 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002883 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2884 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002885 break;
2886 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002887
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002888 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002889 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002890 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002891 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002892 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002893 break;
2894 }
2895
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002896 case Primitive::kPrimDouble:
2897 case Primitive::kPrimFloat: {
2898 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002899 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002900 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002901 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002902 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002903
2904 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002905 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002906 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002907}
2908
2909void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2910 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002911 Location first = locations->InAt(0);
2912 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002913 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002914
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002915 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002916 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002917 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002918 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2919 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002920 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2921 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002922 } else {
2923 __ leal(out.AsRegister<CpuRegister>(), Address(
2924 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2925 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002926 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002927 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2928 __ addl(out.AsRegister<CpuRegister>(),
2929 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2930 } else {
2931 __ leal(out.AsRegister<CpuRegister>(), Address(
2932 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2933 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002934 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002935 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002936 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002937 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002938 break;
2939 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002940
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002941 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002942 if (second.IsRegister()) {
2943 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2944 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002945 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2946 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002947 } else {
2948 __ leaq(out.AsRegister<CpuRegister>(), Address(
2949 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2950 }
2951 } else {
2952 DCHECK(second.IsConstant());
2953 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2954 int32_t int32_value = Low32Bits(value);
2955 DCHECK_EQ(int32_value, value);
2956 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2957 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2958 } else {
2959 __ leaq(out.AsRegister<CpuRegister>(), Address(
2960 first.AsRegister<CpuRegister>(), int32_value));
2961 }
2962 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002963 break;
2964 }
2965
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002966 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002967 if (second.IsFpuRegister()) {
2968 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2969 } else if (second.IsConstant()) {
2970 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002971 codegen_->LiteralFloatAddress(
2972 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002973 } else {
2974 DCHECK(second.IsStackSlot());
2975 __ addss(first.AsFpuRegister<XmmRegister>(),
2976 Address(CpuRegister(RSP), second.GetStackIndex()));
2977 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002978 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002979 }
2980
2981 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002982 if (second.IsFpuRegister()) {
2983 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2984 } else if (second.IsConstant()) {
2985 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002986 codegen_->LiteralDoubleAddress(
2987 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002988 } else {
2989 DCHECK(second.IsDoubleStackSlot());
2990 __ addsd(first.AsFpuRegister<XmmRegister>(),
2991 Address(CpuRegister(RSP), second.GetStackIndex()));
2992 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002993 break;
2994 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002995
2996 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002997 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002998 }
2999}
3000
3001void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003002 LocationSummary* locations =
3003 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003004 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003005 case Primitive::kPrimInt: {
3006 locations->SetInAt(0, Location::RequiresRegister());
3007 locations->SetInAt(1, Location::Any());
3008 locations->SetOut(Location::SameAsFirstInput());
3009 break;
3010 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003011 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003012 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003013 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003014 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003015 break;
3016 }
Calin Juravle11351682014-10-23 15:38:15 +01003017 case Primitive::kPrimFloat:
3018 case Primitive::kPrimDouble: {
3019 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003020 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003021 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003022 break;
Calin Juravle11351682014-10-23 15:38:15 +01003023 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003024 default:
Calin Juravle11351682014-10-23 15:38:15 +01003025 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003026 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003027}
3028
3029void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3030 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003031 Location first = locations->InAt(0);
3032 Location second = locations->InAt(1);
3033 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003034 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003035 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003036 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003037 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003038 } else if (second.IsConstant()) {
3039 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003040 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003041 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003042 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003043 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003044 break;
3045 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003046 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003047 if (second.IsConstant()) {
3048 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3049 DCHECK(IsInt<32>(value));
3050 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3051 } else {
3052 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3053 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003054 break;
3055 }
3056
Calin Juravle11351682014-10-23 15:38:15 +01003057 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003058 if (second.IsFpuRegister()) {
3059 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3060 } else if (second.IsConstant()) {
3061 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003062 codegen_->LiteralFloatAddress(
3063 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003064 } else {
3065 DCHECK(second.IsStackSlot());
3066 __ subss(first.AsFpuRegister<XmmRegister>(),
3067 Address(CpuRegister(RSP), second.GetStackIndex()));
3068 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003069 break;
Calin Juravle11351682014-10-23 15:38:15 +01003070 }
3071
3072 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003073 if (second.IsFpuRegister()) {
3074 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3075 } else if (second.IsConstant()) {
3076 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003077 codegen_->LiteralDoubleAddress(
3078 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003079 } else {
3080 DCHECK(second.IsDoubleStackSlot());
3081 __ subsd(first.AsFpuRegister<XmmRegister>(),
3082 Address(CpuRegister(RSP), second.GetStackIndex()));
3083 }
Calin Juravle11351682014-10-23 15:38:15 +01003084 break;
3085 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003086
3087 default:
Calin Juravle11351682014-10-23 15:38:15 +01003088 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003089 }
3090}
3091
Calin Juravle34bacdf2014-10-07 20:23:36 +01003092void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3093 LocationSummary* locations =
3094 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3095 switch (mul->GetResultType()) {
3096 case Primitive::kPrimInt: {
3097 locations->SetInAt(0, Location::RequiresRegister());
3098 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003099 if (mul->InputAt(1)->IsIntConstant()) {
3100 // Can use 3 operand multiply.
3101 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3102 } else {
3103 locations->SetOut(Location::SameAsFirstInput());
3104 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003105 break;
3106 }
3107 case Primitive::kPrimLong: {
3108 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003109 locations->SetInAt(1, Location::Any());
3110 if (mul->InputAt(1)->IsLongConstant() &&
3111 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003112 // Can use 3 operand multiply.
3113 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3114 } else {
3115 locations->SetOut(Location::SameAsFirstInput());
3116 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003117 break;
3118 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003119 case Primitive::kPrimFloat:
3120 case Primitive::kPrimDouble: {
3121 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003122 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003123 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003124 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003125 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003126
3127 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003128 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003129 }
3130}
3131
3132void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3133 LocationSummary* locations = mul->GetLocations();
3134 Location first = locations->InAt(0);
3135 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003136 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003137 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003138 case Primitive::kPrimInt:
3139 // The constant may have ended up in a register, so test explicitly to avoid
3140 // problems where the output may not be the same as the first operand.
3141 if (mul->InputAt(1)->IsIntConstant()) {
3142 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3143 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3144 } else if (second.IsRegister()) {
3145 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003146 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003147 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003148 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003149 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003150 __ imull(first.AsRegister<CpuRegister>(),
3151 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003152 }
3153 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003154 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003155 // The constant may have ended up in a register, so test explicitly to avoid
3156 // problems where the output may not be the same as the first operand.
3157 if (mul->InputAt(1)->IsLongConstant()) {
3158 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3159 if (IsInt<32>(value)) {
3160 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3161 Immediate(static_cast<int32_t>(value)));
3162 } else {
3163 // Have to use the constant area.
3164 DCHECK(first.Equals(out));
3165 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3166 }
3167 } else if (second.IsRegister()) {
3168 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003169 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003170 } else {
3171 DCHECK(second.IsDoubleStackSlot());
3172 DCHECK(first.Equals(out));
3173 __ imulq(first.AsRegister<CpuRegister>(),
3174 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003175 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003176 break;
3177 }
3178
Calin Juravleb5bfa962014-10-21 18:02:24 +01003179 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003180 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003181 if (second.IsFpuRegister()) {
3182 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3183 } else if (second.IsConstant()) {
3184 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003185 codegen_->LiteralFloatAddress(
3186 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003187 } else {
3188 DCHECK(second.IsStackSlot());
3189 __ mulss(first.AsFpuRegister<XmmRegister>(),
3190 Address(CpuRegister(RSP), second.GetStackIndex()));
3191 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003192 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003193 }
3194
3195 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003196 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003197 if (second.IsFpuRegister()) {
3198 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3199 } else if (second.IsConstant()) {
3200 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003201 codegen_->LiteralDoubleAddress(
3202 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003203 } else {
3204 DCHECK(second.IsDoubleStackSlot());
3205 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3206 Address(CpuRegister(RSP), second.GetStackIndex()));
3207 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003208 break;
3209 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003210
3211 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003212 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003213 }
3214}
3215
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003216void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3217 uint32_t stack_adjustment, bool is_float) {
3218 if (source.IsStackSlot()) {
3219 DCHECK(is_float);
3220 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3221 } else if (source.IsDoubleStackSlot()) {
3222 DCHECK(!is_float);
3223 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3224 } else {
3225 // Write the value to the temporary location on the stack and load to FP stack.
3226 if (is_float) {
3227 Location stack_temp = Location::StackSlot(temp_offset);
3228 codegen_->Move(stack_temp, source);
3229 __ flds(Address(CpuRegister(RSP), temp_offset));
3230 } else {
3231 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3232 codegen_->Move(stack_temp, source);
3233 __ fldl(Address(CpuRegister(RSP), temp_offset));
3234 }
3235 }
3236}
3237
3238void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3239 Primitive::Type type = rem->GetResultType();
3240 bool is_float = type == Primitive::kPrimFloat;
3241 size_t elem_size = Primitive::ComponentSize(type);
3242 LocationSummary* locations = rem->GetLocations();
3243 Location first = locations->InAt(0);
3244 Location second = locations->InAt(1);
3245 Location out = locations->Out();
3246
3247 // Create stack space for 2 elements.
3248 // TODO: enhance register allocator to ask for stack temporaries.
3249 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3250
3251 // Load the values to the FP stack in reverse order, using temporaries if needed.
3252 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3253 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3254
3255 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003256 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003257 __ Bind(&retry);
3258 __ fprem();
3259
3260 // Move FP status to AX.
3261 __ fstsw();
3262
3263 // And see if the argument reduction is complete. This is signaled by the
3264 // C2 FPU flag bit set to 0.
3265 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3266 __ j(kNotEqual, &retry);
3267
3268 // We have settled on the final value. Retrieve it into an XMM register.
3269 // Store FP top of stack to real stack.
3270 if (is_float) {
3271 __ fsts(Address(CpuRegister(RSP), 0));
3272 } else {
3273 __ fstl(Address(CpuRegister(RSP), 0));
3274 }
3275
3276 // Pop the 2 items from the FP stack.
3277 __ fucompp();
3278
3279 // Load the value from the stack into an XMM register.
3280 DCHECK(out.IsFpuRegister()) << out;
3281 if (is_float) {
3282 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3283 } else {
3284 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3285 }
3286
3287 // And remove the temporary stack space we allocated.
3288 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3289}
3290
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003291void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3292 DCHECK(instruction->IsDiv() || instruction->IsRem());
3293
3294 LocationSummary* locations = instruction->GetLocations();
3295 Location second = locations->InAt(1);
3296 DCHECK(second.IsConstant());
3297
3298 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3299 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003300 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003301
3302 DCHECK(imm == 1 || imm == -1);
3303
3304 switch (instruction->GetResultType()) {
3305 case Primitive::kPrimInt: {
3306 if (instruction->IsRem()) {
3307 __ xorl(output_register, output_register);
3308 } else {
3309 __ movl(output_register, input_register);
3310 if (imm == -1) {
3311 __ negl(output_register);
3312 }
3313 }
3314 break;
3315 }
3316
3317 case Primitive::kPrimLong: {
3318 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003319 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003320 } else {
3321 __ movq(output_register, input_register);
3322 if (imm == -1) {
3323 __ negq(output_register);
3324 }
3325 }
3326 break;
3327 }
3328
3329 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003330 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003331 }
3332}
3333
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003334void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003335 LocationSummary* locations = instruction->GetLocations();
3336 Location second = locations->InAt(1);
3337
3338 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3339 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3340
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003341 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003342
3343 DCHECK(IsPowerOfTwo(std::abs(imm)));
3344
3345 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3346
3347 if (instruction->GetResultType() == Primitive::kPrimInt) {
3348 __ leal(tmp, Address(numerator, std::abs(imm) - 1));
3349 __ testl(numerator, numerator);
3350 __ cmov(kGreaterEqual, tmp, numerator);
3351 int shift = CTZ(imm);
3352 __ sarl(tmp, Immediate(shift));
3353
3354 if (imm < 0) {
3355 __ negl(tmp);
3356 }
3357
3358 __ movl(output_register, tmp);
3359 } else {
3360 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3361 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3362
Mark Mendell92e83bf2015-05-07 11:25:03 -04003363 codegen_->Load64BitValue(rdx, std::abs(imm) - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003364 __ addq(rdx, numerator);
3365 __ testq(numerator, numerator);
3366 __ cmov(kGreaterEqual, rdx, numerator);
3367 int shift = CTZ(imm);
3368 __ sarq(rdx, Immediate(shift));
3369
3370 if (imm < 0) {
3371 __ negq(rdx);
3372 }
3373
3374 __ movq(output_register, rdx);
3375 }
3376}
3377
3378void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3379 DCHECK(instruction->IsDiv() || instruction->IsRem());
3380
3381 LocationSummary* locations = instruction->GetLocations();
3382 Location second = locations->InAt(1);
3383
3384 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3385 : locations->GetTemp(0).AsRegister<CpuRegister>();
3386 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3387 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3388 : locations->Out().AsRegister<CpuRegister>();
3389 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3390
3391 DCHECK_EQ(RAX, eax.AsRegister());
3392 DCHECK_EQ(RDX, edx.AsRegister());
3393 if (instruction->IsDiv()) {
3394 DCHECK_EQ(RAX, out.AsRegister());
3395 } else {
3396 DCHECK_EQ(RDX, out.AsRegister());
3397 }
3398
3399 int64_t magic;
3400 int shift;
3401
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003402 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003403 if (instruction->GetResultType() == Primitive::kPrimInt) {
3404 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3405
3406 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3407
3408 __ movl(numerator, eax);
3409
Mark Mendell0c9497d2015-08-21 09:30:05 -04003410 NearLabel no_div;
3411 NearLabel end;
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003412 __ testl(eax, eax);
3413 __ j(kNotEqual, &no_div);
3414
3415 __ xorl(out, out);
3416 __ jmp(&end);
3417
3418 __ Bind(&no_div);
3419
3420 __ movl(eax, Immediate(magic));
3421 __ imull(numerator);
3422
3423 if (imm > 0 && magic < 0) {
3424 __ addl(edx, numerator);
3425 } else if (imm < 0 && magic > 0) {
3426 __ subl(edx, numerator);
3427 }
3428
3429 if (shift != 0) {
3430 __ sarl(edx, Immediate(shift));
3431 }
3432
3433 __ movl(eax, edx);
3434 __ shrl(edx, Immediate(31));
3435 __ addl(edx, eax);
3436
3437 if (instruction->IsRem()) {
3438 __ movl(eax, numerator);
3439 __ imull(edx, Immediate(imm));
3440 __ subl(eax, edx);
3441 __ movl(edx, eax);
3442 } else {
3443 __ movl(eax, edx);
3444 }
3445 __ Bind(&end);
3446 } else {
3447 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3448
3449 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3450
3451 CpuRegister rax = eax;
3452 CpuRegister rdx = edx;
3453
3454 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3455
3456 // Save the numerator.
3457 __ movq(numerator, rax);
3458
3459 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003460 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003461
3462 // RDX:RAX = magic * numerator
3463 __ imulq(numerator);
3464
3465 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003466 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003467 __ addq(rdx, numerator);
3468 } else if (imm < 0 && magic > 0) {
3469 // RDX -= numerator
3470 __ subq(rdx, numerator);
3471 }
3472
3473 // Shift if needed.
3474 if (shift != 0) {
3475 __ sarq(rdx, Immediate(shift));
3476 }
3477
3478 // RDX += 1 if RDX < 0
3479 __ movq(rax, rdx);
3480 __ shrq(rdx, Immediate(63));
3481 __ addq(rdx, rax);
3482
3483 if (instruction->IsRem()) {
3484 __ movq(rax, numerator);
3485
3486 if (IsInt<32>(imm)) {
3487 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3488 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003489 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003490 }
3491
3492 __ subq(rax, rdx);
3493 __ movq(rdx, rax);
3494 } else {
3495 __ movq(rax, rdx);
3496 }
3497 }
3498}
3499
Calin Juravlebacfec32014-11-14 15:54:36 +00003500void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3501 DCHECK(instruction->IsDiv() || instruction->IsRem());
3502 Primitive::Type type = instruction->GetResultType();
3503 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3504
3505 bool is_div = instruction->IsDiv();
3506 LocationSummary* locations = instruction->GetLocations();
3507
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003508 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3509 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003510
Roland Levillain271ab9c2014-11-27 15:23:57 +00003511 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003512 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003513
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003514 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003515 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003516
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003517 if (imm == 0) {
3518 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3519 } else if (imm == 1 || imm == -1) {
3520 DivRemOneOrMinusOne(instruction);
3521 } else if (instruction->IsDiv() && IsPowerOfTwo(std::abs(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003522 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003523 } else {
3524 DCHECK(imm <= -2 || imm >= 2);
3525 GenerateDivRemWithAnyConstant(instruction);
3526 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003527 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003528 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003529 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
3530 out.AsRegister(), type, is_div);
3531 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003532
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003533 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3534 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3535 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3536 // so it's safe to just use negl instead of more complex comparisons.
3537 if (type == Primitive::kPrimInt) {
3538 __ cmpl(second_reg, Immediate(-1));
3539 __ j(kEqual, slow_path->GetEntryLabel());
3540 // edx:eax <- sign-extended of eax
3541 __ cdq();
3542 // eax = quotient, edx = remainder
3543 __ idivl(second_reg);
3544 } else {
3545 __ cmpq(second_reg, Immediate(-1));
3546 __ j(kEqual, slow_path->GetEntryLabel());
3547 // rdx:rax <- sign-extended of rax
3548 __ cqo();
3549 // rax = quotient, rdx = remainder
3550 __ idivq(second_reg);
3551 }
3552 __ Bind(slow_path->GetExitLabel());
3553 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003554}
3555
Calin Juravle7c4954d2014-10-28 16:57:40 +00003556void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3557 LocationSummary* locations =
3558 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3559 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003560 case Primitive::kPrimInt:
3561 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003562 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003563 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003564 locations->SetOut(Location::SameAsFirstInput());
3565 // Intel uses edx:eax as the dividend.
3566 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003567 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3568 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3569 // output and request another temp.
3570 if (div->InputAt(1)->IsConstant()) {
3571 locations->AddTemp(Location::RequiresRegister());
3572 }
Calin Juravled0d48522014-11-04 16:40:20 +00003573 break;
3574 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003575
Calin Juravle7c4954d2014-10-28 16:57:40 +00003576 case Primitive::kPrimFloat:
3577 case Primitive::kPrimDouble: {
3578 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003579 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003580 locations->SetOut(Location::SameAsFirstInput());
3581 break;
3582 }
3583
3584 default:
3585 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3586 }
3587}
3588
3589void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3590 LocationSummary* locations = div->GetLocations();
3591 Location first = locations->InAt(0);
3592 Location second = locations->InAt(1);
3593 DCHECK(first.Equals(locations->Out()));
3594
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003595 Primitive::Type type = div->GetResultType();
3596 switch (type) {
3597 case Primitive::kPrimInt:
3598 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003599 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003600 break;
3601 }
3602
Calin Juravle7c4954d2014-10-28 16:57:40 +00003603 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003604 if (second.IsFpuRegister()) {
3605 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3606 } else if (second.IsConstant()) {
3607 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003608 codegen_->LiteralFloatAddress(
3609 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003610 } else {
3611 DCHECK(second.IsStackSlot());
3612 __ divss(first.AsFpuRegister<XmmRegister>(),
3613 Address(CpuRegister(RSP), second.GetStackIndex()));
3614 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003615 break;
3616 }
3617
3618 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003619 if (second.IsFpuRegister()) {
3620 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3621 } else if (second.IsConstant()) {
3622 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003623 codegen_->LiteralDoubleAddress(
3624 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003625 } else {
3626 DCHECK(second.IsDoubleStackSlot());
3627 __ divsd(first.AsFpuRegister<XmmRegister>(),
3628 Address(CpuRegister(RSP), second.GetStackIndex()));
3629 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003630 break;
3631 }
3632
3633 default:
3634 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3635 }
3636}
3637
Calin Juravlebacfec32014-11-14 15:54:36 +00003638void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003639 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003640 LocationSummary* locations =
3641 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003642
3643 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003644 case Primitive::kPrimInt:
3645 case Primitive::kPrimLong: {
3646 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003647 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003648 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3649 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3651 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3652 // output and request another temp.
3653 if (rem->InputAt(1)->IsConstant()) {
3654 locations->AddTemp(Location::RequiresRegister());
3655 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003656 break;
3657 }
3658
3659 case Primitive::kPrimFloat:
3660 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003661 locations->SetInAt(0, Location::Any());
3662 locations->SetInAt(1, Location::Any());
3663 locations->SetOut(Location::RequiresFpuRegister());
3664 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003665 break;
3666 }
3667
3668 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003669 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003670 }
3671}
3672
3673void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3674 Primitive::Type type = rem->GetResultType();
3675 switch (type) {
3676 case Primitive::kPrimInt:
3677 case Primitive::kPrimLong: {
3678 GenerateDivRemIntegral(rem);
3679 break;
3680 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003681 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003682 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003683 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003684 break;
3685 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003686 default:
3687 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3688 }
3689}
3690
Calin Juravled0d48522014-11-04 16:40:20 +00003691void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003692 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3693 ? LocationSummary::kCallOnSlowPath
3694 : LocationSummary::kNoCall;
3695 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003696 locations->SetInAt(0, Location::Any());
3697 if (instruction->HasUses()) {
3698 locations->SetOut(Location::SameAsFirstInput());
3699 }
3700}
3701
3702void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003703 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003704 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3705 codegen_->AddSlowPath(slow_path);
3706
3707 LocationSummary* locations = instruction->GetLocations();
3708 Location value = locations->InAt(0);
3709
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003710 switch (instruction->GetType()) {
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003711 case Primitive::kPrimByte:
3712 case Primitive::kPrimChar:
3713 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003714 case Primitive::kPrimInt: {
3715 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003716 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003717 __ j(kEqual, slow_path->GetEntryLabel());
3718 } else if (value.IsStackSlot()) {
3719 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3720 __ j(kEqual, slow_path->GetEntryLabel());
3721 } else {
3722 DCHECK(value.IsConstant()) << value;
3723 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3724 __ jmp(slow_path->GetEntryLabel());
3725 }
3726 }
3727 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003728 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003729 case Primitive::kPrimLong: {
3730 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003731 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003732 __ j(kEqual, slow_path->GetEntryLabel());
3733 } else if (value.IsDoubleStackSlot()) {
3734 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3735 __ j(kEqual, slow_path->GetEntryLabel());
3736 } else {
3737 DCHECK(value.IsConstant()) << value;
3738 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3739 __ jmp(slow_path->GetEntryLabel());
3740 }
3741 }
3742 break;
3743 }
3744 default:
3745 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003746 }
Calin Juravled0d48522014-11-04 16:40:20 +00003747}
3748
Calin Juravle9aec02f2014-11-18 23:06:35 +00003749void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3750 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3751
3752 LocationSummary* locations =
3753 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3754
3755 switch (op->GetResultType()) {
3756 case Primitive::kPrimInt:
3757 case Primitive::kPrimLong: {
3758 locations->SetInAt(0, Location::RequiresRegister());
3759 // The shift count needs to be in CL.
3760 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3761 locations->SetOut(Location::SameAsFirstInput());
3762 break;
3763 }
3764 default:
3765 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3766 }
3767}
3768
3769void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3770 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3771
3772 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003773 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003774 Location second = locations->InAt(1);
3775
3776 switch (op->GetResultType()) {
3777 case Primitive::kPrimInt: {
3778 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003779 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003780 if (op->IsShl()) {
3781 __ shll(first_reg, second_reg);
3782 } else if (op->IsShr()) {
3783 __ sarl(first_reg, second_reg);
3784 } else {
3785 __ shrl(first_reg, second_reg);
3786 }
3787 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003788 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003789 if (op->IsShl()) {
3790 __ shll(first_reg, imm);
3791 } else if (op->IsShr()) {
3792 __ sarl(first_reg, imm);
3793 } else {
3794 __ shrl(first_reg, imm);
3795 }
3796 }
3797 break;
3798 }
3799 case Primitive::kPrimLong: {
3800 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003801 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003802 if (op->IsShl()) {
3803 __ shlq(first_reg, second_reg);
3804 } else if (op->IsShr()) {
3805 __ sarq(first_reg, second_reg);
3806 } else {
3807 __ shrq(first_reg, second_reg);
3808 }
3809 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003810 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003811 if (op->IsShl()) {
3812 __ shlq(first_reg, imm);
3813 } else if (op->IsShr()) {
3814 __ sarq(first_reg, imm);
3815 } else {
3816 __ shrq(first_reg, imm);
3817 }
3818 }
3819 break;
3820 }
3821 default:
3822 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003823 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003824 }
3825}
3826
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003827void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3828 LocationSummary* locations =
3829 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3830
3831 switch (ror->GetResultType()) {
3832 case Primitive::kPrimInt:
3833 case Primitive::kPrimLong: {
3834 locations->SetInAt(0, Location::RequiresRegister());
3835 // The shift count needs to be in CL (unless it is a constant).
3836 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3837 locations->SetOut(Location::SameAsFirstInput());
3838 break;
3839 }
3840 default:
3841 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3842 UNREACHABLE();
3843 }
3844}
3845
3846void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3847 LocationSummary* locations = ror->GetLocations();
3848 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3849 Location second = locations->InAt(1);
3850
3851 switch (ror->GetResultType()) {
3852 case Primitive::kPrimInt:
3853 if (second.IsRegister()) {
3854 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3855 __ rorl(first_reg, second_reg);
3856 } else {
3857 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
3858 __ rorl(first_reg, imm);
3859 }
3860 break;
3861 case Primitive::kPrimLong:
3862 if (second.IsRegister()) {
3863 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3864 __ rorq(first_reg, second_reg);
3865 } else {
3866 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
3867 __ rorq(first_reg, imm);
3868 }
3869 break;
3870 default:
3871 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3872 UNREACHABLE();
3873 }
3874}
3875
Calin Juravle9aec02f2014-11-18 23:06:35 +00003876void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3877 HandleShift(shl);
3878}
3879
3880void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3881 HandleShift(shl);
3882}
3883
3884void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3885 HandleShift(shr);
3886}
3887
3888void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3889 HandleShift(shr);
3890}
3891
3892void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3893 HandleShift(ushr);
3894}
3895
3896void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3897 HandleShift(ushr);
3898}
3899
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003900void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003901 LocationSummary* locations =
3902 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003903 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray729645a2015-11-19 13:29:02 +00003904 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3905 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003906 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003907}
3908
3909void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003910 // Note: if heap poisoning is enabled, the entry point takes cares
3911 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003912 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3913 instruction,
3914 instruction->GetDexPc(),
3915 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003916 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003917
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01003918 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003919}
3920
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003921void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3922 LocationSummary* locations =
3923 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3924 InvokeRuntimeCallingConvention calling_convention;
3925 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003926 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003927 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003928 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003929}
3930
3931void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3932 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003933 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3934 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003935 // Note: if heap poisoning is enabled, the entry point takes cares
3936 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003937 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3938 instruction,
3939 instruction->GetDexPc(),
3940 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003941 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003942
3943 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003944}
3945
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003946void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003947 LocationSummary* locations =
3948 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003949 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3950 if (location.IsStackSlot()) {
3951 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3952 } else if (location.IsDoubleStackSlot()) {
3953 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3954 }
3955 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003956}
3957
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003958void InstructionCodeGeneratorX86_64::VisitParameterValue(
3959 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003960 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003961}
3962
3963void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3964 LocationSummary* locations =
3965 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3966 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3967}
3968
3969void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3970 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3971 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003972}
3973
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003974void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003975 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003976 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003977 locations->SetInAt(0, Location::RequiresRegister());
3978 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003979}
3980
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003981void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3982 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003983 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3984 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003985 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003986 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003987 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003988 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003989 break;
3990
3991 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003992 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003993 break;
3994
3995 default:
3996 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
3997 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003998}
3999
David Brazdil66d126e2015-04-03 16:02:44 +01004000void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4001 LocationSummary* locations =
4002 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4003 locations->SetInAt(0, Location::RequiresRegister());
4004 locations->SetOut(Location::SameAsFirstInput());
4005}
4006
4007void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004008 LocationSummary* locations = bool_not->GetLocations();
4009 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4010 locations->Out().AsRegister<CpuRegister>().AsRegister());
4011 Location out = locations->Out();
4012 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4013}
4014
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004015void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004016 LocationSummary* locations =
4017 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004018 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4019 locations->SetInAt(i, Location::Any());
4020 }
4021 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004022}
4023
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004024void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004025 LOG(FATAL) << "Unimplemented";
4026}
4027
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004028void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004029 /*
4030 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004031 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004032 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4033 */
4034 switch (kind) {
4035 case MemBarrierKind::kAnyAny: {
4036 __ mfence();
4037 break;
4038 }
4039 case MemBarrierKind::kAnyStore:
4040 case MemBarrierKind::kLoadAny:
4041 case MemBarrierKind::kStoreStore: {
4042 // nop
4043 break;
4044 }
4045 default:
4046 LOG(FATAL) << "Unexpected memory barier " << kind;
4047 }
4048}
4049
4050void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4051 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4052
Roland Levillain0d5a2812015-11-13 10:07:31 +00004053 bool object_field_get_with_read_barrier =
4054 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004055 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004056 new (GetGraph()->GetArena()) LocationSummary(instruction,
4057 object_field_get_with_read_barrier ?
4058 LocationSummary::kCallOnSlowPath :
4059 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004060 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004061 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4062 locations->SetOut(Location::RequiresFpuRegister());
4063 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004064 // The output overlaps for an object field get when read barriers
4065 // are enabled: we do not want the move to overwrite the object's
4066 // location, as we need it to emit the read barrier.
4067 locations->SetOut(
4068 Location::RequiresRegister(),
4069 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004070 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004071 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4072 // We need a temporary register for the read barrier marking slow
4073 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4074 locations->AddTemp(Location::RequiresRegister());
4075 }
Calin Juravle52c48962014-12-16 17:02:57 +00004076}
4077
4078void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4079 const FieldInfo& field_info) {
4080 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4081
4082 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004083 Location base_loc = locations->InAt(0);
4084 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004085 Location out = locations->Out();
4086 bool is_volatile = field_info.IsVolatile();
4087 Primitive::Type field_type = field_info.GetFieldType();
4088 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4089
4090 switch (field_type) {
4091 case Primitive::kPrimBoolean: {
4092 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4093 break;
4094 }
4095
4096 case Primitive::kPrimByte: {
4097 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4098 break;
4099 }
4100
4101 case Primitive::kPrimShort: {
4102 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4103 break;
4104 }
4105
4106 case Primitive::kPrimChar: {
4107 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4108 break;
4109 }
4110
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004111 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004112 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4113 break;
4114 }
4115
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004116 case Primitive::kPrimNot: {
4117 // /* HeapReference<Object> */ out = *(base + offset)
4118 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4119 Location temp_loc = locations->GetTemp(0);
4120 // Note that a potential implicit null check is handled in this
4121 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4122 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4123 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4124 if (is_volatile) {
4125 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4126 }
4127 } else {
4128 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4129 codegen_->MaybeRecordImplicitNullCheck(instruction);
4130 if (is_volatile) {
4131 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4132 }
4133 // If read barriers are enabled, emit read barriers other than
4134 // Baker's using a slow path (and also unpoison the loaded
4135 // reference, if heap poisoning is enabled).
4136 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4137 }
4138 break;
4139 }
4140
Calin Juravle52c48962014-12-16 17:02:57 +00004141 case Primitive::kPrimLong: {
4142 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4143 break;
4144 }
4145
4146 case Primitive::kPrimFloat: {
4147 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4148 break;
4149 }
4150
4151 case Primitive::kPrimDouble: {
4152 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4153 break;
4154 }
4155
4156 case Primitive::kPrimVoid:
4157 LOG(FATAL) << "Unreachable type " << field_type;
4158 UNREACHABLE();
4159 }
4160
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004161 if (field_type == Primitive::kPrimNot) {
4162 // Potential implicit null checks, in the case of reference
4163 // fields, are handled in the previous switch statement.
4164 } else {
4165 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004166 }
Roland Levillain4d027112015-07-01 15:41:14 +01004167
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004168 if (is_volatile) {
4169 if (field_type == Primitive::kPrimNot) {
4170 // Memory barriers, in the case of references, are also handled
4171 // in the previous switch statement.
4172 } else {
4173 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4174 }
Roland Levillain4d027112015-07-01 15:41:14 +01004175 }
Calin Juravle52c48962014-12-16 17:02:57 +00004176}
4177
4178void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4179 const FieldInfo& field_info) {
4180 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4181
4182 LocationSummary* locations =
4183 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004184 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004185 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004186 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004187 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004188
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004189 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004190 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004191 if (is_volatile) {
4192 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4193 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4194 } else {
4195 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4196 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004197 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004198 if (is_volatile) {
4199 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4200 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4201 } else {
4202 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4203 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004204 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004205 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004206 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004207 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004208 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004209 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4210 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004211 locations->AddTemp(Location::RequiresRegister());
4212 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004213}
4214
Calin Juravle52c48962014-12-16 17:02:57 +00004215void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004216 const FieldInfo& field_info,
4217 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004218 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4219
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004220 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004221 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4222 Location value = locations->InAt(1);
4223 bool is_volatile = field_info.IsVolatile();
4224 Primitive::Type field_type = field_info.GetFieldType();
4225 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4226
4227 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004228 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004229 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004230
Mark Mendellea5af682015-10-22 17:35:49 -04004231 bool maybe_record_implicit_null_check_done = false;
4232
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004233 switch (field_type) {
4234 case Primitive::kPrimBoolean:
4235 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004236 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004237 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004238 __ movb(Address(base, offset), Immediate(v));
4239 } else {
4240 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4241 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004242 break;
4243 }
4244
4245 case Primitive::kPrimShort:
4246 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004247 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004248 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004249 __ movw(Address(base, offset), Immediate(v));
4250 } else {
4251 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4252 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004253 break;
4254 }
4255
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004256 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004257 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004258 if (value.IsConstant()) {
4259 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004260 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4261 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4262 // Note: if heap poisoning is enabled, no need to poison
4263 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004264 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004265 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004266 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4267 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4268 __ movl(temp, value.AsRegister<CpuRegister>());
4269 __ PoisonHeapReference(temp);
4270 __ movl(Address(base, offset), temp);
4271 } else {
4272 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4273 }
Mark Mendell40741f32015-04-20 22:10:34 -04004274 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004275 break;
4276 }
4277
4278 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004279 if (value.IsConstant()) {
4280 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004281 codegen_->MoveInt64ToAddress(Address(base, offset),
4282 Address(base, offset + sizeof(int32_t)),
4283 v,
4284 instruction);
4285 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004286 } else {
4287 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4288 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004289 break;
4290 }
4291
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004292 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004293 if (value.IsConstant()) {
4294 int32_t v =
4295 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4296 __ movl(Address(base, offset), Immediate(v));
4297 } else {
4298 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4299 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004300 break;
4301 }
4302
4303 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004304 if (value.IsConstant()) {
4305 int64_t v =
4306 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4307 codegen_->MoveInt64ToAddress(Address(base, offset),
4308 Address(base, offset + sizeof(int32_t)),
4309 v,
4310 instruction);
4311 maybe_record_implicit_null_check_done = true;
4312 } else {
4313 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4314 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004315 break;
4316 }
4317
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004318 case Primitive::kPrimVoid:
4319 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004320 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004321 }
Calin Juravle52c48962014-12-16 17:02:57 +00004322
Mark Mendellea5af682015-10-22 17:35:49 -04004323 if (!maybe_record_implicit_null_check_done) {
4324 codegen_->MaybeRecordImplicitNullCheck(instruction);
4325 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004326
4327 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4328 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4329 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004330 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004331 }
4332
Calin Juravle52c48962014-12-16 17:02:57 +00004333 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004334 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004335 }
4336}
4337
4338void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4339 HandleFieldSet(instruction, instruction->GetFieldInfo());
4340}
4341
4342void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004343 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004344}
4345
4346void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004347 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004348}
4349
4350void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004351 HandleFieldGet(instruction, instruction->GetFieldInfo());
4352}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004353
Calin Juravle52c48962014-12-16 17:02:57 +00004354void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4355 HandleFieldGet(instruction);
4356}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004357
Calin Juravle52c48962014-12-16 17:02:57 +00004358void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4359 HandleFieldGet(instruction, instruction->GetFieldInfo());
4360}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004361
Calin Juravle52c48962014-12-16 17:02:57 +00004362void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4363 HandleFieldSet(instruction, instruction->GetFieldInfo());
4364}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004365
Calin Juravle52c48962014-12-16 17:02:57 +00004366void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004367 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004368}
4369
Calin Juravlee460d1d2015-09-29 04:52:17 +01004370void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4371 HUnresolvedInstanceFieldGet* instruction) {
4372 FieldAccessCallingConventionX86_64 calling_convention;
4373 codegen_->CreateUnresolvedFieldLocationSummary(
4374 instruction, instruction->GetFieldType(), calling_convention);
4375}
4376
4377void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4378 HUnresolvedInstanceFieldGet* instruction) {
4379 FieldAccessCallingConventionX86_64 calling_convention;
4380 codegen_->GenerateUnresolvedFieldAccess(instruction,
4381 instruction->GetFieldType(),
4382 instruction->GetFieldIndex(),
4383 instruction->GetDexPc(),
4384 calling_convention);
4385}
4386
4387void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4388 HUnresolvedInstanceFieldSet* instruction) {
4389 FieldAccessCallingConventionX86_64 calling_convention;
4390 codegen_->CreateUnresolvedFieldLocationSummary(
4391 instruction, instruction->GetFieldType(), calling_convention);
4392}
4393
4394void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4395 HUnresolvedInstanceFieldSet* instruction) {
4396 FieldAccessCallingConventionX86_64 calling_convention;
4397 codegen_->GenerateUnresolvedFieldAccess(instruction,
4398 instruction->GetFieldType(),
4399 instruction->GetFieldIndex(),
4400 instruction->GetDexPc(),
4401 calling_convention);
4402}
4403
4404void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4405 HUnresolvedStaticFieldGet* instruction) {
4406 FieldAccessCallingConventionX86_64 calling_convention;
4407 codegen_->CreateUnresolvedFieldLocationSummary(
4408 instruction, instruction->GetFieldType(), calling_convention);
4409}
4410
4411void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4412 HUnresolvedStaticFieldGet* instruction) {
4413 FieldAccessCallingConventionX86_64 calling_convention;
4414 codegen_->GenerateUnresolvedFieldAccess(instruction,
4415 instruction->GetFieldType(),
4416 instruction->GetFieldIndex(),
4417 instruction->GetDexPc(),
4418 calling_convention);
4419}
4420
4421void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4422 HUnresolvedStaticFieldSet* instruction) {
4423 FieldAccessCallingConventionX86_64 calling_convention;
4424 codegen_->CreateUnresolvedFieldLocationSummary(
4425 instruction, instruction->GetFieldType(), calling_convention);
4426}
4427
4428void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4429 HUnresolvedStaticFieldSet* instruction) {
4430 FieldAccessCallingConventionX86_64 calling_convention;
4431 codegen_->GenerateUnresolvedFieldAccess(instruction,
4432 instruction->GetFieldType(),
4433 instruction->GetFieldIndex(),
4434 instruction->GetDexPc(),
4435 calling_convention);
4436}
4437
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004438void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004439 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4440 ? LocationSummary::kCallOnSlowPath
4441 : LocationSummary::kNoCall;
4442 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4443 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004444 ? Location::RequiresRegister()
4445 : Location::Any();
4446 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004447 if (instruction->HasUses()) {
4448 locations->SetOut(Location::SameAsFirstInput());
4449 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004450}
4451
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004452void InstructionCodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004453 if (codegen_->CanMoveNullCheckToUser(instruction)) {
4454 return;
4455 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004456 LocationSummary* locations = instruction->GetLocations();
4457 Location obj = locations->InAt(0);
4458
4459 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
4460 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4461}
4462
4463void InstructionCodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004464 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004465 codegen_->AddSlowPath(slow_path);
4466
4467 LocationSummary* locations = instruction->GetLocations();
4468 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004469
4470 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004471 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004472 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004473 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004474 } else {
4475 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004476 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004477 __ jmp(slow_path->GetEntryLabel());
4478 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004479 }
4480 __ j(kEqual, slow_path->GetEntryLabel());
4481}
4482
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004483void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004484 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004485 GenerateImplicitNullCheck(instruction);
4486 } else {
4487 GenerateExplicitNullCheck(instruction);
4488 }
4489}
4490
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004491void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004492 bool object_array_get_with_read_barrier =
4493 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004494 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004495 new (GetGraph()->GetArena()) LocationSummary(instruction,
4496 object_array_get_with_read_barrier ?
4497 LocationSummary::kCallOnSlowPath :
4498 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004499 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004500 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004501 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4502 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4503 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004504 // The output overlaps for an object array get when read barriers
4505 // are enabled: we do not want the move to overwrite the array's
4506 // location, as we need it to emit the read barrier.
4507 locations->SetOut(
4508 Location::RequiresRegister(),
4509 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004510 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004511 // We need a temporary register for the read barrier marking slow
4512 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4513 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4514 locations->AddTemp(Location::RequiresRegister());
4515 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004516}
4517
4518void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4519 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004520 Location obj_loc = locations->InAt(0);
4521 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004522 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004523 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004524
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004525 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004526 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004527 case Primitive::kPrimBoolean: {
4528 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004529 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004530 if (index.IsConstant()) {
4531 __ movzxb(out, Address(obj,
4532 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4533 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004534 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004535 }
4536 break;
4537 }
4538
4539 case Primitive::kPrimByte: {
4540 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004541 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004542 if (index.IsConstant()) {
4543 __ movsxb(out, Address(obj,
4544 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4545 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004546 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004547 }
4548 break;
4549 }
4550
4551 case Primitive::kPrimShort: {
4552 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004553 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004554 if (index.IsConstant()) {
4555 __ movsxw(out, Address(obj,
4556 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4557 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004558 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004559 }
4560 break;
4561 }
4562
4563 case Primitive::kPrimChar: {
4564 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004565 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004566 if (index.IsConstant()) {
4567 __ movzxw(out, Address(obj,
4568 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4569 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004570 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004571 }
4572 break;
4573 }
4574
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004575 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004576 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004577 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004578 if (index.IsConstant()) {
4579 __ movl(out, Address(obj,
4580 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4581 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004582 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004583 }
4584 break;
4585 }
4586
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004587 case Primitive::kPrimNot: {
4588 static_assert(
4589 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4590 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4591 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4592 // /* HeapReference<Object> */ out =
4593 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4594 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4595 Location temp = locations->GetTemp(0);
4596 // Note that a potential implicit null check is handled in this
4597 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4598 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4599 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4600 } else {
4601 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4602 if (index.IsConstant()) {
4603 uint32_t offset =
4604 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4605 __ movl(out, Address(obj, offset));
4606 codegen_->MaybeRecordImplicitNullCheck(instruction);
4607 // If read barriers are enabled, emit read barriers other than
4608 // Baker's using a slow path (and also unpoison the loaded
4609 // reference, if heap poisoning is enabled).
4610 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4611 } else {
4612 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4613 codegen_->MaybeRecordImplicitNullCheck(instruction);
4614 // If read barriers are enabled, emit read barriers other than
4615 // Baker's using a slow path (and also unpoison the loaded
4616 // reference, if heap poisoning is enabled).
4617 codegen_->MaybeGenerateReadBarrierSlow(
4618 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4619 }
4620 }
4621 break;
4622 }
4623
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004624 case Primitive::kPrimLong: {
4625 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004626 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004627 if (index.IsConstant()) {
4628 __ movq(out, Address(obj,
4629 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4630 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004631 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004632 }
4633 break;
4634 }
4635
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004636 case Primitive::kPrimFloat: {
4637 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004638 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004639 if (index.IsConstant()) {
4640 __ movss(out, Address(obj,
4641 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4642 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004643 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004644 }
4645 break;
4646 }
4647
4648 case Primitive::kPrimDouble: {
4649 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004650 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004651 if (index.IsConstant()) {
4652 __ movsd(out, Address(obj,
4653 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4654 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004655 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004656 }
4657 break;
4658 }
4659
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004660 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004661 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004662 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004663 }
Roland Levillain4d027112015-07-01 15:41:14 +01004664
4665 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004666 // Potential implicit null checks, in the case of reference
4667 // arrays, are handled in the previous switch statement.
4668 } else {
4669 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004670 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004671}
4672
4673void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004674 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004675
4676 bool needs_write_barrier =
4677 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004678 bool may_need_runtime_call = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004679 bool object_array_set_with_read_barrier =
4680 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004681
Nicolas Geoffray39468442014-09-02 15:17:15 +01004682 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004683 instruction,
Roland Levillain0d5a2812015-11-13 10:07:31 +00004684 (may_need_runtime_call || object_array_set_with_read_barrier) ?
4685 LocationSummary::kCallOnSlowPath :
4686 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004687
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004688 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004689 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4690 if (Primitive::IsFloatingPointType(value_type)) {
4691 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004692 } else {
4693 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4694 }
4695
4696 if (needs_write_barrier) {
4697 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004698
4699 // This first temporary register is possibly used for heap
4700 // reference poisoning and/or read barrier emission too.
4701 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004702 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004703 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004704}
4705
4706void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4707 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004708 Location array_loc = locations->InAt(0);
4709 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004710 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004711 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004712 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004713 bool may_need_runtime_call = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004714 bool needs_write_barrier =
4715 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004716 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4717 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4718 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004719
4720 switch (value_type) {
4721 case Primitive::kPrimBoolean:
4722 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004723 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4724 Address address = index.IsConstant()
4725 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4726 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4727 if (value.IsRegister()) {
4728 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004729 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004730 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004731 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004732 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004733 break;
4734 }
4735
4736 case Primitive::kPrimShort:
4737 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004738 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4739 Address address = index.IsConstant()
4740 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4741 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4742 if (value.IsRegister()) {
4743 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004744 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004745 DCHECK(value.IsConstant()) << value;
4746 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004747 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004748 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004749 break;
4750 }
4751
4752 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004753 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4754 Address address = index.IsConstant()
4755 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4756 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004757
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004758 if (!value.IsRegister()) {
4759 // Just setting null.
4760 DCHECK(instruction->InputAt(2)->IsNullConstant());
4761 DCHECK(value.IsConstant()) << value;
4762 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004763 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004764 DCHECK(!needs_write_barrier);
4765 DCHECK(!may_need_runtime_call);
4766 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004767 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004768
4769 DCHECK(needs_write_barrier);
4770 CpuRegister register_value = value.AsRegister<CpuRegister>();
4771 NearLabel done, not_null, do_put;
4772 SlowPathCode* slow_path = nullptr;
4773 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4774 if (may_need_runtime_call) {
4775 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4776 codegen_->AddSlowPath(slow_path);
4777 if (instruction->GetValueCanBeNull()) {
4778 __ testl(register_value, register_value);
4779 __ j(kNotEqual, &not_null);
4780 __ movl(address, Immediate(0));
4781 codegen_->MaybeRecordImplicitNullCheck(instruction);
4782 __ jmp(&done);
4783 __ Bind(&not_null);
4784 }
4785
Roland Levillain0d5a2812015-11-13 10:07:31 +00004786 if (kEmitCompilerReadBarrier) {
4787 // When read barriers are enabled, the type checking
4788 // instrumentation requires two read barriers:
4789 //
4790 // __ movl(temp2, temp);
4791 // // /* HeapReference<Class> */ temp = temp->component_type_
4792 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004793 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004794 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4795 //
4796 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4797 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004798 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004799 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4800 //
4801 // __ cmpl(temp, temp2);
4802 //
4803 // However, the second read barrier may trash `temp`, as it
4804 // is a temporary register, and as such would not be saved
4805 // along with live registers before calling the runtime (nor
4806 // restored afterwards). So in this case, we bail out and
4807 // delegate the work to the array set slow path.
4808 //
4809 // TODO: Extend the register allocator to support a new
4810 // "(locally) live temp" location so as to avoid always
4811 // going into the slow path when read barriers are enabled.
4812 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004813 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004814 // /* HeapReference<Class> */ temp = array->klass_
4815 __ movl(temp, Address(array, class_offset));
4816 codegen_->MaybeRecordImplicitNullCheck(instruction);
4817 __ MaybeUnpoisonHeapReference(temp);
4818
4819 // /* HeapReference<Class> */ temp = temp->component_type_
4820 __ movl(temp, Address(temp, component_offset));
4821 // If heap poisoning is enabled, no need to unpoison `temp`
4822 // nor the object reference in `register_value->klass`, as
4823 // we are comparing two poisoned references.
4824 __ cmpl(temp, Address(register_value, class_offset));
4825
4826 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4827 __ j(kEqual, &do_put);
4828 // If heap poisoning is enabled, the `temp` reference has
4829 // not been unpoisoned yet; unpoison it now.
4830 __ MaybeUnpoisonHeapReference(temp);
4831
4832 // /* HeapReference<Class> */ temp = temp->super_class_
4833 __ movl(temp, Address(temp, super_offset));
4834 // If heap poisoning is enabled, no need to unpoison
4835 // `temp`, as we are comparing against null below.
4836 __ testl(temp, temp);
4837 __ j(kNotEqual, slow_path->GetEntryLabel());
4838 __ Bind(&do_put);
4839 } else {
4840 __ j(kNotEqual, slow_path->GetEntryLabel());
4841 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004842 }
4843 }
4844
4845 if (kPoisonHeapReferences) {
4846 __ movl(temp, register_value);
4847 __ PoisonHeapReference(temp);
4848 __ movl(address, temp);
4849 } else {
4850 __ movl(address, register_value);
4851 }
4852 if (!may_need_runtime_call) {
4853 codegen_->MaybeRecordImplicitNullCheck(instruction);
4854 }
4855
4856 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4857 codegen_->MarkGCCard(
4858 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4859 __ Bind(&done);
4860
4861 if (slow_path != nullptr) {
4862 __ Bind(slow_path->GetExitLabel());
4863 }
4864
4865 break;
4866 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004867
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004868 case Primitive::kPrimInt: {
4869 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4870 Address address = index.IsConstant()
4871 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4872 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4873 if (value.IsRegister()) {
4874 __ movl(address, value.AsRegister<CpuRegister>());
4875 } else {
4876 DCHECK(value.IsConstant()) << value;
4877 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4878 __ movl(address, Immediate(v));
4879 }
4880 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004881 break;
4882 }
4883
4884 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004885 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4886 Address address = index.IsConstant()
4887 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4888 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4889 if (value.IsRegister()) {
4890 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004891 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004892 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004893 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004894 Address address_high = index.IsConstant()
4895 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4896 offset + sizeof(int32_t))
4897 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4898 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004899 }
4900 break;
4901 }
4902
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004903 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004904 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4905 Address address = index.IsConstant()
4906 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4907 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004908 if (value.IsFpuRegister()) {
4909 __ movss(address, value.AsFpuRegister<XmmRegister>());
4910 } else {
4911 DCHECK(value.IsConstant());
4912 int32_t v =
4913 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4914 __ movl(address, Immediate(v));
4915 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004916 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004917 break;
4918 }
4919
4920 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004921 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4922 Address address = index.IsConstant()
4923 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4924 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004925 if (value.IsFpuRegister()) {
4926 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4927 codegen_->MaybeRecordImplicitNullCheck(instruction);
4928 } else {
4929 int64_t v =
4930 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4931 Address address_high = index.IsConstant()
4932 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4933 offset + sizeof(int32_t))
4934 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4935 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4936 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004937 break;
4938 }
4939
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004940 case Primitive::kPrimVoid:
4941 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004942 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004943 }
4944}
4945
4946void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004947 LocationSummary* locations =
4948 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004949 locations->SetInAt(0, Location::RequiresRegister());
4950 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004951}
4952
4953void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4954 LocationSummary* locations = instruction->GetLocations();
4955 uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004956 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4957 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004958 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004959 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004960}
4961
4962void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004963 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4964 ? LocationSummary::kCallOnSlowPath
4965 : LocationSummary::kNoCall;
4966 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004967 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04004968 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004969 if (instruction->HasUses()) {
4970 locations->SetOut(Location::SameAsFirstInput());
4971 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004972}
4973
4974void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4975 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004976 Location index_loc = locations->InAt(0);
4977 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07004978 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004979 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004980
Mark Mendell99dbd682015-04-22 16:18:52 -04004981 if (length_loc.IsConstant()) {
4982 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4983 if (index_loc.IsConstant()) {
4984 // BCE will remove the bounds check if we are guarenteed to pass.
4985 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4986 if (index < 0 || index >= length) {
4987 codegen_->AddSlowPath(slow_path);
4988 __ jmp(slow_path->GetEntryLabel());
4989 } else {
4990 // Some optimization after BCE may have generated this, and we should not
4991 // generate a bounds check if it is a valid range.
4992 }
4993 return;
4994 }
4995
4996 // We have to reverse the jump condition because the length is the constant.
4997 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
4998 __ cmpl(index_reg, Immediate(length));
4999 codegen_->AddSlowPath(slow_path);
5000 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005001 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005002 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5003 if (index_loc.IsConstant()) {
5004 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5005 __ cmpl(length, Immediate(value));
5006 } else {
5007 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5008 }
5009 codegen_->AddSlowPath(slow_path);
5010 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005011 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005012}
5013
5014void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5015 CpuRegister card,
5016 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005017 CpuRegister value,
5018 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005019 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005020 if (value_can_be_null) {
5021 __ testl(value, value);
5022 __ j(kEqual, &is_null);
5023 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005024 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5025 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005026 __ movq(temp, object);
5027 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005028 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005029 if (value_can_be_null) {
5030 __ Bind(&is_null);
5031 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005032}
5033
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005034void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
5035 temp->SetLocations(nullptr);
5036}
5037
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005038void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005039 // Nothing to do, this is driven by the code generator.
5040}
5041
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005042void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005043 LOG(FATAL) << "Unimplemented";
5044}
5045
5046void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005047 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5048}
5049
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005050void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5051 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5052}
5053
5054void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005055 HBasicBlock* block = instruction->GetBlock();
5056 if (block->GetLoopInformation() != nullptr) {
5057 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5058 // The back edge will generate the suspend check.
5059 return;
5060 }
5061 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5062 // The goto will generate the suspend check.
5063 return;
5064 }
5065 GenerateSuspendCheck(instruction, nullptr);
5066}
5067
5068void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5069 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005070 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005071 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5072 if (slow_path == nullptr) {
5073 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5074 instruction->SetSlowPath(slow_path);
5075 codegen_->AddSlowPath(slow_path);
5076 if (successor != nullptr) {
5077 DCHECK(successor->IsLoopHeader());
5078 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5079 }
5080 } else {
5081 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5082 }
5083
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005084 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5085 /* no_rip */ true),
5086 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005087 if (successor == nullptr) {
5088 __ j(kNotEqual, slow_path->GetEntryLabel());
5089 __ Bind(slow_path->GetReturnLabel());
5090 } else {
5091 __ j(kEqual, codegen_->GetLabelOf(successor));
5092 __ jmp(slow_path->GetEntryLabel());
5093 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005094}
5095
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005096X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5097 return codegen_->GetAssembler();
5098}
5099
5100void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005101 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005102 Location source = move->GetSource();
5103 Location destination = move->GetDestination();
5104
5105 if (source.IsRegister()) {
5106 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005107 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005108 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005109 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005110 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005111 } else {
5112 DCHECK(destination.IsDoubleStackSlot());
5113 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005114 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005115 }
5116 } else if (source.IsStackSlot()) {
5117 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005118 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005119 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005120 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005121 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005122 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005123 } else {
5124 DCHECK(destination.IsStackSlot());
5125 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5126 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5127 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005128 } else if (source.IsDoubleStackSlot()) {
5129 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005130 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005131 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005132 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005133 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5134 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005135 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005136 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005137 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5138 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5139 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005140 } else if (source.IsConstant()) {
5141 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005142 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5143 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005144 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005145 if (value == 0) {
5146 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5147 } else {
5148 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5149 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005150 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005151 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005152 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005153 }
5154 } else if (constant->IsLongConstant()) {
5155 int64_t value = constant->AsLongConstant()->GetValue();
5156 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005157 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005158 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005159 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005160 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005161 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005162 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005163 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005164 int32_t value = bit_cast<int32_t, float>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005165 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005166 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5167 if (value == 0) {
5168 // easy FP 0.0.
5169 __ xorps(dest, dest);
5170 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005171 __ movss(dest, codegen_->LiteralFloatAddress(fp_value));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005172 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005173 } else {
5174 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell92e83bf2015-05-07 11:25:03 -04005175 Immediate imm(value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005176 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5177 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005178 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005179 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005180 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005181 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005182 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005183 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5184 if (value == 0) {
5185 __ xorpd(dest, dest);
5186 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005187 __ movsd(dest, codegen_->LiteralDoubleAddress(fp_value));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005188 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005189 } else {
5190 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005191 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005192 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005193 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005194 } else if (source.IsFpuRegister()) {
5195 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005196 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005197 } else if (destination.IsStackSlot()) {
5198 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005199 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005200 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005201 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005202 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005203 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005204 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005205 }
5206}
5207
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005208void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005209 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005210 __ movl(Address(CpuRegister(RSP), mem), reg);
5211 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005212}
5213
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005214void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005215 ScratchRegisterScope ensure_scratch(
5216 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5217
5218 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5219 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5220 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5221 Address(CpuRegister(RSP), mem2 + stack_offset));
5222 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5223 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5224 CpuRegister(ensure_scratch.GetRegister()));
5225}
5226
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005227void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5228 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5229 __ movq(Address(CpuRegister(RSP), mem), reg);
5230 __ movq(reg, CpuRegister(TMP));
5231}
5232
5233void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5234 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005235 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005236
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005237 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5238 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5239 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5240 Address(CpuRegister(RSP), mem2 + stack_offset));
5241 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5242 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5243 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005244}
5245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005246void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5247 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5248 __ movss(Address(CpuRegister(RSP), mem), reg);
5249 __ movd(reg, CpuRegister(TMP));
5250}
5251
5252void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5253 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5254 __ movsd(Address(CpuRegister(RSP), mem), reg);
5255 __ movd(reg, CpuRegister(TMP));
5256}
5257
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005258void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005259 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005260 Location source = move->GetSource();
5261 Location destination = move->GetDestination();
5262
5263 if (source.IsRegister() && destination.IsRegister()) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005264 __ xchgq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005265 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005266 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005267 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005268 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005269 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005270 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5271 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005272 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005273 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005274 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005275 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5276 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005277 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005278 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5279 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5280 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005281 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005282 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005283 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005284 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005285 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005286 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005287 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005288 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005289 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291 }
5292}
5293
5294
5295void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5296 __ pushq(CpuRegister(reg));
5297}
5298
5299
5300void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5301 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005302}
5303
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005304void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005305 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005306 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5307 Immediate(mirror::Class::kStatusInitialized));
5308 __ j(kLess, slow_path->GetEntryLabel());
5309 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005310 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005311}
5312
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005313void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005314 InvokeRuntimeCallingConvention calling_convention;
5315 CodeGenerator::CreateLoadClassLocationSummary(
5316 cls,
5317 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005318 Location::RegisterLocation(RAX),
5319 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005320}
5321
5322void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005323 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005324 if (cls->NeedsAccessCheck()) {
5325 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5326 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5327 cls,
5328 cls->GetDexPc(),
5329 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005330 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005331 return;
5332 }
5333
Roland Levillain0d5a2812015-11-13 10:07:31 +00005334 Location out_loc = locations->Out();
5335 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005336 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005337
Calin Juravle580b6092015-10-06 17:35:58 +01005338 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005339 DCHECK(!cls->CanCallRuntime());
5340 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005341 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5342 GenerateGcRootFieldLoad(
5343 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005344 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005345 // /* GcRoot<mirror::Class>[] */ out =
5346 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5347 __ movq(out, Address(current_method,
5348 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005349 // /* GcRoot<mirror::Class> */ out = out[type_index]
5350 GenerateGcRootFieldLoad(cls, out_loc, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Roland Levillain4d027112015-07-01 15:41:14 +01005351
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005352 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5353 DCHECK(cls->CanCallRuntime());
5354 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5355 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5356 codegen_->AddSlowPath(slow_path);
5357 if (!cls->IsInDexCache()) {
5358 __ testl(out, out);
5359 __ j(kEqual, slow_path->GetEntryLabel());
5360 }
5361 if (cls->MustGenerateClinitCheck()) {
5362 GenerateClassInitializationCheck(slow_path, out);
5363 } else {
5364 __ Bind(slow_path->GetExitLabel());
5365 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005366 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005367 }
5368}
5369
5370void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5371 LocationSummary* locations =
5372 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5373 locations->SetInAt(0, Location::RequiresRegister());
5374 if (check->HasUses()) {
5375 locations->SetOut(Location::SameAsFirstInput());
5376 }
5377}
5378
5379void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005380 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005381 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005382 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005383 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005384 GenerateClassInitializationCheck(slow_path,
5385 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005386}
5387
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005388void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005389 LocationSummary::CallKind call_kind = (!load->IsInDexCache() || kEmitCompilerReadBarrier)
5390 ? LocationSummary::kCallOnSlowPath
5391 : LocationSummary::kNoCall;
5392 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005393 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005394 locations->SetOut(Location::RequiresRegister());
5395}
5396
5397void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005398 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005399 Location out_loc = locations->Out();
5400 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005401 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005402
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005403 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5404 GenerateGcRootFieldLoad(
5405 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005406 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5407 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005408 // /* GcRoot<mirror::String> */ out = out[string_index]
5409 GenerateGcRootFieldLoad(
5410 load, out_loc, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005411
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005412 if (!load->IsInDexCache()) {
5413 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5414 codegen_->AddSlowPath(slow_path);
5415 __ testl(out, out);
5416 __ j(kEqual, slow_path->GetEntryLabel());
5417 __ Bind(slow_path->GetExitLabel());
5418 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005419}
5420
David Brazdilcb1c0552015-08-04 16:22:25 +01005421static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005422 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5423 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005424}
5425
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005426void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5427 LocationSummary* locations =
5428 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5429 locations->SetOut(Location::RequiresRegister());
5430}
5431
5432void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005433 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5434}
5435
5436void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5437 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5438}
5439
5440void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5441 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005442}
5443
5444void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5445 LocationSummary* locations =
5446 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5447 InvokeRuntimeCallingConvention calling_convention;
5448 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5449}
5450
5451void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005452 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5453 instruction,
5454 instruction->GetDexPc(),
5455 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005456 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005457}
5458
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005459static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5460 return kEmitCompilerReadBarrier &&
5461 (kUseBakerReadBarrier ||
5462 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5463 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5464 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5465}
5466
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005467void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005468 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005469 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5470 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005471 case TypeCheckKind::kExactCheck:
5472 case TypeCheckKind::kAbstractClassCheck:
5473 case TypeCheckKind::kClassHierarchyCheck:
5474 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005475 call_kind =
5476 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005477 break;
5478 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005479 case TypeCheckKind::kUnresolvedCheck:
5480 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005481 call_kind = LocationSummary::kCallOnSlowPath;
5482 break;
5483 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005484
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005485 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005486 locations->SetInAt(0, Location::RequiresRegister());
5487 locations->SetInAt(1, Location::Any());
5488 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5489 locations->SetOut(Location::RequiresRegister());
5490 // When read barriers are enabled, we need a temporary register for
5491 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005492 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005493 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005494 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005495}
5496
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005497void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005498 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005499 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005500 Location obj_loc = locations->InAt(0);
5501 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005502 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005503 Location out_loc = locations->Out();
5504 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005505 Location temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
5506 locations->GetTemp(0) :
5507 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005508 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005509 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5510 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5511 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005512 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005513 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005514
5515 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005516 // Avoid null check if we know obj is not null.
5517 if (instruction->MustDoNullCheck()) {
5518 __ testl(obj, obj);
5519 __ j(kEqual, &zero);
5520 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005521
Roland Levillain0d5a2812015-11-13 10:07:31 +00005522 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005523 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005524
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005525 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005526 case TypeCheckKind::kExactCheck: {
5527 if (cls.IsRegister()) {
5528 __ cmpl(out, cls.AsRegister<CpuRegister>());
5529 } else {
5530 DCHECK(cls.IsStackSlot()) << cls;
5531 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5532 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005533 if (zero.IsLinked()) {
5534 // Classes must be equal for the instanceof to succeed.
5535 __ j(kNotEqual, &zero);
5536 __ movl(out, Immediate(1));
5537 __ jmp(&done);
5538 } else {
5539 __ setcc(kEqual, out);
5540 // setcc only sets the low byte.
5541 __ andl(out, Immediate(1));
5542 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005543 break;
5544 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005545
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005546 case TypeCheckKind::kAbstractClassCheck: {
5547 // If the class is abstract, we eagerly fetch the super class of the
5548 // object to avoid doing a comparison we know will fail.
5549 NearLabel loop, success;
5550 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005551 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005552 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005553 __ testl(out, out);
5554 // If `out` is null, we use it for the result, and jump to `done`.
5555 __ j(kEqual, &done);
5556 if (cls.IsRegister()) {
5557 __ cmpl(out, cls.AsRegister<CpuRegister>());
5558 } else {
5559 DCHECK(cls.IsStackSlot()) << cls;
5560 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5561 }
5562 __ j(kNotEqual, &loop);
5563 __ movl(out, Immediate(1));
5564 if (zero.IsLinked()) {
5565 __ jmp(&done);
5566 }
5567 break;
5568 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005569
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005570 case TypeCheckKind::kClassHierarchyCheck: {
5571 // Walk over the class hierarchy to find a match.
5572 NearLabel loop, success;
5573 __ Bind(&loop);
5574 if (cls.IsRegister()) {
5575 __ cmpl(out, cls.AsRegister<CpuRegister>());
5576 } else {
5577 DCHECK(cls.IsStackSlot()) << cls;
5578 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5579 }
5580 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005581 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005582 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005583 __ testl(out, out);
5584 __ j(kNotEqual, &loop);
5585 // If `out` is null, we use it for the result, and jump to `done`.
5586 __ jmp(&done);
5587 __ Bind(&success);
5588 __ movl(out, Immediate(1));
5589 if (zero.IsLinked()) {
5590 __ jmp(&done);
5591 }
5592 break;
5593 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005594
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005595 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005596 // Do an exact check.
5597 NearLabel exact_check;
5598 if (cls.IsRegister()) {
5599 __ cmpl(out, cls.AsRegister<CpuRegister>());
5600 } else {
5601 DCHECK(cls.IsStackSlot()) << cls;
5602 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5603 }
5604 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005605 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005606 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005607 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005608 __ testl(out, out);
5609 // If `out` is null, we use it for the result, and jump to `done`.
5610 __ j(kEqual, &done);
5611 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5612 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005613 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005614 __ movl(out, Immediate(1));
5615 __ jmp(&done);
5616 break;
5617 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005618
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005619 case TypeCheckKind::kArrayCheck: {
5620 if (cls.IsRegister()) {
5621 __ cmpl(out, cls.AsRegister<CpuRegister>());
5622 } else {
5623 DCHECK(cls.IsStackSlot()) << cls;
5624 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5625 }
5626 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005627 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5628 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005629 codegen_->AddSlowPath(slow_path);
5630 __ j(kNotEqual, slow_path->GetEntryLabel());
5631 __ movl(out, Immediate(1));
5632 if (zero.IsLinked()) {
5633 __ jmp(&done);
5634 }
5635 break;
5636 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005637
Calin Juravle98893e12015-10-02 21:05:03 +01005638 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005639 case TypeCheckKind::kInterfaceCheck: {
5640 // Note that we indeed only call on slow path, but we always go
5641 // into the slow path for the unresolved & interface check
5642 // cases.
5643 //
5644 // We cannot directly call the InstanceofNonTrivial runtime
5645 // entry point without resorting to a type checking slow path
5646 // here (i.e. by calling InvokeRuntime directly), as it would
5647 // require to assign fixed registers for the inputs of this
5648 // HInstanceOf instruction (following the runtime calling
5649 // convention), which might be cluttered by the potential first
5650 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005651 //
5652 // TODO: Introduce a new runtime entry point taking the object
5653 // to test (instead of its class) as argument, and let it deal
5654 // with the read barrier issues. This will let us refactor this
5655 // case of the `switch` code as it was previously (with a direct
5656 // call to the runtime not using a type checking slow path).
5657 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005658 DCHECK(locations->OnlyCallsOnSlowPath());
5659 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5660 /* is_fatal */ false);
5661 codegen_->AddSlowPath(slow_path);
5662 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005663 if (zero.IsLinked()) {
5664 __ jmp(&done);
5665 }
5666 break;
5667 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005668 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005669
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005670 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005671 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005672 __ xorl(out, out);
5673 }
5674
5675 if (done.IsLinked()) {
5676 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005677 }
5678
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005679 if (slow_path != nullptr) {
5680 __ Bind(slow_path->GetExitLabel());
5681 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005682}
5683
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005684void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005685 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5686 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005687 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5688 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005689 case TypeCheckKind::kExactCheck:
5690 case TypeCheckKind::kAbstractClassCheck:
5691 case TypeCheckKind::kClassHierarchyCheck:
5692 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005693 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5694 LocationSummary::kCallOnSlowPath :
5695 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005696 break;
5697 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005698 case TypeCheckKind::kUnresolvedCheck:
5699 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005700 call_kind = LocationSummary::kCallOnSlowPath;
5701 break;
5702 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005703 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5704 locations->SetInAt(0, Location::RequiresRegister());
5705 locations->SetInAt(1, Location::Any());
5706 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5707 locations->AddTemp(Location::RequiresRegister());
5708 // When read barriers are enabled, we need an additional temporary
5709 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005710 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005711 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005712 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005713}
5714
5715void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005716 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005717 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005718 Location obj_loc = locations->InAt(0);
5719 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005720 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005721 Location temp_loc = locations->GetTemp(0);
5722 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005723 Location temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
5724 locations->GetTemp(1) :
5725 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005726 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5727 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5728 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5729 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005730
Roland Levillain0d5a2812015-11-13 10:07:31 +00005731 bool is_type_check_slow_path_fatal =
5732 (type_check_kind == TypeCheckKind::kExactCheck ||
5733 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5734 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5735 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5736 !instruction->CanThrowIntoCatchBlock();
5737 SlowPathCode* type_check_slow_path =
5738 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5739 is_type_check_slow_path_fatal);
5740 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005741
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005742 Label done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005743 // Avoid null check if we know obj is not null.
5744 if (instruction->MustDoNullCheck()) {
5745 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005746 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005747 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005748
Roland Levillain0d5a2812015-11-13 10:07:31 +00005749 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005750 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005751
Roland Levillain0d5a2812015-11-13 10:07:31 +00005752 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005753 case TypeCheckKind::kExactCheck:
5754 case TypeCheckKind::kArrayCheck: {
5755 if (cls.IsRegister()) {
5756 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5757 } else {
5758 DCHECK(cls.IsStackSlot()) << cls;
5759 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5760 }
5761 // Jump to slow path for throwing the exception or doing a
5762 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005763 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005764 break;
5765 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005766
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005767 case TypeCheckKind::kAbstractClassCheck: {
5768 // If the class is abstract, we eagerly fetch the super class of the
5769 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005770 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005771 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005772 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005773 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774
5775 // If the class reference currently in `temp` is not null, jump
5776 // to the `compare_classes` label to compare it with the checked
5777 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005778 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005779 __ j(kNotEqual, &compare_classes);
5780 // Otherwise, jump to the slow path to throw the exception.
5781 //
5782 // But before, move back the object's class into `temp` before
5783 // going into the slow path, as it has been overwritten in the
5784 // meantime.
5785 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005786 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005787 __ jmp(type_check_slow_path->GetEntryLabel());
5788
5789 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005790 if (cls.IsRegister()) {
5791 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5792 } else {
5793 DCHECK(cls.IsStackSlot()) << cls;
5794 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5795 }
5796 __ j(kNotEqual, &loop);
5797 break;
5798 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005799
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005800 case TypeCheckKind::kClassHierarchyCheck: {
5801 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005802 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005803 __ Bind(&loop);
5804 if (cls.IsRegister()) {
5805 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5806 } else {
5807 DCHECK(cls.IsStackSlot()) << cls;
5808 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5809 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005810 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005811
Roland Levillain0d5a2812015-11-13 10:07:31 +00005812 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005813 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005814
5815 // If the class reference currently in `temp` is not null, jump
5816 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005817 __ testl(temp, temp);
5818 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005819 // Otherwise, jump to the slow path to throw the exception.
5820 //
5821 // But before, move back the object's class into `temp` before
5822 // going into the slow path, as it has been overwritten in the
5823 // meantime.
5824 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005825 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005826 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005827 break;
5828 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005829
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005830 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005831 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005832 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005833 if (cls.IsRegister()) {
5834 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5835 } else {
5836 DCHECK(cls.IsStackSlot()) << cls;
5837 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5838 }
5839 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
5841 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005842 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005843 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005844
5845 // If the component type is not null (i.e. the object is indeed
5846 // an array), jump to label `check_non_primitive_component_type`
5847 // to further check that this component type is not a primitive
5848 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005849 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005850 __ j(kNotEqual, &check_non_primitive_component_type);
5851 // Otherwise, jump to the slow path to throw the exception.
5852 //
5853 // But before, move back the object's class into `temp` before
5854 // going into the slow path, as it has been overwritten in the
5855 // meantime.
5856 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005857 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005858 __ jmp(type_check_slow_path->GetEntryLabel());
5859
5860 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005861 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005862 __ j(kEqual, &done);
5863 // Same comment as above regarding `temp` and the slow path.
5864 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005865 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005867 break;
5868 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005869
Calin Juravle98893e12015-10-02 21:05:03 +01005870 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005871 case TypeCheckKind::kInterfaceCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005872 // We always go into the type check slow path for the unresolved &
5873 // interface check cases.
5874 //
5875 // We cannot directly call the CheckCast runtime entry point
5876 // without resorting to a type checking slow path here (i.e. by
5877 // calling InvokeRuntime directly), as it would require to
5878 // assign fixed registers for the inputs of this HInstanceOf
5879 // instruction (following the runtime calling convention), which
5880 // might be cluttered by the potential first read barrier
5881 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005882 //
5883 // TODO: Introduce a new runtime entry point taking the object
5884 // to test (instead of its class) as argument, and let it deal
5885 // with the read barrier issues. This will let us refactor this
5886 // case of the `switch` code as it was previously (with a direct
5887 // call to the runtime not using a type checking slow path).
5888 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005889 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005890 break;
5891 }
5892 __ Bind(&done);
5893
Roland Levillain0d5a2812015-11-13 10:07:31 +00005894 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005895}
5896
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005897void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5898 LocationSummary* locations =
5899 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5900 InvokeRuntimeCallingConvention calling_convention;
5901 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5902}
5903
5904void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005905 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
5906 : QUICK_ENTRY_POINT(pUnlockObject),
5907 instruction,
5908 instruction->GetDexPc(),
5909 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005910 if (instruction->IsEnter()) {
5911 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5912 } else {
5913 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5914 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005915}
5916
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005917void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
5918void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
5919void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
5920
5921void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5922 LocationSummary* locations =
5923 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5924 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5925 || instruction->GetResultType() == Primitive::kPrimLong);
5926 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005927 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005928 locations->SetOut(Location::SameAsFirstInput());
5929}
5930
5931void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
5932 HandleBitwiseOperation(instruction);
5933}
5934
5935void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
5936 HandleBitwiseOperation(instruction);
5937}
5938
5939void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
5940 HandleBitwiseOperation(instruction);
5941}
5942
5943void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5944 LocationSummary* locations = instruction->GetLocations();
5945 Location first = locations->InAt(0);
5946 Location second = locations->InAt(1);
5947 DCHECK(first.Equals(locations->Out()));
5948
5949 if (instruction->GetResultType() == Primitive::kPrimInt) {
5950 if (second.IsRegister()) {
5951 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005952 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005953 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005954 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005955 } else {
5956 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005957 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005958 }
5959 } else if (second.IsConstant()) {
5960 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
5961 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005962 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005963 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005964 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005965 } else {
5966 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005967 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005968 }
5969 } else {
5970 Address address(CpuRegister(RSP), second.GetStackIndex());
5971 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005972 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005973 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005974 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005975 } else {
5976 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005977 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005978 }
5979 }
5980 } else {
5981 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005982 CpuRegister first_reg = first.AsRegister<CpuRegister>();
5983 bool second_is_constant = false;
5984 int64_t value = 0;
5985 if (second.IsConstant()) {
5986 second_is_constant = true;
5987 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005988 }
Mark Mendell40741f32015-04-20 22:10:34 -04005989 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005990
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005991 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005992 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04005993 if (is_int32_value) {
5994 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
5995 } else {
5996 __ andq(first_reg, codegen_->LiteralInt64Address(value));
5997 }
5998 } else if (second.IsDoubleStackSlot()) {
5999 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006000 } else {
6001 __ andq(first_reg, second.AsRegister<CpuRegister>());
6002 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006003 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006004 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006005 if (is_int32_value) {
6006 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6007 } else {
6008 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6009 }
6010 } else if (second.IsDoubleStackSlot()) {
6011 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006012 } else {
6013 __ orq(first_reg, second.AsRegister<CpuRegister>());
6014 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006015 } else {
6016 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006017 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006018 if (is_int32_value) {
6019 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6020 } else {
6021 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6022 }
6023 } else if (second.IsDoubleStackSlot()) {
6024 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006025 } else {
6026 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6027 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006028 }
6029 }
6030}
6031
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006032void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6033 Location out,
6034 uint32_t offset,
6035 Location temp) {
6036 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6037 if (kEmitCompilerReadBarrier) {
6038 if (kUseBakerReadBarrier) {
6039 // Load with fast path based Baker's read barrier.
6040 // /* HeapReference<Object> */ out = *(out + offset)
6041 codegen_->GenerateFieldLoadWithBakerReadBarrier(
6042 instruction, out, out_reg, offset, temp, /* needs_null_check */ false);
6043 } else {
6044 // Load with slow path based read barrier.
6045 // Save the value of `out` into `temp` before overwriting it
6046 // in the following move operation, as we will need it for the
6047 // read barrier below.
6048 __ movl(temp.AsRegister<CpuRegister>(), out_reg);
6049 // /* HeapReference<Object> */ out = *(out + offset)
6050 __ movl(out_reg, Address(out_reg, offset));
6051 codegen_->GenerateReadBarrierSlow(instruction, out, out, temp, offset);
6052 }
6053 } else {
6054 // Plain load with no read barrier.
6055 // /* HeapReference<Object> */ out = *(out + offset)
6056 __ movl(out_reg, Address(out_reg, offset));
6057 __ MaybeUnpoisonHeapReference(out_reg);
6058 }
6059}
6060
6061void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6062 Location out,
6063 Location obj,
6064 uint32_t offset,
6065 Location temp) {
6066 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6067 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6068 if (kEmitCompilerReadBarrier) {
6069 if (kUseBakerReadBarrier) {
6070 // Load with fast path based Baker's read barrier.
6071 // /* HeapReference<Object> */ out = *(obj + offset)
6072 codegen_->GenerateFieldLoadWithBakerReadBarrier(
6073 instruction, out, obj_reg, offset, temp, /* needs_null_check */ false);
6074 } else {
6075 // Load with slow path based read barrier.
6076 // /* HeapReference<Object> */ out = *(obj + offset)
6077 __ movl(out_reg, Address(obj_reg, offset));
6078 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6079 }
6080 } else {
6081 // Plain load with no read barrier.
6082 // /* HeapReference<Object> */ out = *(obj + offset)
6083 __ movl(out_reg, Address(obj_reg, offset));
6084 __ MaybeUnpoisonHeapReference(out_reg);
6085 }
6086}
6087
6088void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6089 Location root,
6090 CpuRegister obj,
6091 uint32_t offset) {
6092 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6093 if (kEmitCompilerReadBarrier) {
6094 if (kUseBakerReadBarrier) {
6095 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6096 // Baker's read barrier are used:
6097 //
6098 // root = obj.field;
6099 // if (Thread::Current()->GetIsGcMarking()) {
6100 // root = ReadBarrier::Mark(root)
6101 // }
6102
6103 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6104 __ movl(root_reg, Address(obj, offset));
6105 static_assert(
6106 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6107 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6108 "have different sizes.");
6109 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6110 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6111 "have different sizes.");
6112
6113 // Slow path used to mark the GC root `root`.
6114 SlowPathCode* slow_path =
6115 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6116 codegen_->AddSlowPath(slow_path);
6117
6118 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6119 /* no_rip */ true),
6120 Immediate(0));
6121 __ j(kNotEqual, slow_path->GetEntryLabel());
6122 __ Bind(slow_path->GetExitLabel());
6123 } else {
6124 // GC root loaded through a slow path for read barriers other
6125 // than Baker's.
6126 // /* GcRoot<mirror::Object>* */ root = obj + offset
6127 __ leaq(root_reg, Address(obj, offset));
6128 // /* mirror::Object* */ root = root->Read()
6129 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6130 }
6131 } else {
6132 // Plain GC root load with no read barrier.
6133 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6134 __ movl(root_reg, Address(obj, offset));
6135 }
6136}
6137
6138void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6139 Location ref,
6140 CpuRegister obj,
6141 uint32_t offset,
6142 Location temp,
6143 bool needs_null_check) {
6144 DCHECK(kEmitCompilerReadBarrier);
6145 DCHECK(kUseBakerReadBarrier);
6146
6147 // /* HeapReference<Object> */ ref = *(obj + offset)
6148 Address src(obj, offset);
6149 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6150}
6151
6152void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6153 Location ref,
6154 CpuRegister obj,
6155 uint32_t data_offset,
6156 Location index,
6157 Location temp,
6158 bool needs_null_check) {
6159 DCHECK(kEmitCompilerReadBarrier);
6160 DCHECK(kUseBakerReadBarrier);
6161
6162 // /* HeapReference<Object> */ ref =
6163 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6164 Address src = index.IsConstant() ?
6165 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6166 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6167 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6168}
6169
6170void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6171 Location ref,
6172 CpuRegister obj,
6173 const Address& src,
6174 Location temp,
6175 bool needs_null_check) {
6176 DCHECK(kEmitCompilerReadBarrier);
6177 DCHECK(kUseBakerReadBarrier);
6178
6179 // In slow path based read barriers, the read barrier call is
6180 // inserted after the original load. However, in fast path based
6181 // Baker's read barriers, we need to perform the load of
6182 // mirror::Object::monitor_ *before* the original reference load.
6183 // This load-load ordering is required by the read barrier.
6184 // The fast path/slow path (for Baker's algorithm) should look like:
6185 //
6186 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6187 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6188 // HeapReference<Object> ref = *src; // Original reference load.
6189 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6190 // if (is_gray) {
6191 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6192 // }
6193 //
6194 // Note: the original implementation in ReadBarrier::Barrier is
6195 // slightly more complex as:
6196 // - it implements the load-load fence using a data dependency on
6197 // the high-bits of rb_state, which are expected to be all zeroes;
6198 // - it performs additional checks that we do not do here for
6199 // performance reasons.
6200
6201 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6202 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6203 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6204
6205 // /* int32_t */ monitor = obj->monitor_
6206 __ movl(temp_reg, Address(obj, monitor_offset));
6207 if (needs_null_check) {
6208 MaybeRecordImplicitNullCheck(instruction);
6209 }
6210 // /* LockWord */ lock_word = LockWord(monitor)
6211 static_assert(sizeof(LockWord) == sizeof(int32_t),
6212 "art::LockWord and int32_t have different sizes.");
6213 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6214 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6215 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6216 static_assert(
6217 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6218 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6219
6220 // Load fence to prevent load-load reordering.
6221 // Note that this is a no-op, thanks to the x86-64 memory model.
6222 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6223
6224 // The actual reference load.
6225 // /* HeapReference<Object> */ ref = *src
6226 __ movl(ref_reg, src);
6227
6228 // Object* ref = ref_addr->AsMirrorPtr()
6229 __ MaybeUnpoisonHeapReference(ref_reg);
6230
6231 // Slow path used to mark the object `ref` when it is gray.
6232 SlowPathCode* slow_path =
6233 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6234 AddSlowPath(slow_path);
6235
6236 // if (rb_state == ReadBarrier::gray_ptr_)
6237 // ref = ReadBarrier::Mark(ref);
6238 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6239 __ j(kEqual, slow_path->GetEntryLabel());
6240 __ Bind(slow_path->GetExitLabel());
6241}
6242
6243void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6244 Location out,
6245 Location ref,
6246 Location obj,
6247 uint32_t offset,
6248 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006249 DCHECK(kEmitCompilerReadBarrier);
6250
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006251 // Insert a slow path based read barrier *after* the reference load.
6252 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006253 // If heap poisoning is enabled, the unpoisoning of the loaded
6254 // reference will be carried out by the runtime within the slow
6255 // path.
6256 //
6257 // Note that `ref` currently does not get unpoisoned (when heap
6258 // poisoning is enabled), which is alright as the `ref` argument is
6259 // not used by the artReadBarrierSlow entry point.
6260 //
6261 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6262 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6263 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6264 AddSlowPath(slow_path);
6265
Roland Levillain0d5a2812015-11-13 10:07:31 +00006266 __ jmp(slow_path->GetEntryLabel());
6267 __ Bind(slow_path->GetExitLabel());
6268}
6269
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006270void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6271 Location out,
6272 Location ref,
6273 Location obj,
6274 uint32_t offset,
6275 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006276 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006277 // Baker's read barriers shall be handled by the fast path
6278 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6279 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006280 // If heap poisoning is enabled, unpoisoning will be taken care of
6281 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006282 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006283 } else if (kPoisonHeapReferences) {
6284 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6285 }
6286}
6287
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006288void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6289 Location out,
6290 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006291 DCHECK(kEmitCompilerReadBarrier);
6292
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006293 // Insert a slow path based read barrier *after* the GC root load.
6294 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006295 // Note that GC roots are not affected by heap poisoning, so we do
6296 // not need to do anything special for this here.
6297 SlowPathCode* slow_path =
6298 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6299 AddSlowPath(slow_path);
6300
Roland Levillain0d5a2812015-11-13 10:07:31 +00006301 __ jmp(slow_path->GetEntryLabel());
6302 __ Bind(slow_path->GetExitLabel());
6303}
6304
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006305void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006306 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006307 LOG(FATAL) << "Unreachable";
6308}
6309
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006310void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006311 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006312 LOG(FATAL) << "Unreachable";
6313}
6314
Nicolas Geoffray2e7cd752015-07-10 11:38:52 +01006315void LocationsBuilderX86_64::VisitFakeString(HFakeString* instruction) {
6316 DCHECK(codegen_->IsBaseline());
6317 LocationSummary* locations =
6318 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6319 locations->SetOut(Location::ConstantLocation(GetGraph()->GetNullConstant()));
6320}
6321
6322void InstructionCodeGeneratorX86_64::VisitFakeString(HFakeString* instruction ATTRIBUTE_UNUSED) {
6323 DCHECK(codegen_->IsBaseline());
6324 // Will be generated at use site.
6325}
6326
Mark Mendellfe57faa2015-09-18 09:26:15 -04006327// Simple implementation of packed switch - generate cascaded compare/jumps.
6328void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6329 LocationSummary* locations =
6330 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6331 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006332 locations->AddTemp(Location::RequiresRegister());
6333 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006334}
6335
6336void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6337 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu59f054d2015-12-07 17:17:03 +08006338 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006339 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006340 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6341 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6342 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Zheng Xu59f054d2015-12-07 17:17:03 +08006343 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6344
6345 // Should we generate smaller inline compare/jumps?
6346 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6347 // Figure out the correct compare values and jump conditions.
6348 // Handle the first compare/branch as a special case because it might
6349 // jump to the default case.
6350 DCHECK_GT(num_entries, 2u);
6351 Condition first_condition;
6352 uint32_t index;
6353 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6354 if (lower_bound != 0) {
6355 first_condition = kLess;
6356 __ cmpl(value_reg_in, Immediate(lower_bound));
6357 __ j(first_condition, codegen_->GetLabelOf(default_block));
6358 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6359
6360 index = 1;
6361 } else {
6362 // Handle all the compare/jumps below.
6363 first_condition = kBelow;
6364 index = 0;
6365 }
6366
6367 // Handle the rest of the compare/jumps.
6368 for (; index + 1 < num_entries; index += 2) {
6369 int32_t compare_to_value = lower_bound + index + 1;
6370 __ cmpl(value_reg_in, Immediate(compare_to_value));
6371 // Jump to successors[index] if value < case_value[index].
6372 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6373 // Jump to successors[index + 1] if value == case_value[index + 1].
6374 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6375 }
6376
6377 if (index != num_entries) {
6378 // There are an odd number of entries. Handle the last one.
6379 DCHECK_EQ(index + 1, num_entries);
6380 __ cmpl(value_reg_in, Immediate(lower_bound + index));
6381 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6382 }
6383
6384 // And the default for any other value.
6385 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6386 __ jmp(codegen_->GetLabelOf(default_block));
6387 }
6388 return;
6389 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006390
6391 // Remove the bias, if needed.
6392 Register value_reg_out = value_reg_in.AsRegister();
6393 if (lower_bound != 0) {
6394 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6395 value_reg_out = temp_reg.AsRegister();
6396 }
6397 CpuRegister value_reg(value_reg_out);
6398
6399 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006400 __ cmpl(value_reg, Immediate(num_entries - 1));
6401 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006402
Mark Mendell9c86b482015-09-18 13:36:07 -04006403 // We are in the range of the table.
6404 // Load the address of the jump table in the constant area.
6405 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006406
Mark Mendell9c86b482015-09-18 13:36:07 -04006407 // Load the (signed) offset from the jump table.
6408 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6409
6410 // Add the offset to the address of the table base.
6411 __ addq(temp_reg, base_reg);
6412
6413 // And jump.
6414 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006415}
6416
Mark Mendell92e83bf2015-05-07 11:25:03 -04006417void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6418 if (value == 0) {
6419 __ xorl(dest, dest);
6420 } else if (value > 0 && IsInt<32>(value)) {
6421 // We can use a 32 bit move, as it will zero-extend and is one byte shorter.
6422 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6423 } else {
6424 __ movq(dest, Immediate(value));
6425 }
6426}
6427
Mark Mendellcfa410b2015-05-25 16:02:44 -04006428void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6429 DCHECK(dest.IsDoubleStackSlot());
6430 if (IsInt<32>(value)) {
6431 // Can move directly as an int32 constant.
6432 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6433 Immediate(static_cast<int32_t>(value)));
6434 } else {
6435 Load64BitValue(CpuRegister(TMP), value);
6436 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6437 }
6438}
6439
Mark Mendell9c86b482015-09-18 13:36:07 -04006440/**
6441 * Class to handle late fixup of offsets into constant area.
6442 */
6443class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6444 public:
6445 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6446 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6447
6448 protected:
6449 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6450
6451 CodeGeneratorX86_64* codegen_;
6452
6453 private:
6454 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6455 // Patch the correct offset for the instruction. We use the address of the
6456 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6457 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6458 int32_t relative_position = constant_offset - pos;
6459
6460 // Patch in the right value.
6461 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6462 }
6463
6464 // Location in constant area that the fixup refers to.
6465 size_t offset_into_constant_area_;
6466};
6467
6468/**
6469 t * Class to handle late fixup of offsets to a jump table that will be created in the
6470 * constant area.
6471 */
6472class JumpTableRIPFixup : public RIPFixup {
6473 public:
6474 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6475 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6476
6477 void CreateJumpTable() {
6478 X86_64Assembler* assembler = codegen_->GetAssembler();
6479
6480 // Ensure that the reference to the jump table has the correct offset.
6481 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6482 SetOffset(offset_in_constant_table);
6483
6484 // Compute the offset from the start of the function to this jump table.
6485 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6486
6487 // Populate the jump table with the correct values for the jump table.
6488 int32_t num_entries = switch_instr_->GetNumEntries();
6489 HBasicBlock* block = switch_instr_->GetBlock();
6490 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6491 // The value that we want is the target offset - the position of the table.
6492 for (int32_t i = 0; i < num_entries; i++) {
6493 HBasicBlock* b = successors[i];
6494 Label* l = codegen_->GetLabelOf(b);
6495 DCHECK(l->IsBound());
6496 int32_t offset_to_block = l->Position() - current_table_offset;
6497 assembler->AppendInt32(offset_to_block);
6498 }
6499 }
6500
6501 private:
6502 const HPackedSwitch* switch_instr_;
6503};
6504
Mark Mendellf55c3e02015-03-26 21:07:46 -04006505void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6506 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006507 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006508 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6509 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006510 assembler->Align(4, 0);
6511 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006512
6513 // Populate any jump tables.
6514 for (auto jump_table : fixups_to_jump_tables_) {
6515 jump_table->CreateJumpTable();
6516 }
6517
6518 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006519 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006520 }
6521
6522 // And finish up.
6523 CodeGenerator::Finalize(allocator);
6524}
6525
Mark Mendellf55c3e02015-03-26 21:07:46 -04006526Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6527 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6528 return Address::RIP(fixup);
6529}
6530
6531Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6532 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6533 return Address::RIP(fixup);
6534}
6535
6536Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6537 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6538 return Address::RIP(fixup);
6539}
6540
6541Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6542 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6543 return Address::RIP(fixup);
6544}
6545
Andreas Gampe85b62f22015-09-09 13:15:38 -07006546// TODO: trg as memory.
6547void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6548 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006549 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006550 return;
6551 }
6552
6553 DCHECK_NE(type, Primitive::kPrimVoid);
6554
6555 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6556 if (trg.Equals(return_loc)) {
6557 return;
6558 }
6559
6560 // Let the parallel move resolver take care of all of this.
6561 HParallelMove parallel_move(GetGraph()->GetArena());
6562 parallel_move.AddMove(return_loc, trg, type, nullptr);
6563 GetMoveResolver()->EmitNativeCode(&parallel_move);
6564}
6565
Mark Mendell9c86b482015-09-18 13:36:07 -04006566Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6567 // Create a fixup to be used to create and address the jump table.
6568 JumpTableRIPFixup* table_fixup =
6569 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6570
6571 // We have to populate the jump tables.
6572 fixups_to_jump_tables_.push_back(table_fixup);
6573 return Address::RIP(table_fixup);
6574}
6575
Mark Mendellea5af682015-10-22 17:35:49 -04006576void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6577 const Address& addr_high,
6578 int64_t v,
6579 HInstruction* instruction) {
6580 if (IsInt<32>(v)) {
6581 int32_t v_32 = v;
6582 __ movq(addr_low, Immediate(v_32));
6583 MaybeRecordImplicitNullCheck(instruction);
6584 } else {
6585 // Didn't fit in a register. Do it in pieces.
6586 int32_t low_v = Low32Bits(v);
6587 int32_t high_v = High32Bits(v);
6588 __ movl(addr_low, Immediate(low_v));
6589 MaybeRecordImplicitNullCheck(instruction);
6590 __ movl(addr_high, Immediate(high_v));
6591 }
6592}
6593
Roland Levillain4d027112015-07-01 15:41:14 +01006594#undef __
6595
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006596} // namespace x86_64
6597} // namespace art